code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
|---|---|---|---|
def parse_radia_multi_alt(infile, outfile):
"""
This function will parse the vcf to detect sites having multiple alt alleles and pick out on the
most likely ones.
INFILE : open file handle for the input vcf
OUTFILE : open file handle for the output vcf
The columns in INFILE are
[0] CHROM
[1] POS
[2] ID
[3] REF
[4] ALT
[5] QUAL
[6] FILTER
[7] INFO
[8] FORMAT
[9] DNA_NORMAL
[10] DNA_TUMOR
[11] RNA_TUMOR - Not always present
"""
for line in infile:
# Print header to putfile
if line.startswith('#'):
print(line.strip(), file=outfile)
continue
line = line.strip().split('\t')
# If there is just 1 ALT allele, print and continue
if len(line[4]) == 1:
print('\t'.join(line), file=outfile)
# If not, process
else:
seq_field_indeces = [9, 10]
alleles = [line[3]] + line[4].split(',') # all alleles, incl. REF
# collect tumor, normal and (if present) rna AD and AFs
# AD = Depth of reads supporting each allele
# AF = Fraction of reads supporting each allele
normal_AD = line[9].split(':')[5].split(',')
normal_AF = line[9].split(':')[6].split(',')
tumor_AD = line[10].split(':')[5].split(',')
tumor_AF = line[10].split(':')[6].split(',')
if len(line[11]) > 1:
rna_AD = line[11].split(':')[5].split(',')
rna_AF = line[11].split(':')[6].split(',')
seq_field_indeces += [11] # append rna since it is present
else:
# If rna is missing, set RNA_AD and RNA_AF to null sets for easily integrating into
# the logic in the following code
rna_AD = rna_AF = [0, 0, 0, 0]
# Initialise variables to store the probable ALT alleles and the index values of the
# same wrt AD and AF
out_alleles = set([])
out_AF_AD_index = {0}
# parse AD and AF to get most probable ALT alleles
for i in range(1, len(normal_AF)):
# Criteria for selection = AD > 4 and AF >0.1 in either tumor or RNA, given normal
# AF < 0.1
if ((float(tumor_AF[i]) >= 0.1 and int(tumor_AD[i]) >= 4) \
or (float(rna_AF[i]) >= 0.1 and int(rna_AD[i]) >= 4)) \
and (float(normal_AF[i]) < 0.1):
out_alleles.add(alleles[i])
out_AF_AD_index.add(i)
# If the number of probable alleles is greater than 0 the print to outfile with the
# modified allele fraction representing reads corrresponding to all alleles
if len(out_alleles) > 0:
line[4] = ','.join(out_alleles) # set alt alleles
# Modify the AD and AF values in the TUMOR/NORMAL/RNA fields
# one at a time. Seq fields contain
# [0] GT* - Genotype
# [1] DP - Read depth at this position in the sample
# [2] INDEL - Number of indels
# [3] START - Number of reads starting at this position
# [4] STOP - Number of reads stopping at this position
# [5] AD* - Depth of reads supporting alleles
# [6] AF* - Fraction of reads supporting alleles
# [7] BQ* - Avg base quality for reads supporting alleles
# [8] SB* - Strand Bias for reads supporting alleles
# Fields marked with *s are teh ones that contain info for each seq field and need
# to be modified
for seq_field_index in seq_field_indeces:
# Get the details for seq_field
deets = line[seq_field_index].split(':')
# modify fields 5 thu 8 to hold only info for the probable
# alleles
for field_index in range(5, 9):
field = deets[field_index].split(",")
deets[field_index] = ",".join([x for i, x in enumerate(field)
if i in out_AF_AD_index])
# Modify DP to hold the new total of reads
deets[1] = str(sum([int(x) for x in deets[5].split(",")]))
# get the most likely genotypes based on AD and AF
GT_by_AD = set([i for i, x in enumerate(deets[5].split(",")) if int(x) >= 4])
GT_by_AF = set([i for i, x in enumerate(deets[6].split(",")) \
if float(x) >= 0.1])
# Get the consensus genotype
GT = GT_by_AD.intersection(GT_by_AF)
if len(GT) == 0:
deets[0] = "0/0"
elif len(GT) == 1:
deets[0] = "/".join([str(x) for x in GT] + [str(x) for x in GT])
elif len(GT) == 2:
deets[0] = "/".join([str(x) for x in GT])
else:
print("ERROR : triple genotype detected", file=sys.stderr)
print(line, file=sys.stdout)
# Rejoin the details line
line[seq_field_index] = ":".join(deets)
# Print the modified line to output
print("\t".join(line), file=outfile)
# Else do nothing
else:
pass
|
def function[parse_radia_multi_alt, parameter[infile, outfile]]:
constant[
This function will parse the vcf to detect sites having multiple alt alleles and pick out on the
most likely ones.
INFILE : open file handle for the input vcf
OUTFILE : open file handle for the output vcf
The columns in INFILE are
[0] CHROM
[1] POS
[2] ID
[3] REF
[4] ALT
[5] QUAL
[6] FILTER
[7] INFO
[8] FORMAT
[9] DNA_NORMAL
[10] DNA_TUMOR
[11] RNA_TUMOR - Not always present
]
for taget[name[line]] in starred[name[infile]] begin[:]
if call[name[line].startswith, parameter[constant[#]]] begin[:]
call[name[print], parameter[call[name[line].strip, parameter[]]]]
continue
variable[line] assign[=] call[call[name[line].strip, parameter[]].split, parameter[constant[ ]]]
if compare[call[name[len], parameter[call[name[line]][constant[4]]]] equal[==] constant[1]] begin[:]
call[name[print], parameter[call[constant[ ].join, parameter[name[line]]]]]
|
keyword[def] identifier[parse_radia_multi_alt] ( identifier[infile] , identifier[outfile] ):
literal[string]
keyword[for] identifier[line] keyword[in] identifier[infile] :
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[print] ( identifier[line] . identifier[strip] (), identifier[file] = identifier[outfile] )
keyword[continue]
identifier[line] = identifier[line] . identifier[strip] (). identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[line] [ literal[int] ])== literal[int] :
identifier[print] ( literal[string] . identifier[join] ( identifier[line] ), identifier[file] = identifier[outfile] )
keyword[else] :
identifier[seq_field_indeces] =[ literal[int] , literal[int] ]
identifier[alleles] =[ identifier[line] [ literal[int] ]]+ identifier[line] [ literal[int] ]. identifier[split] ( literal[string] )
identifier[normal_AD] = identifier[line] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )
identifier[normal_AF] = identifier[line] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )
identifier[tumor_AD] = identifier[line] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )
identifier[tumor_AF] = identifier[line] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[line] [ literal[int] ])> literal[int] :
identifier[rna_AD] = identifier[line] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )
identifier[rna_AF] = identifier[line] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )
identifier[seq_field_indeces] +=[ literal[int] ]
keyword[else] :
identifier[rna_AD] = identifier[rna_AF] =[ literal[int] , literal[int] , literal[int] , literal[int] ]
identifier[out_alleles] = identifier[set] ([])
identifier[out_AF_AD_index] ={ literal[int] }
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[normal_AF] )):
keyword[if] (( identifier[float] ( identifier[tumor_AF] [ identifier[i] ])>= literal[int] keyword[and] identifier[int] ( identifier[tumor_AD] [ identifier[i] ])>= literal[int] ) keyword[or] ( identifier[float] ( identifier[rna_AF] [ identifier[i] ])>= literal[int] keyword[and] identifier[int] ( identifier[rna_AD] [ identifier[i] ])>= literal[int] )) keyword[and] ( identifier[float] ( identifier[normal_AF] [ identifier[i] ])< literal[int] ):
identifier[out_alleles] . identifier[add] ( identifier[alleles] [ identifier[i] ])
identifier[out_AF_AD_index] . identifier[add] ( identifier[i] )
keyword[if] identifier[len] ( identifier[out_alleles] )> literal[int] :
identifier[line] [ literal[int] ]= literal[string] . identifier[join] ( identifier[out_alleles] )
keyword[for] identifier[seq_field_index] keyword[in] identifier[seq_field_indeces] :
identifier[deets] = identifier[line] [ identifier[seq_field_index] ]. identifier[split] ( literal[string] )
keyword[for] identifier[field_index] keyword[in] identifier[range] ( literal[int] , literal[int] ):
identifier[field] = identifier[deets] [ identifier[field_index] ]. identifier[split] ( literal[string] )
identifier[deets] [ identifier[field_index] ]= literal[string] . identifier[join] ([ identifier[x] keyword[for] identifier[i] , identifier[x] keyword[in] identifier[enumerate] ( identifier[field] )
keyword[if] identifier[i] keyword[in] identifier[out_AF_AD_index] ])
identifier[deets] [ literal[int] ]= identifier[str] ( identifier[sum] ([ identifier[int] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[deets] [ literal[int] ]. identifier[split] ( literal[string] )]))
identifier[GT_by_AD] = identifier[set] ([ identifier[i] keyword[for] identifier[i] , identifier[x] keyword[in] identifier[enumerate] ( identifier[deets] [ literal[int] ]. identifier[split] ( literal[string] )) keyword[if] identifier[int] ( identifier[x] )>= literal[int] ])
identifier[GT_by_AF] = identifier[set] ([ identifier[i] keyword[for] identifier[i] , identifier[x] keyword[in] identifier[enumerate] ( identifier[deets] [ literal[int] ]. identifier[split] ( literal[string] )) keyword[if] identifier[float] ( identifier[x] )>= literal[int] ])
identifier[GT] = identifier[GT_by_AD] . identifier[intersection] ( identifier[GT_by_AF] )
keyword[if] identifier[len] ( identifier[GT] )== literal[int] :
identifier[deets] [ literal[int] ]= literal[string]
keyword[elif] identifier[len] ( identifier[GT] )== literal[int] :
identifier[deets] [ literal[int] ]= literal[string] . identifier[join] ([ identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[GT] ]+[ identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[GT] ])
keyword[elif] identifier[len] ( identifier[GT] )== literal[int] :
identifier[deets] [ literal[int] ]= literal[string] . identifier[join] ([ identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[GT] ])
keyword[else] :
identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[print] ( identifier[line] , identifier[file] = identifier[sys] . identifier[stdout] )
identifier[line] [ identifier[seq_field_index] ]= literal[string] . identifier[join] ( identifier[deets] )
identifier[print] ( literal[string] . identifier[join] ( identifier[line] ), identifier[file] = identifier[outfile] )
keyword[else] :
keyword[pass]
|
def parse_radia_multi_alt(infile, outfile):
"""
This function will parse the vcf to detect sites having multiple alt alleles and pick out on the
most likely ones.
INFILE : open file handle for the input vcf
OUTFILE : open file handle for the output vcf
The columns in INFILE are
[0] CHROM
[1] POS
[2] ID
[3] REF
[4] ALT
[5] QUAL
[6] FILTER
[7] INFO
[8] FORMAT
[9] DNA_NORMAL
[10] DNA_TUMOR
[11] RNA_TUMOR - Not always present
"""
for line in infile:
# Print header to putfile
if line.startswith('#'):
print(line.strip(), file=outfile)
continue # depends on [control=['if'], data=[]]
line = line.strip().split('\t')
# If there is just 1 ALT allele, print and continue
if len(line[4]) == 1:
print('\t'.join(line), file=outfile) # depends on [control=['if'], data=[]]
else:
# If not, process
seq_field_indeces = [9, 10]
alleles = [line[3]] + line[4].split(',') # all alleles, incl. REF
# collect tumor, normal and (if present) rna AD and AFs
# AD = Depth of reads supporting each allele
# AF = Fraction of reads supporting each allele
normal_AD = line[9].split(':')[5].split(',')
normal_AF = line[9].split(':')[6].split(',')
tumor_AD = line[10].split(':')[5].split(',')
tumor_AF = line[10].split(':')[6].split(',')
if len(line[11]) > 1:
rna_AD = line[11].split(':')[5].split(',')
rna_AF = line[11].split(':')[6].split(',')
seq_field_indeces += [11] # append rna since it is present # depends on [control=['if'], data=[]]
else:
# If rna is missing, set RNA_AD and RNA_AF to null sets for easily integrating into
# the logic in the following code
rna_AD = rna_AF = [0, 0, 0, 0]
# Initialise variables to store the probable ALT alleles and the index values of the
# same wrt AD and AF
out_alleles = set([])
out_AF_AD_index = {0}
# parse AD and AF to get most probable ALT alleles
for i in range(1, len(normal_AF)):
# Criteria for selection = AD > 4 and AF >0.1 in either tumor or RNA, given normal
# AF < 0.1
if (float(tumor_AF[i]) >= 0.1 and int(tumor_AD[i]) >= 4 or (float(rna_AF[i]) >= 0.1 and int(rna_AD[i]) >= 4)) and float(normal_AF[i]) < 0.1:
out_alleles.add(alleles[i])
out_AF_AD_index.add(i) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
# If the number of probable alleles is greater than 0 the print to outfile with the
# modified allele fraction representing reads corrresponding to all alleles
if len(out_alleles) > 0:
line[4] = ','.join(out_alleles) # set alt alleles
# Modify the AD and AF values in the TUMOR/NORMAL/RNA fields
# one at a time. Seq fields contain
# [0] GT* - Genotype
# [1] DP - Read depth at this position in the sample
# [2] INDEL - Number of indels
# [3] START - Number of reads starting at this position
# [4] STOP - Number of reads stopping at this position
# [5] AD* - Depth of reads supporting alleles
# [6] AF* - Fraction of reads supporting alleles
# [7] BQ* - Avg base quality for reads supporting alleles
# [8] SB* - Strand Bias for reads supporting alleles
# Fields marked with *s are teh ones that contain info for each seq field and need
# to be modified
for seq_field_index in seq_field_indeces:
# Get the details for seq_field
deets = line[seq_field_index].split(':')
# modify fields 5 thu 8 to hold only info for the probable
# alleles
for field_index in range(5, 9):
field = deets[field_index].split(',')
deets[field_index] = ','.join([x for (i, x) in enumerate(field) if i in out_AF_AD_index]) # depends on [control=['for'], data=['field_index']]
# Modify DP to hold the new total of reads
deets[1] = str(sum([int(x) for x in deets[5].split(',')]))
# get the most likely genotypes based on AD and AF
GT_by_AD = set([i for (i, x) in enumerate(deets[5].split(',')) if int(x) >= 4])
GT_by_AF = set([i for (i, x) in enumerate(deets[6].split(',')) if float(x) >= 0.1])
# Get the consensus genotype
GT = GT_by_AD.intersection(GT_by_AF)
if len(GT) == 0:
deets[0] = '0/0' # depends on [control=['if'], data=[]]
elif len(GT) == 1:
deets[0] = '/'.join([str(x) for x in GT] + [str(x) for x in GT]) # depends on [control=['if'], data=[]]
elif len(GT) == 2:
deets[0] = '/'.join([str(x) for x in GT]) # depends on [control=['if'], data=[]]
else:
print('ERROR : triple genotype detected', file=sys.stderr)
print(line, file=sys.stdout)
# Rejoin the details line
line[seq_field_index] = ':'.join(deets) # depends on [control=['for'], data=['seq_field_index']]
# Print the modified line to output
print('\t'.join(line), file=outfile) # depends on [control=['if'], data=[]]
else:
# Else do nothing
pass # depends on [control=['for'], data=['line']]
|
def _relabel_to_obo(d):
"""Change the keys of ``d`` to use Obo labels.
"""
return {
owl_to_obo.get(old_k, old_k): old_v
for old_k, old_v in six.iteritems(d)
}
|
def function[_relabel_to_obo, parameter[d]]:
constant[Change the keys of ``d`` to use Obo labels.
]
return[<ast.DictComp object at 0x7da1b138ac50>]
|
keyword[def] identifier[_relabel_to_obo] ( identifier[d] ):
literal[string]
keyword[return] {
identifier[owl_to_obo] . identifier[get] ( identifier[old_k] , identifier[old_k] ): identifier[old_v]
keyword[for] identifier[old_k] , identifier[old_v] keyword[in] identifier[six] . identifier[iteritems] ( identifier[d] )
}
|
def _relabel_to_obo(d):
"""Change the keys of ``d`` to use Obo labels.
"""
return {owl_to_obo.get(old_k, old_k): old_v for (old_k, old_v) in six.iteritems(d)}
|
def RegisterAnyElement(cls):
'''If find registered TypeCode instance, add Wrapper class
to TypeCode class serialmap and Re-RegisterType. Provides
Any serialzation of any instances of the Wrapper.
'''
for k,v in cls.types_dict.items():
what = Any.serialmap.get(k)
if what is None: continue
if v in what.__class__.seriallist: continue
what.__class__.seriallist.append(v)
RegisterType(what.__class__, clobber=1, **what.__dict__)
|
def function[RegisterAnyElement, parameter[cls]]:
constant[If find registered TypeCode instance, add Wrapper class
to TypeCode class serialmap and Re-RegisterType. Provides
Any serialzation of any instances of the Wrapper.
]
for taget[tuple[[<ast.Name object at 0x7da2047e9390>, <ast.Name object at 0x7da2047ea890>]]] in starred[call[name[cls].types_dict.items, parameter[]]] begin[:]
variable[what] assign[=] call[name[Any].serialmap.get, parameter[name[k]]]
if compare[name[what] is constant[None]] begin[:]
continue
if compare[name[v] in name[what].__class__.seriallist] begin[:]
continue
call[name[what].__class__.seriallist.append, parameter[name[v]]]
call[name[RegisterType], parameter[name[what].__class__]]
|
keyword[def] identifier[RegisterAnyElement] ( identifier[cls] ):
literal[string]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[cls] . identifier[types_dict] . identifier[items] ():
identifier[what] = identifier[Any] . identifier[serialmap] . identifier[get] ( identifier[k] )
keyword[if] identifier[what] keyword[is] keyword[None] : keyword[continue]
keyword[if] identifier[v] keyword[in] identifier[what] . identifier[__class__] . identifier[seriallist] : keyword[continue]
identifier[what] . identifier[__class__] . identifier[seriallist] . identifier[append] ( identifier[v] )
identifier[RegisterType] ( identifier[what] . identifier[__class__] , identifier[clobber] = literal[int] ,** identifier[what] . identifier[__dict__] )
|
def RegisterAnyElement(cls):
"""If find registered TypeCode instance, add Wrapper class
to TypeCode class serialmap and Re-RegisterType. Provides
Any serialzation of any instances of the Wrapper.
"""
for (k, v) in cls.types_dict.items():
what = Any.serialmap.get(k)
if what is None:
continue # depends on [control=['if'], data=[]]
if v in what.__class__.seriallist:
continue # depends on [control=['if'], data=[]]
what.__class__.seriallist.append(v)
RegisterType(what.__class__, clobber=1, **what.__dict__) # depends on [control=['for'], data=[]]
|
def pa11y_counts(results):
"""
Given a list of pa11y results, return three integers:
number of errors, number of warnings, and number of notices.
"""
num_error = 0
num_warning = 0
num_notice = 0
for result in results:
if result['type'] == 'error':
num_error += 1
elif result['type'] == 'warning':
num_warning += 1
elif result['type'] == 'notice':
num_notice += 1
return num_error, num_warning, num_notice
|
def function[pa11y_counts, parameter[results]]:
constant[
Given a list of pa11y results, return three integers:
number of errors, number of warnings, and number of notices.
]
variable[num_error] assign[=] constant[0]
variable[num_warning] assign[=] constant[0]
variable[num_notice] assign[=] constant[0]
for taget[name[result]] in starred[name[results]] begin[:]
if compare[call[name[result]][constant[type]] equal[==] constant[error]] begin[:]
<ast.AugAssign object at 0x7da20c6ab0d0>
return[tuple[[<ast.Name object at 0x7da20c6a9de0>, <ast.Name object at 0x7da20c6abbe0>, <ast.Name object at 0x7da20c6a8910>]]]
|
keyword[def] identifier[pa11y_counts] ( identifier[results] ):
literal[string]
identifier[num_error] = literal[int]
identifier[num_warning] = literal[int]
identifier[num_notice] = literal[int]
keyword[for] identifier[result] keyword[in] identifier[results] :
keyword[if] identifier[result] [ literal[string] ]== literal[string] :
identifier[num_error] += literal[int]
keyword[elif] identifier[result] [ literal[string] ]== literal[string] :
identifier[num_warning] += literal[int]
keyword[elif] identifier[result] [ literal[string] ]== literal[string] :
identifier[num_notice] += literal[int]
keyword[return] identifier[num_error] , identifier[num_warning] , identifier[num_notice]
|
def pa11y_counts(results):
"""
Given a list of pa11y results, return three integers:
number of errors, number of warnings, and number of notices.
"""
num_error = 0
num_warning = 0
num_notice = 0
for result in results:
if result['type'] == 'error':
num_error += 1 # depends on [control=['if'], data=[]]
elif result['type'] == 'warning':
num_warning += 1 # depends on [control=['if'], data=[]]
elif result['type'] == 'notice':
num_notice += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['result']]
return (num_error, num_warning, num_notice)
|
def find_valid_random_signature(s: int) -> Tuple[int, int]:
"""
Find v and r valid values for a given s
:param s: random value
:return: v, r
"""
for _ in range(10000):
r = int(os.urandom(31).hex(), 16)
v = (r % 2) + 27
if r < secpk1n:
tx = Transaction(0, 1, 21000, b'', 0, b'', v=v, r=r, s=s)
try:
tx.sender
return v, r
except (InvalidTransaction, ValueError):
logger.debug('Cannot find signature with v=%d r=%d s=%d', v, r, s)
raise ValueError('Valid signature not found with s=%d', s)
|
def function[find_valid_random_signature, parameter[s]]:
constant[
Find v and r valid values for a given s
:param s: random value
:return: v, r
]
for taget[name[_]] in starred[call[name[range], parameter[constant[10000]]]] begin[:]
variable[r] assign[=] call[name[int], parameter[call[call[name[os].urandom, parameter[constant[31]]].hex, parameter[]], constant[16]]]
variable[v] assign[=] binary_operation[binary_operation[name[r] <ast.Mod object at 0x7da2590d6920> constant[2]] + constant[27]]
if compare[name[r] less[<] name[secpk1n]] begin[:]
variable[tx] assign[=] call[name[Transaction], parameter[constant[0], constant[1], constant[21000], constant[b''], constant[0], constant[b'']]]
<ast.Try object at 0x7da18bc73520>
<ast.Raise object at 0x7da18bc72650>
|
keyword[def] identifier[find_valid_random_signature] ( identifier[s] : identifier[int] )-> identifier[Tuple] [ identifier[int] , identifier[int] ]:
literal[string]
keyword[for] identifier[_] keyword[in] identifier[range] ( literal[int] ):
identifier[r] = identifier[int] ( identifier[os] . identifier[urandom] ( literal[int] ). identifier[hex] (), literal[int] )
identifier[v] =( identifier[r] % literal[int] )+ literal[int]
keyword[if] identifier[r] < identifier[secpk1n] :
identifier[tx] = identifier[Transaction] ( literal[int] , literal[int] , literal[int] , literal[string] , literal[int] , literal[string] , identifier[v] = identifier[v] , identifier[r] = identifier[r] , identifier[s] = identifier[s] )
keyword[try] :
identifier[tx] . identifier[sender]
keyword[return] identifier[v] , identifier[r]
keyword[except] ( identifier[InvalidTransaction] , identifier[ValueError] ):
identifier[logger] . identifier[debug] ( literal[string] , identifier[v] , identifier[r] , identifier[s] )
keyword[raise] identifier[ValueError] ( literal[string] , identifier[s] )
|
def find_valid_random_signature(s: int) -> Tuple[int, int]:
"""
Find v and r valid values for a given s
:param s: random value
:return: v, r
"""
for _ in range(10000):
r = int(os.urandom(31).hex(), 16)
v = r % 2 + 27
if r < secpk1n:
tx = Transaction(0, 1, 21000, b'', 0, b'', v=v, r=r, s=s)
try:
tx.sender
return (v, r) # depends on [control=['try'], data=[]]
except (InvalidTransaction, ValueError):
logger.debug('Cannot find signature with v=%d r=%d s=%d', v, r, s) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['r']] # depends on [control=['for'], data=[]]
raise ValueError('Valid signature not found with s=%d', s)
|
def parse_octal(self, text, i):
"""Parse octal value."""
value = int(text, 8)
if value > 0xFF and self.is_bytes:
# Re fails on octal greater than `0o377` or `0xFF`
raise ValueError("octal escape value outside of range 0-0o377!")
else:
single = self.get_single_stack()
if self.span_stack:
text = self.convert_case(chr(value), self.span_stack[-1])
value = ord(self.convert_case(text, single)) if single is not None else ord(text)
elif single:
value = ord(self.convert_case(chr(value), single))
if self.use_format and value in _CURLY_BRACKETS_ORD:
self.handle_format(chr(value), i)
elif value <= 0xFF:
self.result.append('\\%03o' % value)
else:
self.result.append(chr(value))
|
def function[parse_octal, parameter[self, text, i]]:
constant[Parse octal value.]
variable[value] assign[=] call[name[int], parameter[name[text], constant[8]]]
if <ast.BoolOp object at 0x7da1b04ec6a0> begin[:]
<ast.Raise object at 0x7da1b04efa00>
|
keyword[def] identifier[parse_octal] ( identifier[self] , identifier[text] , identifier[i] ):
literal[string]
identifier[value] = identifier[int] ( identifier[text] , literal[int] )
keyword[if] identifier[value] > literal[int] keyword[and] identifier[self] . identifier[is_bytes] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[else] :
identifier[single] = identifier[self] . identifier[get_single_stack] ()
keyword[if] identifier[self] . identifier[span_stack] :
identifier[text] = identifier[self] . identifier[convert_case] ( identifier[chr] ( identifier[value] ), identifier[self] . identifier[span_stack] [- literal[int] ])
identifier[value] = identifier[ord] ( identifier[self] . identifier[convert_case] ( identifier[text] , identifier[single] )) keyword[if] identifier[single] keyword[is] keyword[not] keyword[None] keyword[else] identifier[ord] ( identifier[text] )
keyword[elif] identifier[single] :
identifier[value] = identifier[ord] ( identifier[self] . identifier[convert_case] ( identifier[chr] ( identifier[value] ), identifier[single] ))
keyword[if] identifier[self] . identifier[use_format] keyword[and] identifier[value] keyword[in] identifier[_CURLY_BRACKETS_ORD] :
identifier[self] . identifier[handle_format] ( identifier[chr] ( identifier[value] ), identifier[i] )
keyword[elif] identifier[value] <= literal[int] :
identifier[self] . identifier[result] . identifier[append] ( literal[string] % identifier[value] )
keyword[else] :
identifier[self] . identifier[result] . identifier[append] ( identifier[chr] ( identifier[value] ))
|
def parse_octal(self, text, i):
"""Parse octal value."""
value = int(text, 8)
if value > 255 and self.is_bytes:
# Re fails on octal greater than `0o377` or `0xFF`
raise ValueError('octal escape value outside of range 0-0o377!') # depends on [control=['if'], data=[]]
else:
single = self.get_single_stack()
if self.span_stack:
text = self.convert_case(chr(value), self.span_stack[-1])
value = ord(self.convert_case(text, single)) if single is not None else ord(text) # depends on [control=['if'], data=[]]
elif single:
value = ord(self.convert_case(chr(value), single)) # depends on [control=['if'], data=[]]
if self.use_format and value in _CURLY_BRACKETS_ORD:
self.handle_format(chr(value), i) # depends on [control=['if'], data=[]]
elif value <= 255:
self.result.append('\\%03o' % value) # depends on [control=['if'], data=['value']]
else:
self.result.append(chr(value))
|
def list_nodes(call=None):
'''
Return a list of the VMs that are on the provider
'''
if call == 'action':
raise SaltCloudSystemExit(
'The list_nodes function must be called with -f or --function.'
)
ret = {}
nodes = list_nodes_full()
if 'error' in nodes:
raise SaltCloudSystemExit(
'An error occurred while listing nodes: {0}'.format(
nodes['error']['Errors']['Error']['Message']
)
)
for node in nodes:
ret[node] = {
'id': nodes[node]['hostname'],
'ram': nodes[node]['maxMemory'],
'cpus': nodes[node]['maxCpu'],
}
if 'primaryIpAddress' in nodes[node]:
ret[node]['public_ips'] = nodes[node]['primaryIpAddress']
if 'primaryBackendIpAddress' in nodes[node]:
ret[node]['private_ips'] = nodes[node]['primaryBackendIpAddress']
if 'status' in nodes[node]:
ret[node]['state'] = six.text_type(nodes[node]['status']['name'])
return ret
|
def function[list_nodes, parameter[call]]:
constant[
Return a list of the VMs that are on the provider
]
if compare[name[call] equal[==] constant[action]] begin[:]
<ast.Raise object at 0x7da1b1c37cd0>
variable[ret] assign[=] dictionary[[], []]
variable[nodes] assign[=] call[name[list_nodes_full], parameter[]]
if compare[constant[error] in name[nodes]] begin[:]
<ast.Raise object at 0x7da1b1c37a00>
for taget[name[node]] in starred[name[nodes]] begin[:]
call[name[ret]][name[node]] assign[=] dictionary[[<ast.Constant object at 0x7da1b2135930>, <ast.Constant object at 0x7da1b2137e80>, <ast.Constant object at 0x7da1b21360b0>], [<ast.Subscript object at 0x7da1b2137d60>, <ast.Subscript object at 0x7da1b2134d60>, <ast.Subscript object at 0x7da1b2135b70>]]
if compare[constant[primaryIpAddress] in call[name[nodes]][name[node]]] begin[:]
call[call[name[ret]][name[node]]][constant[public_ips]] assign[=] call[call[name[nodes]][name[node]]][constant[primaryIpAddress]]
if compare[constant[primaryBackendIpAddress] in call[name[nodes]][name[node]]] begin[:]
call[call[name[ret]][name[node]]][constant[private_ips]] assign[=] call[call[name[nodes]][name[node]]][constant[primaryBackendIpAddress]]
if compare[constant[status] in call[name[nodes]][name[node]]] begin[:]
call[call[name[ret]][name[node]]][constant[state]] assign[=] call[name[six].text_type, parameter[call[call[call[name[nodes]][name[node]]][constant[status]]][constant[name]]]]
return[name[ret]]
|
keyword[def] identifier[list_nodes] ( identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[call] == literal[string] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
)
identifier[ret] ={}
identifier[nodes] = identifier[list_nodes_full] ()
keyword[if] literal[string] keyword[in] identifier[nodes] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string] . identifier[format] (
identifier[nodes] [ literal[string] ][ literal[string] ][ literal[string] ][ literal[string] ]
)
)
keyword[for] identifier[node] keyword[in] identifier[nodes] :
identifier[ret] [ identifier[node] ]={
literal[string] : identifier[nodes] [ identifier[node] ][ literal[string] ],
literal[string] : identifier[nodes] [ identifier[node] ][ literal[string] ],
literal[string] : identifier[nodes] [ identifier[node] ][ literal[string] ],
}
keyword[if] literal[string] keyword[in] identifier[nodes] [ identifier[node] ]:
identifier[ret] [ identifier[node] ][ literal[string] ]= identifier[nodes] [ identifier[node] ][ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[nodes] [ identifier[node] ]:
identifier[ret] [ identifier[node] ][ literal[string] ]= identifier[nodes] [ identifier[node] ][ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[nodes] [ identifier[node] ]:
identifier[ret] [ identifier[node] ][ literal[string] ]= identifier[six] . identifier[text_type] ( identifier[nodes] [ identifier[node] ][ literal[string] ][ literal[string] ])
keyword[return] identifier[ret]
|
def list_nodes(call=None):
"""
Return a list of the VMs that are on the provider
"""
if call == 'action':
raise SaltCloudSystemExit('The list_nodes function must be called with -f or --function.') # depends on [control=['if'], data=[]]
ret = {}
nodes = list_nodes_full()
if 'error' in nodes:
raise SaltCloudSystemExit('An error occurred while listing nodes: {0}'.format(nodes['error']['Errors']['Error']['Message'])) # depends on [control=['if'], data=['nodes']]
for node in nodes:
ret[node] = {'id': nodes[node]['hostname'], 'ram': nodes[node]['maxMemory'], 'cpus': nodes[node]['maxCpu']}
if 'primaryIpAddress' in nodes[node]:
ret[node]['public_ips'] = nodes[node]['primaryIpAddress'] # depends on [control=['if'], data=[]]
if 'primaryBackendIpAddress' in nodes[node]:
ret[node]['private_ips'] = nodes[node]['primaryBackendIpAddress'] # depends on [control=['if'], data=[]]
if 'status' in nodes[node]:
ret[node]['state'] = six.text_type(nodes[node]['status']['name']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']]
return ret
|
def enforce_reset(self):
"""enforce parameter bounds on the ensemble by resetting
violating vals to bound
"""
ub = (self.ubnd * (1.0+self.bound_tol)).to_dict()
lb = (self.lbnd * (1.0 - self.bound_tol)).to_dict()
#for iname,name in enumerate(self.columns):
#self.loc[self.loc[:,name] > ub[name],name] = ub[name] * (1.0 + self.bound_tol)
#self.loc[self.loc[:,name] < lb[name],name] = lb[name].copy() * (1.0 - self.bound_tol)
# self.loc[self.loc[:,name] > ub[name],name] = ub[name]
# self.loc[self.loc[:,name] < lb[name],name] = lb[name]
val_arr = self.values
for iname, name in enumerate(self.columns):
val_arr[val_arr[:,iname] > ub[name],iname] = ub[name]
val_arr[val_arr[:, iname] < lb[name],iname] = lb[name]
|
def function[enforce_reset, parameter[self]]:
constant[enforce parameter bounds on the ensemble by resetting
violating vals to bound
]
variable[ub] assign[=] call[binary_operation[name[self].ubnd * binary_operation[constant[1.0] + name[self].bound_tol]].to_dict, parameter[]]
variable[lb] assign[=] call[binary_operation[name[self].lbnd * binary_operation[constant[1.0] - name[self].bound_tol]].to_dict, parameter[]]
variable[val_arr] assign[=] name[self].values
for taget[tuple[[<ast.Name object at 0x7da1b23edd50>, <ast.Name object at 0x7da1b23ed960>]]] in starred[call[name[enumerate], parameter[name[self].columns]]] begin[:]
call[name[val_arr]][tuple[[<ast.Compare object at 0x7da1b23ee620>, <ast.Name object at 0x7da1b2407e50>]]] assign[=] call[name[ub]][name[name]]
call[name[val_arr]][tuple[[<ast.Compare object at 0x7da1b24042e0>, <ast.Name object at 0x7da1b23402e0>]]] assign[=] call[name[lb]][name[name]]
|
keyword[def] identifier[enforce_reset] ( identifier[self] ):
literal[string]
identifier[ub] =( identifier[self] . identifier[ubnd] *( literal[int] + identifier[self] . identifier[bound_tol] )). identifier[to_dict] ()
identifier[lb] =( identifier[self] . identifier[lbnd] *( literal[int] - identifier[self] . identifier[bound_tol] )). identifier[to_dict] ()
identifier[val_arr] = identifier[self] . identifier[values]
keyword[for] identifier[iname] , identifier[name] keyword[in] identifier[enumerate] ( identifier[self] . identifier[columns] ):
identifier[val_arr] [ identifier[val_arr] [:, identifier[iname] ]> identifier[ub] [ identifier[name] ], identifier[iname] ]= identifier[ub] [ identifier[name] ]
identifier[val_arr] [ identifier[val_arr] [:, identifier[iname] ]< identifier[lb] [ identifier[name] ], identifier[iname] ]= identifier[lb] [ identifier[name] ]
|
def enforce_reset(self):
"""enforce parameter bounds on the ensemble by resetting
violating vals to bound
"""
ub = (self.ubnd * (1.0 + self.bound_tol)).to_dict()
lb = (self.lbnd * (1.0 - self.bound_tol)).to_dict()
#for iname,name in enumerate(self.columns):
#self.loc[self.loc[:,name] > ub[name],name] = ub[name] * (1.0 + self.bound_tol)
#self.loc[self.loc[:,name] < lb[name],name] = lb[name].copy() * (1.0 - self.bound_tol)
# self.loc[self.loc[:,name] > ub[name],name] = ub[name]
# self.loc[self.loc[:,name] < lb[name],name] = lb[name]
val_arr = self.values
for (iname, name) in enumerate(self.columns):
val_arr[val_arr[:, iname] > ub[name], iname] = ub[name]
val_arr[val_arr[:, iname] < lb[name], iname] = lb[name] # depends on [control=['for'], data=[]]
|
def _on_entry(self, event):
"""
Callback for the Entry widget, sets the Scale variable to the appropriate value.
:param event: Tkinter event
"""
contents = self._entry.get()
if contents == "":
return
try:
value = self._variable.set(int(contents))
except ValueError:
value = None
if not value:
self._on_scale(None)
|
def function[_on_entry, parameter[self, event]]:
constant[
Callback for the Entry widget, sets the Scale variable to the appropriate value.
:param event: Tkinter event
]
variable[contents] assign[=] call[name[self]._entry.get, parameter[]]
if compare[name[contents] equal[==] constant[]] begin[:]
return[None]
<ast.Try object at 0x7da1b2381ba0>
if <ast.UnaryOp object at 0x7da1b2383f70> begin[:]
call[name[self]._on_scale, parameter[constant[None]]]
|
keyword[def] identifier[_on_entry] ( identifier[self] , identifier[event] ):
literal[string]
identifier[contents] = identifier[self] . identifier[_entry] . identifier[get] ()
keyword[if] identifier[contents] == literal[string] :
keyword[return]
keyword[try] :
identifier[value] = identifier[self] . identifier[_variable] . identifier[set] ( identifier[int] ( identifier[contents] ))
keyword[except] identifier[ValueError] :
identifier[value] = keyword[None]
keyword[if] keyword[not] identifier[value] :
identifier[self] . identifier[_on_scale] ( keyword[None] )
|
def _on_entry(self, event):
"""
Callback for the Entry widget, sets the Scale variable to the appropriate value.
:param event: Tkinter event
"""
contents = self._entry.get()
if contents == '':
return # depends on [control=['if'], data=[]]
try:
value = self._variable.set(int(contents)) # depends on [control=['try'], data=[]]
except ValueError:
value = None # depends on [control=['except'], data=[]]
if not value:
self._on_scale(None) # depends on [control=['if'], data=[]]
|
def _generate_csv_header_line(*, header_names, header_prefix='', header=True, sep=',', newline='\n'):
"""
Helper function to generate a CSV header line depending on
the combination of arguments provided.
"""
if isinstance(header, str): # user-provided header line
header_line = header + newline
else:
if not (header is None or isinstance(header, bool)):
raise ValueError(f"Invalid value for argument `header`: {header}")
else:
if header:
header_line = header_prefix + sep.join(header_names) + newline
else:
header_line = ""
return header_line
|
def function[_generate_csv_header_line, parameter[]]:
constant[
Helper function to generate a CSV header line depending on
the combination of arguments provided.
]
if call[name[isinstance], parameter[name[header], name[str]]] begin[:]
variable[header_line] assign[=] binary_operation[name[header] + name[newline]]
return[name[header_line]]
|
keyword[def] identifier[_generate_csv_header_line] (*, identifier[header_names] , identifier[header_prefix] = literal[string] , identifier[header] = keyword[True] , identifier[sep] = literal[string] , identifier[newline] = literal[string] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[header] , identifier[str] ):
identifier[header_line] = identifier[header] + identifier[newline]
keyword[else] :
keyword[if] keyword[not] ( identifier[header] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[header] , identifier[bool] )):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[else] :
keyword[if] identifier[header] :
identifier[header_line] = identifier[header_prefix] + identifier[sep] . identifier[join] ( identifier[header_names] )+ identifier[newline]
keyword[else] :
identifier[header_line] = literal[string]
keyword[return] identifier[header_line]
|
def _generate_csv_header_line(*, header_names, header_prefix='', header=True, sep=',', newline='\n'):
"""
Helper function to generate a CSV header line depending on
the combination of arguments provided.
"""
if isinstance(header, str): # user-provided header line
header_line = header + newline # depends on [control=['if'], data=[]]
elif not (header is None or isinstance(header, bool)):
raise ValueError(f'Invalid value for argument `header`: {header}') # depends on [control=['if'], data=[]]
elif header:
header_line = header_prefix + sep.join(header_names) + newline # depends on [control=['if'], data=[]]
else:
header_line = ''
return header_line
|
def is_widget_required_attribute(widget):
"""
Is this widget required?
"""
if not widget.is_required:
return False
if isinstance(widget, WIDGETS_NO_REQUIRED):
return False
return True
|
def function[is_widget_required_attribute, parameter[widget]]:
constant[
Is this widget required?
]
if <ast.UnaryOp object at 0x7da1b1d39cc0> begin[:]
return[constant[False]]
if call[name[isinstance], parameter[name[widget], name[WIDGETS_NO_REQUIRED]]] begin[:]
return[constant[False]]
return[constant[True]]
|
keyword[def] identifier[is_widget_required_attribute] ( identifier[widget] ):
literal[string]
keyword[if] keyword[not] identifier[widget] . identifier[is_required] :
keyword[return] keyword[False]
keyword[if] identifier[isinstance] ( identifier[widget] , identifier[WIDGETS_NO_REQUIRED] ):
keyword[return] keyword[False]
keyword[return] keyword[True]
|
def is_widget_required_attribute(widget):
"""
Is this widget required?
"""
if not widget.is_required:
return False # depends on [control=['if'], data=[]]
if isinstance(widget, WIDGETS_NO_REQUIRED):
return False # depends on [control=['if'], data=[]]
return True
|
def update_value(self, id_number, new_value, metadata=None):
"""
Update a canned value
:type id_number: int
:param id_number: canned value ID number
:type new_value: str
:param new_value: New canned value value
:type metadata: str
:param metadata: Optional metadata
:rtype: dict
:return: an empty dictionary
"""
data = {
'id': id_number,
'new_value': new_value
}
if metadata is not None:
data['metadata'] = metadata
return self.post('updateValue', data)
|
def function[update_value, parameter[self, id_number, new_value, metadata]]:
constant[
Update a canned value
:type id_number: int
:param id_number: canned value ID number
:type new_value: str
:param new_value: New canned value value
:type metadata: str
:param metadata: Optional metadata
:rtype: dict
:return: an empty dictionary
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b254f340>, <ast.Constant object at 0x7da1b254c850>], [<ast.Name object at 0x7da1b254e4a0>, <ast.Name object at 0x7da1b254cdc0>]]
if compare[name[metadata] is_not constant[None]] begin[:]
call[name[data]][constant[metadata]] assign[=] name[metadata]
return[call[name[self].post, parameter[constant[updateValue], name[data]]]]
|
keyword[def] identifier[update_value] ( identifier[self] , identifier[id_number] , identifier[new_value] , identifier[metadata] = keyword[None] ):
literal[string]
identifier[data] ={
literal[string] : identifier[id_number] ,
literal[string] : identifier[new_value]
}
keyword[if] identifier[metadata] keyword[is] keyword[not] keyword[None] :
identifier[data] [ literal[string] ]= identifier[metadata]
keyword[return] identifier[self] . identifier[post] ( literal[string] , identifier[data] )
|
def update_value(self, id_number, new_value, metadata=None):
"""
Update a canned value
:type id_number: int
:param id_number: canned value ID number
:type new_value: str
:param new_value: New canned value value
:type metadata: str
:param metadata: Optional metadata
:rtype: dict
:return: an empty dictionary
"""
data = {'id': id_number, 'new_value': new_value}
if metadata is not None:
data['metadata'] = metadata # depends on [control=['if'], data=['metadata']]
return self.post('updateValue', data)
|
def with_git(repo,
target_dir=None,
limit=None,
refspec="HEAD",
clone=True,
rev_list_args=None,
version_filter=lambda version: True):
"""
Decorate a project class with git-based version information.
This adds two attributes to a project class:
- A `versions` method that returns a list of available versions
for this project.
- A `repository` attribute that provides a repository string to
download from later.
We use the `git rev-list` subcommand to list available versions.
Args:
repo (str): Repository to download from, this will be stored
in the `repository` attribute of the decorated class.
target_dir (str): An optional path where we should put the clone.
If unspecified, we will use the `SRC_FILE` attribute of
the decorated class.
limit (int): Limit the number of commits to consider for available
versions. Versions are 'ordered' from latest to oldest.
refspec (str): A git refspec string to start listing the versions from.
clone (bool): Should we clone the repo if it isn't already available
in our tmp dir? Defaults to `True`. You can set this to False to
avoid time consuming clones, when the project has not been accessed
at least once in your installation.
ref_list_args (list of str): Additional arguments you want to pass to
`git rev-list`.
version_filter (class filter): Filter function to remove unwanted
project versions.
"""
if not rev_list_args:
rev_list_args = []
def git_decorator(cls):
from benchbuild.utils.cmd import git
@staticmethod
def versions_impl():
"""Return a list of versions from the git hashes up to :limit:."""
directory = cls.SRC_FILE if target_dir is None else target_dir
repo_prefix = local.path(str(CFG["tmp_dir"]))
repo_loc = local.path(repo_prefix) / directory
if source_required(repo_loc):
if not clone:
return []
git("clone", repo, repo_loc)
update_hash(repo_loc)
with local.cwd(repo_loc):
rev_list = git("rev-list", "--abbrev-commit", "--abbrev=10",
refspec, *rev_list_args).strip().split('\n')
latest = git("rev-parse", "--short=10",
refspec).strip().split('\n')
cls.VERSION = latest[0]
if limit:
return list(filter(version_filter, rev_list))[:limit]
return list(filter(version_filter, rev_list))
def download_impl(self):
"""Download the selected version."""
nonlocal target_dir, git
directory = cls.SRC_FILE if target_dir is None else target_dir
Git(self.repository, directory)
with local.cwd(directory):
git("checkout", self.version)
cls.versions = versions_impl
cls.download = download_impl
cls.repository = repo
return cls
return git_decorator
|
def function[with_git, parameter[repo, target_dir, limit, refspec, clone, rev_list_args, version_filter]]:
constant[
Decorate a project class with git-based version information.
This adds two attributes to a project class:
- A `versions` method that returns a list of available versions
for this project.
- A `repository` attribute that provides a repository string to
download from later.
We use the `git rev-list` subcommand to list available versions.
Args:
repo (str): Repository to download from, this will be stored
in the `repository` attribute of the decorated class.
target_dir (str): An optional path where we should put the clone.
If unspecified, we will use the `SRC_FILE` attribute of
the decorated class.
limit (int): Limit the number of commits to consider for available
versions. Versions are 'ordered' from latest to oldest.
refspec (str): A git refspec string to start listing the versions from.
clone (bool): Should we clone the repo if it isn't already available
in our tmp dir? Defaults to `True`. You can set this to False to
avoid time consuming clones, when the project has not been accessed
at least once in your installation.
ref_list_args (list of str): Additional arguments you want to pass to
`git rev-list`.
version_filter (class filter): Filter function to remove unwanted
project versions.
]
if <ast.UnaryOp object at 0x7da18f09fe50> begin[:]
variable[rev_list_args] assign[=] list[[]]
def function[git_decorator, parameter[cls]]:
from relative_module[benchbuild.utils.cmd] import module[git]
def function[versions_impl, parameter[]]:
constant[Return a list of versions from the git hashes up to :limit:.]
variable[directory] assign[=] <ast.IfExp object at 0x7da18f09f910>
variable[repo_prefix] assign[=] call[name[local].path, parameter[call[name[str], parameter[call[name[CFG]][constant[tmp_dir]]]]]]
variable[repo_loc] assign[=] binary_operation[call[name[local].path, parameter[name[repo_prefix]]] / name[directory]]
if call[name[source_required], parameter[name[repo_loc]]] begin[:]
if <ast.UnaryOp object at 0x7da18f09ecb0> begin[:]
return[list[[]]]
call[name[git], parameter[constant[clone], name[repo], name[repo_loc]]]
call[name[update_hash], parameter[name[repo_loc]]]
with call[name[local].cwd, parameter[name[repo_loc]]] begin[:]
variable[rev_list] assign[=] call[call[call[name[git], parameter[constant[rev-list], constant[--abbrev-commit], constant[--abbrev=10], name[refspec], <ast.Starred object at 0x7da18f09ebc0>]].strip, parameter[]].split, parameter[constant[
]]]
variable[latest] assign[=] call[call[call[name[git], parameter[constant[rev-parse], constant[--short=10], name[refspec]]].strip, parameter[]].split, parameter[constant[
]]]
name[cls].VERSION assign[=] call[name[latest]][constant[0]]
if name[limit] begin[:]
return[call[call[name[list], parameter[call[name[filter], parameter[name[version_filter], name[rev_list]]]]]][<ast.Slice object at 0x7da18f09f6a0>]]
return[call[name[list], parameter[call[name[filter], parameter[name[version_filter], name[rev_list]]]]]]
def function[download_impl, parameter[self]]:
constant[Download the selected version.]
<ast.Nonlocal object at 0x7da18f09ef50>
variable[directory] assign[=] <ast.IfExp object at 0x7da18f09c400>
call[name[Git], parameter[name[self].repository, name[directory]]]
with call[name[local].cwd, parameter[name[directory]]] begin[:]
call[name[git], parameter[constant[checkout], name[self].version]]
name[cls].versions assign[=] name[versions_impl]
name[cls].download assign[=] name[download_impl]
name[cls].repository assign[=] name[repo]
return[name[cls]]
return[name[git_decorator]]
|
keyword[def] identifier[with_git] ( identifier[repo] ,
identifier[target_dir] = keyword[None] ,
identifier[limit] = keyword[None] ,
identifier[refspec] = literal[string] ,
identifier[clone] = keyword[True] ,
identifier[rev_list_args] = keyword[None] ,
identifier[version_filter] = keyword[lambda] identifier[version] : keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[rev_list_args] :
identifier[rev_list_args] =[]
keyword[def] identifier[git_decorator] ( identifier[cls] ):
keyword[from] identifier[benchbuild] . identifier[utils] . identifier[cmd] keyword[import] identifier[git]
@ identifier[staticmethod]
keyword[def] identifier[versions_impl] ():
literal[string]
identifier[directory] = identifier[cls] . identifier[SRC_FILE] keyword[if] identifier[target_dir] keyword[is] keyword[None] keyword[else] identifier[target_dir]
identifier[repo_prefix] = identifier[local] . identifier[path] ( identifier[str] ( identifier[CFG] [ literal[string] ]))
identifier[repo_loc] = identifier[local] . identifier[path] ( identifier[repo_prefix] )/ identifier[directory]
keyword[if] identifier[source_required] ( identifier[repo_loc] ):
keyword[if] keyword[not] identifier[clone] :
keyword[return] []
identifier[git] ( literal[string] , identifier[repo] , identifier[repo_loc] )
identifier[update_hash] ( identifier[repo_loc] )
keyword[with] identifier[local] . identifier[cwd] ( identifier[repo_loc] ):
identifier[rev_list] = identifier[git] ( literal[string] , literal[string] , literal[string] ,
identifier[refspec] ,* identifier[rev_list_args] ). identifier[strip] (). identifier[split] ( literal[string] )
identifier[latest] = identifier[git] ( literal[string] , literal[string] ,
identifier[refspec] ). identifier[strip] (). identifier[split] ( literal[string] )
identifier[cls] . identifier[VERSION] = identifier[latest] [ literal[int] ]
keyword[if] identifier[limit] :
keyword[return] identifier[list] ( identifier[filter] ( identifier[version_filter] , identifier[rev_list] ))[: identifier[limit] ]
keyword[return] identifier[list] ( identifier[filter] ( identifier[version_filter] , identifier[rev_list] ))
keyword[def] identifier[download_impl] ( identifier[self] ):
literal[string]
keyword[nonlocal] identifier[target_dir] , identifier[git]
identifier[directory] = identifier[cls] . identifier[SRC_FILE] keyword[if] identifier[target_dir] keyword[is] keyword[None] keyword[else] identifier[target_dir]
identifier[Git] ( identifier[self] . identifier[repository] , identifier[directory] )
keyword[with] identifier[local] . identifier[cwd] ( identifier[directory] ):
identifier[git] ( literal[string] , identifier[self] . identifier[version] )
identifier[cls] . identifier[versions] = identifier[versions_impl]
identifier[cls] . identifier[download] = identifier[download_impl]
identifier[cls] . identifier[repository] = identifier[repo]
keyword[return] identifier[cls]
keyword[return] identifier[git_decorator]
|
def with_git(repo, target_dir=None, limit=None, refspec='HEAD', clone=True, rev_list_args=None, version_filter=lambda version: True):
"""
Decorate a project class with git-based version information.
This adds two attributes to a project class:
- A `versions` method that returns a list of available versions
for this project.
- A `repository` attribute that provides a repository string to
download from later.
We use the `git rev-list` subcommand to list available versions.
Args:
repo (str): Repository to download from, this will be stored
in the `repository` attribute of the decorated class.
target_dir (str): An optional path where we should put the clone.
If unspecified, we will use the `SRC_FILE` attribute of
the decorated class.
limit (int): Limit the number of commits to consider for available
versions. Versions are 'ordered' from latest to oldest.
refspec (str): A git refspec string to start listing the versions from.
clone (bool): Should we clone the repo if it isn't already available
in our tmp dir? Defaults to `True`. You can set this to False to
avoid time consuming clones, when the project has not been accessed
at least once in your installation.
ref_list_args (list of str): Additional arguments you want to pass to
`git rev-list`.
version_filter (class filter): Filter function to remove unwanted
project versions.
"""
if not rev_list_args:
rev_list_args = [] # depends on [control=['if'], data=[]]
def git_decorator(cls):
from benchbuild.utils.cmd import git
@staticmethod
def versions_impl():
"""Return a list of versions from the git hashes up to :limit:."""
directory = cls.SRC_FILE if target_dir is None else target_dir
repo_prefix = local.path(str(CFG['tmp_dir']))
repo_loc = local.path(repo_prefix) / directory
if source_required(repo_loc):
if not clone:
return [] # depends on [control=['if'], data=[]]
git('clone', repo, repo_loc)
update_hash(repo_loc) # depends on [control=['if'], data=[]]
with local.cwd(repo_loc):
rev_list = git('rev-list', '--abbrev-commit', '--abbrev=10', refspec, *rev_list_args).strip().split('\n')
latest = git('rev-parse', '--short=10', refspec).strip().split('\n')
cls.VERSION = latest[0] # depends on [control=['with'], data=[]]
if limit:
return list(filter(version_filter, rev_list))[:limit] # depends on [control=['if'], data=[]]
return list(filter(version_filter, rev_list))
def download_impl(self):
"""Download the selected version."""
nonlocal target_dir, git
directory = cls.SRC_FILE if target_dir is None else target_dir
Git(self.repository, directory)
with local.cwd(directory):
git('checkout', self.version) # depends on [control=['with'], data=[]]
cls.versions = versions_impl
cls.download = download_impl
cls.repository = repo
return cls
return git_decorator
|
def get_actions(self, request):
"""Disable the "delete selected" admin action.
Otherwise the action is present even though has_delete_permission is False, it just doesn't
work.
"""
actions = super(CertificateMixin, self).get_actions(request)
actions.pop('delete_selected', '')
return actions
|
def function[get_actions, parameter[self, request]]:
constant[Disable the "delete selected" admin action.
Otherwise the action is present even though has_delete_permission is False, it just doesn't
work.
]
variable[actions] assign[=] call[call[name[super], parameter[name[CertificateMixin], name[self]]].get_actions, parameter[name[request]]]
call[name[actions].pop, parameter[constant[delete_selected], constant[]]]
return[name[actions]]
|
keyword[def] identifier[get_actions] ( identifier[self] , identifier[request] ):
literal[string]
identifier[actions] = identifier[super] ( identifier[CertificateMixin] , identifier[self] ). identifier[get_actions] ( identifier[request] )
identifier[actions] . identifier[pop] ( literal[string] , literal[string] )
keyword[return] identifier[actions]
|
def get_actions(self, request):
"""Disable the "delete selected" admin action.
Otherwise the action is present even though has_delete_permission is False, it just doesn't
work.
"""
actions = super(CertificateMixin, self).get_actions(request)
actions.pop('delete_selected', '')
return actions
|
def artist_top_tracks(self, spotify_id, country):
"""Get an artists top tracks per country with their ID.
Parameters
----------
spotify_id : str
The spotify_id to search by.
country : COUNTRY_TP
COUNTRY
"""
route = Route('GET', '/artists/{spotify_id}/top-tracks', spotify_id=spotify_id)
payload = {'country': country}
return self.request(route, params=payload)
|
def function[artist_top_tracks, parameter[self, spotify_id, country]]:
constant[Get an artists top tracks per country with their ID.
Parameters
----------
spotify_id : str
The spotify_id to search by.
country : COUNTRY_TP
COUNTRY
]
variable[route] assign[=] call[name[Route], parameter[constant[GET], constant[/artists/{spotify_id}/top-tracks]]]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da204620f70>], [<ast.Name object at 0x7da204620640>]]
return[call[name[self].request, parameter[name[route]]]]
|
keyword[def] identifier[artist_top_tracks] ( identifier[self] , identifier[spotify_id] , identifier[country] ):
literal[string]
identifier[route] = identifier[Route] ( literal[string] , literal[string] , identifier[spotify_id] = identifier[spotify_id] )
identifier[payload] ={ literal[string] : identifier[country] }
keyword[return] identifier[self] . identifier[request] ( identifier[route] , identifier[params] = identifier[payload] )
|
def artist_top_tracks(self, spotify_id, country):
"""Get an artists top tracks per country with their ID.
Parameters
----------
spotify_id : str
The spotify_id to search by.
country : COUNTRY_TP
COUNTRY
"""
route = Route('GET', '/artists/{spotify_id}/top-tracks', spotify_id=spotify_id)
payload = {'country': country}
return self.request(route, params=payload)
|
def _update_data(self, *data_dict, **kwargs):
"""
A private method to process and update entity values correctly.
:param data: A dictionary of values to be updated for the entity
:param kwargs: keyword arguments with key-value pairs to be updated
"""
# Load each of the fields given in the data dictionary
self.errors = {}
for data in data_dict:
if not isinstance(data, dict):
raise AssertionError(
f'Positional argument "{data}" passed must be a dict.'
f'This argument serves as a template for loading common '
f'values.'
)
for field_name, val in data.items():
setattr(self, field_name, val)
# Now load against the keyword arguments
for field_name, val in kwargs.items():
setattr(self, field_name, val)
# Raise any errors found during update
if self.errors:
raise ValidationError(self.errors)
|
def function[_update_data, parameter[self]]:
constant[
A private method to process and update entity values correctly.
:param data: A dictionary of values to be updated for the entity
:param kwargs: keyword arguments with key-value pairs to be updated
]
name[self].errors assign[=] dictionary[[], []]
for taget[name[data]] in starred[name[data_dict]] begin[:]
if <ast.UnaryOp object at 0x7da20c76d690> begin[:]
<ast.Raise object at 0x7da20c76cdf0>
for taget[tuple[[<ast.Name object at 0x7da20c76dff0>, <ast.Name object at 0x7da20c76df30>]]] in starred[call[name[data].items, parameter[]]] begin[:]
call[name[setattr], parameter[name[self], name[field_name], name[val]]]
for taget[tuple[[<ast.Name object at 0x7da1b1b0ce20>, <ast.Name object at 0x7da1b1b0cd90>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:]
call[name[setattr], parameter[name[self], name[field_name], name[val]]]
if name[self].errors begin[:]
<ast.Raise object at 0x7da1b1b0c0d0>
|
keyword[def] identifier[_update_data] ( identifier[self] ,* identifier[data_dict] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[errors] ={}
keyword[for] identifier[data] keyword[in] identifier[data_dict] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[data] , identifier[dict] ):
keyword[raise] identifier[AssertionError] (
literal[string]
literal[string]
literal[string]
)
keyword[for] identifier[field_name] , identifier[val] keyword[in] identifier[data] . identifier[items] ():
identifier[setattr] ( identifier[self] , identifier[field_name] , identifier[val] )
keyword[for] identifier[field_name] , identifier[val] keyword[in] identifier[kwargs] . identifier[items] ():
identifier[setattr] ( identifier[self] , identifier[field_name] , identifier[val] )
keyword[if] identifier[self] . identifier[errors] :
keyword[raise] identifier[ValidationError] ( identifier[self] . identifier[errors] )
|
def _update_data(self, *data_dict, **kwargs):
"""
A private method to process and update entity values correctly.
:param data: A dictionary of values to be updated for the entity
:param kwargs: keyword arguments with key-value pairs to be updated
"""
# Load each of the fields given in the data dictionary
self.errors = {}
for data in data_dict:
if not isinstance(data, dict):
raise AssertionError(f'Positional argument "{data}" passed must be a dict.This argument serves as a template for loading common values.') # depends on [control=['if'], data=[]]
for (field_name, val) in data.items():
setattr(self, field_name, val) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['data']]
# Now load against the keyword arguments
for (field_name, val) in kwargs.items():
setattr(self, field_name, val) # depends on [control=['for'], data=[]]
# Raise any errors found during update
if self.errors:
raise ValidationError(self.errors) # depends on [control=['if'], data=[]]
|
def run_iter(self, mine=False, jid=None):
'''
Execute and yield returns as they come in, do not print to the display
mine
The Single objects will use mine_functions defined in the roster,
pillar, or master config (they will be checked in that order) and
will modify the argv with the arguments from mine_functions
'''
fstr = '{0}.prep_jid'.format(self.opts['master_job_cache'])
jid = self.returners[fstr](passed_jid=jid or self.opts.get('jid', None))
# Save the invocation information
argv = self.opts['argv']
if self.opts.get('raw_shell', False):
fun = 'ssh._raw'
args = argv
else:
fun = argv[0] if argv else ''
args = argv[1:]
job_load = {
'jid': jid,
'tgt_type': self.tgt_type,
'tgt': self.opts['tgt'],
'user': self.opts['user'],
'fun': fun,
'arg': args,
}
# save load to the master job cache
if self.opts['master_job_cache'] == 'local_cache':
self.returners['{0}.save_load'.format(self.opts['master_job_cache'])](jid, job_load, minions=self.targets.keys())
else:
self.returners['{0}.save_load'.format(self.opts['master_job_cache'])](jid, job_load)
for ret in self.handle_ssh(mine=mine):
host = next(six.iterkeys(ret))
self.cache_job(jid, host, ret[host], fun)
if self.event:
id_, data = next(six.iteritems(ret))
if isinstance(data, six.text_type):
data = {'return': data}
if 'id' not in data:
data['id'] = id_
data['jid'] = jid # make the jid in the payload the same as the jid in the tag
self.event.fire_event(
data,
salt.utils.event.tagify(
[jid, 'ret', host],
'job'))
yield ret
|
def function[run_iter, parameter[self, mine, jid]]:
constant[
Execute and yield returns as they come in, do not print to the display
mine
The Single objects will use mine_functions defined in the roster,
pillar, or master config (they will be checked in that order) and
will modify the argv with the arguments from mine_functions
]
variable[fstr] assign[=] call[constant[{0}.prep_jid].format, parameter[call[name[self].opts][constant[master_job_cache]]]]
variable[jid] assign[=] call[call[name[self].returners][name[fstr]], parameter[]]
variable[argv] assign[=] call[name[self].opts][constant[argv]]
if call[name[self].opts.get, parameter[constant[raw_shell], constant[False]]] begin[:]
variable[fun] assign[=] constant[ssh._raw]
variable[args] assign[=] name[argv]
variable[job_load] assign[=] dictionary[[<ast.Constant object at 0x7da18bcc91e0>, <ast.Constant object at 0x7da18bcc9db0>, <ast.Constant object at 0x7da18bcc9b10>, <ast.Constant object at 0x7da18bccbdc0>, <ast.Constant object at 0x7da18bcc85e0>, <ast.Constant object at 0x7da18bcc8a60>], [<ast.Name object at 0x7da18bcc8580>, <ast.Attribute object at 0x7da18bcca500>, <ast.Subscript object at 0x7da18bcc9c00>, <ast.Subscript object at 0x7da18bcc9d80>, <ast.Name object at 0x7da18bcca800>, <ast.Name object at 0x7da18bccb010>]]
if compare[call[name[self].opts][constant[master_job_cache]] equal[==] constant[local_cache]] begin[:]
call[call[name[self].returners][call[constant[{0}.save_load].format, parameter[call[name[self].opts][constant[master_job_cache]]]]], parameter[name[jid], name[job_load]]]
for taget[name[ret]] in starred[call[name[self].handle_ssh, parameter[]]] begin[:]
variable[host] assign[=] call[name[next], parameter[call[name[six].iterkeys, parameter[name[ret]]]]]
call[name[self].cache_job, parameter[name[jid], name[host], call[name[ret]][name[host]], name[fun]]]
if name[self].event begin[:]
<ast.Tuple object at 0x7da18c4cccd0> assign[=] call[name[next], parameter[call[name[six].iteritems, parameter[name[ret]]]]]
if call[name[isinstance], parameter[name[data], name[six].text_type]] begin[:]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da18c4cf100>], [<ast.Name object at 0x7da18c4cf0d0>]]
if compare[constant[id] <ast.NotIn object at 0x7da2590d7190> name[data]] begin[:]
call[name[data]][constant[id]] assign[=] name[id_]
call[name[data]][constant[jid]] assign[=] name[jid]
call[name[self].event.fire_event, parameter[name[data], call[name[salt].utils.event.tagify, parameter[list[[<ast.Name object at 0x7da18c4ccb50>, <ast.Constant object at 0x7da18c4cdc30>, <ast.Name object at 0x7da18c4cf7f0>]], constant[job]]]]]
<ast.Yield object at 0x7da18c4cdb40>
|
keyword[def] identifier[run_iter] ( identifier[self] , identifier[mine] = keyword[False] , identifier[jid] = keyword[None] ):
literal[string]
identifier[fstr] = literal[string] . identifier[format] ( identifier[self] . identifier[opts] [ literal[string] ])
identifier[jid] = identifier[self] . identifier[returners] [ identifier[fstr] ]( identifier[passed_jid] = identifier[jid] keyword[or] identifier[self] . identifier[opts] . identifier[get] ( literal[string] , keyword[None] ))
identifier[argv] = identifier[self] . identifier[opts] [ literal[string] ]
keyword[if] identifier[self] . identifier[opts] . identifier[get] ( literal[string] , keyword[False] ):
identifier[fun] = literal[string]
identifier[args] = identifier[argv]
keyword[else] :
identifier[fun] = identifier[argv] [ literal[int] ] keyword[if] identifier[argv] keyword[else] literal[string]
identifier[args] = identifier[argv] [ literal[int] :]
identifier[job_load] ={
literal[string] : identifier[jid] ,
literal[string] : identifier[self] . identifier[tgt_type] ,
literal[string] : identifier[self] . identifier[opts] [ literal[string] ],
literal[string] : identifier[self] . identifier[opts] [ literal[string] ],
literal[string] : identifier[fun] ,
literal[string] : identifier[args] ,
}
keyword[if] identifier[self] . identifier[opts] [ literal[string] ]== literal[string] :
identifier[self] . identifier[returners] [ literal[string] . identifier[format] ( identifier[self] . identifier[opts] [ literal[string] ])]( identifier[jid] , identifier[job_load] , identifier[minions] = identifier[self] . identifier[targets] . identifier[keys] ())
keyword[else] :
identifier[self] . identifier[returners] [ literal[string] . identifier[format] ( identifier[self] . identifier[opts] [ literal[string] ])]( identifier[jid] , identifier[job_load] )
keyword[for] identifier[ret] keyword[in] identifier[self] . identifier[handle_ssh] ( identifier[mine] = identifier[mine] ):
identifier[host] = identifier[next] ( identifier[six] . identifier[iterkeys] ( identifier[ret] ))
identifier[self] . identifier[cache_job] ( identifier[jid] , identifier[host] , identifier[ret] [ identifier[host] ], identifier[fun] )
keyword[if] identifier[self] . identifier[event] :
identifier[id_] , identifier[data] = identifier[next] ( identifier[six] . identifier[iteritems] ( identifier[ret] ))
keyword[if] identifier[isinstance] ( identifier[data] , identifier[six] . identifier[text_type] ):
identifier[data] ={ literal[string] : identifier[data] }
keyword[if] literal[string] keyword[not] keyword[in] identifier[data] :
identifier[data] [ literal[string] ]= identifier[id_]
identifier[data] [ literal[string] ]= identifier[jid]
identifier[self] . identifier[event] . identifier[fire_event] (
identifier[data] ,
identifier[salt] . identifier[utils] . identifier[event] . identifier[tagify] (
[ identifier[jid] , literal[string] , identifier[host] ],
literal[string] ))
keyword[yield] identifier[ret]
|
def run_iter(self, mine=False, jid=None):
"""
Execute and yield returns as they come in, do not print to the display
mine
The Single objects will use mine_functions defined in the roster,
pillar, or master config (they will be checked in that order) and
will modify the argv with the arguments from mine_functions
"""
fstr = '{0}.prep_jid'.format(self.opts['master_job_cache'])
jid = self.returners[fstr](passed_jid=jid or self.opts.get('jid', None))
# Save the invocation information
argv = self.opts['argv']
if self.opts.get('raw_shell', False):
fun = 'ssh._raw'
args = argv # depends on [control=['if'], data=[]]
else:
fun = argv[0] if argv else ''
args = argv[1:]
job_load = {'jid': jid, 'tgt_type': self.tgt_type, 'tgt': self.opts['tgt'], 'user': self.opts['user'], 'fun': fun, 'arg': args}
# save load to the master job cache
if self.opts['master_job_cache'] == 'local_cache':
self.returners['{0}.save_load'.format(self.opts['master_job_cache'])](jid, job_load, minions=self.targets.keys()) # depends on [control=['if'], data=[]]
else:
self.returners['{0}.save_load'.format(self.opts['master_job_cache'])](jid, job_load)
for ret in self.handle_ssh(mine=mine):
host = next(six.iterkeys(ret))
self.cache_job(jid, host, ret[host], fun)
if self.event:
(id_, data) = next(six.iteritems(ret))
if isinstance(data, six.text_type):
data = {'return': data} # depends on [control=['if'], data=[]]
if 'id' not in data:
data['id'] = id_ # depends on [control=['if'], data=['data']]
data['jid'] = jid # make the jid in the payload the same as the jid in the tag
self.event.fire_event(data, salt.utils.event.tagify([jid, 'ret', host], 'job')) # depends on [control=['if'], data=[]]
yield ret # depends on [control=['for'], data=['ret']]
|
def getQCAnalyses(self):
"""
Return the Quality Control analyses.
:returns: a list of QC analyses
:rtype: List of ReferenceAnalysis/DuplicateAnalysis
"""
qc_types = ['ReferenceAnalysis', 'DuplicateAnalysis']
analyses = self.getAnalyses()
return [a for a in analyses if a.portal_type in qc_types]
|
def function[getQCAnalyses, parameter[self]]:
constant[
Return the Quality Control analyses.
:returns: a list of QC analyses
:rtype: List of ReferenceAnalysis/DuplicateAnalysis
]
variable[qc_types] assign[=] list[[<ast.Constant object at 0x7da1b2344fd0>, <ast.Constant object at 0x7da1b2344100>]]
variable[analyses] assign[=] call[name[self].getAnalyses, parameter[]]
return[<ast.ListComp object at 0x7da1b2347520>]
|
keyword[def] identifier[getQCAnalyses] ( identifier[self] ):
literal[string]
identifier[qc_types] =[ literal[string] , literal[string] ]
identifier[analyses] = identifier[self] . identifier[getAnalyses] ()
keyword[return] [ identifier[a] keyword[for] identifier[a] keyword[in] identifier[analyses] keyword[if] identifier[a] . identifier[portal_type] keyword[in] identifier[qc_types] ]
|
def getQCAnalyses(self):
"""
Return the Quality Control analyses.
:returns: a list of QC analyses
:rtype: List of ReferenceAnalysis/DuplicateAnalysis
"""
qc_types = ['ReferenceAnalysis', 'DuplicateAnalysis']
analyses = self.getAnalyses()
return [a for a in analyses if a.portal_type in qc_types]
|
def _lookup_qrz_callsign(self, callsign=None, apikey=None, apiv="1.3.3"):
""" Performs the callsign lookup against the QRZ.com XML API:
"""
if apikey is None:
raise AttributeError("Session Key Missing")
callsign = callsign.upper()
response = self._request_callsign_info_from_qrz(callsign, apikey, apiv)
root = BeautifulSoup(response.text, "html.parser")
lookup = {}
if root.error:
if re.search('Not found', root.error.text, re.I): #No data available for callsign
raise KeyError(root.error.text)
#try to get a new session key and try to request again
elif re.search('Session Timeout', root.error.text, re.I) or re.search('Invalid session key', root.error.text, re.I):
apikey = self._get_qrz_session_key(self._username, self._pwd)
response = self._request_callsign_info_from_qrz(callsign, apikey, apiv)
root = BeautifulSoup(response.text, "html.parser")
#if this fails again, raise error
if root.error:
if re.search('Not found', root.error.text, re.I): #No data available for callsign
raise KeyError(root.error.text)
else:
raise AttributeError(root.error.text) #most likely session key invalid
else:
#update API Key ob Lookup object
self._apikey = apikey
else:
raise AttributeError(root.error.text) #most likely session key missing
if root.callsign is None:
raise ValueError
if root.callsign.call:
lookup[const.CALLSIGN] = root.callsign.call.text
if root.callsign.xref:
lookup[const.XREF] = root.callsign.xref.text
if root.callsign.aliases:
lookup[const.ALIASES] = root.callsign.aliases.text.split(',')
if root.callsign.dxcc:
lookup[const.ADIF] = int(root.callsign.dxcc.text)
if root.callsign.fname:
lookup[const.FNAME] = root.callsign.fname.text
if root.callsign.find("name"):
lookup[const.NAME] = root.callsign.find('name').get_text()
if root.callsign.addr1:
lookup[const.ADDR1] = root.callsign.addr1.text
if root.callsign.addr2:
lookup[const.ADDR2] = root.callsign.addr2.text
if root.callsign.state:
lookup[const.STATE] = root.callsign.state.text
if root.callsign.zip:
lookup[const.ZIPCODE] = root.callsign.zip.text
if root.callsign.country:
lookup[const.COUNTRY] = root.callsign.country.text
if root.callsign.ccode:
lookup[const.CCODE] = int(root.callsign.ccode.text)
if root.callsign.lat:
lookup[const.LATITUDE] = float(root.callsign.lat.text)
if root.callsign.lon:
lookup[const.LONGITUDE] = float(root.callsign.lon.text)
if root.callsign.grid:
lookup[const.LOCATOR] = root.callsign.grid.text
if root.callsign.county:
lookup[const.COUNTY] = root.callsign.county.text
if root.callsign.fips:
lookup[const.FIPS] = int(root.callsign.fips.text) # check type
if root.callsign.land:
lookup[const.LAND] = root.callsign.land.text
if root.callsign.efdate:
try:
lookup[const.EFDATE] = datetime.strptime(root.callsign.efdate.text, '%Y-%m-%d').replace(tzinfo=UTC)
except ValueError:
self._logger.debug("[QRZ.com] efdate: Invalid DateTime; " + callsign + " " + root.callsign.efdate.text)
if root.callsign.expdate:
try:
lookup[const.EXPDATE] = datetime.strptime(root.callsign.expdate.text, '%Y-%m-%d').replace(tzinfo=UTC)
except ValueError:
self._logger.debug("[QRZ.com] expdate: Invalid DateTime; " + callsign + " " + root.callsign.expdate.text)
if root.callsign.p_call:
lookup[const.P_CALL] = root.callsign.p_call.text
if root.callsign.find('class'):
lookup[const.LICENSE_CLASS] = root.callsign.find('class').get_text()
if root.callsign.codes:
lookup[const.CODES] = root.callsign.codes.text
if root.callsign.qslmgr:
lookup[const.QSLMGR] = root.callsign.qslmgr.text
if root.callsign.email:
lookup[const.EMAIL] = root.callsign.email.text
if root.callsign.url:
lookup[const.URL] = root.callsign.url.text
if root.callsign.u_views:
lookup[const.U_VIEWS] = int(root.callsign.u_views.text)
if root.callsign.bio:
lookup[const.BIO] = root.callsign.bio.text
if root.callsign.biodate:
try:
lookup[const.BIODATE] = datetime.strptime(root.callsign.biodate.text, '%Y-%m-%d %H:%M:%S').replace(tzinfo=UTC)
except ValueError:
self._logger.warning("[QRZ.com] biodate: Invalid DateTime; " + callsign)
if root.callsign.image:
lookup[const.IMAGE] = root.callsign.image.text
if root.callsign.imageinfo:
lookup[const.IMAGE_INFO] = root.callsign.imageinfo.text
if root.callsign.serial:
lookup[const.SERIAL] = long(root.callsign.serial.text)
if root.callsign.moddate:
try:
lookup[const.MODDATE] = datetime.strptime(root.callsign.moddate.text, '%Y-%m-%d %H:%M:%S').replace(tzinfo=UTC)
except ValueError:
self._logger.warning("[QRZ.com] moddate: Invalid DateTime; " + callsign)
if root.callsign.MSA:
lookup[const.MSA] = int(root.callsign.MSA.text)
if root.callsign.AreaCode:
lookup[const.AREACODE] = int(root.callsign.AreaCode.text)
if root.callsign.TimeZone:
lookup[const.TIMEZONE] = int(root.callsign.TimeZone.text)
if root.callsign.GMTOffset:
lookup[const.GMTOFFSET] = float(root.callsign.GMTOffset.text)
if root.callsign.DST:
if root.callsign.DST.text == "Y":
lookup[const.DST] = True
else:
lookup[const.DST] = False
if root.callsign.eqsl:
if root.callsign.eqsl.text == "1":
lookup[const.EQSL] = True
else:
lookup[const.EQSL] = False
if root.callsign.mqsl:
if root.callsign.mqsl.text == "1":
lookup[const.MQSL] = True
else:
lookup[const.MQSL] = False
if root.callsign.cqzone:
lookup[const.CQZ] = int(root.callsign.cqzone.text)
if root.callsign.ituzone:
lookup[const.ITUZ] = int(root.callsign.ituzone.text)
if root.callsign.born:
lookup[const.BORN] = int(root.callsign.born.text)
if root.callsign.user:
lookup[const.USER_MGR] = root.callsign.user.text
if root.callsign.lotw:
if root.callsign.lotw.text == "1":
lookup[const.LOTW] = True
else:
lookup[const.LOTW] = False
if root.callsign.iota:
lookup[const.IOTA] = root.callsign.iota.text
if root.callsign.geoloc:
lookup[const.GEOLOC] = root.callsign.geoloc.text
# if sys.version_info >= (2,):
# for item in lookup:
# if isinstance(lookup[item], unicode):
# print item, repr(lookup[item])
return lookup
|
def function[_lookup_qrz_callsign, parameter[self, callsign, apikey, apiv]]:
constant[ Performs the callsign lookup against the QRZ.com XML API:
]
if compare[name[apikey] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0f59ed0>
variable[callsign] assign[=] call[name[callsign].upper, parameter[]]
variable[response] assign[=] call[name[self]._request_callsign_info_from_qrz, parameter[name[callsign], name[apikey], name[apiv]]]
variable[root] assign[=] call[name[BeautifulSoup], parameter[name[response].text, constant[html.parser]]]
variable[lookup] assign[=] dictionary[[], []]
if name[root].error begin[:]
if call[name[re].search, parameter[constant[Not found], name[root].error.text, name[re].I]] begin[:]
<ast.Raise object at 0x7da1b0f59750>
if compare[name[root].callsign is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0f58670>
if name[root].callsign.call begin[:]
call[name[lookup]][name[const].CALLSIGN] assign[=] name[root].callsign.call.text
if name[root].callsign.xref begin[:]
call[name[lookup]][name[const].XREF] assign[=] name[root].callsign.xref.text
if name[root].callsign.aliases begin[:]
call[name[lookup]][name[const].ALIASES] assign[=] call[name[root].callsign.aliases.text.split, parameter[constant[,]]]
if name[root].callsign.dxcc begin[:]
call[name[lookup]][name[const].ADIF] assign[=] call[name[int], parameter[name[root].callsign.dxcc.text]]
if name[root].callsign.fname begin[:]
call[name[lookup]][name[const].FNAME] assign[=] name[root].callsign.fname.text
if call[name[root].callsign.find, parameter[constant[name]]] begin[:]
call[name[lookup]][name[const].NAME] assign[=] call[call[name[root].callsign.find, parameter[constant[name]]].get_text, parameter[]]
if name[root].callsign.addr1 begin[:]
call[name[lookup]][name[const].ADDR1] assign[=] name[root].callsign.addr1.text
if name[root].callsign.addr2 begin[:]
call[name[lookup]][name[const].ADDR2] assign[=] name[root].callsign.addr2.text
if name[root].callsign.state begin[:]
call[name[lookup]][name[const].STATE] assign[=] name[root].callsign.state.text
if name[root].callsign.zip begin[:]
call[name[lookup]][name[const].ZIPCODE] assign[=] name[root].callsign.zip.text
if name[root].callsign.country begin[:]
call[name[lookup]][name[const].COUNTRY] assign[=] name[root].callsign.country.text
if name[root].callsign.ccode begin[:]
call[name[lookup]][name[const].CCODE] assign[=] call[name[int], parameter[name[root].callsign.ccode.text]]
if name[root].callsign.lat begin[:]
call[name[lookup]][name[const].LATITUDE] assign[=] call[name[float], parameter[name[root].callsign.lat.text]]
if name[root].callsign.lon begin[:]
call[name[lookup]][name[const].LONGITUDE] assign[=] call[name[float], parameter[name[root].callsign.lon.text]]
if name[root].callsign.grid begin[:]
call[name[lookup]][name[const].LOCATOR] assign[=] name[root].callsign.grid.text
if name[root].callsign.county begin[:]
call[name[lookup]][name[const].COUNTY] assign[=] name[root].callsign.county.text
if name[root].callsign.fips begin[:]
call[name[lookup]][name[const].FIPS] assign[=] call[name[int], parameter[name[root].callsign.fips.text]]
if name[root].callsign.land begin[:]
call[name[lookup]][name[const].LAND] assign[=] name[root].callsign.land.text
if name[root].callsign.efdate begin[:]
<ast.Try object at 0x7da1b0f1c5b0>
if name[root].callsign.expdate begin[:]
<ast.Try object at 0x7da1b0f1d180>
if name[root].callsign.p_call begin[:]
call[name[lookup]][name[const].P_CALL] assign[=] name[root].callsign.p_call.text
if call[name[root].callsign.find, parameter[constant[class]]] begin[:]
call[name[lookup]][name[const].LICENSE_CLASS] assign[=] call[call[name[root].callsign.find, parameter[constant[class]]].get_text, parameter[]]
if name[root].callsign.codes begin[:]
call[name[lookup]][name[const].CODES] assign[=] name[root].callsign.codes.text
if name[root].callsign.qslmgr begin[:]
call[name[lookup]][name[const].QSLMGR] assign[=] name[root].callsign.qslmgr.text
if name[root].callsign.email begin[:]
call[name[lookup]][name[const].EMAIL] assign[=] name[root].callsign.email.text
if name[root].callsign.url begin[:]
call[name[lookup]][name[const].URL] assign[=] name[root].callsign.url.text
if name[root].callsign.u_views begin[:]
call[name[lookup]][name[const].U_VIEWS] assign[=] call[name[int], parameter[name[root].callsign.u_views.text]]
if name[root].callsign.bio begin[:]
call[name[lookup]][name[const].BIO] assign[=] name[root].callsign.bio.text
if name[root].callsign.biodate begin[:]
<ast.Try object at 0x7da1b0f1fa30>
if name[root].callsign.image begin[:]
call[name[lookup]][name[const].IMAGE] assign[=] name[root].callsign.image.text
if name[root].callsign.imageinfo begin[:]
call[name[lookup]][name[const].IMAGE_INFO] assign[=] name[root].callsign.imageinfo.text
if name[root].callsign.serial begin[:]
call[name[lookup]][name[const].SERIAL] assign[=] call[name[long], parameter[name[root].callsign.serial.text]]
if name[root].callsign.moddate begin[:]
<ast.Try object at 0x7da1b10be710>
if name[root].callsign.MSA begin[:]
call[name[lookup]][name[const].MSA] assign[=] call[name[int], parameter[name[root].callsign.MSA.text]]
if name[root].callsign.AreaCode begin[:]
call[name[lookup]][name[const].AREACODE] assign[=] call[name[int], parameter[name[root].callsign.AreaCode.text]]
if name[root].callsign.TimeZone begin[:]
call[name[lookup]][name[const].TIMEZONE] assign[=] call[name[int], parameter[name[root].callsign.TimeZone.text]]
if name[root].callsign.GMTOffset begin[:]
call[name[lookup]][name[const].GMTOFFSET] assign[=] call[name[float], parameter[name[root].callsign.GMTOffset.text]]
if name[root].callsign.DST begin[:]
if compare[name[root].callsign.DST.text equal[==] constant[Y]] begin[:]
call[name[lookup]][name[const].DST] assign[=] constant[True]
if name[root].callsign.eqsl begin[:]
if compare[name[root].callsign.eqsl.text equal[==] constant[1]] begin[:]
call[name[lookup]][name[const].EQSL] assign[=] constant[True]
if name[root].callsign.mqsl begin[:]
if compare[name[root].callsign.mqsl.text equal[==] constant[1]] begin[:]
call[name[lookup]][name[const].MQSL] assign[=] constant[True]
if name[root].callsign.cqzone begin[:]
call[name[lookup]][name[const].CQZ] assign[=] call[name[int], parameter[name[root].callsign.cqzone.text]]
if name[root].callsign.ituzone begin[:]
call[name[lookup]][name[const].ITUZ] assign[=] call[name[int], parameter[name[root].callsign.ituzone.text]]
if name[root].callsign.born begin[:]
call[name[lookup]][name[const].BORN] assign[=] call[name[int], parameter[name[root].callsign.born.text]]
if name[root].callsign.user begin[:]
call[name[lookup]][name[const].USER_MGR] assign[=] name[root].callsign.user.text
if name[root].callsign.lotw begin[:]
if compare[name[root].callsign.lotw.text equal[==] constant[1]] begin[:]
call[name[lookup]][name[const].LOTW] assign[=] constant[True]
if name[root].callsign.iota begin[:]
call[name[lookup]][name[const].IOTA] assign[=] name[root].callsign.iota.text
if name[root].callsign.geoloc begin[:]
call[name[lookup]][name[const].GEOLOC] assign[=] name[root].callsign.geoloc.text
return[name[lookup]]
|
keyword[def] identifier[_lookup_qrz_callsign] ( identifier[self] , identifier[callsign] = keyword[None] , identifier[apikey] = keyword[None] , identifier[apiv] = literal[string] ):
literal[string]
keyword[if] identifier[apikey] keyword[is] keyword[None] :
keyword[raise] identifier[AttributeError] ( literal[string] )
identifier[callsign] = identifier[callsign] . identifier[upper] ()
identifier[response] = identifier[self] . identifier[_request_callsign_info_from_qrz] ( identifier[callsign] , identifier[apikey] , identifier[apiv] )
identifier[root] = identifier[BeautifulSoup] ( identifier[response] . identifier[text] , literal[string] )
identifier[lookup] ={}
keyword[if] identifier[root] . identifier[error] :
keyword[if] identifier[re] . identifier[search] ( literal[string] , identifier[root] . identifier[error] . identifier[text] , identifier[re] . identifier[I] ):
keyword[raise] identifier[KeyError] ( identifier[root] . identifier[error] . identifier[text] )
keyword[elif] identifier[re] . identifier[search] ( literal[string] , identifier[root] . identifier[error] . identifier[text] , identifier[re] . identifier[I] ) keyword[or] identifier[re] . identifier[search] ( literal[string] , identifier[root] . identifier[error] . identifier[text] , identifier[re] . identifier[I] ):
identifier[apikey] = identifier[self] . identifier[_get_qrz_session_key] ( identifier[self] . identifier[_username] , identifier[self] . identifier[_pwd] )
identifier[response] = identifier[self] . identifier[_request_callsign_info_from_qrz] ( identifier[callsign] , identifier[apikey] , identifier[apiv] )
identifier[root] = identifier[BeautifulSoup] ( identifier[response] . identifier[text] , literal[string] )
keyword[if] identifier[root] . identifier[error] :
keyword[if] identifier[re] . identifier[search] ( literal[string] , identifier[root] . identifier[error] . identifier[text] , identifier[re] . identifier[I] ):
keyword[raise] identifier[KeyError] ( identifier[root] . identifier[error] . identifier[text] )
keyword[else] :
keyword[raise] identifier[AttributeError] ( identifier[root] . identifier[error] . identifier[text] )
keyword[else] :
identifier[self] . identifier[_apikey] = identifier[apikey]
keyword[else] :
keyword[raise] identifier[AttributeError] ( identifier[root] . identifier[error] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError]
keyword[if] identifier[root] . identifier[callsign] . identifier[call] :
identifier[lookup] [ identifier[const] . identifier[CALLSIGN] ]= identifier[root] . identifier[callsign] . identifier[call] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[xref] :
identifier[lookup] [ identifier[const] . identifier[XREF] ]= identifier[root] . identifier[callsign] . identifier[xref] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[aliases] :
identifier[lookup] [ identifier[const] . identifier[ALIASES] ]= identifier[root] . identifier[callsign] . identifier[aliases] . identifier[text] . identifier[split] ( literal[string] )
keyword[if] identifier[root] . identifier[callsign] . identifier[dxcc] :
identifier[lookup] [ identifier[const] . identifier[ADIF] ]= identifier[int] ( identifier[root] . identifier[callsign] . identifier[dxcc] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] . identifier[fname] :
identifier[lookup] [ identifier[const] . identifier[FNAME] ]= identifier[root] . identifier[callsign] . identifier[fname] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[find] ( literal[string] ):
identifier[lookup] [ identifier[const] . identifier[NAME] ]= identifier[root] . identifier[callsign] . identifier[find] ( literal[string] ). identifier[get_text] ()
keyword[if] identifier[root] . identifier[callsign] . identifier[addr1] :
identifier[lookup] [ identifier[const] . identifier[ADDR1] ]= identifier[root] . identifier[callsign] . identifier[addr1] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[addr2] :
identifier[lookup] [ identifier[const] . identifier[ADDR2] ]= identifier[root] . identifier[callsign] . identifier[addr2] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[state] :
identifier[lookup] [ identifier[const] . identifier[STATE] ]= identifier[root] . identifier[callsign] . identifier[state] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[zip] :
identifier[lookup] [ identifier[const] . identifier[ZIPCODE] ]= identifier[root] . identifier[callsign] . identifier[zip] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[country] :
identifier[lookup] [ identifier[const] . identifier[COUNTRY] ]= identifier[root] . identifier[callsign] . identifier[country] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[ccode] :
identifier[lookup] [ identifier[const] . identifier[CCODE] ]= identifier[int] ( identifier[root] . identifier[callsign] . identifier[ccode] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] . identifier[lat] :
identifier[lookup] [ identifier[const] . identifier[LATITUDE] ]= identifier[float] ( identifier[root] . identifier[callsign] . identifier[lat] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] . identifier[lon] :
identifier[lookup] [ identifier[const] . identifier[LONGITUDE] ]= identifier[float] ( identifier[root] . identifier[callsign] . identifier[lon] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] . identifier[grid] :
identifier[lookup] [ identifier[const] . identifier[LOCATOR] ]= identifier[root] . identifier[callsign] . identifier[grid] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[county] :
identifier[lookup] [ identifier[const] . identifier[COUNTY] ]= identifier[root] . identifier[callsign] . identifier[county] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[fips] :
identifier[lookup] [ identifier[const] . identifier[FIPS] ]= identifier[int] ( identifier[root] . identifier[callsign] . identifier[fips] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] . identifier[land] :
identifier[lookup] [ identifier[const] . identifier[LAND] ]= identifier[root] . identifier[callsign] . identifier[land] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[efdate] :
keyword[try] :
identifier[lookup] [ identifier[const] . identifier[EFDATE] ]= identifier[datetime] . identifier[strptime] ( identifier[root] . identifier[callsign] . identifier[efdate] . identifier[text] , literal[string] ). identifier[replace] ( identifier[tzinfo] = identifier[UTC] )
keyword[except] identifier[ValueError] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] + identifier[callsign] + literal[string] + identifier[root] . identifier[callsign] . identifier[efdate] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] . identifier[expdate] :
keyword[try] :
identifier[lookup] [ identifier[const] . identifier[EXPDATE] ]= identifier[datetime] . identifier[strptime] ( identifier[root] . identifier[callsign] . identifier[expdate] . identifier[text] , literal[string] ). identifier[replace] ( identifier[tzinfo] = identifier[UTC] )
keyword[except] identifier[ValueError] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] + identifier[callsign] + literal[string] + identifier[root] . identifier[callsign] . identifier[expdate] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] . identifier[p_call] :
identifier[lookup] [ identifier[const] . identifier[P_CALL] ]= identifier[root] . identifier[callsign] . identifier[p_call] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[find] ( literal[string] ):
identifier[lookup] [ identifier[const] . identifier[LICENSE_CLASS] ]= identifier[root] . identifier[callsign] . identifier[find] ( literal[string] ). identifier[get_text] ()
keyword[if] identifier[root] . identifier[callsign] . identifier[codes] :
identifier[lookup] [ identifier[const] . identifier[CODES] ]= identifier[root] . identifier[callsign] . identifier[codes] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[qslmgr] :
identifier[lookup] [ identifier[const] . identifier[QSLMGR] ]= identifier[root] . identifier[callsign] . identifier[qslmgr] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[email] :
identifier[lookup] [ identifier[const] . identifier[EMAIL] ]= identifier[root] . identifier[callsign] . identifier[email] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[url] :
identifier[lookup] [ identifier[const] . identifier[URL] ]= identifier[root] . identifier[callsign] . identifier[url] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[u_views] :
identifier[lookup] [ identifier[const] . identifier[U_VIEWS] ]= identifier[int] ( identifier[root] . identifier[callsign] . identifier[u_views] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] . identifier[bio] :
identifier[lookup] [ identifier[const] . identifier[BIO] ]= identifier[root] . identifier[callsign] . identifier[bio] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[biodate] :
keyword[try] :
identifier[lookup] [ identifier[const] . identifier[BIODATE] ]= identifier[datetime] . identifier[strptime] ( identifier[root] . identifier[callsign] . identifier[biodate] . identifier[text] , literal[string] ). identifier[replace] ( identifier[tzinfo] = identifier[UTC] )
keyword[except] identifier[ValueError] :
identifier[self] . identifier[_logger] . identifier[warning] ( literal[string] + identifier[callsign] )
keyword[if] identifier[root] . identifier[callsign] . identifier[image] :
identifier[lookup] [ identifier[const] . identifier[IMAGE] ]= identifier[root] . identifier[callsign] . identifier[image] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[imageinfo] :
identifier[lookup] [ identifier[const] . identifier[IMAGE_INFO] ]= identifier[root] . identifier[callsign] . identifier[imageinfo] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[serial] :
identifier[lookup] [ identifier[const] . identifier[SERIAL] ]= identifier[long] ( identifier[root] . identifier[callsign] . identifier[serial] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] . identifier[moddate] :
keyword[try] :
identifier[lookup] [ identifier[const] . identifier[MODDATE] ]= identifier[datetime] . identifier[strptime] ( identifier[root] . identifier[callsign] . identifier[moddate] . identifier[text] , literal[string] ). identifier[replace] ( identifier[tzinfo] = identifier[UTC] )
keyword[except] identifier[ValueError] :
identifier[self] . identifier[_logger] . identifier[warning] ( literal[string] + identifier[callsign] )
keyword[if] identifier[root] . identifier[callsign] . identifier[MSA] :
identifier[lookup] [ identifier[const] . identifier[MSA] ]= identifier[int] ( identifier[root] . identifier[callsign] . identifier[MSA] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] . identifier[AreaCode] :
identifier[lookup] [ identifier[const] . identifier[AREACODE] ]= identifier[int] ( identifier[root] . identifier[callsign] . identifier[AreaCode] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] . identifier[TimeZone] :
identifier[lookup] [ identifier[const] . identifier[TIMEZONE] ]= identifier[int] ( identifier[root] . identifier[callsign] . identifier[TimeZone] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] . identifier[GMTOffset] :
identifier[lookup] [ identifier[const] . identifier[GMTOFFSET] ]= identifier[float] ( identifier[root] . identifier[callsign] . identifier[GMTOffset] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] . identifier[DST] :
keyword[if] identifier[root] . identifier[callsign] . identifier[DST] . identifier[text] == literal[string] :
identifier[lookup] [ identifier[const] . identifier[DST] ]= keyword[True]
keyword[else] :
identifier[lookup] [ identifier[const] . identifier[DST] ]= keyword[False]
keyword[if] identifier[root] . identifier[callsign] . identifier[eqsl] :
keyword[if] identifier[root] . identifier[callsign] . identifier[eqsl] . identifier[text] == literal[string] :
identifier[lookup] [ identifier[const] . identifier[EQSL] ]= keyword[True]
keyword[else] :
identifier[lookup] [ identifier[const] . identifier[EQSL] ]= keyword[False]
keyword[if] identifier[root] . identifier[callsign] . identifier[mqsl] :
keyword[if] identifier[root] . identifier[callsign] . identifier[mqsl] . identifier[text] == literal[string] :
identifier[lookup] [ identifier[const] . identifier[MQSL] ]= keyword[True]
keyword[else] :
identifier[lookup] [ identifier[const] . identifier[MQSL] ]= keyword[False]
keyword[if] identifier[root] . identifier[callsign] . identifier[cqzone] :
identifier[lookup] [ identifier[const] . identifier[CQZ] ]= identifier[int] ( identifier[root] . identifier[callsign] . identifier[cqzone] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] . identifier[ituzone] :
identifier[lookup] [ identifier[const] . identifier[ITUZ] ]= identifier[int] ( identifier[root] . identifier[callsign] . identifier[ituzone] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] . identifier[born] :
identifier[lookup] [ identifier[const] . identifier[BORN] ]= identifier[int] ( identifier[root] . identifier[callsign] . identifier[born] . identifier[text] )
keyword[if] identifier[root] . identifier[callsign] . identifier[user] :
identifier[lookup] [ identifier[const] . identifier[USER_MGR] ]= identifier[root] . identifier[callsign] . identifier[user] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[lotw] :
keyword[if] identifier[root] . identifier[callsign] . identifier[lotw] . identifier[text] == literal[string] :
identifier[lookup] [ identifier[const] . identifier[LOTW] ]= keyword[True]
keyword[else] :
identifier[lookup] [ identifier[const] . identifier[LOTW] ]= keyword[False]
keyword[if] identifier[root] . identifier[callsign] . identifier[iota] :
identifier[lookup] [ identifier[const] . identifier[IOTA] ]= identifier[root] . identifier[callsign] . identifier[iota] . identifier[text]
keyword[if] identifier[root] . identifier[callsign] . identifier[geoloc] :
identifier[lookup] [ identifier[const] . identifier[GEOLOC] ]= identifier[root] . identifier[callsign] . identifier[geoloc] . identifier[text]
keyword[return] identifier[lookup]
|
def _lookup_qrz_callsign(self, callsign=None, apikey=None, apiv='1.3.3'):
""" Performs the callsign lookup against the QRZ.com XML API:
"""
if apikey is None:
raise AttributeError('Session Key Missing') # depends on [control=['if'], data=[]]
callsign = callsign.upper()
response = self._request_callsign_info_from_qrz(callsign, apikey, apiv)
root = BeautifulSoup(response.text, 'html.parser')
lookup = {}
if root.error:
if re.search('Not found', root.error.text, re.I): #No data available for callsign
raise KeyError(root.error.text) # depends on [control=['if'], data=[]]
#try to get a new session key and try to request again
elif re.search('Session Timeout', root.error.text, re.I) or re.search('Invalid session key', root.error.text, re.I):
apikey = self._get_qrz_session_key(self._username, self._pwd)
response = self._request_callsign_info_from_qrz(callsign, apikey, apiv)
root = BeautifulSoup(response.text, 'html.parser')
#if this fails again, raise error
if root.error:
if re.search('Not found', root.error.text, re.I): #No data available for callsign
raise KeyError(root.error.text) # depends on [control=['if'], data=[]]
else:
raise AttributeError(root.error.text) #most likely session key invalid # depends on [control=['if'], data=[]]
else:
#update API Key ob Lookup object
self._apikey = apikey # depends on [control=['if'], data=[]]
else:
raise AttributeError(root.error.text) #most likely session key missing # depends on [control=['if'], data=[]]
if root.callsign is None:
raise ValueError # depends on [control=['if'], data=[]]
if root.callsign.call:
lookup[const.CALLSIGN] = root.callsign.call.text # depends on [control=['if'], data=[]]
if root.callsign.xref:
lookup[const.XREF] = root.callsign.xref.text # depends on [control=['if'], data=[]]
if root.callsign.aliases:
lookup[const.ALIASES] = root.callsign.aliases.text.split(',') # depends on [control=['if'], data=[]]
if root.callsign.dxcc:
lookup[const.ADIF] = int(root.callsign.dxcc.text) # depends on [control=['if'], data=[]]
if root.callsign.fname:
lookup[const.FNAME] = root.callsign.fname.text # depends on [control=['if'], data=[]]
if root.callsign.find('name'):
lookup[const.NAME] = root.callsign.find('name').get_text() # depends on [control=['if'], data=[]]
if root.callsign.addr1:
lookup[const.ADDR1] = root.callsign.addr1.text # depends on [control=['if'], data=[]]
if root.callsign.addr2:
lookup[const.ADDR2] = root.callsign.addr2.text # depends on [control=['if'], data=[]]
if root.callsign.state:
lookup[const.STATE] = root.callsign.state.text # depends on [control=['if'], data=[]]
if root.callsign.zip:
lookup[const.ZIPCODE] = root.callsign.zip.text # depends on [control=['if'], data=[]]
if root.callsign.country:
lookup[const.COUNTRY] = root.callsign.country.text # depends on [control=['if'], data=[]]
if root.callsign.ccode:
lookup[const.CCODE] = int(root.callsign.ccode.text) # depends on [control=['if'], data=[]]
if root.callsign.lat:
lookup[const.LATITUDE] = float(root.callsign.lat.text) # depends on [control=['if'], data=[]]
if root.callsign.lon:
lookup[const.LONGITUDE] = float(root.callsign.lon.text) # depends on [control=['if'], data=[]]
if root.callsign.grid:
lookup[const.LOCATOR] = root.callsign.grid.text # depends on [control=['if'], data=[]]
if root.callsign.county:
lookup[const.COUNTY] = root.callsign.county.text # depends on [control=['if'], data=[]]
if root.callsign.fips:
lookup[const.FIPS] = int(root.callsign.fips.text) # check type # depends on [control=['if'], data=[]]
if root.callsign.land:
lookup[const.LAND] = root.callsign.land.text # depends on [control=['if'], data=[]]
if root.callsign.efdate:
try:
lookup[const.EFDATE] = datetime.strptime(root.callsign.efdate.text, '%Y-%m-%d').replace(tzinfo=UTC) # depends on [control=['try'], data=[]]
except ValueError:
self._logger.debug('[QRZ.com] efdate: Invalid DateTime; ' + callsign + ' ' + root.callsign.efdate.text) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if root.callsign.expdate:
try:
lookup[const.EXPDATE] = datetime.strptime(root.callsign.expdate.text, '%Y-%m-%d').replace(tzinfo=UTC) # depends on [control=['try'], data=[]]
except ValueError:
self._logger.debug('[QRZ.com] expdate: Invalid DateTime; ' + callsign + ' ' + root.callsign.expdate.text) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if root.callsign.p_call:
lookup[const.P_CALL] = root.callsign.p_call.text # depends on [control=['if'], data=[]]
if root.callsign.find('class'):
lookup[const.LICENSE_CLASS] = root.callsign.find('class').get_text() # depends on [control=['if'], data=[]]
if root.callsign.codes:
lookup[const.CODES] = root.callsign.codes.text # depends on [control=['if'], data=[]]
if root.callsign.qslmgr:
lookup[const.QSLMGR] = root.callsign.qslmgr.text # depends on [control=['if'], data=[]]
if root.callsign.email:
lookup[const.EMAIL] = root.callsign.email.text # depends on [control=['if'], data=[]]
if root.callsign.url:
lookup[const.URL] = root.callsign.url.text # depends on [control=['if'], data=[]]
if root.callsign.u_views:
lookup[const.U_VIEWS] = int(root.callsign.u_views.text) # depends on [control=['if'], data=[]]
if root.callsign.bio:
lookup[const.BIO] = root.callsign.bio.text # depends on [control=['if'], data=[]]
if root.callsign.biodate:
try:
lookup[const.BIODATE] = datetime.strptime(root.callsign.biodate.text, '%Y-%m-%d %H:%M:%S').replace(tzinfo=UTC) # depends on [control=['try'], data=[]]
except ValueError:
self._logger.warning('[QRZ.com] biodate: Invalid DateTime; ' + callsign) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if root.callsign.image:
lookup[const.IMAGE] = root.callsign.image.text # depends on [control=['if'], data=[]]
if root.callsign.imageinfo:
lookup[const.IMAGE_INFO] = root.callsign.imageinfo.text # depends on [control=['if'], data=[]]
if root.callsign.serial:
lookup[const.SERIAL] = long(root.callsign.serial.text) # depends on [control=['if'], data=[]]
if root.callsign.moddate:
try:
lookup[const.MODDATE] = datetime.strptime(root.callsign.moddate.text, '%Y-%m-%d %H:%M:%S').replace(tzinfo=UTC) # depends on [control=['try'], data=[]]
except ValueError:
self._logger.warning('[QRZ.com] moddate: Invalid DateTime; ' + callsign) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if root.callsign.MSA:
lookup[const.MSA] = int(root.callsign.MSA.text) # depends on [control=['if'], data=[]]
if root.callsign.AreaCode:
lookup[const.AREACODE] = int(root.callsign.AreaCode.text) # depends on [control=['if'], data=[]]
if root.callsign.TimeZone:
lookup[const.TIMEZONE] = int(root.callsign.TimeZone.text) # depends on [control=['if'], data=[]]
if root.callsign.GMTOffset:
lookup[const.GMTOFFSET] = float(root.callsign.GMTOffset.text) # depends on [control=['if'], data=[]]
if root.callsign.DST:
if root.callsign.DST.text == 'Y':
lookup[const.DST] = True # depends on [control=['if'], data=[]]
else:
lookup[const.DST] = False # depends on [control=['if'], data=[]]
if root.callsign.eqsl:
if root.callsign.eqsl.text == '1':
lookup[const.EQSL] = True # depends on [control=['if'], data=[]]
else:
lookup[const.EQSL] = False # depends on [control=['if'], data=[]]
if root.callsign.mqsl:
if root.callsign.mqsl.text == '1':
lookup[const.MQSL] = True # depends on [control=['if'], data=[]]
else:
lookup[const.MQSL] = False # depends on [control=['if'], data=[]]
if root.callsign.cqzone:
lookup[const.CQZ] = int(root.callsign.cqzone.text) # depends on [control=['if'], data=[]]
if root.callsign.ituzone:
lookup[const.ITUZ] = int(root.callsign.ituzone.text) # depends on [control=['if'], data=[]]
if root.callsign.born:
lookup[const.BORN] = int(root.callsign.born.text) # depends on [control=['if'], data=[]]
if root.callsign.user:
lookup[const.USER_MGR] = root.callsign.user.text # depends on [control=['if'], data=[]]
if root.callsign.lotw:
if root.callsign.lotw.text == '1':
lookup[const.LOTW] = True # depends on [control=['if'], data=[]]
else:
lookup[const.LOTW] = False # depends on [control=['if'], data=[]]
if root.callsign.iota:
lookup[const.IOTA] = root.callsign.iota.text # depends on [control=['if'], data=[]]
if root.callsign.geoloc:
lookup[const.GEOLOC] = root.callsign.geoloc.text # depends on [control=['if'], data=[]]
# if sys.version_info >= (2,):
# for item in lookup:
# if isinstance(lookup[item], unicode):
# print item, repr(lookup[item])
return lookup
|
def _update_dprx(self):
"""Update `dprx`."""
if 'beta' in self.freeparams:
for r in range(self.nsites):
self.dprx['beta'][r] = self.prx[r] * (self.ln_pi_codon[r]
- scipy.dot(self.ln_pi_codon[r], self.prx[r]))
if 'eta' in self.freeparams:
boolterm = scipy.ndarray(N_CODON, dtype='float')
with scipy.errstate(divide='raise', under='raise', over='raise',
invalid='raise'):
for i in range(N_NT - 1):
boolterm.fill(0)
for j in range(3):
boolterm += ((i <= CODON_NT_INDEX[j]).astype('float') /
(self.eta[i] - (i == CODON_NT_INDEX[j]).astype(
'float')))
for r in range(self.nsites):
self.dprx['eta'][i][r] = self.prx[r] * (boolterm -
scipy.dot(boolterm, self.prx[r]) / self.prx[r].sum())
|
def function[_update_dprx, parameter[self]]:
constant[Update `dprx`.]
if compare[constant[beta] in name[self].freeparams] begin[:]
for taget[name[r]] in starred[call[name[range], parameter[name[self].nsites]]] begin[:]
call[call[name[self].dprx][constant[beta]]][name[r]] assign[=] binary_operation[call[name[self].prx][name[r]] * binary_operation[call[name[self].ln_pi_codon][name[r]] - call[name[scipy].dot, parameter[call[name[self].ln_pi_codon][name[r]], call[name[self].prx][name[r]]]]]]
if compare[constant[eta] in name[self].freeparams] begin[:]
variable[boolterm] assign[=] call[name[scipy].ndarray, parameter[name[N_CODON]]]
with call[name[scipy].errstate, parameter[]] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[name[N_NT] - constant[1]]]]] begin[:]
call[name[boolterm].fill, parameter[constant[0]]]
for taget[name[j]] in starred[call[name[range], parameter[constant[3]]]] begin[:]
<ast.AugAssign object at 0x7da20e9b20e0>
for taget[name[r]] in starred[call[name[range], parameter[name[self].nsites]]] begin[:]
call[call[call[name[self].dprx][constant[eta]]][name[i]]][name[r]] assign[=] binary_operation[call[name[self].prx][name[r]] * binary_operation[name[boolterm] - binary_operation[call[name[scipy].dot, parameter[name[boolterm], call[name[self].prx][name[r]]]] / call[call[name[self].prx][name[r]].sum, parameter[]]]]]
|
keyword[def] identifier[_update_dprx] ( identifier[self] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[self] . identifier[freeparams] :
keyword[for] identifier[r] keyword[in] identifier[range] ( identifier[self] . identifier[nsites] ):
identifier[self] . identifier[dprx] [ literal[string] ][ identifier[r] ]= identifier[self] . identifier[prx] [ identifier[r] ]*( identifier[self] . identifier[ln_pi_codon] [ identifier[r] ]
- identifier[scipy] . identifier[dot] ( identifier[self] . identifier[ln_pi_codon] [ identifier[r] ], identifier[self] . identifier[prx] [ identifier[r] ]))
keyword[if] literal[string] keyword[in] identifier[self] . identifier[freeparams] :
identifier[boolterm] = identifier[scipy] . identifier[ndarray] ( identifier[N_CODON] , identifier[dtype] = literal[string] )
keyword[with] identifier[scipy] . identifier[errstate] ( identifier[divide] = literal[string] , identifier[under] = literal[string] , identifier[over] = literal[string] ,
identifier[invalid] = literal[string] ):
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[N_NT] - literal[int] ):
identifier[boolterm] . identifier[fill] ( literal[int] )
keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] ):
identifier[boolterm] +=(( identifier[i] <= identifier[CODON_NT_INDEX] [ identifier[j] ]). identifier[astype] ( literal[string] )/
( identifier[self] . identifier[eta] [ identifier[i] ]-( identifier[i] == identifier[CODON_NT_INDEX] [ identifier[j] ]). identifier[astype] (
literal[string] )))
keyword[for] identifier[r] keyword[in] identifier[range] ( identifier[self] . identifier[nsites] ):
identifier[self] . identifier[dprx] [ literal[string] ][ identifier[i] ][ identifier[r] ]= identifier[self] . identifier[prx] [ identifier[r] ]*( identifier[boolterm] -
identifier[scipy] . identifier[dot] ( identifier[boolterm] , identifier[self] . identifier[prx] [ identifier[r] ])/ identifier[self] . identifier[prx] [ identifier[r] ]. identifier[sum] ())
|
def _update_dprx(self):
"""Update `dprx`."""
if 'beta' in self.freeparams:
for r in range(self.nsites):
self.dprx['beta'][r] = self.prx[r] * (self.ln_pi_codon[r] - scipy.dot(self.ln_pi_codon[r], self.prx[r])) # depends on [control=['for'], data=['r']] # depends on [control=['if'], data=[]]
if 'eta' in self.freeparams:
boolterm = scipy.ndarray(N_CODON, dtype='float')
with scipy.errstate(divide='raise', under='raise', over='raise', invalid='raise'):
for i in range(N_NT - 1):
boolterm.fill(0)
for j in range(3):
boolterm += (i <= CODON_NT_INDEX[j]).astype('float') / (self.eta[i] - (i == CODON_NT_INDEX[j]).astype('float')) # depends on [control=['for'], data=['j']]
for r in range(self.nsites):
self.dprx['eta'][i][r] = self.prx[r] * (boolterm - scipy.dot(boolterm, self.prx[r]) / self.prx[r].sum()) # depends on [control=['for'], data=['r']] # depends on [control=['for'], data=['i']] # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]]
|
def find(**criteria):
"""
Find a single *component* class with the given criteria.
Finds classes indexed with :meth:`register`
:raises SearchMultipleFoundError: if more than one result found
:raises SearchNoneFoundError: if nothing found
::
from cqparts.search import find
import cqparts_motors # example of a 3rd party lib
# get a specific motor class
motor_class = find(type='motor', part_number='ABC123X')
motor = motor_class(shaft_diameter=6.0)
"""
# Find all parts that match the given criteria
results = search(**criteria)
# error cases
if len(results) > 1:
raise SearchMultipleFoundError("%i results found" % len(results))
elif not results:
raise SearchNoneFoundError("%i results found" % len(results))
# return found Part|Assembly class
return results.pop()
|
def function[find, parameter[]]:
constant[
Find a single *component* class with the given criteria.
Finds classes indexed with :meth:`register`
:raises SearchMultipleFoundError: if more than one result found
:raises SearchNoneFoundError: if nothing found
::
from cqparts.search import find
import cqparts_motors # example of a 3rd party lib
# get a specific motor class
motor_class = find(type='motor', part_number='ABC123X')
motor = motor_class(shaft_diameter=6.0)
]
variable[results] assign[=] call[name[search], parameter[]]
if compare[call[name[len], parameter[name[results]]] greater[>] constant[1]] begin[:]
<ast.Raise object at 0x7da18f811f60>
return[call[name[results].pop, parameter[]]]
|
keyword[def] identifier[find] (** identifier[criteria] ):
literal[string]
identifier[results] = identifier[search] (** identifier[criteria] )
keyword[if] identifier[len] ( identifier[results] )> literal[int] :
keyword[raise] identifier[SearchMultipleFoundError] ( literal[string] % identifier[len] ( identifier[results] ))
keyword[elif] keyword[not] identifier[results] :
keyword[raise] identifier[SearchNoneFoundError] ( literal[string] % identifier[len] ( identifier[results] ))
keyword[return] identifier[results] . identifier[pop] ()
|
def find(**criteria):
"""
Find a single *component* class with the given criteria.
Finds classes indexed with :meth:`register`
:raises SearchMultipleFoundError: if more than one result found
:raises SearchNoneFoundError: if nothing found
::
from cqparts.search import find
import cqparts_motors # example of a 3rd party lib
# get a specific motor class
motor_class = find(type='motor', part_number='ABC123X')
motor = motor_class(shaft_diameter=6.0)
"""
# Find all parts that match the given criteria
results = search(**criteria)
# error cases
if len(results) > 1:
raise SearchMultipleFoundError('%i results found' % len(results)) # depends on [control=['if'], data=[]]
elif not results:
raise SearchNoneFoundError('%i results found' % len(results)) # depends on [control=['if'], data=[]]
# return found Part|Assembly class
return results.pop()
|
def vec_angle(a, b):
"""
Calculate angle between two vectors
"""
cosang = np.dot(a, b)
sinang = fast_norm(np.cross(a, b))
return np.arctan2(sinang, cosang)
|
def function[vec_angle, parameter[a, b]]:
constant[
Calculate angle between two vectors
]
variable[cosang] assign[=] call[name[np].dot, parameter[name[a], name[b]]]
variable[sinang] assign[=] call[name[fast_norm], parameter[call[name[np].cross, parameter[name[a], name[b]]]]]
return[call[name[np].arctan2, parameter[name[sinang], name[cosang]]]]
|
keyword[def] identifier[vec_angle] ( identifier[a] , identifier[b] ):
literal[string]
identifier[cosang] = identifier[np] . identifier[dot] ( identifier[a] , identifier[b] )
identifier[sinang] = identifier[fast_norm] ( identifier[np] . identifier[cross] ( identifier[a] , identifier[b] ))
keyword[return] identifier[np] . identifier[arctan2] ( identifier[sinang] , identifier[cosang] )
|
def vec_angle(a, b):
"""
Calculate angle between two vectors
"""
cosang = np.dot(a, b)
sinang = fast_norm(np.cross(a, b))
return np.arctan2(sinang, cosang)
|
def mainline(self):
""" Returns the main line of the game (variation A) as a 'GameTree'."""
if self.variations:
return GameTree(self.data + self.variations[0].mainline())
else:
return self
|
def function[mainline, parameter[self]]:
constant[ Returns the main line of the game (variation A) as a 'GameTree'.]
if name[self].variations begin[:]
return[call[name[GameTree], parameter[binary_operation[name[self].data + call[call[name[self].variations][constant[0]].mainline, parameter[]]]]]]
|
keyword[def] identifier[mainline] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[variations] :
keyword[return] identifier[GameTree] ( identifier[self] . identifier[data] + identifier[self] . identifier[variations] [ literal[int] ]. identifier[mainline] ())
keyword[else] :
keyword[return] identifier[self]
|
def mainline(self):
""" Returns the main line of the game (variation A) as a 'GameTree'."""
if self.variations:
return GameTree(self.data + self.variations[0].mainline()) # depends on [control=['if'], data=[]]
else:
return self
|
def overlap(args):
"""
%prog overlap <a|a.fasta> <b|b.fasta>
Check overlaps between two fasta records. The arguments can be genBank IDs
instead of FASTA files. In case of IDs, the sequences will be downloaded
first.
"""
from jcvi.formats.blast import chain_HSPs
p = OptionParser(overlap.__doc__)
p.add_option("--dir", default=os.getcwd(),
help="Download sequences to dir [default: %default]")
p.add_option("--suffix", default="fasta",
help="Suffix of the sequence file in dir [default: %default]")
p.add_option("--qreverse", default=False, action="store_true",
help="Reverse seq a [default: %default]")
p.add_option("--nochain", default=False, action="store_true",
help="Do not chain adjacent HSPs [default: chain HSPs]")
p.set_align(pctid=GoodPct, hitlen=GoodOverlap, evalue=.01)
p.set_outfile(outfile=None)
opts, args = p.parse_args(args)
if len(args) != 2:
sys.exit(not p.print_help())
afasta, bfasta = args
dir = opts.dir
chain = not opts.nochain
suffix = opts.suffix
evalue = opts.evalue
pctid = opts.pctid
hitlen = opts.hitlen
cutoff = Cutoff(pctid, hitlen)
# Check first whether it is file or accession name
if not op.exists(afasta):
af = op.join(dir, ".".join((afasta, suffix)))
if not op.exists(af): # Check to avoid redownload
entrez([afasta, "--skipcheck", "--outdir=" + dir])
afasta = af
if not op.exists(bfasta):
bf = op.join(dir, ".".join((bfasta, suffix)))
if not op.exists(bf):
entrez([bfasta, "--skipcheck", "--outdir=" + dir])
bfasta = bf
assert op.exists(afasta) and op.exists(bfasta)
cmd = "blastn -dust no"
cmd += " -query {0} -subject {1}".format(afasta, bfasta)
cmd += " -evalue {0} -outfmt 6 -perc_identity {1}".format(evalue, pctid)
fp = popen(cmd)
hsps = fp.readlines()
hsps = [BlastLine(x) for x in hsps]
hsps = [x for x in hsps if x.hitlen >= hitlen]
if chain:
logging.debug("Chain HSPs in the Blast output.")
dist = 2 * hitlen # Distance to chain the HSPs
hsps = chain_HSPs(hsps, xdist=dist, ydist=dist)
if len(hsps) == 0:
print("No match found.", file=sys.stderr)
return None
besthsp = hsps[0]
aid, asize = next(Fasta(afasta).itersizes())
bid, bsize = next(Fasta(bfasta).itersizes())
o = Overlap(besthsp, asize, bsize, cutoff, qreverse=opts.qreverse)
o.print_graphic()
if opts.outfile:
fw = must_open(opts.outfile, "w")
print(str(o), file=fw)
fw.close()
return o
|
def function[overlap, parameter[args]]:
constant[
%prog overlap <a|a.fasta> <b|b.fasta>
Check overlaps between two fasta records. The arguments can be genBank IDs
instead of FASTA files. In case of IDs, the sequences will be downloaded
first.
]
from relative_module[jcvi.formats.blast] import module[chain_HSPs]
variable[p] assign[=] call[name[OptionParser], parameter[name[overlap].__doc__]]
call[name[p].add_option, parameter[constant[--dir]]]
call[name[p].add_option, parameter[constant[--suffix]]]
call[name[p].add_option, parameter[constant[--qreverse]]]
call[name[p].add_option, parameter[constant[--nochain]]]
call[name[p].set_align, parameter[]]
call[name[p].set_outfile, parameter[]]
<ast.Tuple object at 0x7da20c76e410> assign[=] call[name[p].parse_args, parameter[name[args]]]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[2]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da20c76ee60>]]
<ast.Tuple object at 0x7da20c76e9b0> assign[=] name[args]
variable[dir] assign[=] name[opts].dir
variable[chain] assign[=] <ast.UnaryOp object at 0x7da20c76cf70>
variable[suffix] assign[=] name[opts].suffix
variable[evalue] assign[=] name[opts].evalue
variable[pctid] assign[=] name[opts].pctid
variable[hitlen] assign[=] name[opts].hitlen
variable[cutoff] assign[=] call[name[Cutoff], parameter[name[pctid], name[hitlen]]]
if <ast.UnaryOp object at 0x7da20c76c670> begin[:]
variable[af] assign[=] call[name[op].join, parameter[name[dir], call[constant[.].join, parameter[tuple[[<ast.Name object at 0x7da20c76f8e0>, <ast.Name object at 0x7da20c76e4a0>]]]]]]
if <ast.UnaryOp object at 0x7da20c76c7c0> begin[:]
call[name[entrez], parameter[list[[<ast.Name object at 0x7da20c76c6a0>, <ast.Constant object at 0x7da20c76d300>, <ast.BinOp object at 0x7da20c76da50>]]]]
variable[afasta] assign[=] name[af]
if <ast.UnaryOp object at 0x7da20c76d660> begin[:]
variable[bf] assign[=] call[name[op].join, parameter[name[dir], call[constant[.].join, parameter[tuple[[<ast.Name object at 0x7da20c76c070>, <ast.Name object at 0x7da20c76fa60>]]]]]]
if <ast.UnaryOp object at 0x7da20c76ec20> begin[:]
call[name[entrez], parameter[list[[<ast.Name object at 0x7da20c76e9e0>, <ast.Constant object at 0x7da20c76cbe0>, <ast.BinOp object at 0x7da20c76dba0>]]]]
variable[bfasta] assign[=] name[bf]
assert[<ast.BoolOp object at 0x7da20c76eaa0>]
variable[cmd] assign[=] constant[blastn -dust no]
<ast.AugAssign object at 0x7da20c76cfd0>
<ast.AugAssign object at 0x7da20c76fe20>
variable[fp] assign[=] call[name[popen], parameter[name[cmd]]]
variable[hsps] assign[=] call[name[fp].readlines, parameter[]]
variable[hsps] assign[=] <ast.ListComp object at 0x7da20c76f3a0>
variable[hsps] assign[=] <ast.ListComp object at 0x7da20c76c490>
if name[chain] begin[:]
call[name[logging].debug, parameter[constant[Chain HSPs in the Blast output.]]]
variable[dist] assign[=] binary_operation[constant[2] * name[hitlen]]
variable[hsps] assign[=] call[name[chain_HSPs], parameter[name[hsps]]]
if compare[call[name[len], parameter[name[hsps]]] equal[==] constant[0]] begin[:]
call[name[print], parameter[constant[No match found.]]]
return[constant[None]]
variable[besthsp] assign[=] call[name[hsps]][constant[0]]
<ast.Tuple object at 0x7da20c76ebf0> assign[=] call[name[next], parameter[call[call[name[Fasta], parameter[name[afasta]]].itersizes, parameter[]]]]
<ast.Tuple object at 0x7da20c76f700> assign[=] call[name[next], parameter[call[call[name[Fasta], parameter[name[bfasta]]].itersizes, parameter[]]]]
variable[o] assign[=] call[name[Overlap], parameter[name[besthsp], name[asize], name[bsize], name[cutoff]]]
call[name[o].print_graphic, parameter[]]
if name[opts].outfile begin[:]
variable[fw] assign[=] call[name[must_open], parameter[name[opts].outfile, constant[w]]]
call[name[print], parameter[call[name[str], parameter[name[o]]]]]
call[name[fw].close, parameter[]]
return[name[o]]
|
keyword[def] identifier[overlap] ( identifier[args] ):
literal[string]
keyword[from] identifier[jcvi] . identifier[formats] . identifier[blast] keyword[import] identifier[chain_HSPs]
identifier[p] = identifier[OptionParser] ( identifier[overlap] . identifier[__doc__] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = identifier[os] . identifier[getcwd] (),
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[set_align] ( identifier[pctid] = identifier[GoodPct] , identifier[hitlen] = identifier[GoodOverlap] , identifier[evalue] = literal[int] )
identifier[p] . identifier[set_outfile] ( identifier[outfile] = keyword[None] )
identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] )
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[afasta] , identifier[bfasta] = identifier[args]
identifier[dir] = identifier[opts] . identifier[dir]
identifier[chain] = keyword[not] identifier[opts] . identifier[nochain]
identifier[suffix] = identifier[opts] . identifier[suffix]
identifier[evalue] = identifier[opts] . identifier[evalue]
identifier[pctid] = identifier[opts] . identifier[pctid]
identifier[hitlen] = identifier[opts] . identifier[hitlen]
identifier[cutoff] = identifier[Cutoff] ( identifier[pctid] , identifier[hitlen] )
keyword[if] keyword[not] identifier[op] . identifier[exists] ( identifier[afasta] ):
identifier[af] = identifier[op] . identifier[join] ( identifier[dir] , literal[string] . identifier[join] (( identifier[afasta] , identifier[suffix] )))
keyword[if] keyword[not] identifier[op] . identifier[exists] ( identifier[af] ):
identifier[entrez] ([ identifier[afasta] , literal[string] , literal[string] + identifier[dir] ])
identifier[afasta] = identifier[af]
keyword[if] keyword[not] identifier[op] . identifier[exists] ( identifier[bfasta] ):
identifier[bf] = identifier[op] . identifier[join] ( identifier[dir] , literal[string] . identifier[join] (( identifier[bfasta] , identifier[suffix] )))
keyword[if] keyword[not] identifier[op] . identifier[exists] ( identifier[bf] ):
identifier[entrez] ([ identifier[bfasta] , literal[string] , literal[string] + identifier[dir] ])
identifier[bfasta] = identifier[bf]
keyword[assert] identifier[op] . identifier[exists] ( identifier[afasta] ) keyword[and] identifier[op] . identifier[exists] ( identifier[bfasta] )
identifier[cmd] = literal[string]
identifier[cmd] += literal[string] . identifier[format] ( identifier[afasta] , identifier[bfasta] )
identifier[cmd] += literal[string] . identifier[format] ( identifier[evalue] , identifier[pctid] )
identifier[fp] = identifier[popen] ( identifier[cmd] )
identifier[hsps] = identifier[fp] . identifier[readlines] ()
identifier[hsps] =[ identifier[BlastLine] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[hsps] ]
identifier[hsps] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[hsps] keyword[if] identifier[x] . identifier[hitlen] >= identifier[hitlen] ]
keyword[if] identifier[chain] :
identifier[logging] . identifier[debug] ( literal[string] )
identifier[dist] = literal[int] * identifier[hitlen]
identifier[hsps] = identifier[chain_HSPs] ( identifier[hsps] , identifier[xdist] = identifier[dist] , identifier[ydist] = identifier[dist] )
keyword[if] identifier[len] ( identifier[hsps] )== literal[int] :
identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[return] keyword[None]
identifier[besthsp] = identifier[hsps] [ literal[int] ]
identifier[aid] , identifier[asize] = identifier[next] ( identifier[Fasta] ( identifier[afasta] ). identifier[itersizes] ())
identifier[bid] , identifier[bsize] = identifier[next] ( identifier[Fasta] ( identifier[bfasta] ). identifier[itersizes] ())
identifier[o] = identifier[Overlap] ( identifier[besthsp] , identifier[asize] , identifier[bsize] , identifier[cutoff] , identifier[qreverse] = identifier[opts] . identifier[qreverse] )
identifier[o] . identifier[print_graphic] ()
keyword[if] identifier[opts] . identifier[outfile] :
identifier[fw] = identifier[must_open] ( identifier[opts] . identifier[outfile] , literal[string] )
identifier[print] ( identifier[str] ( identifier[o] ), identifier[file] = identifier[fw] )
identifier[fw] . identifier[close] ()
keyword[return] identifier[o]
|
def overlap(args):
"""
%prog overlap <a|a.fasta> <b|b.fasta>
Check overlaps between two fasta records. The arguments can be genBank IDs
instead of FASTA files. In case of IDs, the sequences will be downloaded
first.
"""
from jcvi.formats.blast import chain_HSPs
p = OptionParser(overlap.__doc__)
p.add_option('--dir', default=os.getcwd(), help='Download sequences to dir [default: %default]')
p.add_option('--suffix', default='fasta', help='Suffix of the sequence file in dir [default: %default]')
p.add_option('--qreverse', default=False, action='store_true', help='Reverse seq a [default: %default]')
p.add_option('--nochain', default=False, action='store_true', help='Do not chain adjacent HSPs [default: chain HSPs]')
p.set_align(pctid=GoodPct, hitlen=GoodOverlap, evalue=0.01)
p.set_outfile(outfile=None)
(opts, args) = p.parse_args(args)
if len(args) != 2:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
(afasta, bfasta) = args
dir = opts.dir
chain = not opts.nochain
suffix = opts.suffix
evalue = opts.evalue
pctid = opts.pctid
hitlen = opts.hitlen
cutoff = Cutoff(pctid, hitlen)
# Check first whether it is file or accession name
if not op.exists(afasta):
af = op.join(dir, '.'.join((afasta, suffix)))
if not op.exists(af): # Check to avoid redownload
entrez([afasta, '--skipcheck', '--outdir=' + dir]) # depends on [control=['if'], data=[]]
afasta = af # depends on [control=['if'], data=[]]
if not op.exists(bfasta):
bf = op.join(dir, '.'.join((bfasta, suffix)))
if not op.exists(bf):
entrez([bfasta, '--skipcheck', '--outdir=' + dir]) # depends on [control=['if'], data=[]]
bfasta = bf # depends on [control=['if'], data=[]]
assert op.exists(afasta) and op.exists(bfasta)
cmd = 'blastn -dust no'
cmd += ' -query {0} -subject {1}'.format(afasta, bfasta)
cmd += ' -evalue {0} -outfmt 6 -perc_identity {1}'.format(evalue, pctid)
fp = popen(cmd)
hsps = fp.readlines()
hsps = [BlastLine(x) for x in hsps]
hsps = [x for x in hsps if x.hitlen >= hitlen]
if chain:
logging.debug('Chain HSPs in the Blast output.')
dist = 2 * hitlen # Distance to chain the HSPs
hsps = chain_HSPs(hsps, xdist=dist, ydist=dist) # depends on [control=['if'], data=[]]
if len(hsps) == 0:
print('No match found.', file=sys.stderr)
return None # depends on [control=['if'], data=[]]
besthsp = hsps[0]
(aid, asize) = next(Fasta(afasta).itersizes())
(bid, bsize) = next(Fasta(bfasta).itersizes())
o = Overlap(besthsp, asize, bsize, cutoff, qreverse=opts.qreverse)
o.print_graphic()
if opts.outfile:
fw = must_open(opts.outfile, 'w')
print(str(o), file=fw)
fw.close() # depends on [control=['if'], data=[]]
return o
|
def hardware_flexport_flexport_type_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hardware = ET.SubElement(config, "hardware", xmlns="urn:brocade.com:mgmt:brocade-hardware")
flexport = ET.SubElement(hardware, "flexport")
id_key = ET.SubElement(flexport, "id")
id_key.text = kwargs.pop('id')
flexport_type = ET.SubElement(flexport, "flexport_type")
instance = ET.SubElement(flexport_type, "instance")
instance.text = kwargs.pop('instance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
def function[hardware_flexport_flexport_type_instance, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[hardware] assign[=] call[name[ET].SubElement, parameter[name[config], constant[hardware]]]
variable[flexport] assign[=] call[name[ET].SubElement, parameter[name[hardware], constant[flexport]]]
variable[id_key] assign[=] call[name[ET].SubElement, parameter[name[flexport], constant[id]]]
name[id_key].text assign[=] call[name[kwargs].pop, parameter[constant[id]]]
variable[flexport_type] assign[=] call[name[ET].SubElement, parameter[name[flexport], constant[flexport_type]]]
variable[instance] assign[=] call[name[ET].SubElement, parameter[name[flexport_type], constant[instance]]]
name[instance].text assign[=] call[name[kwargs].pop, parameter[constant[instance]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]]
|
keyword[def] identifier[hardware_flexport_flexport_type_instance] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[hardware] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[flexport] = identifier[ET] . identifier[SubElement] ( identifier[hardware] , literal[string] )
identifier[id_key] = identifier[ET] . identifier[SubElement] ( identifier[flexport] , literal[string] )
identifier[id_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[flexport_type] = identifier[ET] . identifier[SubElement] ( identifier[flexport] , literal[string] )
identifier[instance] = identifier[ET] . identifier[SubElement] ( identifier[flexport_type] , literal[string] )
identifier[instance] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] )
|
def hardware_flexport_flexport_type_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
hardware = ET.SubElement(config, 'hardware', xmlns='urn:brocade.com:mgmt:brocade-hardware')
flexport = ET.SubElement(hardware, 'flexport')
id_key = ET.SubElement(flexport, 'id')
id_key.text = kwargs.pop('id')
flexport_type = ET.SubElement(flexport, 'flexport_type')
instance = ET.SubElement(flexport_type, 'instance')
instance.text = kwargs.pop('instance')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
def _load_config_include(self, include_directory):
"""Load included configuration files.
Args:
include_directory (str): The name of the config include directory.
Returns:
list: A list of all profiles for the current App.
"""
include_directory = os.path.join(self.app_path, include_directory)
if not os.path.isdir(include_directory):
msg = 'Provided include directory does not exist ({}).'.format(include_directory)
sys.exit(msg)
profiles = []
for filename in sorted(os.listdir(include_directory)):
if filename.endswith('.json'):
self.log.info('Loading config: {}'.format(filename))
print('Include File: {}{}{}'.format(c.Style.BRIGHT, c.Fore.MAGENTA, filename))
config_file = os.path.join(include_directory, filename)
with open(config_file) as data_file:
try:
profiles.extend(json.load(data_file))
except ValueError as e:
print('Invalid JSON file: {}{}{}'.format(c.Style.BRIGHT, c.Fore.RED, e))
sys.exit(1)
return profiles
|
def function[_load_config_include, parameter[self, include_directory]]:
constant[Load included configuration files.
Args:
include_directory (str): The name of the config include directory.
Returns:
list: A list of all profiles for the current App.
]
variable[include_directory] assign[=] call[name[os].path.join, parameter[name[self].app_path, name[include_directory]]]
if <ast.UnaryOp object at 0x7da18f58cac0> begin[:]
variable[msg] assign[=] call[constant[Provided include directory does not exist ({}).].format, parameter[name[include_directory]]]
call[name[sys].exit, parameter[name[msg]]]
variable[profiles] assign[=] list[[]]
for taget[name[filename]] in starred[call[name[sorted], parameter[call[name[os].listdir, parameter[name[include_directory]]]]]] begin[:]
if call[name[filename].endswith, parameter[constant[.json]]] begin[:]
call[name[self].log.info, parameter[call[constant[Loading config: {}].format, parameter[name[filename]]]]]
call[name[print], parameter[call[constant[Include File: {}{}{}].format, parameter[name[c].Style.BRIGHT, name[c].Fore.MAGENTA, name[filename]]]]]
variable[config_file] assign[=] call[name[os].path.join, parameter[name[include_directory], name[filename]]]
with call[name[open], parameter[name[config_file]]] begin[:]
<ast.Try object at 0x7da18bc73790>
return[name[profiles]]
|
keyword[def] identifier[_load_config_include] ( identifier[self] , identifier[include_directory] ):
literal[string]
identifier[include_directory] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[app_path] , identifier[include_directory] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[include_directory] ):
identifier[msg] = literal[string] . identifier[format] ( identifier[include_directory] )
identifier[sys] . identifier[exit] ( identifier[msg] )
identifier[profiles] =[]
keyword[for] identifier[filename] keyword[in] identifier[sorted] ( identifier[os] . identifier[listdir] ( identifier[include_directory] )):
keyword[if] identifier[filename] . identifier[endswith] ( literal[string] ):
identifier[self] . identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[filename] ))
identifier[print] ( literal[string] . identifier[format] ( identifier[c] . identifier[Style] . identifier[BRIGHT] , identifier[c] . identifier[Fore] . identifier[MAGENTA] , identifier[filename] ))
identifier[config_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[include_directory] , identifier[filename] )
keyword[with] identifier[open] ( identifier[config_file] ) keyword[as] identifier[data_file] :
keyword[try] :
identifier[profiles] . identifier[extend] ( identifier[json] . identifier[load] ( identifier[data_file] ))
keyword[except] identifier[ValueError] keyword[as] identifier[e] :
identifier[print] ( literal[string] . identifier[format] ( identifier[c] . identifier[Style] . identifier[BRIGHT] , identifier[c] . identifier[Fore] . identifier[RED] , identifier[e] ))
identifier[sys] . identifier[exit] ( literal[int] )
keyword[return] identifier[profiles]
|
def _load_config_include(self, include_directory):
"""Load included configuration files.
Args:
include_directory (str): The name of the config include directory.
Returns:
list: A list of all profiles for the current App.
"""
include_directory = os.path.join(self.app_path, include_directory)
if not os.path.isdir(include_directory):
msg = 'Provided include directory does not exist ({}).'.format(include_directory)
sys.exit(msg) # depends on [control=['if'], data=[]]
profiles = []
for filename in sorted(os.listdir(include_directory)):
if filename.endswith('.json'):
self.log.info('Loading config: {}'.format(filename))
print('Include File: {}{}{}'.format(c.Style.BRIGHT, c.Fore.MAGENTA, filename))
config_file = os.path.join(include_directory, filename)
with open(config_file) as data_file:
try:
profiles.extend(json.load(data_file)) # depends on [control=['try'], data=[]]
except ValueError as e:
print('Invalid JSON file: {}{}{}'.format(c.Style.BRIGHT, c.Fore.RED, e))
sys.exit(1) # depends on [control=['except'], data=['e']] # depends on [control=['with'], data=['data_file']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filename']]
return profiles
|
def transform_model(cls) -> None:
"""
Anything that uses the ModelMeta needs _meta and id.
Also keep track of relationships and make them in the related model class.
"""
if cls.name != "Model":
appname = "models"
for mcls in cls.get_children():
if isinstance(mcls, ClassDef):
for attr in mcls.get_children():
if isinstance(attr, Assign):
if attr.targets[0].name == "app":
appname = attr.value.value
mname = "{}.{}".format(appname, cls.name)
MODELS[mname] = cls
for relname, relval in FUTURE_RELATIONS.get(mname, []):
cls.locals[relname] = relval
for attr in cls.get_children():
if isinstance(attr, Assign):
try:
attrname = attr.value.func.attrname
except AttributeError:
pass
else:
if attrname in ["ForeignKeyField", "ManyToManyField"]:
tomodel = attr.value.args[0].value
relname = ""
if attr.value.keywords:
for keyword in attr.value.keywords:
if keyword.arg == "related_name":
relname = keyword.value.value
if not relname:
relname = cls.name.lower() + "s"
# Injected model attributes need to also have the relation manager
if attrname == "ManyToManyField":
relval = [
attr.value.func,
MANAGER.ast_from_module_name("tortoise.fields").lookup(
"ManyToManyRelationManager"
)[1][0],
]
else:
relval = [
attr.value.func,
MANAGER.ast_from_module_name("tortoise.fields").lookup(
"RelationQueryContainer"
)[1][0],
]
if tomodel in MODELS:
MODELS[tomodel].locals[relname] = relval
else:
FUTURE_RELATIONS.setdefault(tomodel, []).append((relname, relval))
cls.locals["_meta"] = [
MANAGER.ast_from_module_name("tortoise.models").lookup("MetaInfo")[1][0].instantiate_class()
]
if "id" not in cls.locals:
cls.locals["id"] = [nodes.ClassDef("id", None)]
|
def function[transform_model, parameter[cls]]:
constant[
Anything that uses the ModelMeta needs _meta and id.
Also keep track of relationships and make them in the related model class.
]
if compare[name[cls].name not_equal[!=] constant[Model]] begin[:]
variable[appname] assign[=] constant[models]
for taget[name[mcls]] in starred[call[name[cls].get_children, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[mcls], name[ClassDef]]] begin[:]
for taget[name[attr]] in starred[call[name[mcls].get_children, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[attr], name[Assign]]] begin[:]
if compare[call[name[attr].targets][constant[0]].name equal[==] constant[app]] begin[:]
variable[appname] assign[=] name[attr].value.value
variable[mname] assign[=] call[constant[{}.{}].format, parameter[name[appname], name[cls].name]]
call[name[MODELS]][name[mname]] assign[=] name[cls]
for taget[tuple[[<ast.Name object at 0x7da20c990610>, <ast.Name object at 0x7da20c9935e0>]]] in starred[call[name[FUTURE_RELATIONS].get, parameter[name[mname], list[[]]]]] begin[:]
call[name[cls].locals][name[relname]] assign[=] name[relval]
for taget[name[attr]] in starred[call[name[cls].get_children, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[attr], name[Assign]]] begin[:]
<ast.Try object at 0x7da207f02ec0>
call[name[cls].locals][constant[_meta]] assign[=] list[[<ast.Call object at 0x7da18bc73f10>]]
if compare[constant[id] <ast.NotIn object at 0x7da2590d7190> name[cls].locals] begin[:]
call[name[cls].locals][constant[id]] assign[=] list[[<ast.Call object at 0x7da1b17f91b0>]]
|
keyword[def] identifier[transform_model] ( identifier[cls] )-> keyword[None] :
literal[string]
keyword[if] identifier[cls] . identifier[name] != literal[string] :
identifier[appname] = literal[string]
keyword[for] identifier[mcls] keyword[in] identifier[cls] . identifier[get_children] ():
keyword[if] identifier[isinstance] ( identifier[mcls] , identifier[ClassDef] ):
keyword[for] identifier[attr] keyword[in] identifier[mcls] . identifier[get_children] ():
keyword[if] identifier[isinstance] ( identifier[attr] , identifier[Assign] ):
keyword[if] identifier[attr] . identifier[targets] [ literal[int] ]. identifier[name] == literal[string] :
identifier[appname] = identifier[attr] . identifier[value] . identifier[value]
identifier[mname] = literal[string] . identifier[format] ( identifier[appname] , identifier[cls] . identifier[name] )
identifier[MODELS] [ identifier[mname] ]= identifier[cls]
keyword[for] identifier[relname] , identifier[relval] keyword[in] identifier[FUTURE_RELATIONS] . identifier[get] ( identifier[mname] ,[]):
identifier[cls] . identifier[locals] [ identifier[relname] ]= identifier[relval]
keyword[for] identifier[attr] keyword[in] identifier[cls] . identifier[get_children] ():
keyword[if] identifier[isinstance] ( identifier[attr] , identifier[Assign] ):
keyword[try] :
identifier[attrname] = identifier[attr] . identifier[value] . identifier[func] . identifier[attrname]
keyword[except] identifier[AttributeError] :
keyword[pass]
keyword[else] :
keyword[if] identifier[attrname] keyword[in] [ literal[string] , literal[string] ]:
identifier[tomodel] = identifier[attr] . identifier[value] . identifier[args] [ literal[int] ]. identifier[value]
identifier[relname] = literal[string]
keyword[if] identifier[attr] . identifier[value] . identifier[keywords] :
keyword[for] identifier[keyword] keyword[in] identifier[attr] . identifier[value] . identifier[keywords] :
keyword[if] identifier[keyword] . identifier[arg] == literal[string] :
identifier[relname] = identifier[keyword] . identifier[value] . identifier[value]
keyword[if] keyword[not] identifier[relname] :
identifier[relname] = identifier[cls] . identifier[name] . identifier[lower] ()+ literal[string]
keyword[if] identifier[attrname] == literal[string] :
identifier[relval] =[
identifier[attr] . identifier[value] . identifier[func] ,
identifier[MANAGER] . identifier[ast_from_module_name] ( literal[string] ). identifier[lookup] (
literal[string]
)[ literal[int] ][ literal[int] ],
]
keyword[else] :
identifier[relval] =[
identifier[attr] . identifier[value] . identifier[func] ,
identifier[MANAGER] . identifier[ast_from_module_name] ( literal[string] ). identifier[lookup] (
literal[string]
)[ literal[int] ][ literal[int] ],
]
keyword[if] identifier[tomodel] keyword[in] identifier[MODELS] :
identifier[MODELS] [ identifier[tomodel] ]. identifier[locals] [ identifier[relname] ]= identifier[relval]
keyword[else] :
identifier[FUTURE_RELATIONS] . identifier[setdefault] ( identifier[tomodel] ,[]). identifier[append] (( identifier[relname] , identifier[relval] ))
identifier[cls] . identifier[locals] [ literal[string] ]=[
identifier[MANAGER] . identifier[ast_from_module_name] ( literal[string] ). identifier[lookup] ( literal[string] )[ literal[int] ][ literal[int] ]. identifier[instantiate_class] ()
]
keyword[if] literal[string] keyword[not] keyword[in] identifier[cls] . identifier[locals] :
identifier[cls] . identifier[locals] [ literal[string] ]=[ identifier[nodes] . identifier[ClassDef] ( literal[string] , keyword[None] )]
|
def transform_model(cls) -> None:
"""
Anything that uses the ModelMeta needs _meta and id.
Also keep track of relationships and make them in the related model class.
"""
if cls.name != 'Model':
appname = 'models'
for mcls in cls.get_children():
if isinstance(mcls, ClassDef):
for attr in mcls.get_children():
if isinstance(attr, Assign):
if attr.targets[0].name == 'app':
appname = attr.value.value # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['attr']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['mcls']]
mname = '{}.{}'.format(appname, cls.name)
MODELS[mname] = cls
for (relname, relval) in FUTURE_RELATIONS.get(mname, []):
cls.locals[relname] = relval # depends on [control=['for'], data=[]]
for attr in cls.get_children():
if isinstance(attr, Assign):
try:
attrname = attr.value.func.attrname # depends on [control=['try'], data=[]]
except AttributeError:
pass # depends on [control=['except'], data=[]]
else:
if attrname in ['ForeignKeyField', 'ManyToManyField']:
tomodel = attr.value.args[0].value
relname = ''
if attr.value.keywords:
for keyword in attr.value.keywords:
if keyword.arg == 'related_name':
relname = keyword.value.value # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['keyword']] # depends on [control=['if'], data=[]]
if not relname:
relname = cls.name.lower() + 's' # depends on [control=['if'], data=[]]
# Injected model attributes need to also have the relation manager
if attrname == 'ManyToManyField':
relval = [attr.value.func, MANAGER.ast_from_module_name('tortoise.fields').lookup('ManyToManyRelationManager')[1][0]] # depends on [control=['if'], data=[]]
else:
relval = [attr.value.func, MANAGER.ast_from_module_name('tortoise.fields').lookup('RelationQueryContainer')[1][0]]
if tomodel in MODELS:
MODELS[tomodel].locals[relname] = relval # depends on [control=['if'], data=['tomodel', 'MODELS']]
else:
FUTURE_RELATIONS.setdefault(tomodel, []).append((relname, relval)) # depends on [control=['if'], data=['attrname']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['attr']] # depends on [control=['if'], data=[]]
cls.locals['_meta'] = [MANAGER.ast_from_module_name('tortoise.models').lookup('MetaInfo')[1][0].instantiate_class()]
if 'id' not in cls.locals:
cls.locals['id'] = [nodes.ClassDef('id', None)] # depends on [control=['if'], data=[]]
|
def get_user(self, id):
"""
Returns details about the user for the given id.
Use get_user_by_email() or get_user_by_username() for help
identifiying the id.
"""
self.assert_has_permission('scim.read')
return self._get(self.uri + '/Users/%s' % (id))
|
def function[get_user, parameter[self, id]]:
constant[
Returns details about the user for the given id.
Use get_user_by_email() or get_user_by_username() for help
identifiying the id.
]
call[name[self].assert_has_permission, parameter[constant[scim.read]]]
return[call[name[self]._get, parameter[binary_operation[name[self].uri + binary_operation[constant[/Users/%s] <ast.Mod object at 0x7da2590d6920> name[id]]]]]]
|
keyword[def] identifier[get_user] ( identifier[self] , identifier[id] ):
literal[string]
identifier[self] . identifier[assert_has_permission] ( literal[string] )
keyword[return] identifier[self] . identifier[_get] ( identifier[self] . identifier[uri] + literal[string] %( identifier[id] ))
|
def get_user(self, id):
"""
Returns details about the user for the given id.
Use get_user_by_email() or get_user_by_username() for help
identifiying the id.
"""
self.assert_has_permission('scim.read')
return self._get(self.uri + '/Users/%s' % id)
|
def _process_json_response(self, response):
''' Process a given response '''
json_response = self._get_json_response(response)
if self.response_callback is not None:
json_response = self.response_callback(json_response)
response._content = json.dumps(json_response)
self.http_status_code = response.status_code
self._check_error(response, json_response)
self._check_warnings(json_response)
return json_response
|
def function[_process_json_response, parameter[self, response]]:
constant[ Process a given response ]
variable[json_response] assign[=] call[name[self]._get_json_response, parameter[name[response]]]
if compare[name[self].response_callback is_not constant[None]] begin[:]
variable[json_response] assign[=] call[name[self].response_callback, parameter[name[json_response]]]
name[response]._content assign[=] call[name[json].dumps, parameter[name[json_response]]]
name[self].http_status_code assign[=] name[response].status_code
call[name[self]._check_error, parameter[name[response], name[json_response]]]
call[name[self]._check_warnings, parameter[name[json_response]]]
return[name[json_response]]
|
keyword[def] identifier[_process_json_response] ( identifier[self] , identifier[response] ):
literal[string]
identifier[json_response] = identifier[self] . identifier[_get_json_response] ( identifier[response] )
keyword[if] identifier[self] . identifier[response_callback] keyword[is] keyword[not] keyword[None] :
identifier[json_response] = identifier[self] . identifier[response_callback] ( identifier[json_response] )
identifier[response] . identifier[_content] = identifier[json] . identifier[dumps] ( identifier[json_response] )
identifier[self] . identifier[http_status_code] = identifier[response] . identifier[status_code]
identifier[self] . identifier[_check_error] ( identifier[response] , identifier[json_response] )
identifier[self] . identifier[_check_warnings] ( identifier[json_response] )
keyword[return] identifier[json_response]
|
def _process_json_response(self, response):
""" Process a given response """
json_response = self._get_json_response(response)
if self.response_callback is not None:
json_response = self.response_callback(json_response)
response._content = json.dumps(json_response) # depends on [control=['if'], data=[]]
self.http_status_code = response.status_code
self._check_error(response, json_response)
self._check_warnings(json_response)
return json_response
|
def runDynTask(task):
'''
Run a dynamic task and return the result.
Example:
foo = runDynTask( ('baz.faz.Foo', (), {} ) )
'''
func = getDynLocal(task[0])
if func is None:
raise s_exc.NoSuchFunc(name=task[0])
return func(*task[1], **task[2])
|
def function[runDynTask, parameter[task]]:
constant[
Run a dynamic task and return the result.
Example:
foo = runDynTask( ('baz.faz.Foo', (), {} ) )
]
variable[func] assign[=] call[name[getDynLocal], parameter[call[name[task]][constant[0]]]]
if compare[name[func] is constant[None]] begin[:]
<ast.Raise object at 0x7da20c76d390>
return[call[name[func], parameter[<ast.Starred object at 0x7da207f031c0>]]]
|
keyword[def] identifier[runDynTask] ( identifier[task] ):
literal[string]
identifier[func] = identifier[getDynLocal] ( identifier[task] [ literal[int] ])
keyword[if] identifier[func] keyword[is] keyword[None] :
keyword[raise] identifier[s_exc] . identifier[NoSuchFunc] ( identifier[name] = identifier[task] [ literal[int] ])
keyword[return] identifier[func] (* identifier[task] [ literal[int] ],** identifier[task] [ literal[int] ])
|
def runDynTask(task):
"""
Run a dynamic task and return the result.
Example:
foo = runDynTask( ('baz.faz.Foo', (), {} ) )
"""
func = getDynLocal(task[0])
if func is None:
raise s_exc.NoSuchFunc(name=task[0]) # depends on [control=['if'], data=[]]
return func(*task[1], **task[2])
|
def compress_pdf(pdf_fpath, output_fname=None):
""" uses ghostscript to write a pdf """
import utool as ut
ut.assertpath(pdf_fpath)
suffix = '_' + ut.get_datestamp(False) + '_compressed'
print('pdf_fpath = %r' % (pdf_fpath,))
output_pdf_fpath = ut.augpath(pdf_fpath, suffix, newfname=output_fname)
print('output_pdf_fpath = %r' % (output_pdf_fpath,))
gs_exe = find_ghostscript_exe()
cmd_list = (
gs_exe,
'-sDEVICE=pdfwrite',
'-dCompatibilityLevel=1.4',
'-dNOPAUSE',
'-dQUIET',
'-dBATCH',
'-sOutputFile=' + output_pdf_fpath,
pdf_fpath
)
ut.cmd(*cmd_list)
return output_pdf_fpath
|
def function[compress_pdf, parameter[pdf_fpath, output_fname]]:
constant[ uses ghostscript to write a pdf ]
import module[utool] as alias[ut]
call[name[ut].assertpath, parameter[name[pdf_fpath]]]
variable[suffix] assign[=] binary_operation[binary_operation[constant[_] + call[name[ut].get_datestamp, parameter[constant[False]]]] + constant[_compressed]]
call[name[print], parameter[binary_operation[constant[pdf_fpath = %r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b24e81c0>]]]]]
variable[output_pdf_fpath] assign[=] call[name[ut].augpath, parameter[name[pdf_fpath], name[suffix]]]
call[name[print], parameter[binary_operation[constant[output_pdf_fpath = %r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b24eb1f0>]]]]]
variable[gs_exe] assign[=] call[name[find_ghostscript_exe], parameter[]]
variable[cmd_list] assign[=] tuple[[<ast.Name object at 0x7da1b24ea6e0>, <ast.Constant object at 0x7da1b24e95a0>, <ast.Constant object at 0x7da1b24eb820>, <ast.Constant object at 0x7da1b24e9180>, <ast.Constant object at 0x7da1b24e8a00>, <ast.Constant object at 0x7da1b24e8130>, <ast.BinOp object at 0x7da1b24e8f70>, <ast.Name object at 0x7da1b24e9060>]]
call[name[ut].cmd, parameter[<ast.Starred object at 0x7da1b24e8b50>]]
return[name[output_pdf_fpath]]
|
keyword[def] identifier[compress_pdf] ( identifier[pdf_fpath] , identifier[output_fname] = keyword[None] ):
literal[string]
keyword[import] identifier[utool] keyword[as] identifier[ut]
identifier[ut] . identifier[assertpath] ( identifier[pdf_fpath] )
identifier[suffix] = literal[string] + identifier[ut] . identifier[get_datestamp] ( keyword[False] )+ literal[string]
identifier[print] ( literal[string] %( identifier[pdf_fpath] ,))
identifier[output_pdf_fpath] = identifier[ut] . identifier[augpath] ( identifier[pdf_fpath] , identifier[suffix] , identifier[newfname] = identifier[output_fname] )
identifier[print] ( literal[string] %( identifier[output_pdf_fpath] ,))
identifier[gs_exe] = identifier[find_ghostscript_exe] ()
identifier[cmd_list] =(
identifier[gs_exe] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] + identifier[output_pdf_fpath] ,
identifier[pdf_fpath]
)
identifier[ut] . identifier[cmd] (* identifier[cmd_list] )
keyword[return] identifier[output_pdf_fpath]
|
def compress_pdf(pdf_fpath, output_fname=None):
""" uses ghostscript to write a pdf """
import utool as ut
ut.assertpath(pdf_fpath)
suffix = '_' + ut.get_datestamp(False) + '_compressed'
print('pdf_fpath = %r' % (pdf_fpath,))
output_pdf_fpath = ut.augpath(pdf_fpath, suffix, newfname=output_fname)
print('output_pdf_fpath = %r' % (output_pdf_fpath,))
gs_exe = find_ghostscript_exe()
cmd_list = (gs_exe, '-sDEVICE=pdfwrite', '-dCompatibilityLevel=1.4', '-dNOPAUSE', '-dQUIET', '-dBATCH', '-sOutputFile=' + output_pdf_fpath, pdf_fpath)
ut.cmd(*cmd_list)
return output_pdf_fpath
|
def set_fixed_parameters(self, parameters):
""" Set parameters of transform """
if not isinstance(parameters, np.ndarray):
parameters = np.asarray(parameters)
libfn = utils.get_lib_fn('setTransformFixedParameters%s'%self._libsuffix)
libfn(self.pointer, parameters.tolist())
|
def function[set_fixed_parameters, parameter[self, parameters]]:
constant[ Set parameters of transform ]
if <ast.UnaryOp object at 0x7da1b151a560> begin[:]
variable[parameters] assign[=] call[name[np].asarray, parameter[name[parameters]]]
variable[libfn] assign[=] call[name[utils].get_lib_fn, parameter[binary_operation[constant[setTransformFixedParameters%s] <ast.Mod object at 0x7da2590d6920> name[self]._libsuffix]]]
call[name[libfn], parameter[name[self].pointer, call[name[parameters].tolist, parameter[]]]]
|
keyword[def] identifier[set_fixed_parameters] ( identifier[self] , identifier[parameters] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[parameters] , identifier[np] . identifier[ndarray] ):
identifier[parameters] = identifier[np] . identifier[asarray] ( identifier[parameters] )
identifier[libfn] = identifier[utils] . identifier[get_lib_fn] ( literal[string] % identifier[self] . identifier[_libsuffix] )
identifier[libfn] ( identifier[self] . identifier[pointer] , identifier[parameters] . identifier[tolist] ())
|
def set_fixed_parameters(self, parameters):
""" Set parameters of transform """
if not isinstance(parameters, np.ndarray):
parameters = np.asarray(parameters) # depends on [control=['if'], data=[]]
libfn = utils.get_lib_fn('setTransformFixedParameters%s' % self._libsuffix)
libfn(self.pointer, parameters.tolist())
|
def _read_feather_columns(path, columns, num_splits): # pragma: no cover
"""Use a Ray task to read columns from Feather into a Pandas DataFrame.
Note: Ray functions are not detected by codecov (thus pragma: no cover)
Args:
path: The path of the Feather file.
columns: The list of column names to read.
num_splits: The number of partitions to split the column into.
Returns:
A list containing the split Pandas DataFrames and the Index as the last
element. If there is not `index_col` set, then we just return the length.
This is used to determine the total length of the DataFrame to build a
default Index.
"""
from pyarrow import feather
df = feather.read_feather(path, columns=columns)
# Append the length of the index here to build it externally
return _split_result_for_readers(0, num_splits, df) + [len(df.index)]
|
def function[_read_feather_columns, parameter[path, columns, num_splits]]:
constant[Use a Ray task to read columns from Feather into a Pandas DataFrame.
Note: Ray functions are not detected by codecov (thus pragma: no cover)
Args:
path: The path of the Feather file.
columns: The list of column names to read.
num_splits: The number of partitions to split the column into.
Returns:
A list containing the split Pandas DataFrames and the Index as the last
element. If there is not `index_col` set, then we just return the length.
This is used to determine the total length of the DataFrame to build a
default Index.
]
from relative_module[pyarrow] import module[feather]
variable[df] assign[=] call[name[feather].read_feather, parameter[name[path]]]
return[binary_operation[call[name[_split_result_for_readers], parameter[constant[0], name[num_splits], name[df]]] + list[[<ast.Call object at 0x7da20c76fa00>]]]]
|
keyword[def] identifier[_read_feather_columns] ( identifier[path] , identifier[columns] , identifier[num_splits] ):
literal[string]
keyword[from] identifier[pyarrow] keyword[import] identifier[feather]
identifier[df] = identifier[feather] . identifier[read_feather] ( identifier[path] , identifier[columns] = identifier[columns] )
keyword[return] identifier[_split_result_for_readers] ( literal[int] , identifier[num_splits] , identifier[df] )+[ identifier[len] ( identifier[df] . identifier[index] )]
|
def _read_feather_columns(path, columns, num_splits): # pragma: no cover
'Use a Ray task to read columns from Feather into a Pandas DataFrame.\n\n Note: Ray functions are not detected by codecov (thus pragma: no cover)\n\n Args:\n path: The path of the Feather file.\n columns: The list of column names to read.\n num_splits: The number of partitions to split the column into.\n\n Returns:\n A list containing the split Pandas DataFrames and the Index as the last\n element. If there is not `index_col` set, then we just return the length.\n This is used to determine the total length of the DataFrame to build a\n default Index.\n '
from pyarrow import feather
df = feather.read_feather(path, columns=columns)
# Append the length of the index here to build it externally
return _split_result_for_readers(0, num_splits, df) + [len(df.index)]
|
def imdb(limit=None, shuffle=True):
"""Downloads (and caches) IMDB Moview Reviews. 25k training data, 25k test data
Args:
limit: get only first N items for each class
Returns:
[X_train, y_train, X_test, y_test]
"""
movie_review_url = 'http://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz'
# download and extract, thus remove the suffix '.tar.gz'
path = keras.utils.get_file(
'aclImdb.tar.gz', movie_review_url, extract=True)[:-7]
X_train, y_train = read_pos_neg_data(path, 'train', limit)
X_test, y_test = read_pos_neg_data(path, 'test', limit)
if shuffle:
X_train, y_train = sklearn.utils.shuffle(X_train, y_train)
X_test, y_test = sklearn.utils.shuffle(X_test, y_test)
return X_train, X_test, y_train, y_test
|
def function[imdb, parameter[limit, shuffle]]:
constant[Downloads (and caches) IMDB Moview Reviews. 25k training data, 25k test data
Args:
limit: get only first N items for each class
Returns:
[X_train, y_train, X_test, y_test]
]
variable[movie_review_url] assign[=] constant[http://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz]
variable[path] assign[=] call[call[name[keras].utils.get_file, parameter[constant[aclImdb.tar.gz], name[movie_review_url]]]][<ast.Slice object at 0x7da1b0f47370>]
<ast.Tuple object at 0x7da1b0f44df0> assign[=] call[name[read_pos_neg_data], parameter[name[path], constant[train], name[limit]]]
<ast.Tuple object at 0x7da1b0f44400> assign[=] call[name[read_pos_neg_data], parameter[name[path], constant[test], name[limit]]]
if name[shuffle] begin[:]
<ast.Tuple object at 0x7da1b0f47670> assign[=] call[name[sklearn].utils.shuffle, parameter[name[X_train], name[y_train]]]
<ast.Tuple object at 0x7da1b0f47250> assign[=] call[name[sklearn].utils.shuffle, parameter[name[X_test], name[y_test]]]
return[tuple[[<ast.Name object at 0x7da1b11a2a10>, <ast.Name object at 0x7da1b11a0d30>, <ast.Name object at 0x7da1b11a1c60>, <ast.Name object at 0x7da1b11a2680>]]]
|
keyword[def] identifier[imdb] ( identifier[limit] = keyword[None] , identifier[shuffle] = keyword[True] ):
literal[string]
identifier[movie_review_url] = literal[string]
identifier[path] = identifier[keras] . identifier[utils] . identifier[get_file] (
literal[string] , identifier[movie_review_url] , identifier[extract] = keyword[True] )[:- literal[int] ]
identifier[X_train] , identifier[y_train] = identifier[read_pos_neg_data] ( identifier[path] , literal[string] , identifier[limit] )
identifier[X_test] , identifier[y_test] = identifier[read_pos_neg_data] ( identifier[path] , literal[string] , identifier[limit] )
keyword[if] identifier[shuffle] :
identifier[X_train] , identifier[y_train] = identifier[sklearn] . identifier[utils] . identifier[shuffle] ( identifier[X_train] , identifier[y_train] )
identifier[X_test] , identifier[y_test] = identifier[sklearn] . identifier[utils] . identifier[shuffle] ( identifier[X_test] , identifier[y_test] )
keyword[return] identifier[X_train] , identifier[X_test] , identifier[y_train] , identifier[y_test]
|
def imdb(limit=None, shuffle=True):
"""Downloads (and caches) IMDB Moview Reviews. 25k training data, 25k test data
Args:
limit: get only first N items for each class
Returns:
[X_train, y_train, X_test, y_test]
"""
movie_review_url = 'http://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz'
# download and extract, thus remove the suffix '.tar.gz'
path = keras.utils.get_file('aclImdb.tar.gz', movie_review_url, extract=True)[:-7]
(X_train, y_train) = read_pos_neg_data(path, 'train', limit)
(X_test, y_test) = read_pos_neg_data(path, 'test', limit)
if shuffle:
(X_train, y_train) = sklearn.utils.shuffle(X_train, y_train)
(X_test, y_test) = sklearn.utils.shuffle(X_test, y_test) # depends on [control=['if'], data=[]]
return (X_train, X_test, y_train, y_test)
|
def mul(lhs: Any, rhs: Any, default: Any = RaiseTypeErrorIfNotProvided) -> Any:
"""Returns lhs * rhs, or else a default if the operator is not implemented.
This method is mostly used by __pow__ methods trying to return
NotImplemented instead of causing a TypeError.
Args:
lhs: Left hand side of the multiplication.
rhs: Right hand side of the multiplication.
default: Default value to return if the multiplication is not defined.
If not default is specified, a type error is raised when the
multiplication fails.
Returns:
The product of the two inputs, or else the default value if the product
is not defined, or else raises a TypeError if no default is defined.
Raises:
TypeError:
lhs doesn't have __mul__ or it returned NotImplemented
AND lhs doesn't have __rmul__ or it returned NotImplemented
AND a default value isn't specified.
"""
# Use left-hand-side's __mul__.
left_mul = getattr(lhs, '__mul__', None)
result = NotImplemented if left_mul is None else left_mul(rhs)
# Fallback to right-hand-side's __rmul__.
if result is NotImplemented:
right_mul = getattr(rhs, '__rmul__', None)
result = NotImplemented if right_mul is None else right_mul(lhs)
# Don't build up factors of 1.0 vs sympy Symbols.
if lhs == 1 and is_parameterized(rhs):
result = rhs
if rhs == 1 and is_parameterized(lhs):
result = lhs
if lhs == -1 and is_parameterized(rhs):
result = -rhs
if rhs == -1 and is_parameterized(lhs):
result = -lhs
# Output.
if result is not NotImplemented:
return result
if default is not RaiseTypeErrorIfNotProvided:
return default
raise TypeError("unsupported operand type(s) for *: '{}' and '{}'".format(
type(lhs), type(rhs)))
|
def function[mul, parameter[lhs, rhs, default]]:
constant[Returns lhs * rhs, or else a default if the operator is not implemented.
This method is mostly used by __pow__ methods trying to return
NotImplemented instead of causing a TypeError.
Args:
lhs: Left hand side of the multiplication.
rhs: Right hand side of the multiplication.
default: Default value to return if the multiplication is not defined.
If not default is specified, a type error is raised when the
multiplication fails.
Returns:
The product of the two inputs, or else the default value if the product
is not defined, or else raises a TypeError if no default is defined.
Raises:
TypeError:
lhs doesn't have __mul__ or it returned NotImplemented
AND lhs doesn't have __rmul__ or it returned NotImplemented
AND a default value isn't specified.
]
variable[left_mul] assign[=] call[name[getattr], parameter[name[lhs], constant[__mul__], constant[None]]]
variable[result] assign[=] <ast.IfExp object at 0x7da2046234c0>
if compare[name[result] is name[NotImplemented]] begin[:]
variable[right_mul] assign[=] call[name[getattr], parameter[name[rhs], constant[__rmul__], constant[None]]]
variable[result] assign[=] <ast.IfExp object at 0x7da2046206a0>
if <ast.BoolOp object at 0x7da204622230> begin[:]
variable[result] assign[=] name[rhs]
if <ast.BoolOp object at 0x7da204622140> begin[:]
variable[result] assign[=] name[lhs]
if <ast.BoolOp object at 0x7da1b21ed870> begin[:]
variable[result] assign[=] <ast.UnaryOp object at 0x7da1b21ee290>
if <ast.BoolOp object at 0x7da1b21ed4b0> begin[:]
variable[result] assign[=] <ast.UnaryOp object at 0x7da18dc982b0>
if compare[name[result] is_not name[NotImplemented]] begin[:]
return[name[result]]
if compare[name[default] is_not name[RaiseTypeErrorIfNotProvided]] begin[:]
return[name[default]]
<ast.Raise object at 0x7da1b1b019f0>
|
keyword[def] identifier[mul] ( identifier[lhs] : identifier[Any] , identifier[rhs] : identifier[Any] , identifier[default] : identifier[Any] = identifier[RaiseTypeErrorIfNotProvided] )-> identifier[Any] :
literal[string]
identifier[left_mul] = identifier[getattr] ( identifier[lhs] , literal[string] , keyword[None] )
identifier[result] = identifier[NotImplemented] keyword[if] identifier[left_mul] keyword[is] keyword[None] keyword[else] identifier[left_mul] ( identifier[rhs] )
keyword[if] identifier[result] keyword[is] identifier[NotImplemented] :
identifier[right_mul] = identifier[getattr] ( identifier[rhs] , literal[string] , keyword[None] )
identifier[result] = identifier[NotImplemented] keyword[if] identifier[right_mul] keyword[is] keyword[None] keyword[else] identifier[right_mul] ( identifier[lhs] )
keyword[if] identifier[lhs] == literal[int] keyword[and] identifier[is_parameterized] ( identifier[rhs] ):
identifier[result] = identifier[rhs]
keyword[if] identifier[rhs] == literal[int] keyword[and] identifier[is_parameterized] ( identifier[lhs] ):
identifier[result] = identifier[lhs]
keyword[if] identifier[lhs] ==- literal[int] keyword[and] identifier[is_parameterized] ( identifier[rhs] ):
identifier[result] =- identifier[rhs]
keyword[if] identifier[rhs] ==- literal[int] keyword[and] identifier[is_parameterized] ( identifier[lhs] ):
identifier[result] =- identifier[lhs]
keyword[if] identifier[result] keyword[is] keyword[not] identifier[NotImplemented] :
keyword[return] identifier[result]
keyword[if] identifier[default] keyword[is] keyword[not] identifier[RaiseTypeErrorIfNotProvided] :
keyword[return] identifier[default]
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] (
identifier[type] ( identifier[lhs] ), identifier[type] ( identifier[rhs] )))
|
def mul(lhs: Any, rhs: Any, default: Any=RaiseTypeErrorIfNotProvided) -> Any:
"""Returns lhs * rhs, or else a default if the operator is not implemented.
This method is mostly used by __pow__ methods trying to return
NotImplemented instead of causing a TypeError.
Args:
lhs: Left hand side of the multiplication.
rhs: Right hand side of the multiplication.
default: Default value to return if the multiplication is not defined.
If not default is specified, a type error is raised when the
multiplication fails.
Returns:
The product of the two inputs, or else the default value if the product
is not defined, or else raises a TypeError if no default is defined.
Raises:
TypeError:
lhs doesn't have __mul__ or it returned NotImplemented
AND lhs doesn't have __rmul__ or it returned NotImplemented
AND a default value isn't specified.
"""
# Use left-hand-side's __mul__.
left_mul = getattr(lhs, '__mul__', None)
result = NotImplemented if left_mul is None else left_mul(rhs)
# Fallback to right-hand-side's __rmul__.
if result is NotImplemented:
right_mul = getattr(rhs, '__rmul__', None)
result = NotImplemented if right_mul is None else right_mul(lhs) # depends on [control=['if'], data=['result', 'NotImplemented']]
# Don't build up factors of 1.0 vs sympy Symbols.
if lhs == 1 and is_parameterized(rhs):
result = rhs # depends on [control=['if'], data=[]]
if rhs == 1 and is_parameterized(lhs):
result = lhs # depends on [control=['if'], data=[]]
if lhs == -1 and is_parameterized(rhs):
result = -rhs # depends on [control=['if'], data=[]]
if rhs == -1 and is_parameterized(lhs):
result = -lhs # depends on [control=['if'], data=[]]
# Output.
if result is not NotImplemented:
return result # depends on [control=['if'], data=['result']]
if default is not RaiseTypeErrorIfNotProvided:
return default # depends on [control=['if'], data=['default']]
raise TypeError("unsupported operand type(s) for *: '{}' and '{}'".format(type(lhs), type(rhs)))
|
def password_present(name, password):
'''
Ensures the given password is set on the ESXi host. Passwords cannot be obtained from
host, so if a password is set in this state, the ``vsphere.update_host_password``
function will always run (except when using test=True functionality) and the state's
changes dictionary will always be populated.
The username for which the password will change is the same username that is used to
authenticate against the ESXi host via the Proxy Minion. For example, if the pillar
definition for the proxy username is defined as ``root``, then the username that the
password will be updated for via this state is ``root``.
name
Name of the state.
password
The new password to change on the host.
Example:
.. code-block:: yaml
configure-host-password:
esxi.password_present:
- password: 'new-bad-password'
'''
ret = {'name': name,
'result': True,
'changes': {'old': 'unknown',
'new': '********'},
'comment': 'Host password was updated.'}
esxi_cmd = 'esxi.cmd'
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'Host password will change.'
return ret
else:
try:
__salt__[esxi_cmd]('update_host_password',
new_password=password)
except CommandExecutionError as err:
ret['result'] = False
ret['comment'] = 'Error: {0}'.format(err)
return ret
return ret
|
def function[password_present, parameter[name, password]]:
constant[
Ensures the given password is set on the ESXi host. Passwords cannot be obtained from
host, so if a password is set in this state, the ``vsphere.update_host_password``
function will always run (except when using test=True functionality) and the state's
changes dictionary will always be populated.
The username for which the password will change is the same username that is used to
authenticate against the ESXi host via the Proxy Minion. For example, if the pillar
definition for the proxy username is defined as ``root``, then the username that the
password will be updated for via this state is ``root``.
name
Name of the state.
password
The new password to change on the host.
Example:
.. code-block:: yaml
configure-host-password:
esxi.password_present:
- password: 'new-bad-password'
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da18dc9a950>, <ast.Constant object at 0x7da18dc986d0>, <ast.Constant object at 0x7da18dc9a080>, <ast.Constant object at 0x7da18dc9a560>], [<ast.Name object at 0x7da18dc98910>, <ast.Constant object at 0x7da18dc9b520>, <ast.Dict object at 0x7da18dc98a90>, <ast.Constant object at 0x7da18dc9ac50>]]
variable[esxi_cmd] assign[=] constant[esxi.cmd]
if call[name[__opts__]][constant[test]] begin[:]
call[name[ret]][constant[result]] assign[=] constant[None]
call[name[ret]][constant[comment]] assign[=] constant[Host password will change.]
return[name[ret]]
return[name[ret]]
|
keyword[def] identifier[password_present] ( identifier[name] , identifier[password] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] ,
literal[string] : keyword[True] ,
literal[string] :{ literal[string] : literal[string] ,
literal[string] : literal[string] },
literal[string] : literal[string] }
identifier[esxi_cmd] = literal[string]
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= keyword[None]
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
keyword[else] :
keyword[try] :
identifier[__salt__] [ identifier[esxi_cmd] ]( literal[string] ,
identifier[new_password] = identifier[password] )
keyword[except] identifier[CommandExecutionError] keyword[as] identifier[err] :
identifier[ret] [ literal[string] ]= keyword[False]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[err] )
keyword[return] identifier[ret]
keyword[return] identifier[ret]
|
def password_present(name, password):
"""
Ensures the given password is set on the ESXi host. Passwords cannot be obtained from
host, so if a password is set in this state, the ``vsphere.update_host_password``
function will always run (except when using test=True functionality) and the state's
changes dictionary will always be populated.
The username for which the password will change is the same username that is used to
authenticate against the ESXi host via the Proxy Minion. For example, if the pillar
definition for the proxy username is defined as ``root``, then the username that the
password will be updated for via this state is ``root``.
name
Name of the state.
password
The new password to change on the host.
Example:
.. code-block:: yaml
configure-host-password:
esxi.password_present:
- password: 'new-bad-password'
"""
ret = {'name': name, 'result': True, 'changes': {'old': 'unknown', 'new': '********'}, 'comment': 'Host password was updated.'}
esxi_cmd = 'esxi.cmd'
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'Host password will change.'
return ret # depends on [control=['if'], data=[]]
else:
try:
__salt__[esxi_cmd]('update_host_password', new_password=password) # depends on [control=['try'], data=[]]
except CommandExecutionError as err:
ret['result'] = False
ret['comment'] = 'Error: {0}'.format(err)
return ret # depends on [control=['except'], data=['err']]
return ret
|
def process_exception_message(exception):
"""
Process an exception message.
Args:
exception: The exception to process.
Returns:
A filtered string summarizing the exception.
"""
exception_message = str(exception)
for replace_char in ['\t', '\n', '\\n']:
exception_message = exception_message.replace(replace_char, '' if replace_char != '\t' else ' ')
return exception_message.replace('section', 'alias')
|
def function[process_exception_message, parameter[exception]]:
constant[
Process an exception message.
Args:
exception: The exception to process.
Returns:
A filtered string summarizing the exception.
]
variable[exception_message] assign[=] call[name[str], parameter[name[exception]]]
for taget[name[replace_char]] in starred[list[[<ast.Constant object at 0x7da2054a7be0>, <ast.Constant object at 0x7da2054a6a40>, <ast.Constant object at 0x7da2054a72e0>]]] begin[:]
variable[exception_message] assign[=] call[name[exception_message].replace, parameter[name[replace_char], <ast.IfExp object at 0x7da2054a52d0>]]
return[call[name[exception_message].replace, parameter[constant[section], constant[alias]]]]
|
keyword[def] identifier[process_exception_message] ( identifier[exception] ):
literal[string]
identifier[exception_message] = identifier[str] ( identifier[exception] )
keyword[for] identifier[replace_char] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
identifier[exception_message] = identifier[exception_message] . identifier[replace] ( identifier[replace_char] , literal[string] keyword[if] identifier[replace_char] != literal[string] keyword[else] literal[string] )
keyword[return] identifier[exception_message] . identifier[replace] ( literal[string] , literal[string] )
|
def process_exception_message(exception):
"""
Process an exception message.
Args:
exception: The exception to process.
Returns:
A filtered string summarizing the exception.
"""
exception_message = str(exception)
for replace_char in ['\t', '\n', '\\n']:
exception_message = exception_message.replace(replace_char, '' if replace_char != '\t' else ' ') # depends on [control=['for'], data=['replace_char']]
return exception_message.replace('section', 'alias')
|
def filter_time_frame(start, delta):
"""Filter :class:`.Line` objects by their connection time.
:param start: a time expression (see -s argument on --help for its format)
to filter log lines that are before this time.
:type start: string
:param delta: a relative time expression (see -s argument on --help for
its format) to limit the amount of time log lines will be considered.
:type delta: string
:returns: a function that filters by the time a request is made.
:rtype: function
"""
start_value = start
delta_value = delta
end_value = None
if start_value is not '':
start_value = _date_str_to_datetime(start_value)
if delta_value is not '':
delta_value = _delta_str_to_timedelta(delta_value)
if start_value is not '' and delta_value is not '':
end_value = start_value + delta_value
def filter_func(log_line):
if start_value is '':
return True
elif start_value > log_line.accept_date:
return False
if end_value is None:
return True
elif end_value < log_line.accept_date:
return False
return True
return filter_func
|
def function[filter_time_frame, parameter[start, delta]]:
constant[Filter :class:`.Line` objects by their connection time.
:param start: a time expression (see -s argument on --help for its format)
to filter log lines that are before this time.
:type start: string
:param delta: a relative time expression (see -s argument on --help for
its format) to limit the amount of time log lines will be considered.
:type delta: string
:returns: a function that filters by the time a request is made.
:rtype: function
]
variable[start_value] assign[=] name[start]
variable[delta_value] assign[=] name[delta]
variable[end_value] assign[=] constant[None]
if compare[name[start_value] is_not constant[]] begin[:]
variable[start_value] assign[=] call[name[_date_str_to_datetime], parameter[name[start_value]]]
if compare[name[delta_value] is_not constant[]] begin[:]
variable[delta_value] assign[=] call[name[_delta_str_to_timedelta], parameter[name[delta_value]]]
if <ast.BoolOp object at 0x7da1b10e5ba0> begin[:]
variable[end_value] assign[=] binary_operation[name[start_value] + name[delta_value]]
def function[filter_func, parameter[log_line]]:
if compare[name[start_value] is constant[]] begin[:]
return[constant[True]]
if compare[name[end_value] is constant[None]] begin[:]
return[constant[True]]
return[constant[True]]
return[name[filter_func]]
|
keyword[def] identifier[filter_time_frame] ( identifier[start] , identifier[delta] ):
literal[string]
identifier[start_value] = identifier[start]
identifier[delta_value] = identifier[delta]
identifier[end_value] = keyword[None]
keyword[if] identifier[start_value] keyword[is] keyword[not] literal[string] :
identifier[start_value] = identifier[_date_str_to_datetime] ( identifier[start_value] )
keyword[if] identifier[delta_value] keyword[is] keyword[not] literal[string] :
identifier[delta_value] = identifier[_delta_str_to_timedelta] ( identifier[delta_value] )
keyword[if] identifier[start_value] keyword[is] keyword[not] literal[string] keyword[and] identifier[delta_value] keyword[is] keyword[not] literal[string] :
identifier[end_value] = identifier[start_value] + identifier[delta_value]
keyword[def] identifier[filter_func] ( identifier[log_line] ):
keyword[if] identifier[start_value] keyword[is] literal[string] :
keyword[return] keyword[True]
keyword[elif] identifier[start_value] > identifier[log_line] . identifier[accept_date] :
keyword[return] keyword[False]
keyword[if] identifier[end_value] keyword[is] keyword[None] :
keyword[return] keyword[True]
keyword[elif] identifier[end_value] < identifier[log_line] . identifier[accept_date] :
keyword[return] keyword[False]
keyword[return] keyword[True]
keyword[return] identifier[filter_func]
|
def filter_time_frame(start, delta):
"""Filter :class:`.Line` objects by their connection time.
:param start: a time expression (see -s argument on --help for its format)
to filter log lines that are before this time.
:type start: string
:param delta: a relative time expression (see -s argument on --help for
its format) to limit the amount of time log lines will be considered.
:type delta: string
:returns: a function that filters by the time a request is made.
:rtype: function
"""
start_value = start
delta_value = delta
end_value = None
if start_value is not '':
start_value = _date_str_to_datetime(start_value) # depends on [control=['if'], data=['start_value']]
if delta_value is not '':
delta_value = _delta_str_to_timedelta(delta_value) # depends on [control=['if'], data=['delta_value']]
if start_value is not '' and delta_value is not '':
end_value = start_value + delta_value # depends on [control=['if'], data=[]]
def filter_func(log_line):
if start_value is '':
return True # depends on [control=['if'], data=[]]
elif start_value > log_line.accept_date:
return False # depends on [control=['if'], data=[]]
if end_value is None:
return True # depends on [control=['if'], data=[]]
elif end_value < log_line.accept_date:
return False # depends on [control=['if'], data=[]]
return True
return filter_func
|
def match(version, match_expr):
"""Compare two versions through a comparison
:param str version: a version string
:param str match_expr: operator and version; valid operators are
< smaller than
> greater than
>= greator or equal than
<= smaller or equal than
== equal
!= not equal
:return: True if the expression matches the version, otherwise False
:rtype: bool
>>> import semver
>>> semver.match("2.0.0", ">=1.0.0")
True
>>> semver.match("1.0.0", ">1.0.0")
False
"""
prefix = match_expr[:2]
if prefix in ('>=', '<=', '==', '!='):
match_version = match_expr[2:]
elif prefix and prefix[0] in ('>', '<'):
prefix = prefix[0]
match_version = match_expr[1:]
else:
raise ValueError("match_expr parameter should be in format <op><ver>, "
"where <op> is one of "
"['<', '>', '==', '<=', '>=', '!=']. "
"You provided: %r" % match_expr)
possibilities_dict = {
'>': (1,),
'<': (-1,),
'==': (0,),
'!=': (-1, 1),
'>=': (0, 1),
'<=': (-1, 0)
}
possibilities = possibilities_dict[prefix]
cmp_res = compare(version, match_version)
return cmp_res in possibilities
|
def function[match, parameter[version, match_expr]]:
constant[Compare two versions through a comparison
:param str version: a version string
:param str match_expr: operator and version; valid operators are
< smaller than
> greater than
>= greator or equal than
<= smaller or equal than
== equal
!= not equal
:return: True if the expression matches the version, otherwise False
:rtype: bool
>>> import semver
>>> semver.match("2.0.0", ">=1.0.0")
True
>>> semver.match("1.0.0", ">1.0.0")
False
]
variable[prefix] assign[=] call[name[match_expr]][<ast.Slice object at 0x7da2054a7850>]
if compare[name[prefix] in tuple[[<ast.Constant object at 0x7da2054a7130>, <ast.Constant object at 0x7da2054a52d0>, <ast.Constant object at 0x7da2054a5cc0>, <ast.Constant object at 0x7da2054a7f10>]]] begin[:]
variable[match_version] assign[=] call[name[match_expr]][<ast.Slice object at 0x7da2054a60e0>]
variable[possibilities_dict] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c6350>, <ast.Constant object at 0x7da20c6c6860>, <ast.Constant object at 0x7da20c6c7fd0>, <ast.Constant object at 0x7da20c6c41c0>, <ast.Constant object at 0x7da20c6c6da0>, <ast.Constant object at 0x7da20c6c5cc0>], [<ast.Tuple object at 0x7da2041d87c0>, <ast.Tuple object at 0x7da2041dbcd0>, <ast.Tuple object at 0x7da2041d8730>, <ast.Tuple object at 0x7da2041da590>, <ast.Tuple object at 0x7da2041d8310>, <ast.Tuple object at 0x7da2041dbdf0>]]
variable[possibilities] assign[=] call[name[possibilities_dict]][name[prefix]]
variable[cmp_res] assign[=] call[name[compare], parameter[name[version], name[match_version]]]
return[compare[name[cmp_res] in name[possibilities]]]
|
keyword[def] identifier[match] ( identifier[version] , identifier[match_expr] ):
literal[string]
identifier[prefix] = identifier[match_expr] [: literal[int] ]
keyword[if] identifier[prefix] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] ):
identifier[match_version] = identifier[match_expr] [ literal[int] :]
keyword[elif] identifier[prefix] keyword[and] identifier[prefix] [ literal[int] ] keyword[in] ( literal[string] , literal[string] ):
identifier[prefix] = identifier[prefix] [ literal[int] ]
identifier[match_version] = identifier[match_expr] [ literal[int] :]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string]
literal[string]
literal[string] % identifier[match_expr] )
identifier[possibilities_dict] ={
literal[string] :( literal[int] ,),
literal[string] :(- literal[int] ,),
literal[string] :( literal[int] ,),
literal[string] :(- literal[int] , literal[int] ),
literal[string] :( literal[int] , literal[int] ),
literal[string] :(- literal[int] , literal[int] )
}
identifier[possibilities] = identifier[possibilities_dict] [ identifier[prefix] ]
identifier[cmp_res] = identifier[compare] ( identifier[version] , identifier[match_version] )
keyword[return] identifier[cmp_res] keyword[in] identifier[possibilities]
|
def match(version, match_expr):
"""Compare two versions through a comparison
:param str version: a version string
:param str match_expr: operator and version; valid operators are
< smaller than
> greater than
>= greator or equal than
<= smaller or equal than
== equal
!= not equal
:return: True if the expression matches the version, otherwise False
:rtype: bool
>>> import semver
>>> semver.match("2.0.0", ">=1.0.0")
True
>>> semver.match("1.0.0", ">1.0.0")
False
"""
prefix = match_expr[:2]
if prefix in ('>=', '<=', '==', '!='):
match_version = match_expr[2:] # depends on [control=['if'], data=[]]
elif prefix and prefix[0] in ('>', '<'):
prefix = prefix[0]
match_version = match_expr[1:] # depends on [control=['if'], data=[]]
else:
raise ValueError("match_expr parameter should be in format <op><ver>, where <op> is one of ['<', '>', '==', '<=', '>=', '!=']. You provided: %r" % match_expr)
possibilities_dict = {'>': (1,), '<': (-1,), '==': (0,), '!=': (-1, 1), '>=': (0, 1), '<=': (-1, 0)}
possibilities = possibilities_dict[prefix]
cmp_res = compare(version, match_version)
return cmp_res in possibilities
|
def _set_snps(self, snps, build=37):
""" Set `_snps` and `_build` properties of this ``Individual``.
Notes
-----
Intended to be used internally to `lineage`.
Parameters
----------
snps : pandas.DataFrame
individual's genetic data normalized for use with `lineage`
build : int
build of this ``Individual``'s SNPs
"""
self._snps = snps
self._build = build
|
def function[_set_snps, parameter[self, snps, build]]:
constant[ Set `_snps` and `_build` properties of this ``Individual``.
Notes
-----
Intended to be used internally to `lineage`.
Parameters
----------
snps : pandas.DataFrame
individual's genetic data normalized for use with `lineage`
build : int
build of this ``Individual``'s SNPs
]
name[self]._snps assign[=] name[snps]
name[self]._build assign[=] name[build]
|
keyword[def] identifier[_set_snps] ( identifier[self] , identifier[snps] , identifier[build] = literal[int] ):
literal[string]
identifier[self] . identifier[_snps] = identifier[snps]
identifier[self] . identifier[_build] = identifier[build]
|
def _set_snps(self, snps, build=37):
""" Set `_snps` and `_build` properties of this ``Individual``.
Notes
-----
Intended to be used internally to `lineage`.
Parameters
----------
snps : pandas.DataFrame
individual's genetic data normalized for use with `lineage`
build : int
build of this ``Individual``'s SNPs
"""
self._snps = snps
self._build = build
|
def list_backends(_):
"""List all available backends."""
backends = [b.__name__ for b in available_backends()]
print('\n'.join(backends))
|
def function[list_backends, parameter[_]]:
constant[List all available backends.]
variable[backends] assign[=] <ast.ListComp object at 0x7da18eb54070>
call[name[print], parameter[call[constant[
].join, parameter[name[backends]]]]]
|
keyword[def] identifier[list_backends] ( identifier[_] ):
literal[string]
identifier[backends] =[ identifier[b] . identifier[__name__] keyword[for] identifier[b] keyword[in] identifier[available_backends] ()]
identifier[print] ( literal[string] . identifier[join] ( identifier[backends] ))
|
def list_backends(_):
"""List all available backends."""
backends = [b.__name__ for b in available_backends()]
print('\n'.join(backends))
|
def generic_is_view_attribute(parents, attrs):
"""Generates is_X_attribute function for given parents and attrs."""
def is_attribute(node):
return _attribute_is_magic(node, attrs, parents)
return is_attribute
|
def function[generic_is_view_attribute, parameter[parents, attrs]]:
constant[Generates is_X_attribute function for given parents and attrs.]
def function[is_attribute, parameter[node]]:
return[call[name[_attribute_is_magic], parameter[name[node], name[attrs], name[parents]]]]
return[name[is_attribute]]
|
keyword[def] identifier[generic_is_view_attribute] ( identifier[parents] , identifier[attrs] ):
literal[string]
keyword[def] identifier[is_attribute] ( identifier[node] ):
keyword[return] identifier[_attribute_is_magic] ( identifier[node] , identifier[attrs] , identifier[parents] )
keyword[return] identifier[is_attribute]
|
def generic_is_view_attribute(parents, attrs):
"""Generates is_X_attribute function for given parents and attrs."""
def is_attribute(node):
return _attribute_is_magic(node, attrs, parents)
return is_attribute
|
def configure_logging(verbose, logger):
"""Configures the logging used."""
if not verbose:
log_level = logging.WARNING
elif verbose == 1:
log_level = logging.INFO
else:
log_level = logging.DEBUG
logger.setLevel(log_level)
ch = colorlog.StreamHandler()
ch.setLevel(log_level)
formatter = colorlog.ColoredFormatter(
'%(log_color)s%(asctime)s %(name)s %(levelname)s: %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
|
def function[configure_logging, parameter[verbose, logger]]:
constant[Configures the logging used.]
if <ast.UnaryOp object at 0x7da1b19cd3f0> begin[:]
variable[log_level] assign[=] name[logging].WARNING
call[name[logger].setLevel, parameter[name[log_level]]]
variable[ch] assign[=] call[name[colorlog].StreamHandler, parameter[]]
call[name[ch].setLevel, parameter[name[log_level]]]
variable[formatter] assign[=] call[name[colorlog].ColoredFormatter, parameter[constant[%(log_color)s%(asctime)s %(name)s %(levelname)s: %(message)s]]]
call[name[ch].setFormatter, parameter[name[formatter]]]
call[name[logger].addHandler, parameter[name[ch]]]
|
keyword[def] identifier[configure_logging] ( identifier[verbose] , identifier[logger] ):
literal[string]
keyword[if] keyword[not] identifier[verbose] :
identifier[log_level] = identifier[logging] . identifier[WARNING]
keyword[elif] identifier[verbose] == literal[int] :
identifier[log_level] = identifier[logging] . identifier[INFO]
keyword[else] :
identifier[log_level] = identifier[logging] . identifier[DEBUG]
identifier[logger] . identifier[setLevel] ( identifier[log_level] )
identifier[ch] = identifier[colorlog] . identifier[StreamHandler] ()
identifier[ch] . identifier[setLevel] ( identifier[log_level] )
identifier[formatter] = identifier[colorlog] . identifier[ColoredFormatter] (
literal[string] )
identifier[ch] . identifier[setFormatter] ( identifier[formatter] )
identifier[logger] . identifier[addHandler] ( identifier[ch] )
|
def configure_logging(verbose, logger):
"""Configures the logging used."""
if not verbose:
log_level = logging.WARNING # depends on [control=['if'], data=[]]
elif verbose == 1:
log_level = logging.INFO # depends on [control=['if'], data=[]]
else:
log_level = logging.DEBUG
logger.setLevel(log_level)
ch = colorlog.StreamHandler()
ch.setLevel(log_level)
formatter = colorlog.ColoredFormatter('%(log_color)s%(asctime)s %(name)s %(levelname)s: %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
|
def PreprocessSources(
self, artifacts_registry_object, source_path_specs,
resolver_context=None):
"""Preprocesses the sources.
Args:
artifacts_registry_object (artifacts.ArtifactDefinitionsRegistry):
artifact definitions registry.
source_path_specs (list[dfvfs.PathSpec]): path specifications of
the sources to process.
resolver_context (Optional[dfvfs.Context]): resolver context.
"""
detected_operating_systems = []
for source_path_spec in source_path_specs:
try:
file_system, mount_point = self.GetSourceFileSystem(
source_path_spec, resolver_context=resolver_context)
except (RuntimeError, dfvfs_errors.BackEndError) as exception:
logger.error(exception)
continue
try:
searcher = file_system_searcher.FileSystemSearcher(
file_system, mount_point)
operating_system = self._DetermineOperatingSystem(searcher)
if operating_system != definitions.OPERATING_SYSTEM_FAMILY_UNKNOWN:
preprocess_manager.PreprocessPluginsManager.RunPlugins(
artifacts_registry_object, file_system, mount_point,
self.knowledge_base)
detected_operating_systems.append(operating_system)
finally:
file_system.Close()
if detected_operating_systems:
logger.info('Preprocessing detected operating systems: {0:s}'.format(
', '.join(detected_operating_systems)))
self.knowledge_base.SetValue(
'operating_system', detected_operating_systems[0])
|
def function[PreprocessSources, parameter[self, artifacts_registry_object, source_path_specs, resolver_context]]:
constant[Preprocesses the sources.
Args:
artifacts_registry_object (artifacts.ArtifactDefinitionsRegistry):
artifact definitions registry.
source_path_specs (list[dfvfs.PathSpec]): path specifications of
the sources to process.
resolver_context (Optional[dfvfs.Context]): resolver context.
]
variable[detected_operating_systems] assign[=] list[[]]
for taget[name[source_path_spec]] in starred[name[source_path_specs]] begin[:]
<ast.Try object at 0x7da207f006a0>
<ast.Try object at 0x7da18eb560b0>
if name[detected_operating_systems] begin[:]
call[name[logger].info, parameter[call[constant[Preprocessing detected operating systems: {0:s}].format, parameter[call[constant[, ].join, parameter[name[detected_operating_systems]]]]]]]
call[name[self].knowledge_base.SetValue, parameter[constant[operating_system], call[name[detected_operating_systems]][constant[0]]]]
|
keyword[def] identifier[PreprocessSources] (
identifier[self] , identifier[artifacts_registry_object] , identifier[source_path_specs] ,
identifier[resolver_context] = keyword[None] ):
literal[string]
identifier[detected_operating_systems] =[]
keyword[for] identifier[source_path_spec] keyword[in] identifier[source_path_specs] :
keyword[try] :
identifier[file_system] , identifier[mount_point] = identifier[self] . identifier[GetSourceFileSystem] (
identifier[source_path_spec] , identifier[resolver_context] = identifier[resolver_context] )
keyword[except] ( identifier[RuntimeError] , identifier[dfvfs_errors] . identifier[BackEndError] ) keyword[as] identifier[exception] :
identifier[logger] . identifier[error] ( identifier[exception] )
keyword[continue]
keyword[try] :
identifier[searcher] = identifier[file_system_searcher] . identifier[FileSystemSearcher] (
identifier[file_system] , identifier[mount_point] )
identifier[operating_system] = identifier[self] . identifier[_DetermineOperatingSystem] ( identifier[searcher] )
keyword[if] identifier[operating_system] != identifier[definitions] . identifier[OPERATING_SYSTEM_FAMILY_UNKNOWN] :
identifier[preprocess_manager] . identifier[PreprocessPluginsManager] . identifier[RunPlugins] (
identifier[artifacts_registry_object] , identifier[file_system] , identifier[mount_point] ,
identifier[self] . identifier[knowledge_base] )
identifier[detected_operating_systems] . identifier[append] ( identifier[operating_system] )
keyword[finally] :
identifier[file_system] . identifier[Close] ()
keyword[if] identifier[detected_operating_systems] :
identifier[logger] . identifier[info] ( literal[string] . identifier[format] (
literal[string] . identifier[join] ( identifier[detected_operating_systems] )))
identifier[self] . identifier[knowledge_base] . identifier[SetValue] (
literal[string] , identifier[detected_operating_systems] [ literal[int] ])
|
def PreprocessSources(self, artifacts_registry_object, source_path_specs, resolver_context=None):
"""Preprocesses the sources.
Args:
artifacts_registry_object (artifacts.ArtifactDefinitionsRegistry):
artifact definitions registry.
source_path_specs (list[dfvfs.PathSpec]): path specifications of
the sources to process.
resolver_context (Optional[dfvfs.Context]): resolver context.
"""
detected_operating_systems = []
for source_path_spec in source_path_specs:
try:
(file_system, mount_point) = self.GetSourceFileSystem(source_path_spec, resolver_context=resolver_context) # depends on [control=['try'], data=[]]
except (RuntimeError, dfvfs_errors.BackEndError) as exception:
logger.error(exception)
continue # depends on [control=['except'], data=['exception']]
try:
searcher = file_system_searcher.FileSystemSearcher(file_system, mount_point)
operating_system = self._DetermineOperatingSystem(searcher)
if operating_system != definitions.OPERATING_SYSTEM_FAMILY_UNKNOWN:
preprocess_manager.PreprocessPluginsManager.RunPlugins(artifacts_registry_object, file_system, mount_point, self.knowledge_base)
detected_operating_systems.append(operating_system) # depends on [control=['if'], data=['operating_system']] # depends on [control=['try'], data=[]]
finally:
file_system.Close() # depends on [control=['for'], data=['source_path_spec']]
if detected_operating_systems:
logger.info('Preprocessing detected operating systems: {0:s}'.format(', '.join(detected_operating_systems)))
self.knowledge_base.SetValue('operating_system', detected_operating_systems[0]) # depends on [control=['if'], data=[]]
|
def broadcast_status(self, status):
"""Broadcast transient status information to all listeners"""
self._broadcast(
"transient.status",
json.dumps(status),
headers={"expires": str(int((15 + time.time()) * 1000))},
)
|
def function[broadcast_status, parameter[self, status]]:
constant[Broadcast transient status information to all listeners]
call[name[self]._broadcast, parameter[constant[transient.status], call[name[json].dumps, parameter[name[status]]]]]
|
keyword[def] identifier[broadcast_status] ( identifier[self] , identifier[status] ):
literal[string]
identifier[self] . identifier[_broadcast] (
literal[string] ,
identifier[json] . identifier[dumps] ( identifier[status] ),
identifier[headers] ={ literal[string] : identifier[str] ( identifier[int] (( literal[int] + identifier[time] . identifier[time] ())* literal[int] ))},
)
|
def broadcast_status(self, status):
"""Broadcast transient status information to all listeners"""
self._broadcast('transient.status', json.dumps(status), headers={'expires': str(int((15 + time.time()) * 1000))})
|
def cublasSspr(handle, uplo, n, alpha, x, incx, AP):
"""
Rank-1 operation on real symmetric-packed matrix.
"""
status = _libcublas.cublasSspr_v2(handle,
_CUBLAS_FILL_MODE[uplo], n,
ctypes.byref(ctypes.c_float(alpha)),
int(x), incx, int(AP))
cublasCheckStatus(status)
|
def function[cublasSspr, parameter[handle, uplo, n, alpha, x, incx, AP]]:
constant[
Rank-1 operation on real symmetric-packed matrix.
]
variable[status] assign[=] call[name[_libcublas].cublasSspr_v2, parameter[name[handle], call[name[_CUBLAS_FILL_MODE]][name[uplo]], name[n], call[name[ctypes].byref, parameter[call[name[ctypes].c_float, parameter[name[alpha]]]]], call[name[int], parameter[name[x]]], name[incx], call[name[int], parameter[name[AP]]]]]
call[name[cublasCheckStatus], parameter[name[status]]]
|
keyword[def] identifier[cublasSspr] ( identifier[handle] , identifier[uplo] , identifier[n] , identifier[alpha] , identifier[x] , identifier[incx] , identifier[AP] ):
literal[string]
identifier[status] = identifier[_libcublas] . identifier[cublasSspr_v2] ( identifier[handle] ,
identifier[_CUBLAS_FILL_MODE] [ identifier[uplo] ], identifier[n] ,
identifier[ctypes] . identifier[byref] ( identifier[ctypes] . identifier[c_float] ( identifier[alpha] )),
identifier[int] ( identifier[x] ), identifier[incx] , identifier[int] ( identifier[AP] ))
identifier[cublasCheckStatus] ( identifier[status] )
|
def cublasSspr(handle, uplo, n, alpha, x, incx, AP):
"""
Rank-1 operation on real symmetric-packed matrix.
"""
status = _libcublas.cublasSspr_v2(handle, _CUBLAS_FILL_MODE[uplo], n, ctypes.byref(ctypes.c_float(alpha)), int(x), incx, int(AP))
cublasCheckStatus(status)
|
def set_panel_label(label, ax=None, x=0.05, y=0.9):
"""Add a panel label to the figure/axes, by default in the top-left corner
Parameters
----------
label : str
text to be added as panel label
ax : matplotlib.Axes, optional
panel to which to add the panel label
x : number, default 0.05
relative location of label to x-axis
y : number, default 0.9
relative location of label to y-axis
"""
def _lim_loc(lim, loc):
return lim[0] + (lim[1] - lim[0]) * loc
if ax is not None:
ax.text(_lim_loc(ax.get_xlim(), x), _lim_loc(ax.get_ylim(), y), label)
else:
plt.text(_lim_loc(plt.xlim(), x), _lim_loc(plt.ylim(), y), label)
|
def function[set_panel_label, parameter[label, ax, x, y]]:
constant[Add a panel label to the figure/axes, by default in the top-left corner
Parameters
----------
label : str
text to be added as panel label
ax : matplotlib.Axes, optional
panel to which to add the panel label
x : number, default 0.05
relative location of label to x-axis
y : number, default 0.9
relative location of label to y-axis
]
def function[_lim_loc, parameter[lim, loc]]:
return[binary_operation[call[name[lim]][constant[0]] + binary_operation[binary_operation[call[name[lim]][constant[1]] - call[name[lim]][constant[0]]] * name[loc]]]]
if compare[name[ax] is_not constant[None]] begin[:]
call[name[ax].text, parameter[call[name[_lim_loc], parameter[call[name[ax].get_xlim, parameter[]], name[x]]], call[name[_lim_loc], parameter[call[name[ax].get_ylim, parameter[]], name[y]]], name[label]]]
|
keyword[def] identifier[set_panel_label] ( identifier[label] , identifier[ax] = keyword[None] , identifier[x] = literal[int] , identifier[y] = literal[int] ):
literal[string]
keyword[def] identifier[_lim_loc] ( identifier[lim] , identifier[loc] ):
keyword[return] identifier[lim] [ literal[int] ]+( identifier[lim] [ literal[int] ]- identifier[lim] [ literal[int] ])* identifier[loc]
keyword[if] identifier[ax] keyword[is] keyword[not] keyword[None] :
identifier[ax] . identifier[text] ( identifier[_lim_loc] ( identifier[ax] . identifier[get_xlim] (), identifier[x] ), identifier[_lim_loc] ( identifier[ax] . identifier[get_ylim] (), identifier[y] ), identifier[label] )
keyword[else] :
identifier[plt] . identifier[text] ( identifier[_lim_loc] ( identifier[plt] . identifier[xlim] (), identifier[x] ), identifier[_lim_loc] ( identifier[plt] . identifier[ylim] (), identifier[y] ), identifier[label] )
|
def set_panel_label(label, ax=None, x=0.05, y=0.9):
"""Add a panel label to the figure/axes, by default in the top-left corner
Parameters
----------
label : str
text to be added as panel label
ax : matplotlib.Axes, optional
panel to which to add the panel label
x : number, default 0.05
relative location of label to x-axis
y : number, default 0.9
relative location of label to y-axis
"""
def _lim_loc(lim, loc):
return lim[0] + (lim[1] - lim[0]) * loc
if ax is not None:
ax.text(_lim_loc(ax.get_xlim(), x), _lim_loc(ax.get_ylim(), y), label) # depends on [control=['if'], data=['ax']]
else:
plt.text(_lim_loc(plt.xlim(), x), _lim_loc(plt.ylim(), y), label)
|
def _parse_ND_at_response(self, packet_info):
"""
If the given packet is a successful AT response for an ND
command, parse the parameter field.
"""
if packet_info['id'] == 'at_response' and \
packet_info['command'].lower() == b'nd' and \
packet_info['status'] == b'\x00':
result = {}
# Parse each field directly
result['source_addr'] = packet_info['parameter'][0:2]
result['source_addr_long'] = packet_info['parameter'][2:10]
# Parse the null-terminated node identifier field
null_terminator_index = 10
while packet_info['parameter'][null_terminator_index:
null_terminator_index+1] != b'\x00':
null_terminator_index += 1
# Parse each field thereafter directly
result['node_identifier'] = \
packet_info['parameter'][10:null_terminator_index]
result['parent_address'] = \
packet_info['parameter'][null_terminator_index+1:
null_terminator_index+3]
result['device_type'] = \
packet_info['parameter'][null_terminator_index+3:
null_terminator_index+4]
result['status'] = \
packet_info['parameter'][null_terminator_index+4:
null_terminator_index+5]
result['profile_id'] = \
packet_info['parameter'][null_terminator_index+5:
null_terminator_index+7]
result['manufacturer'] = \
packet_info['parameter'][null_terminator_index+7:
null_terminator_index+9]
# Simple check to ensure a good parse
if null_terminator_index+9 != len(packet_info['parameter']):
raise ValueError("Improper ND response length: expected {0}, "
"read {1} bytes".format(
len(packet_info['parameter']),
null_terminator_index+9)
)
return result
else:
return packet_info['parameter']
|
def function[_parse_ND_at_response, parameter[self, packet_info]]:
constant[
If the given packet is a successful AT response for an ND
command, parse the parameter field.
]
if <ast.BoolOp object at 0x7da1b1b86a70> begin[:]
variable[result] assign[=] dictionary[[], []]
call[name[result]][constant[source_addr]] assign[=] call[call[name[packet_info]][constant[parameter]]][<ast.Slice object at 0x7da1b1b855a0>]
call[name[result]][constant[source_addr_long]] assign[=] call[call[name[packet_info]][constant[parameter]]][<ast.Slice object at 0x7da1b1b86080>]
variable[null_terminator_index] assign[=] constant[10]
while compare[call[call[name[packet_info]][constant[parameter]]][<ast.Slice object at 0x7da1b1b841c0>] not_equal[!=] constant[b'\x00']] begin[:]
<ast.AugAssign object at 0x7da1b1b84430>
call[name[result]][constant[node_identifier]] assign[=] call[call[name[packet_info]][constant[parameter]]][<ast.Slice object at 0x7da1b1b876a0>]
call[name[result]][constant[parent_address]] assign[=] call[call[name[packet_info]][constant[parameter]]][<ast.Slice object at 0x7da1b1b853f0>]
call[name[result]][constant[device_type]] assign[=] call[call[name[packet_info]][constant[parameter]]][<ast.Slice object at 0x7da1b1b84850>]
call[name[result]][constant[status]] assign[=] call[call[name[packet_info]][constant[parameter]]][<ast.Slice object at 0x7da1b1b85990>]
call[name[result]][constant[profile_id]] assign[=] call[call[name[packet_info]][constant[parameter]]][<ast.Slice object at 0x7da1b1b84070>]
call[name[result]][constant[manufacturer]] assign[=] call[call[name[packet_info]][constant[parameter]]][<ast.Slice object at 0x7da1b1b85090>]
if compare[binary_operation[name[null_terminator_index] + constant[9]] not_equal[!=] call[name[len], parameter[call[name[packet_info]][constant[parameter]]]]] begin[:]
<ast.Raise object at 0x7da1b1b87be0>
return[name[result]]
|
keyword[def] identifier[_parse_ND_at_response] ( identifier[self] , identifier[packet_info] ):
literal[string]
keyword[if] identifier[packet_info] [ literal[string] ]== literal[string] keyword[and] identifier[packet_info] [ literal[string] ]. identifier[lower] ()== literal[string] keyword[and] identifier[packet_info] [ literal[string] ]== literal[string] :
identifier[result] ={}
identifier[result] [ literal[string] ]= identifier[packet_info] [ literal[string] ][ literal[int] : literal[int] ]
identifier[result] [ literal[string] ]= identifier[packet_info] [ literal[string] ][ literal[int] : literal[int] ]
identifier[null_terminator_index] = literal[int]
keyword[while] identifier[packet_info] [ literal[string] ][ identifier[null_terminator_index] :
identifier[null_terminator_index] + literal[int] ]!= literal[string] :
identifier[null_terminator_index] += literal[int]
identifier[result] [ literal[string] ]= identifier[packet_info] [ literal[string] ][ literal[int] : identifier[null_terminator_index] ]
identifier[result] [ literal[string] ]= identifier[packet_info] [ literal[string] ][ identifier[null_terminator_index] + literal[int] :
identifier[null_terminator_index] + literal[int] ]
identifier[result] [ literal[string] ]= identifier[packet_info] [ literal[string] ][ identifier[null_terminator_index] + literal[int] :
identifier[null_terminator_index] + literal[int] ]
identifier[result] [ literal[string] ]= identifier[packet_info] [ literal[string] ][ identifier[null_terminator_index] + literal[int] :
identifier[null_terminator_index] + literal[int] ]
identifier[result] [ literal[string] ]= identifier[packet_info] [ literal[string] ][ identifier[null_terminator_index] + literal[int] :
identifier[null_terminator_index] + literal[int] ]
identifier[result] [ literal[string] ]= identifier[packet_info] [ literal[string] ][ identifier[null_terminator_index] + literal[int] :
identifier[null_terminator_index] + literal[int] ]
keyword[if] identifier[null_terminator_index] + literal[int] != identifier[len] ( identifier[packet_info] [ literal[string] ]):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] (
identifier[len] ( identifier[packet_info] [ literal[string] ]),
identifier[null_terminator_index] + literal[int] )
)
keyword[return] identifier[result]
keyword[else] :
keyword[return] identifier[packet_info] [ literal[string] ]
|
def _parse_ND_at_response(self, packet_info):
"""
If the given packet is a successful AT response for an ND
command, parse the parameter field.
"""
if packet_info['id'] == 'at_response' and packet_info['command'].lower() == b'nd' and (packet_info['status'] == b'\x00'):
result = {}
# Parse each field directly
result['source_addr'] = packet_info['parameter'][0:2]
result['source_addr_long'] = packet_info['parameter'][2:10]
# Parse the null-terminated node identifier field
null_terminator_index = 10
while packet_info['parameter'][null_terminator_index:null_terminator_index + 1] != b'\x00':
null_terminator_index += 1 # depends on [control=['while'], data=[]]
# Parse each field thereafter directly
result['node_identifier'] = packet_info['parameter'][10:null_terminator_index]
result['parent_address'] = packet_info['parameter'][null_terminator_index + 1:null_terminator_index + 3]
result['device_type'] = packet_info['parameter'][null_terminator_index + 3:null_terminator_index + 4]
result['status'] = packet_info['parameter'][null_terminator_index + 4:null_terminator_index + 5]
result['profile_id'] = packet_info['parameter'][null_terminator_index + 5:null_terminator_index + 7]
result['manufacturer'] = packet_info['parameter'][null_terminator_index + 7:null_terminator_index + 9]
# Simple check to ensure a good parse
if null_terminator_index + 9 != len(packet_info['parameter']):
raise ValueError('Improper ND response length: expected {0}, read {1} bytes'.format(len(packet_info['parameter']), null_terminator_index + 9)) # depends on [control=['if'], data=[]]
return result # depends on [control=['if'], data=[]]
else:
return packet_info['parameter']
|
def process_pkcs7(self, data, name):
"""
Process PKCS7 signature with certificate in it.
:param data:
:param name:
:return:
"""
from cryptography.hazmat.backends.openssl.backend import backend
from cryptography.hazmat.backends.openssl.x509 import _Certificate
# DER conversion
is_pem = startswith(data, '-----')
if self.re_match(r'^[a-zA-Z0-9-\s+=/]+$', data):
is_pem = True
try:
der = data
if is_pem:
data = data.decode('utf8')
data = re.sub(r'\s*-----\s*BEGIN\s+PKCS7\s*-----', '', data)
data = re.sub(r'\s*-----\s*END\s+PKCS7\s*-----', '', data)
der = base64.b64decode(data)
bio = backend._bytes_to_bio(der)
pkcs7 = backend._lib.d2i_PKCS7_bio(bio.bio, backend._ffi.NULL)
backend.openssl_assert(pkcs7 != backend._ffi.NULL)
signers = backend._lib.PKCS7_get0_signers(pkcs7, backend._ffi.NULL, 0)
backend.openssl_assert(signers != backend._ffi.NULL)
backend.openssl_assert(backend._lib.sk_X509_num(signers) > 0)
x509_ptr = backend._lib.sk_X509_value(signers, 0)
backend.openssl_assert(x509_ptr != backend._ffi.NULL)
x509_ptr = backend._ffi.gc(x509_ptr, backend._lib.X509_free)
x509 = _Certificate(backend, x509_ptr)
self.num_pkcs7_cert += 1
return [self.process_x509(x509, name=name, pem=False, source='pkcs7-cert', aux='')]
except Exception as e:
logger.debug('Error in PKCS7 processing %s: %s' % (name, e))
self.trace_logger.log(e)
|
def function[process_pkcs7, parameter[self, data, name]]:
constant[
Process PKCS7 signature with certificate in it.
:param data:
:param name:
:return:
]
from relative_module[cryptography.hazmat.backends.openssl.backend] import module[backend]
from relative_module[cryptography.hazmat.backends.openssl.x509] import module[_Certificate]
variable[is_pem] assign[=] call[name[startswith], parameter[name[data], constant[-----]]]
if call[name[self].re_match, parameter[constant[^[a-zA-Z0-9-\s+=/]+$], name[data]]] begin[:]
variable[is_pem] assign[=] constant[True]
<ast.Try object at 0x7da20c7c9690>
|
keyword[def] identifier[process_pkcs7] ( identifier[self] , identifier[data] , identifier[name] ):
literal[string]
keyword[from] identifier[cryptography] . identifier[hazmat] . identifier[backends] . identifier[openssl] . identifier[backend] keyword[import] identifier[backend]
keyword[from] identifier[cryptography] . identifier[hazmat] . identifier[backends] . identifier[openssl] . identifier[x509] keyword[import] identifier[_Certificate]
identifier[is_pem] = identifier[startswith] ( identifier[data] , literal[string] )
keyword[if] identifier[self] . identifier[re_match] ( literal[string] , identifier[data] ):
identifier[is_pem] = keyword[True]
keyword[try] :
identifier[der] = identifier[data]
keyword[if] identifier[is_pem] :
identifier[data] = identifier[data] . identifier[decode] ( literal[string] )
identifier[data] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[data] )
identifier[data] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[data] )
identifier[der] = identifier[base64] . identifier[b64decode] ( identifier[data] )
identifier[bio] = identifier[backend] . identifier[_bytes_to_bio] ( identifier[der] )
identifier[pkcs7] = identifier[backend] . identifier[_lib] . identifier[d2i_PKCS7_bio] ( identifier[bio] . identifier[bio] , identifier[backend] . identifier[_ffi] . identifier[NULL] )
identifier[backend] . identifier[openssl_assert] ( identifier[pkcs7] != identifier[backend] . identifier[_ffi] . identifier[NULL] )
identifier[signers] = identifier[backend] . identifier[_lib] . identifier[PKCS7_get0_signers] ( identifier[pkcs7] , identifier[backend] . identifier[_ffi] . identifier[NULL] , literal[int] )
identifier[backend] . identifier[openssl_assert] ( identifier[signers] != identifier[backend] . identifier[_ffi] . identifier[NULL] )
identifier[backend] . identifier[openssl_assert] ( identifier[backend] . identifier[_lib] . identifier[sk_X509_num] ( identifier[signers] )> literal[int] )
identifier[x509_ptr] = identifier[backend] . identifier[_lib] . identifier[sk_X509_value] ( identifier[signers] , literal[int] )
identifier[backend] . identifier[openssl_assert] ( identifier[x509_ptr] != identifier[backend] . identifier[_ffi] . identifier[NULL] )
identifier[x509_ptr] = identifier[backend] . identifier[_ffi] . identifier[gc] ( identifier[x509_ptr] , identifier[backend] . identifier[_lib] . identifier[X509_free] )
identifier[x509] = identifier[_Certificate] ( identifier[backend] , identifier[x509_ptr] )
identifier[self] . identifier[num_pkcs7_cert] += literal[int]
keyword[return] [ identifier[self] . identifier[process_x509] ( identifier[x509] , identifier[name] = identifier[name] , identifier[pem] = keyword[False] , identifier[source] = literal[string] , identifier[aux] = literal[string] )]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger] . identifier[debug] ( literal[string] %( identifier[name] , identifier[e] ))
identifier[self] . identifier[trace_logger] . identifier[log] ( identifier[e] )
|
def process_pkcs7(self, data, name):
"""
Process PKCS7 signature with certificate in it.
:param data:
:param name:
:return:
"""
from cryptography.hazmat.backends.openssl.backend import backend
from cryptography.hazmat.backends.openssl.x509 import _Certificate
# DER conversion
is_pem = startswith(data, '-----')
if self.re_match('^[a-zA-Z0-9-\\s+=/]+$', data):
is_pem = True # depends on [control=['if'], data=[]]
try:
der = data
if is_pem:
data = data.decode('utf8')
data = re.sub('\\s*-----\\s*BEGIN\\s+PKCS7\\s*-----', '', data)
data = re.sub('\\s*-----\\s*END\\s+PKCS7\\s*-----', '', data)
der = base64.b64decode(data) # depends on [control=['if'], data=[]]
bio = backend._bytes_to_bio(der)
pkcs7 = backend._lib.d2i_PKCS7_bio(bio.bio, backend._ffi.NULL)
backend.openssl_assert(pkcs7 != backend._ffi.NULL)
signers = backend._lib.PKCS7_get0_signers(pkcs7, backend._ffi.NULL, 0)
backend.openssl_assert(signers != backend._ffi.NULL)
backend.openssl_assert(backend._lib.sk_X509_num(signers) > 0)
x509_ptr = backend._lib.sk_X509_value(signers, 0)
backend.openssl_assert(x509_ptr != backend._ffi.NULL)
x509_ptr = backend._ffi.gc(x509_ptr, backend._lib.X509_free)
x509 = _Certificate(backend, x509_ptr)
self.num_pkcs7_cert += 1
return [self.process_x509(x509, name=name, pem=False, source='pkcs7-cert', aux='')] # depends on [control=['try'], data=[]]
except Exception as e:
logger.debug('Error in PKCS7 processing %s: %s' % (name, e))
self.trace_logger.log(e) # depends on [control=['except'], data=['e']]
|
def format(self, record):
"""
Calls the standard formatter, but will indent all of the log messages
by our current indentation level.
"""
formatted = super(IndentingFormatter, self).format(record)
prefix = ''
if self.add_timestamp:
prefix = self.formatTime(record, "%Y-%m-%dT%H:%M:%S ")
prefix += " " * get_indentation()
formatted = "".join([
prefix + line
for line in formatted.splitlines(True)
])
return formatted
|
def function[format, parameter[self, record]]:
constant[
Calls the standard formatter, but will indent all of the log messages
by our current indentation level.
]
variable[formatted] assign[=] call[call[name[super], parameter[name[IndentingFormatter], name[self]]].format, parameter[name[record]]]
variable[prefix] assign[=] constant[]
if name[self].add_timestamp begin[:]
variable[prefix] assign[=] call[name[self].formatTime, parameter[name[record], constant[%Y-%m-%dT%H:%M:%S ]]]
<ast.AugAssign object at 0x7da18bcc87c0>
variable[formatted] assign[=] call[constant[].join, parameter[<ast.ListComp object at 0x7da18bccb4c0>]]
return[name[formatted]]
|
keyword[def] identifier[format] ( identifier[self] , identifier[record] ):
literal[string]
identifier[formatted] = identifier[super] ( identifier[IndentingFormatter] , identifier[self] ). identifier[format] ( identifier[record] )
identifier[prefix] = literal[string]
keyword[if] identifier[self] . identifier[add_timestamp] :
identifier[prefix] = identifier[self] . identifier[formatTime] ( identifier[record] , literal[string] )
identifier[prefix] += literal[string] * identifier[get_indentation] ()
identifier[formatted] = literal[string] . identifier[join] ([
identifier[prefix] + identifier[line]
keyword[for] identifier[line] keyword[in] identifier[formatted] . identifier[splitlines] ( keyword[True] )
])
keyword[return] identifier[formatted]
|
def format(self, record):
"""
Calls the standard formatter, but will indent all of the log messages
by our current indentation level.
"""
formatted = super(IndentingFormatter, self).format(record)
prefix = ''
if self.add_timestamp:
prefix = self.formatTime(record, '%Y-%m-%dT%H:%M:%S ') # depends on [control=['if'], data=[]]
prefix += ' ' * get_indentation()
formatted = ''.join([prefix + line for line in formatted.splitlines(True)])
return formatted
|
def ranked_attributes(self):
"""
Returns the matrix of ranked attributes from the last run.
:return: the Numpy matrix
:rtype: ndarray
"""
matrix = javabridge.call(self.jobject, "rankedAttributes", "()[[D")
if matrix is None:
return None
else:
return arrays.double_matrix_to_ndarray(matrix)
|
def function[ranked_attributes, parameter[self]]:
constant[
Returns the matrix of ranked attributes from the last run.
:return: the Numpy matrix
:rtype: ndarray
]
variable[matrix] assign[=] call[name[javabridge].call, parameter[name[self].jobject, constant[rankedAttributes], constant[()[[D]]]
if compare[name[matrix] is constant[None]] begin[:]
return[constant[None]]
|
keyword[def] identifier[ranked_attributes] ( identifier[self] ):
literal[string]
identifier[matrix] = identifier[javabridge] . identifier[call] ( identifier[self] . identifier[jobject] , literal[string] , literal[string] )
keyword[if] identifier[matrix] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[else] :
keyword[return] identifier[arrays] . identifier[double_matrix_to_ndarray] ( identifier[matrix] )
|
def ranked_attributes(self):
"""
Returns the matrix of ranked attributes from the last run.
:return: the Numpy matrix
:rtype: ndarray
"""
matrix = javabridge.call(self.jobject, 'rankedAttributes', '()[[D')
if matrix is None:
return None # depends on [control=['if'], data=[]]
else:
return arrays.double_matrix_to_ndarray(matrix)
|
def setup(self):
"""Continue the run process blocking on MasterControlProgram.run"""
# If the app was invoked to specified to prepend the path, do so now
if self.args.prepend_path:
self._prepend_python_path(self.args.prepend_path)
|
def function[setup, parameter[self]]:
constant[Continue the run process blocking on MasterControlProgram.run]
if name[self].args.prepend_path begin[:]
call[name[self]._prepend_python_path, parameter[name[self].args.prepend_path]]
|
keyword[def] identifier[setup] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[args] . identifier[prepend_path] :
identifier[self] . identifier[_prepend_python_path] ( identifier[self] . identifier[args] . identifier[prepend_path] )
|
def setup(self):
"""Continue the run process blocking on MasterControlProgram.run"""
# If the app was invoked to specified to prepend the path, do so now
if self.args.prepend_path:
self._prepend_python_path(self.args.prepend_path) # depends on [control=['if'], data=[]]
|
def _makeService(self):
"""
Construct a service for the endpoint as described.
"""
if self._endpointService is None:
_service = service
else:
_service = self._endpointService
return _service(
self.description.encode('ascii'), self.factory.getFactory())
|
def function[_makeService, parameter[self]]:
constant[
Construct a service for the endpoint as described.
]
if compare[name[self]._endpointService is constant[None]] begin[:]
variable[_service] assign[=] name[service]
return[call[name[_service], parameter[call[name[self].description.encode, parameter[constant[ascii]]], call[name[self].factory.getFactory, parameter[]]]]]
|
keyword[def] identifier[_makeService] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_endpointService] keyword[is] keyword[None] :
identifier[_service] = identifier[service]
keyword[else] :
identifier[_service] = identifier[self] . identifier[_endpointService]
keyword[return] identifier[_service] (
identifier[self] . identifier[description] . identifier[encode] ( literal[string] ), identifier[self] . identifier[factory] . identifier[getFactory] ())
|
def _makeService(self):
"""
Construct a service for the endpoint as described.
"""
if self._endpointService is None:
_service = service # depends on [control=['if'], data=[]]
else:
_service = self._endpointService
return _service(self.description.encode('ascii'), self.factory.getFactory())
|
def struct_types(self):
"""
Return an iterator over the struct types defined in
the module. The iterator will yield a TypeRef.
"""
it = ffi.lib.LLVMPY_ModuleTypesIter(self)
return _TypesIterator(it, dict(module=self))
|
def function[struct_types, parameter[self]]:
constant[
Return an iterator over the struct types defined in
the module. The iterator will yield a TypeRef.
]
variable[it] assign[=] call[name[ffi].lib.LLVMPY_ModuleTypesIter, parameter[name[self]]]
return[call[name[_TypesIterator], parameter[name[it], call[name[dict], parameter[]]]]]
|
keyword[def] identifier[struct_types] ( identifier[self] ):
literal[string]
identifier[it] = identifier[ffi] . identifier[lib] . identifier[LLVMPY_ModuleTypesIter] ( identifier[self] )
keyword[return] identifier[_TypesIterator] ( identifier[it] , identifier[dict] ( identifier[module] = identifier[self] ))
|
def struct_types(self):
"""
Return an iterator over the struct types defined in
the module. The iterator will yield a TypeRef.
"""
it = ffi.lib.LLVMPY_ModuleTypesIter(self)
return _TypesIterator(it, dict(module=self))
|
def lock(self):
'''Lock card with SCardBeginTransaction.'''
component = self.component
while True:
if isinstance(
component,
smartcard.pcsc.PCSCCardConnection.PCSCCardConnection):
hresult = SCardBeginTransaction(component.hcard)
if 0 != hresult:
raise CardConnectionException(
'Failed to lock with SCardBeginTransaction: ' +
SCardGetErrorMessage(hresult))
else:
# print('locked')
pass
break
if hasattr(component, 'component'):
component = component.component
else:
break
|
def function[lock, parameter[self]]:
constant[Lock card with SCardBeginTransaction.]
variable[component] assign[=] name[self].component
while constant[True] begin[:]
if call[name[isinstance], parameter[name[component], name[smartcard].pcsc.PCSCCardConnection.PCSCCardConnection]] begin[:]
variable[hresult] assign[=] call[name[SCardBeginTransaction], parameter[name[component].hcard]]
if compare[constant[0] not_equal[!=] name[hresult]] begin[:]
<ast.Raise object at 0x7da1b246d090>
break
if call[name[hasattr], parameter[name[component], constant[component]]] begin[:]
variable[component] assign[=] name[component].component
|
keyword[def] identifier[lock] ( identifier[self] ):
literal[string]
identifier[component] = identifier[self] . identifier[component]
keyword[while] keyword[True] :
keyword[if] identifier[isinstance] (
identifier[component] ,
identifier[smartcard] . identifier[pcsc] . identifier[PCSCCardConnection] . identifier[PCSCCardConnection] ):
identifier[hresult] = identifier[SCardBeginTransaction] ( identifier[component] . identifier[hcard] )
keyword[if] literal[int] != identifier[hresult] :
keyword[raise] identifier[CardConnectionException] (
literal[string] +
identifier[SCardGetErrorMessage] ( identifier[hresult] ))
keyword[else] :
keyword[pass]
keyword[break]
keyword[if] identifier[hasattr] ( identifier[component] , literal[string] ):
identifier[component] = identifier[component] . identifier[component]
keyword[else] :
keyword[break]
|
def lock(self):
"""Lock card with SCardBeginTransaction."""
component = self.component
while True:
if isinstance(component, smartcard.pcsc.PCSCCardConnection.PCSCCardConnection):
hresult = SCardBeginTransaction(component.hcard)
if 0 != hresult:
raise CardConnectionException('Failed to lock with SCardBeginTransaction: ' + SCardGetErrorMessage(hresult)) # depends on [control=['if'], data=['hresult']]
else:
# print('locked')
pass
break # depends on [control=['if'], data=[]]
if hasattr(component, 'component'):
component = component.component # depends on [control=['if'], data=[]]
else:
break # depends on [control=['while'], data=[]]
|
def prepare_extensible(self):
"""
This function finishes initialization, must be called once all field descriptors and tag have been filled.
"""
# see if extensible and store cycle len
for k in self._tags:
if "extensible" in k:
cycle_len = int(k.split(":")[1])
break
else:
# not extensible
return
# find cycle start and prepare patterns
cycle_start = None
cycle_patterns = []
for i, field_descriptor in enumerate(self._field_descriptors):
# quit if finished
if (cycle_start is not None) and (i >= (cycle_start + cycle_len)):
break
# set cycle start if not set yet
if (cycle_start is None) and ("begin-extensible" in field_descriptor.tags):
cycle_start = i
# leave if cycle start not reached yet
if cycle_start is None:
continue
# store pattern
cycle_patterns.append(field_descriptor.ref.replace("1", r"(\d+)"))
else:
raise RuntimeError("cycle start not found")
# detach unnecessary field descriptors
self._field_descriptors = self._field_descriptors[:cycle_start + cycle_len]
# store cycle info
self.extensible_info = (cycle_start, cycle_len, tuple(cycle_patterns))
# set field descriptor cycle_start index (for error messages while serialization)
for i, fd in enumerate(self._field_descriptors[cycle_start:]):
fd.set_extensible_info(cycle_start, cycle_len, cycle_patterns[i])
|
def function[prepare_extensible, parameter[self]]:
constant[
This function finishes initialization, must be called once all field descriptors and tag have been filled.
]
for taget[name[k]] in starred[name[self]._tags] begin[:]
if compare[constant[extensible] in name[k]] begin[:]
variable[cycle_len] assign[=] call[name[int], parameter[call[call[name[k].split, parameter[constant[:]]]][constant[1]]]]
break
variable[cycle_start] assign[=] constant[None]
variable[cycle_patterns] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c795930>, <ast.Name object at 0x7da20c795d20>]]] in starred[call[name[enumerate], parameter[name[self]._field_descriptors]]] begin[:]
if <ast.BoolOp object at 0x7da20c7951e0> begin[:]
break
if <ast.BoolOp object at 0x7da20c795960> begin[:]
variable[cycle_start] assign[=] name[i]
if compare[name[cycle_start] is constant[None]] begin[:]
continue
call[name[cycle_patterns].append, parameter[call[name[field_descriptor].ref.replace, parameter[constant[1], constant[(\d+)]]]]]
name[self]._field_descriptors assign[=] call[name[self]._field_descriptors][<ast.Slice object at 0x7da20c796560>]
name[self].extensible_info assign[=] tuple[[<ast.Name object at 0x7da20c796470>, <ast.Name object at 0x7da20c794100>, <ast.Call object at 0x7da20c796830>]]
for taget[tuple[[<ast.Name object at 0x7da20c796440>, <ast.Name object at 0x7da1b02a7820>]]] in starred[call[name[enumerate], parameter[call[name[self]._field_descriptors][<ast.Slice object at 0x7da1b02a62f0>]]]] begin[:]
call[name[fd].set_extensible_info, parameter[name[cycle_start], name[cycle_len], call[name[cycle_patterns]][name[i]]]]
|
keyword[def] identifier[prepare_extensible] ( identifier[self] ):
literal[string]
keyword[for] identifier[k] keyword[in] identifier[self] . identifier[_tags] :
keyword[if] literal[string] keyword[in] identifier[k] :
identifier[cycle_len] = identifier[int] ( identifier[k] . identifier[split] ( literal[string] )[ literal[int] ])
keyword[break]
keyword[else] :
keyword[return]
identifier[cycle_start] = keyword[None]
identifier[cycle_patterns] =[]
keyword[for] identifier[i] , identifier[field_descriptor] keyword[in] identifier[enumerate] ( identifier[self] . identifier[_field_descriptors] ):
keyword[if] ( identifier[cycle_start] keyword[is] keyword[not] keyword[None] ) keyword[and] ( identifier[i] >=( identifier[cycle_start] + identifier[cycle_len] )):
keyword[break]
keyword[if] ( identifier[cycle_start] keyword[is] keyword[None] ) keyword[and] ( literal[string] keyword[in] identifier[field_descriptor] . identifier[tags] ):
identifier[cycle_start] = identifier[i]
keyword[if] identifier[cycle_start] keyword[is] keyword[None] :
keyword[continue]
identifier[cycle_patterns] . identifier[append] ( identifier[field_descriptor] . identifier[ref] . identifier[replace] ( literal[string] , literal[string] ))
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[self] . identifier[_field_descriptors] = identifier[self] . identifier[_field_descriptors] [: identifier[cycle_start] + identifier[cycle_len] ]
identifier[self] . identifier[extensible_info] =( identifier[cycle_start] , identifier[cycle_len] , identifier[tuple] ( identifier[cycle_patterns] ))
keyword[for] identifier[i] , identifier[fd] keyword[in] identifier[enumerate] ( identifier[self] . identifier[_field_descriptors] [ identifier[cycle_start] :]):
identifier[fd] . identifier[set_extensible_info] ( identifier[cycle_start] , identifier[cycle_len] , identifier[cycle_patterns] [ identifier[i] ])
|
def prepare_extensible(self):
"""
This function finishes initialization, must be called once all field descriptors and tag have been filled.
"""
# see if extensible and store cycle len
for k in self._tags:
if 'extensible' in k:
cycle_len = int(k.split(':')[1])
break # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=['k']]
else:
# not extensible
return
# find cycle start and prepare patterns
cycle_start = None
cycle_patterns = []
for (i, field_descriptor) in enumerate(self._field_descriptors):
# quit if finished
if cycle_start is not None and i >= cycle_start + cycle_len:
break # depends on [control=['if'], data=[]]
# set cycle start if not set yet
if cycle_start is None and 'begin-extensible' in field_descriptor.tags:
cycle_start = i # depends on [control=['if'], data=[]]
# leave if cycle start not reached yet
if cycle_start is None:
continue # depends on [control=['if'], data=[]]
# store pattern
cycle_patterns.append(field_descriptor.ref.replace('1', '(\\d+)')) # depends on [control=['for'], data=[]]
else:
raise RuntimeError('cycle start not found')
# detach unnecessary field descriptors
self._field_descriptors = self._field_descriptors[:cycle_start + cycle_len]
# store cycle info
self.extensible_info = (cycle_start, cycle_len, tuple(cycle_patterns))
# set field descriptor cycle_start index (for error messages while serialization)
for (i, fd) in enumerate(self._field_descriptors[cycle_start:]):
fd.set_extensible_info(cycle_start, cycle_len, cycle_patterns[i]) # depends on [control=['for'], data=[]]
|
def route_to_route_network(gtfs, walking_threshold, start_time, end_time):
"""
Creates networkx graph where the nodes are bus routes and a edge indicates that there is a possibility to transfer
between the routes
:param gtfs:
:param walking_threshold:
:param start_time:
:param end_time:
:return:
"""
graph = networkx.Graph()
routes = gtfs.get_table("routes")
for i in routes.itertuples():
graph.add_node(i.route_id, attr_dict={"type": i.type, "color": route_types.ROUTE_TYPE_TO_COLOR[i.type]})
query = """SELECT stop1.route_id AS route_id1, stop1.type, stop2.route_id AS route_id2, stop2.type FROM
(SELECT * FROM stop_distances WHERE d_walk < %s) sd,
(SELECT * FROM stop_times, trips, routes
WHERE stop_times.trip_I=trips.trip_I AND trips.route_I=routes.route_I
AND stop_times.dep_time_ds > %s AND stop_times.dep_time_ds < %s) stop1,
(SELECT * FROM stop_times, trips, routes
WHERE stop_times.trip_I=trips.trip_I AND trips.route_I=routes.route_I
AND stop_times.dep_time_ds > %s AND stop_times.dep_time_ds < %s) stop2
WHERE sd.from_stop_I = stop1.stop_I AND sd.to_stop_I = stop2.stop_I AND stop1.route_id != stop2.route_id
GROUP BY stop1.route_id, stop2.route_id""" % (walking_threshold, start_time, end_time, start_time,
end_time)
df = gtfs.execute_custom_query_pandas(query)
for items in df.itertuples():
graph.add_edge(items.route_id1, items.route_id2)
graph.remove_nodes_from(networkx.isolates(graph))
return graph
|
def function[route_to_route_network, parameter[gtfs, walking_threshold, start_time, end_time]]:
constant[
Creates networkx graph where the nodes are bus routes and a edge indicates that there is a possibility to transfer
between the routes
:param gtfs:
:param walking_threshold:
:param start_time:
:param end_time:
:return:
]
variable[graph] assign[=] call[name[networkx].Graph, parameter[]]
variable[routes] assign[=] call[name[gtfs].get_table, parameter[constant[routes]]]
for taget[name[i]] in starred[call[name[routes].itertuples, parameter[]]] begin[:]
call[name[graph].add_node, parameter[name[i].route_id]]
variable[query] assign[=] binary_operation[constant[SELECT stop1.route_id AS route_id1, stop1.type, stop2.route_id AS route_id2, stop2.type FROM
(SELECT * FROM stop_distances WHERE d_walk < %s) sd,
(SELECT * FROM stop_times, trips, routes
WHERE stop_times.trip_I=trips.trip_I AND trips.route_I=routes.route_I
AND stop_times.dep_time_ds > %s AND stop_times.dep_time_ds < %s) stop1,
(SELECT * FROM stop_times, trips, routes
WHERE stop_times.trip_I=trips.trip_I AND trips.route_I=routes.route_I
AND stop_times.dep_time_ds > %s AND stop_times.dep_time_ds < %s) stop2
WHERE sd.from_stop_I = stop1.stop_I AND sd.to_stop_I = stop2.stop_I AND stop1.route_id != stop2.route_id
GROUP BY stop1.route_id, stop2.route_id] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c9932e0>, <ast.Name object at 0x7da20c990160>, <ast.Name object at 0x7da20c993190>, <ast.Name object at 0x7da20c991c60>, <ast.Name object at 0x7da20c992ad0>]]]
variable[df] assign[=] call[name[gtfs].execute_custom_query_pandas, parameter[name[query]]]
for taget[name[items]] in starred[call[name[df].itertuples, parameter[]]] begin[:]
call[name[graph].add_edge, parameter[name[items].route_id1, name[items].route_id2]]
call[name[graph].remove_nodes_from, parameter[call[name[networkx].isolates, parameter[name[graph]]]]]
return[name[graph]]
|
keyword[def] identifier[route_to_route_network] ( identifier[gtfs] , identifier[walking_threshold] , identifier[start_time] , identifier[end_time] ):
literal[string]
identifier[graph] = identifier[networkx] . identifier[Graph] ()
identifier[routes] = identifier[gtfs] . identifier[get_table] ( literal[string] )
keyword[for] identifier[i] keyword[in] identifier[routes] . identifier[itertuples] ():
identifier[graph] . identifier[add_node] ( identifier[i] . identifier[route_id] , identifier[attr_dict] ={ literal[string] : identifier[i] . identifier[type] , literal[string] : identifier[route_types] . identifier[ROUTE_TYPE_TO_COLOR] [ identifier[i] . identifier[type] ]})
identifier[query] = literal[string] %( identifier[walking_threshold] , identifier[start_time] , identifier[end_time] , identifier[start_time] ,
identifier[end_time] )
identifier[df] = identifier[gtfs] . identifier[execute_custom_query_pandas] ( identifier[query] )
keyword[for] identifier[items] keyword[in] identifier[df] . identifier[itertuples] ():
identifier[graph] . identifier[add_edge] ( identifier[items] . identifier[route_id1] , identifier[items] . identifier[route_id2] )
identifier[graph] . identifier[remove_nodes_from] ( identifier[networkx] . identifier[isolates] ( identifier[graph] ))
keyword[return] identifier[graph]
|
def route_to_route_network(gtfs, walking_threshold, start_time, end_time):
"""
Creates networkx graph where the nodes are bus routes and a edge indicates that there is a possibility to transfer
between the routes
:param gtfs:
:param walking_threshold:
:param start_time:
:param end_time:
:return:
"""
graph = networkx.Graph()
routes = gtfs.get_table('routes')
for i in routes.itertuples():
graph.add_node(i.route_id, attr_dict={'type': i.type, 'color': route_types.ROUTE_TYPE_TO_COLOR[i.type]}) # depends on [control=['for'], data=['i']]
query = 'SELECT stop1.route_id AS route_id1, stop1.type, stop2.route_id AS route_id2, stop2.type FROM\n (SELECT * FROM stop_distances WHERE d_walk < %s) sd,\n (SELECT * FROM stop_times, trips, routes \n WHERE stop_times.trip_I=trips.trip_I AND trips.route_I=routes.route_I \n AND stop_times.dep_time_ds > %s AND stop_times.dep_time_ds < %s) stop1,\n (SELECT * FROM stop_times, trips, routes \n WHERE stop_times.trip_I=trips.trip_I AND trips.route_I=routes.route_I \n AND stop_times.dep_time_ds > %s AND stop_times.dep_time_ds < %s) stop2\n WHERE sd.from_stop_I = stop1.stop_I AND sd.to_stop_I = stop2.stop_I AND stop1.route_id != stop2.route_id\n GROUP BY stop1.route_id, stop2.route_id' % (walking_threshold, start_time, end_time, start_time, end_time)
df = gtfs.execute_custom_query_pandas(query)
for items in df.itertuples():
graph.add_edge(items.route_id1, items.route_id2) # depends on [control=['for'], data=['items']]
graph.remove_nodes_from(networkx.isolates(graph))
return graph
|
def future_datetime(self, end_date='+30d', tzinfo=None):
"""
Get a DateTime object based on a random date between 1 second form now
and a given date.
Accepts date strings that can be recognized by strtotime().
:param end_date Defaults to "+30d"
:param tzinfo: timezone, instance of datetime.tzinfo subclass
:example DateTime('1999-02-02 11:42:52')
:return DateTime
"""
return self.date_time_between(
start_date='+1s', end_date=end_date, tzinfo=tzinfo,
)
|
def function[future_datetime, parameter[self, end_date, tzinfo]]:
constant[
Get a DateTime object based on a random date between 1 second form now
and a given date.
Accepts date strings that can be recognized by strtotime().
:param end_date Defaults to "+30d"
:param tzinfo: timezone, instance of datetime.tzinfo subclass
:example DateTime('1999-02-02 11:42:52')
:return DateTime
]
return[call[name[self].date_time_between, parameter[]]]
|
keyword[def] identifier[future_datetime] ( identifier[self] , identifier[end_date] = literal[string] , identifier[tzinfo] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[date_time_between] (
identifier[start_date] = literal[string] , identifier[end_date] = identifier[end_date] , identifier[tzinfo] = identifier[tzinfo] ,
)
|
def future_datetime(self, end_date='+30d', tzinfo=None):
"""
Get a DateTime object based on a random date between 1 second form now
and a given date.
Accepts date strings that can be recognized by strtotime().
:param end_date Defaults to "+30d"
:param tzinfo: timezone, instance of datetime.tzinfo subclass
:example DateTime('1999-02-02 11:42:52')
:return DateTime
"""
return self.date_time_between(start_date='+1s', end_date=end_date, tzinfo=tzinfo)
|
def filter_results(source, results, aggressive):
"""Filter out spurious reports from pycodestyle.
If aggressive is True, we allow possibly unsafe fixes (E711, E712).
"""
non_docstring_string_line_numbers = multiline_string_lines(
source, include_docstrings=False)
all_string_line_numbers = multiline_string_lines(
source, include_docstrings=True)
commented_out_code_line_numbers = commented_out_code_lines(source)
has_e901 = any(result['id'].lower() == 'e901' for result in results)
for r in results:
issue_id = r['id'].lower()
if r['line'] in non_docstring_string_line_numbers:
if issue_id.startswith(('e1', 'e501', 'w191')):
continue
if r['line'] in all_string_line_numbers:
if issue_id in ['e501']:
continue
# We must offset by 1 for lines that contain the trailing contents of
# multiline strings.
if not aggressive and (r['line'] + 1) in all_string_line_numbers:
# Do not modify multiline strings in non-aggressive mode. Remove
# trailing whitespace could break doctests.
if issue_id.startswith(('w29', 'w39')):
continue
if aggressive <= 0:
if issue_id.startswith(('e711', 'e72', 'w6')):
continue
if aggressive <= 1:
if issue_id.startswith(('e712', 'e713', 'e714')):
continue
if aggressive <= 2:
if issue_id.startswith(('e704')):
continue
if r['line'] in commented_out_code_line_numbers:
if issue_id.startswith(('e26', 'e501')):
continue
# Do not touch indentation if there is a token error caused by
# incomplete multi-line statement. Otherwise, we risk screwing up the
# indentation.
if has_e901:
if issue_id.startswith(('e1', 'e7')):
continue
yield r
|
def function[filter_results, parameter[source, results, aggressive]]:
constant[Filter out spurious reports from pycodestyle.
If aggressive is True, we allow possibly unsafe fixes (E711, E712).
]
variable[non_docstring_string_line_numbers] assign[=] call[name[multiline_string_lines], parameter[name[source]]]
variable[all_string_line_numbers] assign[=] call[name[multiline_string_lines], parameter[name[source]]]
variable[commented_out_code_line_numbers] assign[=] call[name[commented_out_code_lines], parameter[name[source]]]
variable[has_e901] assign[=] call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b21dba60>]]
for taget[name[r]] in starred[name[results]] begin[:]
variable[issue_id] assign[=] call[call[name[r]][constant[id]].lower, parameter[]]
if compare[call[name[r]][constant[line]] in name[non_docstring_string_line_numbers]] begin[:]
if call[name[issue_id].startswith, parameter[tuple[[<ast.Constant object at 0x7da1b21daa10>, <ast.Constant object at 0x7da1b21da860>, <ast.Constant object at 0x7da1b21dbe80>]]]] begin[:]
continue
if compare[call[name[r]][constant[line]] in name[all_string_line_numbers]] begin[:]
if compare[name[issue_id] in list[[<ast.Constant object at 0x7da1b21db5e0>]]] begin[:]
continue
if <ast.BoolOp object at 0x7da1b21db880> begin[:]
if call[name[issue_id].startswith, parameter[tuple[[<ast.Constant object at 0x7da1b1b12710>, <ast.Constant object at 0x7da1b1b12d40>]]]] begin[:]
continue
if compare[name[aggressive] less_or_equal[<=] constant[0]] begin[:]
if call[name[issue_id].startswith, parameter[tuple[[<ast.Constant object at 0x7da1b1b13010>, <ast.Constant object at 0x7da1b1b12590>, <ast.Constant object at 0x7da1b1b13eb0>]]]] begin[:]
continue
if compare[name[aggressive] less_or_equal[<=] constant[1]] begin[:]
if call[name[issue_id].startswith, parameter[tuple[[<ast.Constant object at 0x7da1b1b11000>, <ast.Constant object at 0x7da1b1b12980>, <ast.Constant object at 0x7da1b1b13370>]]]] begin[:]
continue
if compare[name[aggressive] less_or_equal[<=] constant[2]] begin[:]
if call[name[issue_id].startswith, parameter[constant[e704]]] begin[:]
continue
if compare[call[name[r]][constant[line]] in name[commented_out_code_line_numbers]] begin[:]
if call[name[issue_id].startswith, parameter[tuple[[<ast.Constant object at 0x7da1b1b10760>, <ast.Constant object at 0x7da1b1b10700>]]]] begin[:]
continue
if name[has_e901] begin[:]
if call[name[issue_id].startswith, parameter[tuple[[<ast.Constant object at 0x7da2044c2d40>, <ast.Constant object at 0x7da2044c32b0>]]]] begin[:]
continue
<ast.Yield object at 0x7da2044c2da0>
|
keyword[def] identifier[filter_results] ( identifier[source] , identifier[results] , identifier[aggressive] ):
literal[string]
identifier[non_docstring_string_line_numbers] = identifier[multiline_string_lines] (
identifier[source] , identifier[include_docstrings] = keyword[False] )
identifier[all_string_line_numbers] = identifier[multiline_string_lines] (
identifier[source] , identifier[include_docstrings] = keyword[True] )
identifier[commented_out_code_line_numbers] = identifier[commented_out_code_lines] ( identifier[source] )
identifier[has_e901] = identifier[any] ( identifier[result] [ literal[string] ]. identifier[lower] ()== literal[string] keyword[for] identifier[result] keyword[in] identifier[results] )
keyword[for] identifier[r] keyword[in] identifier[results] :
identifier[issue_id] = identifier[r] [ literal[string] ]. identifier[lower] ()
keyword[if] identifier[r] [ literal[string] ] keyword[in] identifier[non_docstring_string_line_numbers] :
keyword[if] identifier[issue_id] . identifier[startswith] (( literal[string] , literal[string] , literal[string] )):
keyword[continue]
keyword[if] identifier[r] [ literal[string] ] keyword[in] identifier[all_string_line_numbers] :
keyword[if] identifier[issue_id] keyword[in] [ literal[string] ]:
keyword[continue]
keyword[if] keyword[not] identifier[aggressive] keyword[and] ( identifier[r] [ literal[string] ]+ literal[int] ) keyword[in] identifier[all_string_line_numbers] :
keyword[if] identifier[issue_id] . identifier[startswith] (( literal[string] , literal[string] )):
keyword[continue]
keyword[if] identifier[aggressive] <= literal[int] :
keyword[if] identifier[issue_id] . identifier[startswith] (( literal[string] , literal[string] , literal[string] )):
keyword[continue]
keyword[if] identifier[aggressive] <= literal[int] :
keyword[if] identifier[issue_id] . identifier[startswith] (( literal[string] , literal[string] , literal[string] )):
keyword[continue]
keyword[if] identifier[aggressive] <= literal[int] :
keyword[if] identifier[issue_id] . identifier[startswith] (( literal[string] )):
keyword[continue]
keyword[if] identifier[r] [ literal[string] ] keyword[in] identifier[commented_out_code_line_numbers] :
keyword[if] identifier[issue_id] . identifier[startswith] (( literal[string] , literal[string] )):
keyword[continue]
keyword[if] identifier[has_e901] :
keyword[if] identifier[issue_id] . identifier[startswith] (( literal[string] , literal[string] )):
keyword[continue]
keyword[yield] identifier[r]
|
def filter_results(source, results, aggressive):
"""Filter out spurious reports from pycodestyle.
If aggressive is True, we allow possibly unsafe fixes (E711, E712).
"""
non_docstring_string_line_numbers = multiline_string_lines(source, include_docstrings=False)
all_string_line_numbers = multiline_string_lines(source, include_docstrings=True)
commented_out_code_line_numbers = commented_out_code_lines(source)
has_e901 = any((result['id'].lower() == 'e901' for result in results))
for r in results:
issue_id = r['id'].lower()
if r['line'] in non_docstring_string_line_numbers:
if issue_id.startswith(('e1', 'e501', 'w191')):
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if r['line'] in all_string_line_numbers:
if issue_id in ['e501']:
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# We must offset by 1 for lines that contain the trailing contents of
# multiline strings.
if not aggressive and r['line'] + 1 in all_string_line_numbers:
# Do not modify multiline strings in non-aggressive mode. Remove
# trailing whitespace could break doctests.
if issue_id.startswith(('w29', 'w39')):
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if aggressive <= 0:
if issue_id.startswith(('e711', 'e72', 'w6')):
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if aggressive <= 1:
if issue_id.startswith(('e712', 'e713', 'e714')):
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if aggressive <= 2:
if issue_id.startswith('e704'):
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if r['line'] in commented_out_code_line_numbers:
if issue_id.startswith(('e26', 'e501')):
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Do not touch indentation if there is a token error caused by
# incomplete multi-line statement. Otherwise, we risk screwing up the
# indentation.
if has_e901:
if issue_id.startswith(('e1', 'e7')):
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
yield r # depends on [control=['for'], data=['r']]
|
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
|
def function[i2c_slave_last_transmit_size, parameter[self]]:
constant[Returns the number of bytes transmitted by the slave.]
variable[ret] assign[=] call[name[api].py_aa_i2c_slave_write_stats, parameter[name[self].handle]]
call[name[_raise_error_if_negative], parameter[name[ret]]]
return[name[ret]]
|
keyword[def] identifier[i2c_slave_last_transmit_size] ( identifier[self] ):
literal[string]
identifier[ret] = identifier[api] . identifier[py_aa_i2c_slave_write_stats] ( identifier[self] . identifier[handle] )
identifier[_raise_error_if_negative] ( identifier[ret] )
keyword[return] identifier[ret]
|
def i2c_slave_last_transmit_size(self):
"""Returns the number of bytes transmitted by the slave."""
ret = api.py_aa_i2c_slave_write_stats(self.handle)
_raise_error_if_negative(ret)
return ret
|
def union_conforms(element: Union, etype, namespace: Dict[str, Any], conforms: Callable) -> bool:
""" Determine whether element conforms to at least one of the types in etype
:param element: element to test
:param etype: type to test against
:param namespace: Namespace to use for resolving forward references
:param conforms: conformance test function
:return: True if element conforms to at least one type in etype
"""
return any(conforms(element, t, namespace) for t in etype.__args__)
|
def function[union_conforms, parameter[element, etype, namespace, conforms]]:
constant[ Determine whether element conforms to at least one of the types in etype
:param element: element to test
:param etype: type to test against
:param namespace: Namespace to use for resolving forward references
:param conforms: conformance test function
:return: True if element conforms to at least one type in etype
]
return[call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b25b2830>]]]
|
keyword[def] identifier[union_conforms] ( identifier[element] : identifier[Union] , identifier[etype] , identifier[namespace] : identifier[Dict] [ identifier[str] , identifier[Any] ], identifier[conforms] : identifier[Callable] )-> identifier[bool] :
literal[string]
keyword[return] identifier[any] ( identifier[conforms] ( identifier[element] , identifier[t] , identifier[namespace] ) keyword[for] identifier[t] keyword[in] identifier[etype] . identifier[__args__] )
|
def union_conforms(element: Union, etype, namespace: Dict[str, Any], conforms: Callable) -> bool:
""" Determine whether element conforms to at least one of the types in etype
:param element: element to test
:param etype: type to test against
:param namespace: Namespace to use for resolving forward references
:param conforms: conformance test function
:return: True if element conforms to at least one type in etype
"""
return any((conforms(element, t, namespace) for t in etype.__args__))
|
def dircolorize(path, name_only=True):
"""Use user dircolors settings to colorize a string which is a path.
If name_only is True, it does this by the name rules (*.x) only; it
will not check the filesystem to colorize things like pipes, block devs,
doors, etc."""
if not name_only:
raise NotImplemented("Filesystem checking not implemented.")
for k,regex in colorremap.iteritems():
if regex.match(path):
return '\x1b[%(color)sm%(path)s\x1b[00m' % {'color': k, 'path': path}
return path
|
def function[dircolorize, parameter[path, name_only]]:
constant[Use user dircolors settings to colorize a string which is a path.
If name_only is True, it does this by the name rules (*.x) only; it
will not check the filesystem to colorize things like pipes, block devs,
doors, etc.]
if <ast.UnaryOp object at 0x7da1b0a818d0> begin[:]
<ast.Raise object at 0x7da1b0a81a20>
for taget[tuple[[<ast.Name object at 0x7da1b0a80160>, <ast.Name object at 0x7da1b0a80220>]]] in starred[call[name[colorremap].iteritems, parameter[]]] begin[:]
if call[name[regex].match, parameter[name[path]]] begin[:]
return[binary_operation[constant[[%(color)sm%(path)s[00m] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b0a80280>, <ast.Constant object at 0x7da1b0a80130>], [<ast.Name object at 0x7da1b0a811b0>, <ast.Name object at 0x7da1b0a81cf0>]]]]
return[name[path]]
|
keyword[def] identifier[dircolorize] ( identifier[path] , identifier[name_only] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[name_only] :
keyword[raise] identifier[NotImplemented] ( literal[string] )
keyword[for] identifier[k] , identifier[regex] keyword[in] identifier[colorremap] . identifier[iteritems] ():
keyword[if] identifier[regex] . identifier[match] ( identifier[path] ):
keyword[return] literal[string] %{ literal[string] : identifier[k] , literal[string] : identifier[path] }
keyword[return] identifier[path]
|
def dircolorize(path, name_only=True):
"""Use user dircolors settings to colorize a string which is a path.
If name_only is True, it does this by the name rules (*.x) only; it
will not check the filesystem to colorize things like pipes, block devs,
doors, etc."""
if not name_only:
raise NotImplemented('Filesystem checking not implemented.') # depends on [control=['if'], data=[]]
for (k, regex) in colorremap.iteritems():
if regex.match(path):
return '\x1b[%(color)sm%(path)s\x1b[00m' % {'color': k, 'path': path} # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return path
|
def portfolio(self) -> List[PortfolioItem]:
"""
List of portfolio items of the default account.
"""
account = self.wrapper.accounts[0]
return [v for v in self.wrapper.portfolio[account].values()]
|
def function[portfolio, parameter[self]]:
constant[
List of portfolio items of the default account.
]
variable[account] assign[=] call[name[self].wrapper.accounts][constant[0]]
return[<ast.ListComp object at 0x7da20c794f10>]
|
keyword[def] identifier[portfolio] ( identifier[self] )-> identifier[List] [ identifier[PortfolioItem] ]:
literal[string]
identifier[account] = identifier[self] . identifier[wrapper] . identifier[accounts] [ literal[int] ]
keyword[return] [ identifier[v] keyword[for] identifier[v] keyword[in] identifier[self] . identifier[wrapper] . identifier[portfolio] [ identifier[account] ]. identifier[values] ()]
|
def portfolio(self) -> List[PortfolioItem]:
"""
List of portfolio items of the default account.
"""
account = self.wrapper.accounts[0]
return [v for v in self.wrapper.portfolio[account].values()]
|
def facets(self):
"""Provides facets support. REQUIRES VALID HTTPS!"""
self.verify_integrity()
if self.__facets_enabled:
data = json.dumps({
'trustedFacets' : [{
'version': { 'major': 1, 'minor' : 0 },
'ids': self.__facets_list
}]
}, sort_keys=True, indent=2, separators=(',', ': '))
mime = 'application/fido.trusted-apps+json'
resp = Response(data, mimetype=mime)
return resp, 200
else:
return jsonify({}), 404
|
def function[facets, parameter[self]]:
constant[Provides facets support. REQUIRES VALID HTTPS!]
call[name[self].verify_integrity, parameter[]]
if name[self].__facets_enabled begin[:]
variable[data] assign[=] call[name[json].dumps, parameter[dictionary[[<ast.Constant object at 0x7da18f00e440>], [<ast.List object at 0x7da18f00df30>]]]]
variable[mime] assign[=] constant[application/fido.trusted-apps+json]
variable[resp] assign[=] call[name[Response], parameter[name[data]]]
return[tuple[[<ast.Name object at 0x7da18f00dcc0>, <ast.Constant object at 0x7da18f00f160>]]]
|
keyword[def] identifier[facets] ( identifier[self] ):
literal[string]
identifier[self] . identifier[verify_integrity] ()
keyword[if] identifier[self] . identifier[__facets_enabled] :
identifier[data] = identifier[json] . identifier[dumps] ({
literal[string] :[{
literal[string] :{ literal[string] : literal[int] , literal[string] : literal[int] },
literal[string] : identifier[self] . identifier[__facets_list]
}]
}, identifier[sort_keys] = keyword[True] , identifier[indent] = literal[int] , identifier[separators] =( literal[string] , literal[string] ))
identifier[mime] = literal[string]
identifier[resp] = identifier[Response] ( identifier[data] , identifier[mimetype] = identifier[mime] )
keyword[return] identifier[resp] , literal[int]
keyword[else] :
keyword[return] identifier[jsonify] ({}), literal[int]
|
def facets(self):
"""Provides facets support. REQUIRES VALID HTTPS!"""
self.verify_integrity()
if self.__facets_enabled:
data = json.dumps({'trustedFacets': [{'version': {'major': 1, 'minor': 0}, 'ids': self.__facets_list}]}, sort_keys=True, indent=2, separators=(',', ': '))
mime = 'application/fido.trusted-apps+json'
resp = Response(data, mimetype=mime)
return (resp, 200) # depends on [control=['if'], data=[]]
else:
return (jsonify({}), 404)
|
def tomof(self, indent=0, maxline=MAX_MOF_LINE):
"""
Return a MOF string with the declaration of this CIM method for use in
a CIM class declaration.
The order of parameters and qualifiers is preserved.
Parameters:
indent (:term:`integer`): Number of spaces to indent each line of
the returned string, counted in the line with the method name.
Returns:
:term:`unicode string`: MOF string.
"""
mof = []
if self.qualifiers:
mof.append(_qualifiers_tomof(self.qualifiers, indent + MOF_INDENT,
maxline))
mof.append(_indent_str(indent))
# return_type is ensured not to be None or reference
mof.append(moftype(self.return_type, None))
mof.append(u' ')
mof.append(self.name)
if self.parameters.values():
mof.append(u'(\n')
mof_parms = []
for p in self.parameters.itervalues():
mof_parms.append(p.tomof(indent + MOF_INDENT, maxline))
mof.append(u',\n'.join(mof_parms))
mof.append(u');\n')
else:
mof.append(u'();\n')
return u''.join(mof)
|
def function[tomof, parameter[self, indent, maxline]]:
constant[
Return a MOF string with the declaration of this CIM method for use in
a CIM class declaration.
The order of parameters and qualifiers is preserved.
Parameters:
indent (:term:`integer`): Number of spaces to indent each line of
the returned string, counted in the line with the method name.
Returns:
:term:`unicode string`: MOF string.
]
variable[mof] assign[=] list[[]]
if name[self].qualifiers begin[:]
call[name[mof].append, parameter[call[name[_qualifiers_tomof], parameter[name[self].qualifiers, binary_operation[name[indent] + name[MOF_INDENT]], name[maxline]]]]]
call[name[mof].append, parameter[call[name[_indent_str], parameter[name[indent]]]]]
call[name[mof].append, parameter[call[name[moftype], parameter[name[self].return_type, constant[None]]]]]
call[name[mof].append, parameter[constant[ ]]]
call[name[mof].append, parameter[name[self].name]]
if call[name[self].parameters.values, parameter[]] begin[:]
call[name[mof].append, parameter[constant[(
]]]
variable[mof_parms] assign[=] list[[]]
for taget[name[p]] in starred[call[name[self].parameters.itervalues, parameter[]]] begin[:]
call[name[mof_parms].append, parameter[call[name[p].tomof, parameter[binary_operation[name[indent] + name[MOF_INDENT]], name[maxline]]]]]
call[name[mof].append, parameter[call[constant[,
].join, parameter[name[mof_parms]]]]]
call[name[mof].append, parameter[constant[);
]]]
return[call[constant[].join, parameter[name[mof]]]]
|
keyword[def] identifier[tomof] ( identifier[self] , identifier[indent] = literal[int] , identifier[maxline] = identifier[MAX_MOF_LINE] ):
literal[string]
identifier[mof] =[]
keyword[if] identifier[self] . identifier[qualifiers] :
identifier[mof] . identifier[append] ( identifier[_qualifiers_tomof] ( identifier[self] . identifier[qualifiers] , identifier[indent] + identifier[MOF_INDENT] ,
identifier[maxline] ))
identifier[mof] . identifier[append] ( identifier[_indent_str] ( identifier[indent] ))
identifier[mof] . identifier[append] ( identifier[moftype] ( identifier[self] . identifier[return_type] , keyword[None] ))
identifier[mof] . identifier[append] ( literal[string] )
identifier[mof] . identifier[append] ( identifier[self] . identifier[name] )
keyword[if] identifier[self] . identifier[parameters] . identifier[values] ():
identifier[mof] . identifier[append] ( literal[string] )
identifier[mof_parms] =[]
keyword[for] identifier[p] keyword[in] identifier[self] . identifier[parameters] . identifier[itervalues] ():
identifier[mof_parms] . identifier[append] ( identifier[p] . identifier[tomof] ( identifier[indent] + identifier[MOF_INDENT] , identifier[maxline] ))
identifier[mof] . identifier[append] ( literal[string] . identifier[join] ( identifier[mof_parms] ))
identifier[mof] . identifier[append] ( literal[string] )
keyword[else] :
identifier[mof] . identifier[append] ( literal[string] )
keyword[return] literal[string] . identifier[join] ( identifier[mof] )
|
def tomof(self, indent=0, maxline=MAX_MOF_LINE):
"""
Return a MOF string with the declaration of this CIM method for use in
a CIM class declaration.
The order of parameters and qualifiers is preserved.
Parameters:
indent (:term:`integer`): Number of spaces to indent each line of
the returned string, counted in the line with the method name.
Returns:
:term:`unicode string`: MOF string.
"""
mof = []
if self.qualifiers:
mof.append(_qualifiers_tomof(self.qualifiers, indent + MOF_INDENT, maxline)) # depends on [control=['if'], data=[]]
mof.append(_indent_str(indent))
# return_type is ensured not to be None or reference
mof.append(moftype(self.return_type, None))
mof.append(u' ')
mof.append(self.name)
if self.parameters.values():
mof.append(u'(\n')
mof_parms = []
for p in self.parameters.itervalues():
mof_parms.append(p.tomof(indent + MOF_INDENT, maxline)) # depends on [control=['for'], data=['p']]
mof.append(u',\n'.join(mof_parms))
mof.append(u');\n') # depends on [control=['if'], data=[]]
else:
mof.append(u'();\n')
return u''.join(mof)
|
def query_repos(gh_session, orgs=None, repos=None, public_only=True):
"""
Yields GitHub3.py repo objects for provided orgs and repo names
If orgs and repos are BOTH empty, execute special mode of getting ALL
repositories from the GitHub Server.
If public_only is True, will return only those repos that are marked as
public. Set this to false to return all organizations that the session has
permissions to access.
"""
if orgs is None:
orgs = []
if repos is None:
repos = []
if public_only:
privacy = 'public'
else:
privacy = 'all'
_check_api_limits(gh_session, 10)
for org_name in orgs:
org = gh_session.organization(org_name)
num_repos = org.public_repos_count
_check_api_limits(gh_session, _num_requests_needed(num_repos))
for repo in org.repositories(type=privacy):
_check_api_limits(gh_session, 10)
yield repo
for repo_name in repos:
_check_api_limits(gh_session, 10)
org, name = repo_name.split('/')
yield gh_session.repository(org, name)
if not (orgs or repos):
for repo in gh_session.all_repositories():
yield repo
|
def function[query_repos, parameter[gh_session, orgs, repos, public_only]]:
constant[
Yields GitHub3.py repo objects for provided orgs and repo names
If orgs and repos are BOTH empty, execute special mode of getting ALL
repositories from the GitHub Server.
If public_only is True, will return only those repos that are marked as
public. Set this to false to return all organizations that the session has
permissions to access.
]
if compare[name[orgs] is constant[None]] begin[:]
variable[orgs] assign[=] list[[]]
if compare[name[repos] is constant[None]] begin[:]
variable[repos] assign[=] list[[]]
if name[public_only] begin[:]
variable[privacy] assign[=] constant[public]
call[name[_check_api_limits], parameter[name[gh_session], constant[10]]]
for taget[name[org_name]] in starred[name[orgs]] begin[:]
variable[org] assign[=] call[name[gh_session].organization, parameter[name[org_name]]]
variable[num_repos] assign[=] name[org].public_repos_count
call[name[_check_api_limits], parameter[name[gh_session], call[name[_num_requests_needed], parameter[name[num_repos]]]]]
for taget[name[repo]] in starred[call[name[org].repositories, parameter[]]] begin[:]
call[name[_check_api_limits], parameter[name[gh_session], constant[10]]]
<ast.Yield object at 0x7da1b05c6e90>
for taget[name[repo_name]] in starred[name[repos]] begin[:]
call[name[_check_api_limits], parameter[name[gh_session], constant[10]]]
<ast.Tuple object at 0x7da1b05c4be0> assign[=] call[name[repo_name].split, parameter[constant[/]]]
<ast.Yield object at 0x7da1b05c5450>
if <ast.UnaryOp object at 0x7da1b05c4ac0> begin[:]
for taget[name[repo]] in starred[call[name[gh_session].all_repositories, parameter[]]] begin[:]
<ast.Yield object at 0x7da1b05c6e00>
|
keyword[def] identifier[query_repos] ( identifier[gh_session] , identifier[orgs] = keyword[None] , identifier[repos] = keyword[None] , identifier[public_only] = keyword[True] ):
literal[string]
keyword[if] identifier[orgs] keyword[is] keyword[None] :
identifier[orgs] =[]
keyword[if] identifier[repos] keyword[is] keyword[None] :
identifier[repos] =[]
keyword[if] identifier[public_only] :
identifier[privacy] = literal[string]
keyword[else] :
identifier[privacy] = literal[string]
identifier[_check_api_limits] ( identifier[gh_session] , literal[int] )
keyword[for] identifier[org_name] keyword[in] identifier[orgs] :
identifier[org] = identifier[gh_session] . identifier[organization] ( identifier[org_name] )
identifier[num_repos] = identifier[org] . identifier[public_repos_count]
identifier[_check_api_limits] ( identifier[gh_session] , identifier[_num_requests_needed] ( identifier[num_repos] ))
keyword[for] identifier[repo] keyword[in] identifier[org] . identifier[repositories] ( identifier[type] = identifier[privacy] ):
identifier[_check_api_limits] ( identifier[gh_session] , literal[int] )
keyword[yield] identifier[repo]
keyword[for] identifier[repo_name] keyword[in] identifier[repos] :
identifier[_check_api_limits] ( identifier[gh_session] , literal[int] )
identifier[org] , identifier[name] = identifier[repo_name] . identifier[split] ( literal[string] )
keyword[yield] identifier[gh_session] . identifier[repository] ( identifier[org] , identifier[name] )
keyword[if] keyword[not] ( identifier[orgs] keyword[or] identifier[repos] ):
keyword[for] identifier[repo] keyword[in] identifier[gh_session] . identifier[all_repositories] ():
keyword[yield] identifier[repo]
|
def query_repos(gh_session, orgs=None, repos=None, public_only=True):
"""
Yields GitHub3.py repo objects for provided orgs and repo names
If orgs and repos are BOTH empty, execute special mode of getting ALL
repositories from the GitHub Server.
If public_only is True, will return only those repos that are marked as
public. Set this to false to return all organizations that the session has
permissions to access.
"""
if orgs is None:
orgs = [] # depends on [control=['if'], data=['orgs']]
if repos is None:
repos = [] # depends on [control=['if'], data=['repos']]
if public_only:
privacy = 'public' # depends on [control=['if'], data=[]]
else:
privacy = 'all'
_check_api_limits(gh_session, 10)
for org_name in orgs:
org = gh_session.organization(org_name)
num_repos = org.public_repos_count
_check_api_limits(gh_session, _num_requests_needed(num_repos))
for repo in org.repositories(type=privacy):
_check_api_limits(gh_session, 10)
yield repo # depends on [control=['for'], data=['repo']] # depends on [control=['for'], data=['org_name']]
for repo_name in repos:
_check_api_limits(gh_session, 10)
(org, name) = repo_name.split('/')
yield gh_session.repository(org, name) # depends on [control=['for'], data=['repo_name']]
if not (orgs or repos):
for repo in gh_session.all_repositories():
yield repo # depends on [control=['for'], data=['repo']] # depends on [control=['if'], data=[]]
|
def absolute_url (base_url, base_ref, parent_url):
"""
Search for the absolute url to detect the link type. This does not
join any url fragments together!
@param base_url: base url from a link tag
@type base_url: string or None
@param base_ref: base url from <base> tag
@type base_ref: string or None
@param parent_url: url of parent document
@type parent_url: string or None
"""
if base_url and urlutil.url_is_absolute(base_url):
return base_url
elif base_ref and urlutil.url_is_absolute(base_ref):
return base_ref
elif parent_url and urlutil.url_is_absolute(parent_url):
return parent_url
return u""
|
def function[absolute_url, parameter[base_url, base_ref, parent_url]]:
constant[
Search for the absolute url to detect the link type. This does not
join any url fragments together!
@param base_url: base url from a link tag
@type base_url: string or None
@param base_ref: base url from <base> tag
@type base_ref: string or None
@param parent_url: url of parent document
@type parent_url: string or None
]
if <ast.BoolOp object at 0x7da1b2344c70> begin[:]
return[name[base_url]]
return[constant[]]
|
keyword[def] identifier[absolute_url] ( identifier[base_url] , identifier[base_ref] , identifier[parent_url] ):
literal[string]
keyword[if] identifier[base_url] keyword[and] identifier[urlutil] . identifier[url_is_absolute] ( identifier[base_url] ):
keyword[return] identifier[base_url]
keyword[elif] identifier[base_ref] keyword[and] identifier[urlutil] . identifier[url_is_absolute] ( identifier[base_ref] ):
keyword[return] identifier[base_ref]
keyword[elif] identifier[parent_url] keyword[and] identifier[urlutil] . identifier[url_is_absolute] ( identifier[parent_url] ):
keyword[return] identifier[parent_url]
keyword[return] literal[string]
|
def absolute_url(base_url, base_ref, parent_url):
"""
Search for the absolute url to detect the link type. This does not
join any url fragments together!
@param base_url: base url from a link tag
@type base_url: string or None
@param base_ref: base url from <base> tag
@type base_ref: string or None
@param parent_url: url of parent document
@type parent_url: string or None
"""
if base_url and urlutil.url_is_absolute(base_url):
return base_url # depends on [control=['if'], data=[]]
elif base_ref and urlutil.url_is_absolute(base_ref):
return base_ref # depends on [control=['if'], data=[]]
elif parent_url and urlutil.url_is_absolute(parent_url):
return parent_url # depends on [control=['if'], data=[]]
return u''
|
def produce_projection_explorer(corpus,
category,
word2vec_model=None,
projection_model=None,
embeddings=None,
term_acceptance_re=re.compile('[a-z]{3,}'),
show_axes=False,
**kwargs):
'''
Parameters
----------
corpus : ParsedCorpus
It is highly recommended to use a stoplisted, unigram corpus-- `corpus.get_stoplisted_unigram_corpus()`
category : str
word2vec_model : Word2Vec
A gensim word2vec model. A default model will be used instead. See Word2VecFromParsedCorpus for the default
model.
projection_model : sklearn-style dimensionality reduction model.
By default: umap.UMAP(min_dist=0.5, metric='cosine')
You could also use, e.g., sklearn.manifold.TSNE(perplexity=10, n_components=2, init='pca', n_iter=2500, random_state=23)
embeddings : array[len(corpus.get_terms()), X]
Word embeddings. If None (default), wil train them using word2vec Model
term_acceptance_re : SRE_Pattern,
Regular expression to identify valid terms
show_axes : bool, default False
Show the ticked axes on the plot. If false, show inner axes as a crosshair.
kwargs : dict
Remaining produce_scattertext_explorer keywords get_tooltip_content
Returns
-------
str
HTML of visualization
'''
embeddings_resolover = EmbeddingsResolver(corpus)
if embeddings is not None:
embeddings_resolover.set_embeddings(embeddings)
else:
embeddings_resolover.set_embeddings_model(word2vec_model, term_acceptance_re)
corpus, word_axes = embeddings_resolover.project_embeddings(projection_model, x_dim=0, y_dim=1)
html = produce_scattertext_explorer(
corpus=corpus,
category=category,
minimum_term_frequency=0,
sort_by_dist=False,
x_coords=scale(word_axes['x']),
y_coords=scale(word_axes['y']),
y_label='',
x_label='',
show_axes=show_axes,
**kwargs
)
return html
|
def function[produce_projection_explorer, parameter[corpus, category, word2vec_model, projection_model, embeddings, term_acceptance_re, show_axes]]:
constant[
Parameters
----------
corpus : ParsedCorpus
It is highly recommended to use a stoplisted, unigram corpus-- `corpus.get_stoplisted_unigram_corpus()`
category : str
word2vec_model : Word2Vec
A gensim word2vec model. A default model will be used instead. See Word2VecFromParsedCorpus for the default
model.
projection_model : sklearn-style dimensionality reduction model.
By default: umap.UMAP(min_dist=0.5, metric='cosine')
You could also use, e.g., sklearn.manifold.TSNE(perplexity=10, n_components=2, init='pca', n_iter=2500, random_state=23)
embeddings : array[len(corpus.get_terms()), X]
Word embeddings. If None (default), wil train them using word2vec Model
term_acceptance_re : SRE_Pattern,
Regular expression to identify valid terms
show_axes : bool, default False
Show the ticked axes on the plot. If false, show inner axes as a crosshair.
kwargs : dict
Remaining produce_scattertext_explorer keywords get_tooltip_content
Returns
-------
str
HTML of visualization
]
variable[embeddings_resolover] assign[=] call[name[EmbeddingsResolver], parameter[name[corpus]]]
if compare[name[embeddings] is_not constant[None]] begin[:]
call[name[embeddings_resolover].set_embeddings, parameter[name[embeddings]]]
<ast.Tuple object at 0x7da1b1a129e0> assign[=] call[name[embeddings_resolover].project_embeddings, parameter[name[projection_model]]]
variable[html] assign[=] call[name[produce_scattertext_explorer], parameter[]]
return[name[html]]
|
keyword[def] identifier[produce_projection_explorer] ( identifier[corpus] ,
identifier[category] ,
identifier[word2vec_model] = keyword[None] ,
identifier[projection_model] = keyword[None] ,
identifier[embeddings] = keyword[None] ,
identifier[term_acceptance_re] = identifier[re] . identifier[compile] ( literal[string] ),
identifier[show_axes] = keyword[False] ,
** identifier[kwargs] ):
literal[string]
identifier[embeddings_resolover] = identifier[EmbeddingsResolver] ( identifier[corpus] )
keyword[if] identifier[embeddings] keyword[is] keyword[not] keyword[None] :
identifier[embeddings_resolover] . identifier[set_embeddings] ( identifier[embeddings] )
keyword[else] :
identifier[embeddings_resolover] . identifier[set_embeddings_model] ( identifier[word2vec_model] , identifier[term_acceptance_re] )
identifier[corpus] , identifier[word_axes] = identifier[embeddings_resolover] . identifier[project_embeddings] ( identifier[projection_model] , identifier[x_dim] = literal[int] , identifier[y_dim] = literal[int] )
identifier[html] = identifier[produce_scattertext_explorer] (
identifier[corpus] = identifier[corpus] ,
identifier[category] = identifier[category] ,
identifier[minimum_term_frequency] = literal[int] ,
identifier[sort_by_dist] = keyword[False] ,
identifier[x_coords] = identifier[scale] ( identifier[word_axes] [ literal[string] ]),
identifier[y_coords] = identifier[scale] ( identifier[word_axes] [ literal[string] ]),
identifier[y_label] = literal[string] ,
identifier[x_label] = literal[string] ,
identifier[show_axes] = identifier[show_axes] ,
** identifier[kwargs]
)
keyword[return] identifier[html]
|
def produce_projection_explorer(corpus, category, word2vec_model=None, projection_model=None, embeddings=None, term_acceptance_re=re.compile('[a-z]{3,}'), show_axes=False, **kwargs):
"""
Parameters
----------
corpus : ParsedCorpus
It is highly recommended to use a stoplisted, unigram corpus-- `corpus.get_stoplisted_unigram_corpus()`
category : str
word2vec_model : Word2Vec
A gensim word2vec model. A default model will be used instead. See Word2VecFromParsedCorpus for the default
model.
projection_model : sklearn-style dimensionality reduction model.
By default: umap.UMAP(min_dist=0.5, metric='cosine')
You could also use, e.g., sklearn.manifold.TSNE(perplexity=10, n_components=2, init='pca', n_iter=2500, random_state=23)
embeddings : array[len(corpus.get_terms()), X]
Word embeddings. If None (default), wil train them using word2vec Model
term_acceptance_re : SRE_Pattern,
Regular expression to identify valid terms
show_axes : bool, default False
Show the ticked axes on the plot. If false, show inner axes as a crosshair.
kwargs : dict
Remaining produce_scattertext_explorer keywords get_tooltip_content
Returns
-------
str
HTML of visualization
"""
embeddings_resolover = EmbeddingsResolver(corpus)
if embeddings is not None:
embeddings_resolover.set_embeddings(embeddings) # depends on [control=['if'], data=['embeddings']]
else:
embeddings_resolover.set_embeddings_model(word2vec_model, term_acceptance_re)
(corpus, word_axes) = embeddings_resolover.project_embeddings(projection_model, x_dim=0, y_dim=1)
html = produce_scattertext_explorer(corpus=corpus, category=category, minimum_term_frequency=0, sort_by_dist=False, x_coords=scale(word_axes['x']), y_coords=scale(word_axes['y']), y_label='', x_label='', show_axes=show_axes, **kwargs)
return html
|
def read_data(self, size):
"""Receive data from the device.
If the read fails for any reason, an :obj:`IOError` exception
is raised.
:param size: the number of bytes to read.
:type size: int
:return: the data received.
:rtype: list(int)
"""
result = list()
while size > 0:
count = min(size, 8)
buf = self.hid.read(count)
if len(buf) < count:
raise IOError(
'pywws.device_cython_hidapi.USBDevice.read_data failed')
result += buf
size -= count
return result
|
def function[read_data, parameter[self, size]]:
constant[Receive data from the device.
If the read fails for any reason, an :obj:`IOError` exception
is raised.
:param size: the number of bytes to read.
:type size: int
:return: the data received.
:rtype: list(int)
]
variable[result] assign[=] call[name[list], parameter[]]
while compare[name[size] greater[>] constant[0]] begin[:]
variable[count] assign[=] call[name[min], parameter[name[size], constant[8]]]
variable[buf] assign[=] call[name[self].hid.read, parameter[name[count]]]
if compare[call[name[len], parameter[name[buf]]] less[<] name[count]] begin[:]
<ast.Raise object at 0x7da18bcc8040>
<ast.AugAssign object at 0x7da18bccbbe0>
<ast.AugAssign object at 0x7da18bcc9bd0>
return[name[result]]
|
keyword[def] identifier[read_data] ( identifier[self] , identifier[size] ):
literal[string]
identifier[result] = identifier[list] ()
keyword[while] identifier[size] > literal[int] :
identifier[count] = identifier[min] ( identifier[size] , literal[int] )
identifier[buf] = identifier[self] . identifier[hid] . identifier[read] ( identifier[count] )
keyword[if] identifier[len] ( identifier[buf] )< identifier[count] :
keyword[raise] identifier[IOError] (
literal[string] )
identifier[result] += identifier[buf]
identifier[size] -= identifier[count]
keyword[return] identifier[result]
|
def read_data(self, size):
"""Receive data from the device.
If the read fails for any reason, an :obj:`IOError` exception
is raised.
:param size: the number of bytes to read.
:type size: int
:return: the data received.
:rtype: list(int)
"""
result = list()
while size > 0:
count = min(size, 8)
buf = self.hid.read(count)
if len(buf) < count:
raise IOError('pywws.device_cython_hidapi.USBDevice.read_data failed') # depends on [control=['if'], data=[]]
result += buf
size -= count # depends on [control=['while'], data=['size']]
return result
|
def BSR(cpu, dest, src):
"""
Bit scan reverse.
Searches the source operand (second operand) for the most significant
set bit (1 bit). If a most significant 1 bit is found, its bit index is
stored in the destination operand (first operand). The source operand
can be a register or a memory location; the destination operand is a register.
The bit index is an unsigned offset from bit 0 of the source operand.
If the contents source operand are 0, the contents of the destination
operand is undefined::
IF SRC = 0
THEN
ZF = 1;
DEST is undefined;
ELSE
ZF = 0;
temp = OperandSize - 1;
WHILE Bit(SRC, temp) = 0
DO
temp = temp - 1;
DEST = temp;
OD;
FI;
:param cpu: current CPU.
:param dest: destination operand.
:param src: source operand.
"""
value = src.read()
flag = Operators.EXTRACT(value, src.size - 1, 1) == 1
res = 0
for pos in reversed(range(0, src.size)):
res = Operators.ITEBV(dest.size, flag, res, pos)
flag = Operators.OR(flag, (Operators.EXTRACT(value, pos, 1) == 1))
cpu.PF = cpu._calculate_parity_flag(res)
cpu.ZF = value == 0
dest.write(Operators.ITEBV(dest.size, cpu.ZF, dest.read(), res))
|
def function[BSR, parameter[cpu, dest, src]]:
constant[
Bit scan reverse.
Searches the source operand (second operand) for the most significant
set bit (1 bit). If a most significant 1 bit is found, its bit index is
stored in the destination operand (first operand). The source operand
can be a register or a memory location; the destination operand is a register.
The bit index is an unsigned offset from bit 0 of the source operand.
If the contents source operand are 0, the contents of the destination
operand is undefined::
IF SRC = 0
THEN
ZF = 1;
DEST is undefined;
ELSE
ZF = 0;
temp = OperandSize - 1;
WHILE Bit(SRC, temp) = 0
DO
temp = temp - 1;
DEST = temp;
OD;
FI;
:param cpu: current CPU.
:param dest: destination operand.
:param src: source operand.
]
variable[value] assign[=] call[name[src].read, parameter[]]
variable[flag] assign[=] compare[call[name[Operators].EXTRACT, parameter[name[value], binary_operation[name[src].size - constant[1]], constant[1]]] equal[==] constant[1]]
variable[res] assign[=] constant[0]
for taget[name[pos]] in starred[call[name[reversed], parameter[call[name[range], parameter[constant[0], name[src].size]]]]] begin[:]
variable[res] assign[=] call[name[Operators].ITEBV, parameter[name[dest].size, name[flag], name[res], name[pos]]]
variable[flag] assign[=] call[name[Operators].OR, parameter[name[flag], compare[call[name[Operators].EXTRACT, parameter[name[value], name[pos], constant[1]]] equal[==] constant[1]]]]
name[cpu].PF assign[=] call[name[cpu]._calculate_parity_flag, parameter[name[res]]]
name[cpu].ZF assign[=] compare[name[value] equal[==] constant[0]]
call[name[dest].write, parameter[call[name[Operators].ITEBV, parameter[name[dest].size, name[cpu].ZF, call[name[dest].read, parameter[]], name[res]]]]]
|
keyword[def] identifier[BSR] ( identifier[cpu] , identifier[dest] , identifier[src] ):
literal[string]
identifier[value] = identifier[src] . identifier[read] ()
identifier[flag] = identifier[Operators] . identifier[EXTRACT] ( identifier[value] , identifier[src] . identifier[size] - literal[int] , literal[int] )== literal[int]
identifier[res] = literal[int]
keyword[for] identifier[pos] keyword[in] identifier[reversed] ( identifier[range] ( literal[int] , identifier[src] . identifier[size] )):
identifier[res] = identifier[Operators] . identifier[ITEBV] ( identifier[dest] . identifier[size] , identifier[flag] , identifier[res] , identifier[pos] )
identifier[flag] = identifier[Operators] . identifier[OR] ( identifier[flag] ,( identifier[Operators] . identifier[EXTRACT] ( identifier[value] , identifier[pos] , literal[int] )== literal[int] ))
identifier[cpu] . identifier[PF] = identifier[cpu] . identifier[_calculate_parity_flag] ( identifier[res] )
identifier[cpu] . identifier[ZF] = identifier[value] == literal[int]
identifier[dest] . identifier[write] ( identifier[Operators] . identifier[ITEBV] ( identifier[dest] . identifier[size] , identifier[cpu] . identifier[ZF] , identifier[dest] . identifier[read] (), identifier[res] ))
|
def BSR(cpu, dest, src):
"""
Bit scan reverse.
Searches the source operand (second operand) for the most significant
set bit (1 bit). If a most significant 1 bit is found, its bit index is
stored in the destination operand (first operand). The source operand
can be a register or a memory location; the destination operand is a register.
The bit index is an unsigned offset from bit 0 of the source operand.
If the contents source operand are 0, the contents of the destination
operand is undefined::
IF SRC = 0
THEN
ZF = 1;
DEST is undefined;
ELSE
ZF = 0;
temp = OperandSize - 1;
WHILE Bit(SRC, temp) = 0
DO
temp = temp - 1;
DEST = temp;
OD;
FI;
:param cpu: current CPU.
:param dest: destination operand.
:param src: source operand.
"""
value = src.read()
flag = Operators.EXTRACT(value, src.size - 1, 1) == 1
res = 0
for pos in reversed(range(0, src.size)):
res = Operators.ITEBV(dest.size, flag, res, pos)
flag = Operators.OR(flag, Operators.EXTRACT(value, pos, 1) == 1) # depends on [control=['for'], data=['pos']]
cpu.PF = cpu._calculate_parity_flag(res)
cpu.ZF = value == 0
dest.write(Operators.ITEBV(dest.size, cpu.ZF, dest.read(), res))
|
def build_and_filter(base_folder, size_pyramid, factor, thresh_factor=1):
"""Build a filtered pyramid of contact maps
Build a fragment pyramid for multi-scale analysis and remove high sparsity
(i.e. low-coverage) and short fragments.
Parameters
----------
base_folder : str or pathlib.Path
Where to create the hdf5 files containing the matrices.
size_pyramid : int
How many levels (contact maps of decreasing resolution) to generate.
factor : int
Subsampling factor (binning) from one level to the next.
thresh_factor : float, optional
Number of standard deviations below the mean coverage beyond which
lesser covered fragments will be discarded. Default is 1.
Returns
-------
obj_pyramid : Pyramid
The pyramid object containing all the levels.
"""
min_bin_per_contig = 1
fact_sub_sampling = factor
all_pyramid_folder = os.path.join(base_folder, "pyramids")
if not (os.path.exists(all_pyramid_folder)):
os.mkdir(all_pyramid_folder)
init_pyramid_folder = os.path.join(
all_pyramid_folder, "pyramid_" + str(1) + "_no_thresh"
)
if not (os.path.exists(init_pyramid_folder)):
init_size_pyramid = 1
build(base_folder, init_size_pyramid, factor, min_bin_per_contig)
init_pyramid_folder_level_0 = os.path.join(init_pyramid_folder, "level_0")
contig_info = os.path.join(
init_pyramid_folder_level_0, "0_contig_info.txt"
)
fragments_list = os.path.join(
init_pyramid_folder_level_0, "0_fragments_list.txt"
)
init_abs_fragments_contacts = os.path.join(
init_pyramid_folder_level_0, "0_abs_frag_contacts.txt"
)
init_pyramid_file = os.path.join(init_pyramid_folder, "pyramid.hdf5")
pyramid_folder = os.path.join(
all_pyramid_folder, "pyramid_" + str(size_pyramid) + "_thresh_auto"
)
if not (os.path.exists(pyramid_folder)):
os.mkdir(pyramid_folder)
level = 0
pyramid_level_folder = os.path.join(pyramid_folder, "level_" + str(level))
if not (os.path.exists(pyramid_level_folder)):
os.mkdir(pyramid_level_folder)
current_contig_info = os.path.join(
pyramid_level_folder, str(level) + "_contig_info.txt"
)
current_frag_list = os.path.join(
pyramid_level_folder, str(level) + "_fragments_list.txt"
)
current_abs_fragments_contacts = os.path.join(
pyramid_level_folder, str(level) + "_abs_frag_contacts.txt"
)
if not (
os.path.exists(current_contig_info)
and os.path.exists(current_frag_list)
and os.path.exists(current_abs_fragments_contacts)
):
logger.info("start filtering")
pyramid_0 = h5py.File(init_pyramid_file)
remove_problematic_fragments(
contig_info,
fragments_list,
init_abs_fragments_contacts,
current_contig_info,
current_frag_list,
current_abs_fragments_contacts,
pyramid_0,
thresh_factor=thresh_factor,
)
pyramid_0.close()
#
else:
logger.info("filtering already done...")
hdf5_pyramid_file = os.path.join(pyramid_folder, "pyramid.hdf5")
pyramid_handle = h5py.File(hdf5_pyramid_file)
pyramid_level_folder = os.path.join(pyramid_folder, "level_" + str(level))
level_pyramid = str(level) + "_"
sub_2_super_frag_index_file = os.path.join(
pyramid_level_folder, level_pyramid + "sub_2_super_index_frag.txt"
)
for level in range(0, size_pyramid):
pyramid_level_folder = os.path.join(
pyramid_folder, "level_" + str(level)
)
if not (os.path.exists(pyramid_level_folder)):
os.mkdir(pyramid_level_folder)
level_pyramid = str(level) + "_"
new_contig_list_file = os.path.join(
pyramid_level_folder, level_pyramid + "contig_info.txt"
)
new_fragments_list_file = os.path.join(
pyramid_level_folder, level_pyramid + "fragments_list.txt"
)
new_abs_fragments_contacts_file = os.path.join(
pyramid_level_folder, level_pyramid + "abs_frag_contacts.txt"
)
if level > 0:
if (
os.path.exists(new_contig_list_file)
and os.path.exists(new_fragments_list_file)
and os.path.exists(new_abs_fragments_contacts_file)
and os.path.exists((sub_2_super_frag_index_file))
):
logger.info("level already built")
nfrags = file_len(new_fragments_list_file) - 1
else: # this should never append !!!
logger.info("writing new_files..")
nfrags = subsample_data_set(
current_contig_info,
current_frag_list,
fact_sub_sampling,
current_abs_fragments_contacts,
new_abs_fragments_contacts_file,
min_bin_per_contig,
new_contig_list_file,
new_fragments_list_file,
sub_2_super_frag_index_file,
)
else:
if (
os.path.exists(new_contig_list_file)
and os.path.exists(new_fragments_list_file)
and os.path.exists(new_abs_fragments_contacts_file)
):
logger.info("level already built...")
nfrags = file_len(new_fragments_list_file) - 1
try:
status = pyramid_handle.attrs[str(level)] == "done"
except KeyError:
pyramid_handle.attrs[str(level)] = "pending"
status = False
if not (status):
logger.info("Start filling the pyramid")
# level_to_fill = pyramid_handle.create_dataset(str(level),
# (nfrags,nfrags), 'i')
# fill_pyramid_level(level_to_fill,new_abs_fragments_contacts_file,
# size_chunk,nfrags)
fill_sparse_pyramid_level(
pyramid_handle, level, new_abs_fragments_contacts_file, nfrags
)
pyramid_handle.attrs[str(level)] = "done"
current_frag_list = new_fragments_list_file
current_contig_info = new_contig_list_file
current_abs_fragments_contacts = new_abs_fragments_contacts_file
sub_2_super_frag_index_file = os.path.join(
pyramid_level_folder, level_pyramid + "sub_2_super_index_frag.txt"
)
logger.info("pyramid built.")
obj_pyramid = pyramid(pyramid_folder, size_pyramid)
pyramid_handle.close()
return obj_pyramid
|
def function[build_and_filter, parameter[base_folder, size_pyramid, factor, thresh_factor]]:
constant[Build a filtered pyramid of contact maps
Build a fragment pyramid for multi-scale analysis and remove high sparsity
(i.e. low-coverage) and short fragments.
Parameters
----------
base_folder : str or pathlib.Path
Where to create the hdf5 files containing the matrices.
size_pyramid : int
How many levels (contact maps of decreasing resolution) to generate.
factor : int
Subsampling factor (binning) from one level to the next.
thresh_factor : float, optional
Number of standard deviations below the mean coverage beyond which
lesser covered fragments will be discarded. Default is 1.
Returns
-------
obj_pyramid : Pyramid
The pyramid object containing all the levels.
]
variable[min_bin_per_contig] assign[=] constant[1]
variable[fact_sub_sampling] assign[=] name[factor]
variable[all_pyramid_folder] assign[=] call[name[os].path.join, parameter[name[base_folder], constant[pyramids]]]
if <ast.UnaryOp object at 0x7da1b055c550> begin[:]
call[name[os].mkdir, parameter[name[all_pyramid_folder]]]
variable[init_pyramid_folder] assign[=] call[name[os].path.join, parameter[name[all_pyramid_folder], binary_operation[binary_operation[constant[pyramid_] + call[name[str], parameter[constant[1]]]] + constant[_no_thresh]]]]
if <ast.UnaryOp object at 0x7da1b055ca60> begin[:]
variable[init_size_pyramid] assign[=] constant[1]
call[name[build], parameter[name[base_folder], name[init_size_pyramid], name[factor], name[min_bin_per_contig]]]
variable[init_pyramid_folder_level_0] assign[=] call[name[os].path.join, parameter[name[init_pyramid_folder], constant[level_0]]]
variable[contig_info] assign[=] call[name[os].path.join, parameter[name[init_pyramid_folder_level_0], constant[0_contig_info.txt]]]
variable[fragments_list] assign[=] call[name[os].path.join, parameter[name[init_pyramid_folder_level_0], constant[0_fragments_list.txt]]]
variable[init_abs_fragments_contacts] assign[=] call[name[os].path.join, parameter[name[init_pyramid_folder_level_0], constant[0_abs_frag_contacts.txt]]]
variable[init_pyramid_file] assign[=] call[name[os].path.join, parameter[name[init_pyramid_folder], constant[pyramid.hdf5]]]
variable[pyramid_folder] assign[=] call[name[os].path.join, parameter[name[all_pyramid_folder], binary_operation[binary_operation[constant[pyramid_] + call[name[str], parameter[name[size_pyramid]]]] + constant[_thresh_auto]]]]
if <ast.UnaryOp object at 0x7da1b055d7e0> begin[:]
call[name[os].mkdir, parameter[name[pyramid_folder]]]
variable[level] assign[=] constant[0]
variable[pyramid_level_folder] assign[=] call[name[os].path.join, parameter[name[pyramid_folder], binary_operation[constant[level_] + call[name[str], parameter[name[level]]]]]]
if <ast.UnaryOp object at 0x7da1b055dd20> begin[:]
call[name[os].mkdir, parameter[name[pyramid_level_folder]]]
variable[current_contig_info] assign[=] call[name[os].path.join, parameter[name[pyramid_level_folder], binary_operation[call[name[str], parameter[name[level]]] + constant[_contig_info.txt]]]]
variable[current_frag_list] assign[=] call[name[os].path.join, parameter[name[pyramid_level_folder], binary_operation[call[name[str], parameter[name[level]]] + constant[_fragments_list.txt]]]]
variable[current_abs_fragments_contacts] assign[=] call[name[os].path.join, parameter[name[pyramid_level_folder], binary_operation[call[name[str], parameter[name[level]]] + constant[_abs_frag_contacts.txt]]]]
if <ast.UnaryOp object at 0x7da1b055e650> begin[:]
call[name[logger].info, parameter[constant[start filtering]]]
variable[pyramid_0] assign[=] call[name[h5py].File, parameter[name[init_pyramid_file]]]
call[name[remove_problematic_fragments], parameter[name[contig_info], name[fragments_list], name[init_abs_fragments_contacts], name[current_contig_info], name[current_frag_list], name[current_abs_fragments_contacts], name[pyramid_0]]]
call[name[pyramid_0].close, parameter[]]
variable[hdf5_pyramid_file] assign[=] call[name[os].path.join, parameter[name[pyramid_folder], constant[pyramid.hdf5]]]
variable[pyramid_handle] assign[=] call[name[h5py].File, parameter[name[hdf5_pyramid_file]]]
variable[pyramid_level_folder] assign[=] call[name[os].path.join, parameter[name[pyramid_folder], binary_operation[constant[level_] + call[name[str], parameter[name[level]]]]]]
variable[level_pyramid] assign[=] binary_operation[call[name[str], parameter[name[level]]] + constant[_]]
variable[sub_2_super_frag_index_file] assign[=] call[name[os].path.join, parameter[name[pyramid_level_folder], binary_operation[name[level_pyramid] + constant[sub_2_super_index_frag.txt]]]]
for taget[name[level]] in starred[call[name[range], parameter[constant[0], name[size_pyramid]]]] begin[:]
variable[pyramid_level_folder] assign[=] call[name[os].path.join, parameter[name[pyramid_folder], binary_operation[constant[level_] + call[name[str], parameter[name[level]]]]]]
if <ast.UnaryOp object at 0x7da1b055fbe0> begin[:]
call[name[os].mkdir, parameter[name[pyramid_level_folder]]]
variable[level_pyramid] assign[=] binary_operation[call[name[str], parameter[name[level]]] + constant[_]]
variable[new_contig_list_file] assign[=] call[name[os].path.join, parameter[name[pyramid_level_folder], binary_operation[name[level_pyramid] + constant[contig_info.txt]]]]
variable[new_fragments_list_file] assign[=] call[name[os].path.join, parameter[name[pyramid_level_folder], binary_operation[name[level_pyramid] + constant[fragments_list.txt]]]]
variable[new_abs_fragments_contacts_file] assign[=] call[name[os].path.join, parameter[name[pyramid_level_folder], binary_operation[name[level_pyramid] + constant[abs_frag_contacts.txt]]]]
if compare[name[level] greater[>] constant[0]] begin[:]
if <ast.BoolOp object at 0x7da204346bf0> begin[:]
call[name[logger].info, parameter[constant[level already built]]]
variable[nfrags] assign[=] binary_operation[call[name[file_len], parameter[name[new_fragments_list_file]]] - constant[1]]
<ast.Try object at 0x7da18f09d480>
if <ast.UnaryOp object at 0x7da18f09ebf0> begin[:]
call[name[logger].info, parameter[constant[Start filling the pyramid]]]
call[name[fill_sparse_pyramid_level], parameter[name[pyramid_handle], name[level], name[new_abs_fragments_contacts_file], name[nfrags]]]
call[name[pyramid_handle].attrs][call[name[str], parameter[name[level]]]] assign[=] constant[done]
variable[current_frag_list] assign[=] name[new_fragments_list_file]
variable[current_contig_info] assign[=] name[new_contig_list_file]
variable[current_abs_fragments_contacts] assign[=] name[new_abs_fragments_contacts_file]
variable[sub_2_super_frag_index_file] assign[=] call[name[os].path.join, parameter[name[pyramid_level_folder], binary_operation[name[level_pyramid] + constant[sub_2_super_index_frag.txt]]]]
call[name[logger].info, parameter[constant[pyramid built.]]]
variable[obj_pyramid] assign[=] call[name[pyramid], parameter[name[pyramid_folder], name[size_pyramid]]]
call[name[pyramid_handle].close, parameter[]]
return[name[obj_pyramid]]
|
keyword[def] identifier[build_and_filter] ( identifier[base_folder] , identifier[size_pyramid] , identifier[factor] , identifier[thresh_factor] = literal[int] ):
literal[string]
identifier[min_bin_per_contig] = literal[int]
identifier[fact_sub_sampling] = identifier[factor]
identifier[all_pyramid_folder] = identifier[os] . identifier[path] . identifier[join] ( identifier[base_folder] , literal[string] )
keyword[if] keyword[not] ( identifier[os] . identifier[path] . identifier[exists] ( identifier[all_pyramid_folder] )):
identifier[os] . identifier[mkdir] ( identifier[all_pyramid_folder] )
identifier[init_pyramid_folder] = identifier[os] . identifier[path] . identifier[join] (
identifier[all_pyramid_folder] , literal[string] + identifier[str] ( literal[int] )+ literal[string]
)
keyword[if] keyword[not] ( identifier[os] . identifier[path] . identifier[exists] ( identifier[init_pyramid_folder] )):
identifier[init_size_pyramid] = literal[int]
identifier[build] ( identifier[base_folder] , identifier[init_size_pyramid] , identifier[factor] , identifier[min_bin_per_contig] )
identifier[init_pyramid_folder_level_0] = identifier[os] . identifier[path] . identifier[join] ( identifier[init_pyramid_folder] , literal[string] )
identifier[contig_info] = identifier[os] . identifier[path] . identifier[join] (
identifier[init_pyramid_folder_level_0] , literal[string]
)
identifier[fragments_list] = identifier[os] . identifier[path] . identifier[join] (
identifier[init_pyramid_folder_level_0] , literal[string]
)
identifier[init_abs_fragments_contacts] = identifier[os] . identifier[path] . identifier[join] (
identifier[init_pyramid_folder_level_0] , literal[string]
)
identifier[init_pyramid_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[init_pyramid_folder] , literal[string] )
identifier[pyramid_folder] = identifier[os] . identifier[path] . identifier[join] (
identifier[all_pyramid_folder] , literal[string] + identifier[str] ( identifier[size_pyramid] )+ literal[string]
)
keyword[if] keyword[not] ( identifier[os] . identifier[path] . identifier[exists] ( identifier[pyramid_folder] )):
identifier[os] . identifier[mkdir] ( identifier[pyramid_folder] )
identifier[level] = literal[int]
identifier[pyramid_level_folder] = identifier[os] . identifier[path] . identifier[join] ( identifier[pyramid_folder] , literal[string] + identifier[str] ( identifier[level] ))
keyword[if] keyword[not] ( identifier[os] . identifier[path] . identifier[exists] ( identifier[pyramid_level_folder] )):
identifier[os] . identifier[mkdir] ( identifier[pyramid_level_folder] )
identifier[current_contig_info] = identifier[os] . identifier[path] . identifier[join] (
identifier[pyramid_level_folder] , identifier[str] ( identifier[level] )+ literal[string]
)
identifier[current_frag_list] = identifier[os] . identifier[path] . identifier[join] (
identifier[pyramid_level_folder] , identifier[str] ( identifier[level] )+ literal[string]
)
identifier[current_abs_fragments_contacts] = identifier[os] . identifier[path] . identifier[join] (
identifier[pyramid_level_folder] , identifier[str] ( identifier[level] )+ literal[string]
)
keyword[if] keyword[not] (
identifier[os] . identifier[path] . identifier[exists] ( identifier[current_contig_info] )
keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[current_frag_list] )
keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[current_abs_fragments_contacts] )
):
identifier[logger] . identifier[info] ( literal[string] )
identifier[pyramid_0] = identifier[h5py] . identifier[File] ( identifier[init_pyramid_file] )
identifier[remove_problematic_fragments] (
identifier[contig_info] ,
identifier[fragments_list] ,
identifier[init_abs_fragments_contacts] ,
identifier[current_contig_info] ,
identifier[current_frag_list] ,
identifier[current_abs_fragments_contacts] ,
identifier[pyramid_0] ,
identifier[thresh_factor] = identifier[thresh_factor] ,
)
identifier[pyramid_0] . identifier[close] ()
keyword[else] :
identifier[logger] . identifier[info] ( literal[string] )
identifier[hdf5_pyramid_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[pyramid_folder] , literal[string] )
identifier[pyramid_handle] = identifier[h5py] . identifier[File] ( identifier[hdf5_pyramid_file] )
identifier[pyramid_level_folder] = identifier[os] . identifier[path] . identifier[join] ( identifier[pyramid_folder] , literal[string] + identifier[str] ( identifier[level] ))
identifier[level_pyramid] = identifier[str] ( identifier[level] )+ literal[string]
identifier[sub_2_super_frag_index_file] = identifier[os] . identifier[path] . identifier[join] (
identifier[pyramid_level_folder] , identifier[level_pyramid] + literal[string]
)
keyword[for] identifier[level] keyword[in] identifier[range] ( literal[int] , identifier[size_pyramid] ):
identifier[pyramid_level_folder] = identifier[os] . identifier[path] . identifier[join] (
identifier[pyramid_folder] , literal[string] + identifier[str] ( identifier[level] )
)
keyword[if] keyword[not] ( identifier[os] . identifier[path] . identifier[exists] ( identifier[pyramid_level_folder] )):
identifier[os] . identifier[mkdir] ( identifier[pyramid_level_folder] )
identifier[level_pyramid] = identifier[str] ( identifier[level] )+ literal[string]
identifier[new_contig_list_file] = identifier[os] . identifier[path] . identifier[join] (
identifier[pyramid_level_folder] , identifier[level_pyramid] + literal[string]
)
identifier[new_fragments_list_file] = identifier[os] . identifier[path] . identifier[join] (
identifier[pyramid_level_folder] , identifier[level_pyramid] + literal[string]
)
identifier[new_abs_fragments_contacts_file] = identifier[os] . identifier[path] . identifier[join] (
identifier[pyramid_level_folder] , identifier[level_pyramid] + literal[string]
)
keyword[if] identifier[level] > literal[int] :
keyword[if] (
identifier[os] . identifier[path] . identifier[exists] ( identifier[new_contig_list_file] )
keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[new_fragments_list_file] )
keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[new_abs_fragments_contacts_file] )
keyword[and] identifier[os] . identifier[path] . identifier[exists] (( identifier[sub_2_super_frag_index_file] ))
):
identifier[logger] . identifier[info] ( literal[string] )
identifier[nfrags] = identifier[file_len] ( identifier[new_fragments_list_file] )- literal[int]
keyword[else] :
identifier[logger] . identifier[info] ( literal[string] )
identifier[nfrags] = identifier[subsample_data_set] (
identifier[current_contig_info] ,
identifier[current_frag_list] ,
identifier[fact_sub_sampling] ,
identifier[current_abs_fragments_contacts] ,
identifier[new_abs_fragments_contacts_file] ,
identifier[min_bin_per_contig] ,
identifier[new_contig_list_file] ,
identifier[new_fragments_list_file] ,
identifier[sub_2_super_frag_index_file] ,
)
keyword[else] :
keyword[if] (
identifier[os] . identifier[path] . identifier[exists] ( identifier[new_contig_list_file] )
keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[new_fragments_list_file] )
keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[new_abs_fragments_contacts_file] )
):
identifier[logger] . identifier[info] ( literal[string] )
identifier[nfrags] = identifier[file_len] ( identifier[new_fragments_list_file] )- literal[int]
keyword[try] :
identifier[status] = identifier[pyramid_handle] . identifier[attrs] [ identifier[str] ( identifier[level] )]== literal[string]
keyword[except] identifier[KeyError] :
identifier[pyramid_handle] . identifier[attrs] [ identifier[str] ( identifier[level] )]= literal[string]
identifier[status] = keyword[False]
keyword[if] keyword[not] ( identifier[status] ):
identifier[logger] . identifier[info] ( literal[string] )
identifier[fill_sparse_pyramid_level] (
identifier[pyramid_handle] , identifier[level] , identifier[new_abs_fragments_contacts_file] , identifier[nfrags]
)
identifier[pyramid_handle] . identifier[attrs] [ identifier[str] ( identifier[level] )]= literal[string]
identifier[current_frag_list] = identifier[new_fragments_list_file]
identifier[current_contig_info] = identifier[new_contig_list_file]
identifier[current_abs_fragments_contacts] = identifier[new_abs_fragments_contacts_file]
identifier[sub_2_super_frag_index_file] = identifier[os] . identifier[path] . identifier[join] (
identifier[pyramid_level_folder] , identifier[level_pyramid] + literal[string]
)
identifier[logger] . identifier[info] ( literal[string] )
identifier[obj_pyramid] = identifier[pyramid] ( identifier[pyramid_folder] , identifier[size_pyramid] )
identifier[pyramid_handle] . identifier[close] ()
keyword[return] identifier[obj_pyramid]
|
def build_and_filter(base_folder, size_pyramid, factor, thresh_factor=1):
"""Build a filtered pyramid of contact maps
Build a fragment pyramid for multi-scale analysis and remove high sparsity
(i.e. low-coverage) and short fragments.
Parameters
----------
base_folder : str or pathlib.Path
Where to create the hdf5 files containing the matrices.
size_pyramid : int
How many levels (contact maps of decreasing resolution) to generate.
factor : int
Subsampling factor (binning) from one level to the next.
thresh_factor : float, optional
Number of standard deviations below the mean coverage beyond which
lesser covered fragments will be discarded. Default is 1.
Returns
-------
obj_pyramid : Pyramid
The pyramid object containing all the levels.
"""
min_bin_per_contig = 1
fact_sub_sampling = factor
all_pyramid_folder = os.path.join(base_folder, 'pyramids')
if not os.path.exists(all_pyramid_folder):
os.mkdir(all_pyramid_folder) # depends on [control=['if'], data=[]]
init_pyramid_folder = os.path.join(all_pyramid_folder, 'pyramid_' + str(1) + '_no_thresh')
if not os.path.exists(init_pyramid_folder):
init_size_pyramid = 1
build(base_folder, init_size_pyramid, factor, min_bin_per_contig) # depends on [control=['if'], data=[]]
init_pyramid_folder_level_0 = os.path.join(init_pyramid_folder, 'level_0')
contig_info = os.path.join(init_pyramid_folder_level_0, '0_contig_info.txt')
fragments_list = os.path.join(init_pyramid_folder_level_0, '0_fragments_list.txt')
init_abs_fragments_contacts = os.path.join(init_pyramid_folder_level_0, '0_abs_frag_contacts.txt')
init_pyramid_file = os.path.join(init_pyramid_folder, 'pyramid.hdf5')
pyramid_folder = os.path.join(all_pyramid_folder, 'pyramid_' + str(size_pyramid) + '_thresh_auto')
if not os.path.exists(pyramid_folder):
os.mkdir(pyramid_folder) # depends on [control=['if'], data=[]]
level = 0
pyramid_level_folder = os.path.join(pyramid_folder, 'level_' + str(level))
if not os.path.exists(pyramid_level_folder):
os.mkdir(pyramid_level_folder) # depends on [control=['if'], data=[]]
current_contig_info = os.path.join(pyramid_level_folder, str(level) + '_contig_info.txt')
current_frag_list = os.path.join(pyramid_level_folder, str(level) + '_fragments_list.txt')
current_abs_fragments_contacts = os.path.join(pyramid_level_folder, str(level) + '_abs_frag_contacts.txt')
if not (os.path.exists(current_contig_info) and os.path.exists(current_frag_list) and os.path.exists(current_abs_fragments_contacts)):
logger.info('start filtering')
pyramid_0 = h5py.File(init_pyramid_file)
remove_problematic_fragments(contig_info, fragments_list, init_abs_fragments_contacts, current_contig_info, current_frag_list, current_abs_fragments_contacts, pyramid_0, thresh_factor=thresh_factor)
pyramid_0.close() # depends on [control=['if'], data=[]]
else:
#
logger.info('filtering already done...')
hdf5_pyramid_file = os.path.join(pyramid_folder, 'pyramid.hdf5')
pyramid_handle = h5py.File(hdf5_pyramid_file)
pyramid_level_folder = os.path.join(pyramid_folder, 'level_' + str(level))
level_pyramid = str(level) + '_'
sub_2_super_frag_index_file = os.path.join(pyramid_level_folder, level_pyramid + 'sub_2_super_index_frag.txt')
for level in range(0, size_pyramid):
pyramid_level_folder = os.path.join(pyramid_folder, 'level_' + str(level))
if not os.path.exists(pyramid_level_folder):
os.mkdir(pyramid_level_folder) # depends on [control=['if'], data=[]]
level_pyramid = str(level) + '_'
new_contig_list_file = os.path.join(pyramid_level_folder, level_pyramid + 'contig_info.txt')
new_fragments_list_file = os.path.join(pyramid_level_folder, level_pyramid + 'fragments_list.txt')
new_abs_fragments_contacts_file = os.path.join(pyramid_level_folder, level_pyramid + 'abs_frag_contacts.txt')
if level > 0:
if os.path.exists(new_contig_list_file) and os.path.exists(new_fragments_list_file) and os.path.exists(new_abs_fragments_contacts_file) and os.path.exists(sub_2_super_frag_index_file):
logger.info('level already built')
nfrags = file_len(new_fragments_list_file) - 1 # depends on [control=['if'], data=[]]
else: # this should never append !!!
logger.info('writing new_files..')
nfrags = subsample_data_set(current_contig_info, current_frag_list, fact_sub_sampling, current_abs_fragments_contacts, new_abs_fragments_contacts_file, min_bin_per_contig, new_contig_list_file, new_fragments_list_file, sub_2_super_frag_index_file) # depends on [control=['if'], data=[]]
elif os.path.exists(new_contig_list_file) and os.path.exists(new_fragments_list_file) and os.path.exists(new_abs_fragments_contacts_file):
logger.info('level already built...')
nfrags = file_len(new_fragments_list_file) - 1 # depends on [control=['if'], data=[]]
try:
status = pyramid_handle.attrs[str(level)] == 'done' # depends on [control=['try'], data=[]]
except KeyError:
pyramid_handle.attrs[str(level)] = 'pending'
status = False # depends on [control=['except'], data=[]]
if not status:
logger.info('Start filling the pyramid')
# level_to_fill = pyramid_handle.create_dataset(str(level),
# (nfrags,nfrags), 'i')
# fill_pyramid_level(level_to_fill,new_abs_fragments_contacts_file,
# size_chunk,nfrags)
fill_sparse_pyramid_level(pyramid_handle, level, new_abs_fragments_contacts_file, nfrags)
pyramid_handle.attrs[str(level)] = 'done' # depends on [control=['if'], data=[]]
current_frag_list = new_fragments_list_file
current_contig_info = new_contig_list_file
current_abs_fragments_contacts = new_abs_fragments_contacts_file
sub_2_super_frag_index_file = os.path.join(pyramid_level_folder, level_pyramid + 'sub_2_super_index_frag.txt') # depends on [control=['for'], data=['level']]
logger.info('pyramid built.')
obj_pyramid = pyramid(pyramid_folder, size_pyramid)
pyramid_handle.close()
return obj_pyramid
|
def resp_graph_raw(dataframe, image_name, dir='./'):
"""Response time graph for raw data
:param pandas.DataFrame dataframe: the raw results dataframe
:param str image_name: the output file name
:param str dir: the output directory
:return: None
"""
factor = int(len(dataframe) / 10)
df = dataframe.reset_index()
fig = pygal.Dot(stroke=False,
x_label_rotation=25,
x_title='Elapsed Time In Test (secs)',
y_title='Average Response Time (secs)',
js=('scripts/pygal-tooltip.min.js',))
try:
grp = df.groupby(pd.cut(df.index, np.arange(0, len(df), factor)))
fig.x_labels = [x for x in grp.first()['epoch']]
fig.title = image_name.split('.')[0]
fig.add('Time', [x for x in grp.describe()['scriptrun_time'].unstack()['mean'].round(2)])
except ZeroDivisionError:
print("Not enough data for raw graph")
fig.render_to_file(filename=os.path.join(dir, image_name))
|
def function[resp_graph_raw, parameter[dataframe, image_name, dir]]:
constant[Response time graph for raw data
:param pandas.DataFrame dataframe: the raw results dataframe
:param str image_name: the output file name
:param str dir: the output directory
:return: None
]
variable[factor] assign[=] call[name[int], parameter[binary_operation[call[name[len], parameter[name[dataframe]]] / constant[10]]]]
variable[df] assign[=] call[name[dataframe].reset_index, parameter[]]
variable[fig] assign[=] call[name[pygal].Dot, parameter[]]
<ast.Try object at 0x7da204566c80>
call[name[fig].render_to_file, parameter[]]
|
keyword[def] identifier[resp_graph_raw] ( identifier[dataframe] , identifier[image_name] , identifier[dir] = literal[string] ):
literal[string]
identifier[factor] = identifier[int] ( identifier[len] ( identifier[dataframe] )/ literal[int] )
identifier[df] = identifier[dataframe] . identifier[reset_index] ()
identifier[fig] = identifier[pygal] . identifier[Dot] ( identifier[stroke] = keyword[False] ,
identifier[x_label_rotation] = literal[int] ,
identifier[x_title] = literal[string] ,
identifier[y_title] = literal[string] ,
identifier[js] =( literal[string] ,))
keyword[try] :
identifier[grp] = identifier[df] . identifier[groupby] ( identifier[pd] . identifier[cut] ( identifier[df] . identifier[index] , identifier[np] . identifier[arange] ( literal[int] , identifier[len] ( identifier[df] ), identifier[factor] )))
identifier[fig] . identifier[x_labels] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[grp] . identifier[first] ()[ literal[string] ]]
identifier[fig] . identifier[title] = identifier[image_name] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[fig] . identifier[add] ( literal[string] ,[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[grp] . identifier[describe] ()[ literal[string] ]. identifier[unstack] ()[ literal[string] ]. identifier[round] ( literal[int] )])
keyword[except] identifier[ZeroDivisionError] :
identifier[print] ( literal[string] )
identifier[fig] . identifier[render_to_file] ( identifier[filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir] , identifier[image_name] ))
|
def resp_graph_raw(dataframe, image_name, dir='./'):
"""Response time graph for raw data
:param pandas.DataFrame dataframe: the raw results dataframe
:param str image_name: the output file name
:param str dir: the output directory
:return: None
"""
factor = int(len(dataframe) / 10)
df = dataframe.reset_index()
fig = pygal.Dot(stroke=False, x_label_rotation=25, x_title='Elapsed Time In Test (secs)', y_title='Average Response Time (secs)', js=('scripts/pygal-tooltip.min.js',))
try:
grp = df.groupby(pd.cut(df.index, np.arange(0, len(df), factor)))
fig.x_labels = [x for x in grp.first()['epoch']]
fig.title = image_name.split('.')[0]
fig.add('Time', [x for x in grp.describe()['scriptrun_time'].unstack()['mean'].round(2)]) # depends on [control=['try'], data=[]]
except ZeroDivisionError:
print('Not enough data for raw graph') # depends on [control=['except'], data=[]]
fig.render_to_file(filename=os.path.join(dir, image_name))
|
def prepare(self, node):
"""
Initialise values to prepare typing computation.
Reorder functions to avoid dependencies issues and prepare typing
computation setting typing values for Pythonic functions.
"""
def register(name, module):
""" Recursively save function typing and combiners for Pythonic."""
for fname, function in module.items():
if isinstance(function, dict):
register(name + "::" + fname, function)
else:
tname = 'pythonic::{0}::functor::{1}'.format(name, fname)
self.result[function] = self.builder.NamedType(tname)
self.combiners[function] = function
if isinstance(function, Class):
register(name + "::" + fname, function.fields)
for mname, module in MODULES.items():
register(mname, module)
super(Types, self).prepare(node)
|
def function[prepare, parameter[self, node]]:
constant[
Initialise values to prepare typing computation.
Reorder functions to avoid dependencies issues and prepare typing
computation setting typing values for Pythonic functions.
]
def function[register, parameter[name, module]]:
constant[ Recursively save function typing and combiners for Pythonic.]
for taget[tuple[[<ast.Name object at 0x7da204960e50>, <ast.Name object at 0x7da204961c00>]]] in starred[call[name[module].items, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[function], name[dict]]] begin[:]
call[name[register], parameter[binary_operation[binary_operation[name[name] + constant[::]] + name[fname]], name[function]]]
for taget[tuple[[<ast.Name object at 0x7da204963a60>, <ast.Name object at 0x7da204961840>]]] in starred[call[name[MODULES].items, parameter[]]] begin[:]
call[name[register], parameter[name[mname], name[module]]]
call[call[name[super], parameter[name[Types], name[self]]].prepare, parameter[name[node]]]
|
keyword[def] identifier[prepare] ( identifier[self] , identifier[node] ):
literal[string]
keyword[def] identifier[register] ( identifier[name] , identifier[module] ):
literal[string]
keyword[for] identifier[fname] , identifier[function] keyword[in] identifier[module] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[function] , identifier[dict] ):
identifier[register] ( identifier[name] + literal[string] + identifier[fname] , identifier[function] )
keyword[else] :
identifier[tname] = literal[string] . identifier[format] ( identifier[name] , identifier[fname] )
identifier[self] . identifier[result] [ identifier[function] ]= identifier[self] . identifier[builder] . identifier[NamedType] ( identifier[tname] )
identifier[self] . identifier[combiners] [ identifier[function] ]= identifier[function]
keyword[if] identifier[isinstance] ( identifier[function] , identifier[Class] ):
identifier[register] ( identifier[name] + literal[string] + identifier[fname] , identifier[function] . identifier[fields] )
keyword[for] identifier[mname] , identifier[module] keyword[in] identifier[MODULES] . identifier[items] ():
identifier[register] ( identifier[mname] , identifier[module] )
identifier[super] ( identifier[Types] , identifier[self] ). identifier[prepare] ( identifier[node] )
|
def prepare(self, node):
"""
Initialise values to prepare typing computation.
Reorder functions to avoid dependencies issues and prepare typing
computation setting typing values for Pythonic functions.
"""
def register(name, module):
""" Recursively save function typing and combiners for Pythonic."""
for (fname, function) in module.items():
if isinstance(function, dict):
register(name + '::' + fname, function) # depends on [control=['if'], data=[]]
else:
tname = 'pythonic::{0}::functor::{1}'.format(name, fname)
self.result[function] = self.builder.NamedType(tname)
self.combiners[function] = function
if isinstance(function, Class):
register(name + '::' + fname, function.fields) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
for (mname, module) in MODULES.items():
register(mname, module) # depends on [control=['for'], data=[]]
super(Types, self).prepare(node)
|
def delete(self, drop_database=True):
"""
Delete the site entry
@param drop_database: Drop the sites associated MySQL database
@type drop_database: bool
"""
self.disable()
Session.delete(self)
if drop_database and self.db_name:
mysql = create_engine('mysql://root:secret@localhost')
mysql.execute('DROP DATABASE IF EXISTS `{db}`'.format(db=self.db_name))
try:
mysql.execute('DROP USER `{u}`'.format(u=self.db_user))
except SQLAlchemyError:
pass
|
def function[delete, parameter[self, drop_database]]:
constant[
Delete the site entry
@param drop_database: Drop the sites associated MySQL database
@type drop_database: bool
]
call[name[self].disable, parameter[]]
call[name[Session].delete, parameter[name[self]]]
if <ast.BoolOp object at 0x7da204567d30> begin[:]
variable[mysql] assign[=] call[name[create_engine], parameter[constant[mysql://root:secret@localhost]]]
call[name[mysql].execute, parameter[call[constant[DROP DATABASE IF EXISTS `{db}`].format, parameter[]]]]
<ast.Try object at 0x7da204564d30>
|
keyword[def] identifier[delete] ( identifier[self] , identifier[drop_database] = keyword[True] ):
literal[string]
identifier[self] . identifier[disable] ()
identifier[Session] . identifier[delete] ( identifier[self] )
keyword[if] identifier[drop_database] keyword[and] identifier[self] . identifier[db_name] :
identifier[mysql] = identifier[create_engine] ( literal[string] )
identifier[mysql] . identifier[execute] ( literal[string] . identifier[format] ( identifier[db] = identifier[self] . identifier[db_name] ))
keyword[try] :
identifier[mysql] . identifier[execute] ( literal[string] . identifier[format] ( identifier[u] = identifier[self] . identifier[db_user] ))
keyword[except] identifier[SQLAlchemyError] :
keyword[pass]
|
def delete(self, drop_database=True):
"""
Delete the site entry
@param drop_database: Drop the sites associated MySQL database
@type drop_database: bool
"""
self.disable()
Session.delete(self)
if drop_database and self.db_name:
mysql = create_engine('mysql://root:secret@localhost')
mysql.execute('DROP DATABASE IF EXISTS `{db}`'.format(db=self.db_name))
try:
mysql.execute('DROP USER `{u}`'.format(u=self.db_user)) # depends on [control=['try'], data=[]]
except SQLAlchemyError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
|
def chassis_name(self, **kwargs):
"""Get device's chassis name/Model.
Args:
rbridge_id (str): The rbridge ID of the device
callback (function): A function executed upon completion of the
method. The only parameter passed to `callback` will be the
``ElementTree`` `config`.
Returns:
Return value of `callback`.
Raises:
KeyError: if `rbridge_id` is not specified.
Examples:
>>> import pynos.device
>>> conn = ('10.24.39.211', '22')
>>> auth = ('admin', 'password')
>>> with pynos.device.Device(conn=conn, auth=auth) as dev:
... output = dev.system.chassis_name(rbridge_id='225')
... assert output == 'VDX6740'
"""
namespace = "urn:brocade.com:mgmt:brocade-rbridge"
rbridge_id = kwargs.pop('rbridge_id', '1')
chassis_name = ' '
callback = kwargs.pop('callback', self._callback)
rid_args = dict(rbridge_id=rbridge_id, chassis_name=chassis_name)
rid = getattr(self._rbridge,
'rbridge_id_switch_attributes_chassis_name')
config = rid(**rid_args)
output = callback(config, handler='get_config')
chassis_name = output.data.find('.//{%s}chassis-name' % namespace).text
return chassis_name
|
def function[chassis_name, parameter[self]]:
constant[Get device's chassis name/Model.
Args:
rbridge_id (str): The rbridge ID of the device
callback (function): A function executed upon completion of the
method. The only parameter passed to `callback` will be the
``ElementTree`` `config`.
Returns:
Return value of `callback`.
Raises:
KeyError: if `rbridge_id` is not specified.
Examples:
>>> import pynos.device
>>> conn = ('10.24.39.211', '22')
>>> auth = ('admin', 'password')
>>> with pynos.device.Device(conn=conn, auth=auth) as dev:
... output = dev.system.chassis_name(rbridge_id='225')
... assert output == 'VDX6740'
]
variable[namespace] assign[=] constant[urn:brocade.com:mgmt:brocade-rbridge]
variable[rbridge_id] assign[=] call[name[kwargs].pop, parameter[constant[rbridge_id], constant[1]]]
variable[chassis_name] assign[=] constant[ ]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
variable[rid_args] assign[=] call[name[dict], parameter[]]
variable[rid] assign[=] call[name[getattr], parameter[name[self]._rbridge, constant[rbridge_id_switch_attributes_chassis_name]]]
variable[config] assign[=] call[name[rid], parameter[]]
variable[output] assign[=] call[name[callback], parameter[name[config]]]
variable[chassis_name] assign[=] call[name[output].data.find, parameter[binary_operation[constant[.//{%s}chassis-name] <ast.Mod object at 0x7da2590d6920> name[namespace]]]].text
return[name[chassis_name]]
|
keyword[def] identifier[chassis_name] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[namespace] = literal[string]
identifier[rbridge_id] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] )
identifier[chassis_name] = literal[string]
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
identifier[rid_args] = identifier[dict] ( identifier[rbridge_id] = identifier[rbridge_id] , identifier[chassis_name] = identifier[chassis_name] )
identifier[rid] = identifier[getattr] ( identifier[self] . identifier[_rbridge] ,
literal[string] )
identifier[config] = identifier[rid] (** identifier[rid_args] )
identifier[output] = identifier[callback] ( identifier[config] , identifier[handler] = literal[string] )
identifier[chassis_name] = identifier[output] . identifier[data] . identifier[find] ( literal[string] % identifier[namespace] ). identifier[text]
keyword[return] identifier[chassis_name]
|
def chassis_name(self, **kwargs):
"""Get device's chassis name/Model.
Args:
rbridge_id (str): The rbridge ID of the device
callback (function): A function executed upon completion of the
method. The only parameter passed to `callback` will be the
``ElementTree`` `config`.
Returns:
Return value of `callback`.
Raises:
KeyError: if `rbridge_id` is not specified.
Examples:
>>> import pynos.device
>>> conn = ('10.24.39.211', '22')
>>> auth = ('admin', 'password')
>>> with pynos.device.Device(conn=conn, auth=auth) as dev:
... output = dev.system.chassis_name(rbridge_id='225')
... assert output == 'VDX6740'
"""
namespace = 'urn:brocade.com:mgmt:brocade-rbridge'
rbridge_id = kwargs.pop('rbridge_id', '1')
chassis_name = ' '
callback = kwargs.pop('callback', self._callback)
rid_args = dict(rbridge_id=rbridge_id, chassis_name=chassis_name)
rid = getattr(self._rbridge, 'rbridge_id_switch_attributes_chassis_name')
config = rid(**rid_args)
output = callback(config, handler='get_config')
chassis_name = output.data.find('.//{%s}chassis-name' % namespace).text
return chassis_name
|
def enable_global_annotations_decorator(flag = True, retrospective = True):
"""Enables or disables global annotation mode via decorators.
See flag global_annotations_decorator.
In contrast to setting the flag directly, this function provides
a retrospective option. If retrospective is true, this will also
affect already imported modules, not only future imports.
"""
global global_annotations_decorator
global_annotations_decorator = flag
if import_hook_enabled:
_install_import_hook()
if global_annotations_decorator and retrospective:
_catch_up_global_annotations_decorator()
return global_annotations_decorator
|
def function[enable_global_annotations_decorator, parameter[flag, retrospective]]:
constant[Enables or disables global annotation mode via decorators.
See flag global_annotations_decorator.
In contrast to setting the flag directly, this function provides
a retrospective option. If retrospective is true, this will also
affect already imported modules, not only future imports.
]
<ast.Global object at 0x7da1b0ddd4b0>
variable[global_annotations_decorator] assign[=] name[flag]
if name[import_hook_enabled] begin[:]
call[name[_install_import_hook], parameter[]]
if <ast.BoolOp object at 0x7da1b0ddd030> begin[:]
call[name[_catch_up_global_annotations_decorator], parameter[]]
return[name[global_annotations_decorator]]
|
keyword[def] identifier[enable_global_annotations_decorator] ( identifier[flag] = keyword[True] , identifier[retrospective] = keyword[True] ):
literal[string]
keyword[global] identifier[global_annotations_decorator]
identifier[global_annotations_decorator] = identifier[flag]
keyword[if] identifier[import_hook_enabled] :
identifier[_install_import_hook] ()
keyword[if] identifier[global_annotations_decorator] keyword[and] identifier[retrospective] :
identifier[_catch_up_global_annotations_decorator] ()
keyword[return] identifier[global_annotations_decorator]
|
def enable_global_annotations_decorator(flag=True, retrospective=True):
"""Enables or disables global annotation mode via decorators.
See flag global_annotations_decorator.
In contrast to setting the flag directly, this function provides
a retrospective option. If retrospective is true, this will also
affect already imported modules, not only future imports.
"""
global global_annotations_decorator
global_annotations_decorator = flag
if import_hook_enabled:
_install_import_hook() # depends on [control=['if'], data=[]]
if global_annotations_decorator and retrospective:
_catch_up_global_annotations_decorator() # depends on [control=['if'], data=[]]
return global_annotations_decorator
|
def pipe(data, *fns):
"""Apply functions recursively on your data
:param data: the data
:param fns: functions
:returns: an object
>>> inc = lambda x: x + 1
>>> pipe(42, inc, str)
'43'
"""
return reduce(lambda acc, f: f(acc), fns, data)
|
def function[pipe, parameter[data]]:
constant[Apply functions recursively on your data
:param data: the data
:param fns: functions
:returns: an object
>>> inc = lambda x: x + 1
>>> pipe(42, inc, str)
'43'
]
return[call[name[reduce], parameter[<ast.Lambda object at 0x7da1b1418f70>, name[fns], name[data]]]]
|
keyword[def] identifier[pipe] ( identifier[data] ,* identifier[fns] ):
literal[string]
keyword[return] identifier[reduce] ( keyword[lambda] identifier[acc] , identifier[f] : identifier[f] ( identifier[acc] ), identifier[fns] , identifier[data] )
|
def pipe(data, *fns):
"""Apply functions recursively on your data
:param data: the data
:param fns: functions
:returns: an object
>>> inc = lambda x: x + 1
>>> pipe(42, inc, str)
'43'
"""
return reduce(lambda acc, f: f(acc), fns, data)
|
def write_int8(self, value, little_endian=True):
"""
Pack the value as a signed byte and write 1 byte to the stream.
Args:
value:
little_endian (bool): specify the endianness. (Default) Little endian.
Returns:
int: the number of bytes written.
"""
if little_endian:
endian = "<"
else:
endian = ">"
return self.pack('%sb' % endian, value)
|
def function[write_int8, parameter[self, value, little_endian]]:
constant[
Pack the value as a signed byte and write 1 byte to the stream.
Args:
value:
little_endian (bool): specify the endianness. (Default) Little endian.
Returns:
int: the number of bytes written.
]
if name[little_endian] begin[:]
variable[endian] assign[=] constant[<]
return[call[name[self].pack, parameter[binary_operation[constant[%sb] <ast.Mod object at 0x7da2590d6920> name[endian]], name[value]]]]
|
keyword[def] identifier[write_int8] ( identifier[self] , identifier[value] , identifier[little_endian] = keyword[True] ):
literal[string]
keyword[if] identifier[little_endian] :
identifier[endian] = literal[string]
keyword[else] :
identifier[endian] = literal[string]
keyword[return] identifier[self] . identifier[pack] ( literal[string] % identifier[endian] , identifier[value] )
|
def write_int8(self, value, little_endian=True):
"""
Pack the value as a signed byte and write 1 byte to the stream.
Args:
value:
little_endian (bool): specify the endianness. (Default) Little endian.
Returns:
int: the number of bytes written.
"""
if little_endian:
endian = '<' # depends on [control=['if'], data=[]]
else:
endian = '>'
return self.pack('%sb' % endian, value)
|
def _mulf16(ins):
""" Multiplies 2 32bit (16.16) fixed point numbers. The result is pushed onto the stack.
"""
op1, op2 = tuple(ins.quad[2:])
if _f_ops(op1, op2) is not None:
op1, op2 = _f_ops(op1, op2)
if op2 == 1: # A * 1 => A
output = _f16_oper(op1)
output.append('push de')
output.append('push hl')
return output
if op2 == -1:
return _neg32(ins)
output = _f16_oper(op1)
if op2 == 0:
output.append('ld hl, 0')
output.append('ld e, h')
output.append('ld d, l')
output.append('push de')
output.append('push hl')
return output
output = _f16_oper(op1, str(op2))
output.append('call __MULF16')
output.append('push de')
output.append('push hl')
REQUIRES.add('mulf16.asm')
return output
|
def function[_mulf16, parameter[ins]]:
constant[ Multiplies 2 32bit (16.16) fixed point numbers. The result is pushed onto the stack.
]
<ast.Tuple object at 0x7da1b06f9f30> assign[=] call[name[tuple], parameter[call[name[ins].quad][<ast.Slice object at 0x7da1b06face0>]]]
if compare[call[name[_f_ops], parameter[name[op1], name[op2]]] is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da1b06f98a0> assign[=] call[name[_f_ops], parameter[name[op1], name[op2]]]
if compare[name[op2] equal[==] constant[1]] begin[:]
variable[output] assign[=] call[name[_f16_oper], parameter[name[op1]]]
call[name[output].append, parameter[constant[push de]]]
call[name[output].append, parameter[constant[push hl]]]
return[name[output]]
if compare[name[op2] equal[==] <ast.UnaryOp object at 0x7da1b06f8b20>] begin[:]
return[call[name[_neg32], parameter[name[ins]]]]
variable[output] assign[=] call[name[_f16_oper], parameter[name[op1]]]
if compare[name[op2] equal[==] constant[0]] begin[:]
call[name[output].append, parameter[constant[ld hl, 0]]]
call[name[output].append, parameter[constant[ld e, h]]]
call[name[output].append, parameter[constant[ld d, l]]]
call[name[output].append, parameter[constant[push de]]]
call[name[output].append, parameter[constant[push hl]]]
return[name[output]]
variable[output] assign[=] call[name[_f16_oper], parameter[name[op1], call[name[str], parameter[name[op2]]]]]
call[name[output].append, parameter[constant[call __MULF16]]]
call[name[output].append, parameter[constant[push de]]]
call[name[output].append, parameter[constant[push hl]]]
call[name[REQUIRES].add, parameter[constant[mulf16.asm]]]
return[name[output]]
|
keyword[def] identifier[_mulf16] ( identifier[ins] ):
literal[string]
identifier[op1] , identifier[op2] = identifier[tuple] ( identifier[ins] . identifier[quad] [ literal[int] :])
keyword[if] identifier[_f_ops] ( identifier[op1] , identifier[op2] ) keyword[is] keyword[not] keyword[None] :
identifier[op1] , identifier[op2] = identifier[_f_ops] ( identifier[op1] , identifier[op2] )
keyword[if] identifier[op2] == literal[int] :
identifier[output] = identifier[_f16_oper] ( identifier[op1] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
keyword[return] identifier[output]
keyword[if] identifier[op2] ==- literal[int] :
keyword[return] identifier[_neg32] ( identifier[ins] )
identifier[output] = identifier[_f16_oper] ( identifier[op1] )
keyword[if] identifier[op2] == literal[int] :
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
keyword[return] identifier[output]
identifier[output] = identifier[_f16_oper] ( identifier[op1] , identifier[str] ( identifier[op2] ))
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
identifier[output] . identifier[append] ( literal[string] )
identifier[REQUIRES] . identifier[add] ( literal[string] )
keyword[return] identifier[output]
|
def _mulf16(ins):
""" Multiplies 2 32bit (16.16) fixed point numbers. The result is pushed onto the stack.
"""
(op1, op2) = tuple(ins.quad[2:])
if _f_ops(op1, op2) is not None:
(op1, op2) = _f_ops(op1, op2)
if op2 == 1: # A * 1 => A
output = _f16_oper(op1)
output.append('push de')
output.append('push hl')
return output # depends on [control=['if'], data=[]]
if op2 == -1:
return _neg32(ins) # depends on [control=['if'], data=[]]
output = _f16_oper(op1)
if op2 == 0:
output.append('ld hl, 0')
output.append('ld e, h')
output.append('ld d, l')
output.append('push de')
output.append('push hl')
return output # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
output = _f16_oper(op1, str(op2))
output.append('call __MULF16')
output.append('push de')
output.append('push hl')
REQUIRES.add('mulf16.asm')
return output
|
def ParseOptions(cls, options, configuration_object):
"""Parses and validates options.
Args:
options (argparse.Namespace): parser options.
configuration_object (CLITool): object to be configured by the argument
helper.
Raises:
BadConfigObject: when the configuration object is of the wrong type.
"""
if not isinstance(configuration_object, tools.CLITool):
raise errors.BadConfigObject(
'Configuration object is not an instance of CLITool')
storage_file = cls._ParseStringOption(options, 'storage_file')
setattr(configuration_object, '_storage_file_path', storage_file)
|
def function[ParseOptions, parameter[cls, options, configuration_object]]:
constant[Parses and validates options.
Args:
options (argparse.Namespace): parser options.
configuration_object (CLITool): object to be configured by the argument
helper.
Raises:
BadConfigObject: when the configuration object is of the wrong type.
]
if <ast.UnaryOp object at 0x7da2041d9480> begin[:]
<ast.Raise object at 0x7da2046230a0>
variable[storage_file] assign[=] call[name[cls]._ParseStringOption, parameter[name[options], constant[storage_file]]]
call[name[setattr], parameter[name[configuration_object], constant[_storage_file_path], name[storage_file]]]
|
keyword[def] identifier[ParseOptions] ( identifier[cls] , identifier[options] , identifier[configuration_object] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[configuration_object] , identifier[tools] . identifier[CLITool] ):
keyword[raise] identifier[errors] . identifier[BadConfigObject] (
literal[string] )
identifier[storage_file] = identifier[cls] . identifier[_ParseStringOption] ( identifier[options] , literal[string] )
identifier[setattr] ( identifier[configuration_object] , literal[string] , identifier[storage_file] )
|
def ParseOptions(cls, options, configuration_object):
"""Parses and validates options.
Args:
options (argparse.Namespace): parser options.
configuration_object (CLITool): object to be configured by the argument
helper.
Raises:
BadConfigObject: when the configuration object is of the wrong type.
"""
if not isinstance(configuration_object, tools.CLITool):
raise errors.BadConfigObject('Configuration object is not an instance of CLITool') # depends on [control=['if'], data=[]]
storage_file = cls._ParseStringOption(options, 'storage_file')
setattr(configuration_object, '_storage_file_path', storage_file)
|
def load_param(params, ctx=None):
"""same as mx.model.load_checkpoint, but do not load symnet and will convert context"""
if ctx is None:
ctx = mx.cpu()
save_dict = mx.nd.load(params)
arg_params = {}
aux_params = {}
for k, v in save_dict.items():
tp, name = k.split(':', 1)
if tp == 'arg':
arg_params[name] = v.as_in_context(ctx)
if tp == 'aux':
aux_params[name] = v.as_in_context(ctx)
return arg_params, aux_params
|
def function[load_param, parameter[params, ctx]]:
constant[same as mx.model.load_checkpoint, but do not load symnet and will convert context]
if compare[name[ctx] is constant[None]] begin[:]
variable[ctx] assign[=] call[name[mx].cpu, parameter[]]
variable[save_dict] assign[=] call[name[mx].nd.load, parameter[name[params]]]
variable[arg_params] assign[=] dictionary[[], []]
variable[aux_params] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b2014e50>, <ast.Name object at 0x7da1b2016740>]]] in starred[call[name[save_dict].items, parameter[]]] begin[:]
<ast.Tuple object at 0x7da1b2014730> assign[=] call[name[k].split, parameter[constant[:], constant[1]]]
if compare[name[tp] equal[==] constant[arg]] begin[:]
call[name[arg_params]][name[name]] assign[=] call[name[v].as_in_context, parameter[name[ctx]]]
if compare[name[tp] equal[==] constant[aux]] begin[:]
call[name[aux_params]][name[name]] assign[=] call[name[v].as_in_context, parameter[name[ctx]]]
return[tuple[[<ast.Name object at 0x7da2054a7730>, <ast.Name object at 0x7da2054a61d0>]]]
|
keyword[def] identifier[load_param] ( identifier[params] , identifier[ctx] = keyword[None] ):
literal[string]
keyword[if] identifier[ctx] keyword[is] keyword[None] :
identifier[ctx] = identifier[mx] . identifier[cpu] ()
identifier[save_dict] = identifier[mx] . identifier[nd] . identifier[load] ( identifier[params] )
identifier[arg_params] ={}
identifier[aux_params] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[save_dict] . identifier[items] ():
identifier[tp] , identifier[name] = identifier[k] . identifier[split] ( literal[string] , literal[int] )
keyword[if] identifier[tp] == literal[string] :
identifier[arg_params] [ identifier[name] ]= identifier[v] . identifier[as_in_context] ( identifier[ctx] )
keyword[if] identifier[tp] == literal[string] :
identifier[aux_params] [ identifier[name] ]= identifier[v] . identifier[as_in_context] ( identifier[ctx] )
keyword[return] identifier[arg_params] , identifier[aux_params]
|
def load_param(params, ctx=None):
"""same as mx.model.load_checkpoint, but do not load symnet and will convert context"""
if ctx is None:
ctx = mx.cpu() # depends on [control=['if'], data=['ctx']]
save_dict = mx.nd.load(params)
arg_params = {}
aux_params = {}
for (k, v) in save_dict.items():
(tp, name) = k.split(':', 1)
if tp == 'arg':
arg_params[name] = v.as_in_context(ctx) # depends on [control=['if'], data=[]]
if tp == 'aux':
aux_params[name] = v.as_in_context(ctx) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return (arg_params, aux_params)
|
def getreferingobjs(self, iddgroups=None, fields=None):
"""Get a list of objects that refer to this object"""
return getreferingobjs(self, iddgroups=iddgroups, fields=fields)
|
def function[getreferingobjs, parameter[self, iddgroups, fields]]:
constant[Get a list of objects that refer to this object]
return[call[name[getreferingobjs], parameter[name[self]]]]
|
keyword[def] identifier[getreferingobjs] ( identifier[self] , identifier[iddgroups] = keyword[None] , identifier[fields] = keyword[None] ):
literal[string]
keyword[return] identifier[getreferingobjs] ( identifier[self] , identifier[iddgroups] = identifier[iddgroups] , identifier[fields] = identifier[fields] )
|
def getreferingobjs(self, iddgroups=None, fields=None):
"""Get a list of objects that refer to this object"""
return getreferingobjs(self, iddgroups=iddgroups, fields=fields)
|
def _safe_cache(memory, func, **kwargs):
""" A wrapper for mem.cache that flushes the cache if the version
number of nibabel has changed.
"""
cachedir = memory.cachedir
if cachedir is None or cachedir in __CACHE_CHECKED:
return memory.cache(func, **kwargs)
version_file = os.path.join(cachedir, 'module_versions.json')
versions = dict()
if os.path.exists(version_file):
with open(version_file, 'r') as _version_file:
versions = json.load(_version_file)
modules = (nibabel, )
# Keep only the major + minor version numbers
my_versions = dict((m.__name__, LooseVersion(m.__version__).version[:2])
for m in modules)
commons = set(versions.keys()).intersection(set(my_versions.keys()))
collisions = [m for m in commons if versions[m] != my_versions[m]]
# Flush cache if version collision
if len(collisions) > 0:
if nilearn.CHECK_CACHE_VERSION:
warnings.warn("Incompatible cache in %s: "
"different version of nibabel. Deleting "
"the cache. Put nilearn.CHECK_CACHE_VERSION "
"to false to avoid this behavior."
% cachedir)
try:
tmp_dir = (os.path.split(cachedir)[:-1]
+ ('old_%i' % os.getpid(), ))
tmp_dir = os.path.join(*tmp_dir)
# We use rename + unlink to be more robust to race
# conditions
os.rename(cachedir, tmp_dir)
shutil.rmtree(tmp_dir)
except OSError:
# Another process could have removed this dir
pass
try:
os.makedirs(cachedir)
except OSError:
# File exists?
pass
else:
warnings.warn("Incompatible cache in %s: "
"old version of nibabel." % cachedir)
# Write json files if configuration is different
if versions != my_versions:
with open(version_file, 'w') as _version_file:
json.dump(my_versions, _version_file)
__CACHE_CHECKED[cachedir] = True
return memory.cache(func, **kwargs)
|
def function[_safe_cache, parameter[memory, func]]:
constant[ A wrapper for mem.cache that flushes the cache if the version
number of nibabel has changed.
]
variable[cachedir] assign[=] name[memory].cachedir
if <ast.BoolOp object at 0x7da1b008f4c0> begin[:]
return[call[name[memory].cache, parameter[name[func]]]]
variable[version_file] assign[=] call[name[os].path.join, parameter[name[cachedir], constant[module_versions.json]]]
variable[versions] assign[=] call[name[dict], parameter[]]
if call[name[os].path.exists, parameter[name[version_file]]] begin[:]
with call[name[open], parameter[name[version_file], constant[r]]] begin[:]
variable[versions] assign[=] call[name[json].load, parameter[name[_version_file]]]
variable[modules] assign[=] tuple[[<ast.Name object at 0x7da1b008d930>]]
variable[my_versions] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b008fb20>]]
variable[commons] assign[=] call[call[name[set], parameter[call[name[versions].keys, parameter[]]]].intersection, parameter[call[name[set], parameter[call[name[my_versions].keys, parameter[]]]]]]
variable[collisions] assign[=] <ast.ListComp object at 0x7da1b008c310>
if compare[call[name[len], parameter[name[collisions]]] greater[>] constant[0]] begin[:]
if name[nilearn].CHECK_CACHE_VERSION begin[:]
call[name[warnings].warn, parameter[binary_operation[constant[Incompatible cache in %s: different version of nibabel. Deleting the cache. Put nilearn.CHECK_CACHE_VERSION to false to avoid this behavior.] <ast.Mod object at 0x7da2590d6920> name[cachedir]]]]
<ast.Try object at 0x7da1b008e470>
<ast.Try object at 0x7da1b0089300>
if compare[name[versions] not_equal[!=] name[my_versions]] begin[:]
with call[name[open], parameter[name[version_file], constant[w]]] begin[:]
call[name[json].dump, parameter[name[my_versions], name[_version_file]]]
call[name[__CACHE_CHECKED]][name[cachedir]] assign[=] constant[True]
return[call[name[memory].cache, parameter[name[func]]]]
|
keyword[def] identifier[_safe_cache] ( identifier[memory] , identifier[func] ,** identifier[kwargs] ):
literal[string]
identifier[cachedir] = identifier[memory] . identifier[cachedir]
keyword[if] identifier[cachedir] keyword[is] keyword[None] keyword[or] identifier[cachedir] keyword[in] identifier[__CACHE_CHECKED] :
keyword[return] identifier[memory] . identifier[cache] ( identifier[func] ,** identifier[kwargs] )
identifier[version_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[cachedir] , literal[string] )
identifier[versions] = identifier[dict] ()
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[version_file] ):
keyword[with] identifier[open] ( identifier[version_file] , literal[string] ) keyword[as] identifier[_version_file] :
identifier[versions] = identifier[json] . identifier[load] ( identifier[_version_file] )
identifier[modules] =( identifier[nibabel] ,)
identifier[my_versions] = identifier[dict] (( identifier[m] . identifier[__name__] , identifier[LooseVersion] ( identifier[m] . identifier[__version__] ). identifier[version] [: literal[int] ])
keyword[for] identifier[m] keyword[in] identifier[modules] )
identifier[commons] = identifier[set] ( identifier[versions] . identifier[keys] ()). identifier[intersection] ( identifier[set] ( identifier[my_versions] . identifier[keys] ()))
identifier[collisions] =[ identifier[m] keyword[for] identifier[m] keyword[in] identifier[commons] keyword[if] identifier[versions] [ identifier[m] ]!= identifier[my_versions] [ identifier[m] ]]
keyword[if] identifier[len] ( identifier[collisions] )> literal[int] :
keyword[if] identifier[nilearn] . identifier[CHECK_CACHE_VERSION] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string]
literal[string]
literal[string]
% identifier[cachedir] )
keyword[try] :
identifier[tmp_dir] =( identifier[os] . identifier[path] . identifier[split] ( identifier[cachedir] )[:- literal[int] ]
+( literal[string] % identifier[os] . identifier[getpid] (),))
identifier[tmp_dir] = identifier[os] . identifier[path] . identifier[join] (* identifier[tmp_dir] )
identifier[os] . identifier[rename] ( identifier[cachedir] , identifier[tmp_dir] )
identifier[shutil] . identifier[rmtree] ( identifier[tmp_dir] )
keyword[except] identifier[OSError] :
keyword[pass]
keyword[try] :
identifier[os] . identifier[makedirs] ( identifier[cachedir] )
keyword[except] identifier[OSError] :
keyword[pass]
keyword[else] :
identifier[warnings] . identifier[warn] ( literal[string]
literal[string] % identifier[cachedir] )
keyword[if] identifier[versions] != identifier[my_versions] :
keyword[with] identifier[open] ( identifier[version_file] , literal[string] ) keyword[as] identifier[_version_file] :
identifier[json] . identifier[dump] ( identifier[my_versions] , identifier[_version_file] )
identifier[__CACHE_CHECKED] [ identifier[cachedir] ]= keyword[True]
keyword[return] identifier[memory] . identifier[cache] ( identifier[func] ,** identifier[kwargs] )
|
def _safe_cache(memory, func, **kwargs):
""" A wrapper for mem.cache that flushes the cache if the version
number of nibabel has changed.
"""
cachedir = memory.cachedir
if cachedir is None or cachedir in __CACHE_CHECKED:
return memory.cache(func, **kwargs) # depends on [control=['if'], data=[]]
version_file = os.path.join(cachedir, 'module_versions.json')
versions = dict()
if os.path.exists(version_file):
with open(version_file, 'r') as _version_file:
versions = json.load(_version_file) # depends on [control=['with'], data=['_version_file']] # depends on [control=['if'], data=[]]
modules = (nibabel,)
# Keep only the major + minor version numbers
my_versions = dict(((m.__name__, LooseVersion(m.__version__).version[:2]) for m in modules))
commons = set(versions.keys()).intersection(set(my_versions.keys()))
collisions = [m for m in commons if versions[m] != my_versions[m]]
# Flush cache if version collision
if len(collisions) > 0:
if nilearn.CHECK_CACHE_VERSION:
warnings.warn('Incompatible cache in %s: different version of nibabel. Deleting the cache. Put nilearn.CHECK_CACHE_VERSION to false to avoid this behavior.' % cachedir)
try:
tmp_dir = os.path.split(cachedir)[:-1] + ('old_%i' % os.getpid(),)
tmp_dir = os.path.join(*tmp_dir)
# We use rename + unlink to be more robust to race
# conditions
os.rename(cachedir, tmp_dir)
shutil.rmtree(tmp_dir) # depends on [control=['try'], data=[]]
except OSError:
# Another process could have removed this dir
pass # depends on [control=['except'], data=[]]
try:
os.makedirs(cachedir) # depends on [control=['try'], data=[]]
except OSError:
# File exists?
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
warnings.warn('Incompatible cache in %s: old version of nibabel.' % cachedir) # depends on [control=['if'], data=[]]
# Write json files if configuration is different
if versions != my_versions:
with open(version_file, 'w') as _version_file:
json.dump(my_versions, _version_file) # depends on [control=['with'], data=['_version_file']] # depends on [control=['if'], data=['my_versions']]
__CACHE_CHECKED[cachedir] = True
return memory.cache(func, **kwargs)
|
def serve(info, host, port, reload, debugger, eager_loading, with_threads):
'''
Runs a local udata development server.
This local server is recommended for development purposes only but it
can also be used for simple intranet deployments.
By default it will not support any sort of concurrency at all
to simplify debugging.
This can be changed with the --with-threads option which will enable basic
multithreading.
The reloader and debugger are by default enabled if the debug flag of
Flask is enabled and disabled otherwise.
'''
# Werkzeug logger is special and is required
# with this configuration for development server
logger = logging.getLogger('werkzeug')
logger.setLevel(logging.INFO)
logger.handlers = []
debug = current_app.config['DEBUG']
if reload is None:
reload = bool(debug)
if debugger is None:
debugger = bool(debug)
if eager_loading is None:
eager_loading = not reload
app = DispatchingApp(info.load_app, use_eager_loading=eager_loading)
settings = os.environ.get('UDATA_SETTINGS',
os.path.join(os.getcwd(), 'udata.cfg'))
extra_files = [settings]
if reload:
extra_files.extend(assets.manifests_paths())
run_simple(host, port, app, use_reloader=reload,
use_debugger=debugger, threaded=with_threads,
extra_files=extra_files)
|
def function[serve, parameter[info, host, port, reload, debugger, eager_loading, with_threads]]:
constant[
Runs a local udata development server.
This local server is recommended for development purposes only but it
can also be used for simple intranet deployments.
By default it will not support any sort of concurrency at all
to simplify debugging.
This can be changed with the --with-threads option which will enable basic
multithreading.
The reloader and debugger are by default enabled if the debug flag of
Flask is enabled and disabled otherwise.
]
variable[logger] assign[=] call[name[logging].getLogger, parameter[constant[werkzeug]]]
call[name[logger].setLevel, parameter[name[logging].INFO]]
name[logger].handlers assign[=] list[[]]
variable[debug] assign[=] call[name[current_app].config][constant[DEBUG]]
if compare[name[reload] is constant[None]] begin[:]
variable[reload] assign[=] call[name[bool], parameter[name[debug]]]
if compare[name[debugger] is constant[None]] begin[:]
variable[debugger] assign[=] call[name[bool], parameter[name[debug]]]
if compare[name[eager_loading] is constant[None]] begin[:]
variable[eager_loading] assign[=] <ast.UnaryOp object at 0x7da1b1120880>
variable[app] assign[=] call[name[DispatchingApp], parameter[name[info].load_app]]
variable[settings] assign[=] call[name[os].environ.get, parameter[constant[UDATA_SETTINGS], call[name[os].path.join, parameter[call[name[os].getcwd, parameter[]], constant[udata.cfg]]]]]
variable[extra_files] assign[=] list[[<ast.Name object at 0x7da18f09fdc0>]]
if name[reload] begin[:]
call[name[extra_files].extend, parameter[call[name[assets].manifests_paths, parameter[]]]]
call[name[run_simple], parameter[name[host], name[port], name[app]]]
|
keyword[def] identifier[serve] ( identifier[info] , identifier[host] , identifier[port] , identifier[reload] , identifier[debugger] , identifier[eager_loading] , identifier[with_threads] ):
literal[string]
identifier[logger] = identifier[logging] . identifier[getLogger] ( literal[string] )
identifier[logger] . identifier[setLevel] ( identifier[logging] . identifier[INFO] )
identifier[logger] . identifier[handlers] =[]
identifier[debug] = identifier[current_app] . identifier[config] [ literal[string] ]
keyword[if] identifier[reload] keyword[is] keyword[None] :
identifier[reload] = identifier[bool] ( identifier[debug] )
keyword[if] identifier[debugger] keyword[is] keyword[None] :
identifier[debugger] = identifier[bool] ( identifier[debug] )
keyword[if] identifier[eager_loading] keyword[is] keyword[None] :
identifier[eager_loading] = keyword[not] identifier[reload]
identifier[app] = identifier[DispatchingApp] ( identifier[info] . identifier[load_app] , identifier[use_eager_loading] = identifier[eager_loading] )
identifier[settings] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] ,
identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[getcwd] (), literal[string] ))
identifier[extra_files] =[ identifier[settings] ]
keyword[if] identifier[reload] :
identifier[extra_files] . identifier[extend] ( identifier[assets] . identifier[manifests_paths] ())
identifier[run_simple] ( identifier[host] , identifier[port] , identifier[app] , identifier[use_reloader] = identifier[reload] ,
identifier[use_debugger] = identifier[debugger] , identifier[threaded] = identifier[with_threads] ,
identifier[extra_files] = identifier[extra_files] )
|
def serve(info, host, port, reload, debugger, eager_loading, with_threads):
"""
Runs a local udata development server.
This local server is recommended for development purposes only but it
can also be used for simple intranet deployments.
By default it will not support any sort of concurrency at all
to simplify debugging.
This can be changed with the --with-threads option which will enable basic
multithreading.
The reloader and debugger are by default enabled if the debug flag of
Flask is enabled and disabled otherwise.
"""
# Werkzeug logger is special and is required
# with this configuration for development server
logger = logging.getLogger('werkzeug')
logger.setLevel(logging.INFO)
logger.handlers = []
debug = current_app.config['DEBUG']
if reload is None:
reload = bool(debug) # depends on [control=['if'], data=['reload']]
if debugger is None:
debugger = bool(debug) # depends on [control=['if'], data=['debugger']]
if eager_loading is None:
eager_loading = not reload # depends on [control=['if'], data=['eager_loading']]
app = DispatchingApp(info.load_app, use_eager_loading=eager_loading)
settings = os.environ.get('UDATA_SETTINGS', os.path.join(os.getcwd(), 'udata.cfg'))
extra_files = [settings]
if reload:
extra_files.extend(assets.manifests_paths()) # depends on [control=['if'], data=[]]
run_simple(host, port, app, use_reloader=reload, use_debugger=debugger, threaded=with_threads, extra_files=extra_files)
|
def _all(self, *args, **kwargs):
'''
Return all the summary of the particular system.
'''
data = dict()
data['software'] = self._software(**kwargs)
data['system'] = self._system(**kwargs)
data['services'] = self._services(**kwargs)
try:
data['configuration'] = self._configuration(**kwargs)
except InspectorQueryException as ex:
data['configuration'] = 'N/A'
log.error(ex)
data['payload'] = self._payload(**kwargs) or 'N/A'
return data
|
def function[_all, parameter[self]]:
constant[
Return all the summary of the particular system.
]
variable[data] assign[=] call[name[dict], parameter[]]
call[name[data]][constant[software]] assign[=] call[name[self]._software, parameter[]]
call[name[data]][constant[system]] assign[=] call[name[self]._system, parameter[]]
call[name[data]][constant[services]] assign[=] call[name[self]._services, parameter[]]
<ast.Try object at 0x7da20c7cbe50>
call[name[data]][constant[payload]] assign[=] <ast.BoolOp object at 0x7da1b21e1990>
return[name[data]]
|
keyword[def] identifier[_all] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[data] = identifier[dict] ()
identifier[data] [ literal[string] ]= identifier[self] . identifier[_software] (** identifier[kwargs] )
identifier[data] [ literal[string] ]= identifier[self] . identifier[_system] (** identifier[kwargs] )
identifier[data] [ literal[string] ]= identifier[self] . identifier[_services] (** identifier[kwargs] )
keyword[try] :
identifier[data] [ literal[string] ]= identifier[self] . identifier[_configuration] (** identifier[kwargs] )
keyword[except] identifier[InspectorQueryException] keyword[as] identifier[ex] :
identifier[data] [ literal[string] ]= literal[string]
identifier[log] . identifier[error] ( identifier[ex] )
identifier[data] [ literal[string] ]= identifier[self] . identifier[_payload] (** identifier[kwargs] ) keyword[or] literal[string]
keyword[return] identifier[data]
|
def _all(self, *args, **kwargs):
"""
Return all the summary of the particular system.
"""
data = dict()
data['software'] = self._software(**kwargs)
data['system'] = self._system(**kwargs)
data['services'] = self._services(**kwargs)
try:
data['configuration'] = self._configuration(**kwargs) # depends on [control=['try'], data=[]]
except InspectorQueryException as ex:
data['configuration'] = 'N/A'
log.error(ex) # depends on [control=['except'], data=['ex']]
data['payload'] = self._payload(**kwargs) or 'N/A'
return data
|
def get_adjusted_value(
self,
assets,
field,
dt,
perspective_dt,
data_frequency):
'''
TODO:
for external data (fetch_csv) support, need to update logic here.
'''
return self.backend.get_spot_value(
assets, field, dt, data_frequency, self.quantopian_compatible
)
|
def function[get_adjusted_value, parameter[self, assets, field, dt, perspective_dt, data_frequency]]:
constant[
TODO:
for external data (fetch_csv) support, need to update logic here.
]
return[call[name[self].backend.get_spot_value, parameter[name[assets], name[field], name[dt], name[data_frequency], name[self].quantopian_compatible]]]
|
keyword[def] identifier[get_adjusted_value] (
identifier[self] ,
identifier[assets] ,
identifier[field] ,
identifier[dt] ,
identifier[perspective_dt] ,
identifier[data_frequency] ):
literal[string]
keyword[return] identifier[self] . identifier[backend] . identifier[get_spot_value] (
identifier[assets] , identifier[field] , identifier[dt] , identifier[data_frequency] , identifier[self] . identifier[quantopian_compatible]
)
|
def get_adjusted_value(self, assets, field, dt, perspective_dt, data_frequency):
"""
TODO:
for external data (fetch_csv) support, need to update logic here.
"""
return self.backend.get_spot_value(assets, field, dt, data_frequency, self.quantopian_compatible)
|
def write_json_flag(flag, fobj, **kwargs):
"""Write a `DataQualityFlag` to a JSON file
Parameters
----------
flag : `DataQualityFlag`
data to write
fobj : `str`, `file`
target file (or filename) to write
**kwargs
other keyword arguments to pass to :func:`json.dump`
See also
--------
json.dump
for details on acceptable keyword arguments
"""
# write to filename
if isinstance(fobj, string_types):
with open(fobj, 'w') as fobj2:
return write_json_flag(flag, fobj2, **kwargs)
# build json packet
data = {}
data['ifo'] = flag.ifo
data['name'] = flag.tag
data['version'] = flag.version
data['active'] = flag.active
data['known'] = flag.known
data['metadata'] = {}
data['metadata']['active_indicates_ifo_badness'] = not flag.isgood
data['metadata']['flag_description'] = flag.description
# write
json.dump(data, fobj, **kwargs)
|
def function[write_json_flag, parameter[flag, fobj]]:
constant[Write a `DataQualityFlag` to a JSON file
Parameters
----------
flag : `DataQualityFlag`
data to write
fobj : `str`, `file`
target file (or filename) to write
**kwargs
other keyword arguments to pass to :func:`json.dump`
See also
--------
json.dump
for details on acceptable keyword arguments
]
if call[name[isinstance], parameter[name[fobj], name[string_types]]] begin[:]
with call[name[open], parameter[name[fobj], constant[w]]] begin[:]
return[call[name[write_json_flag], parameter[name[flag], name[fobj2]]]]
variable[data] assign[=] dictionary[[], []]
call[name[data]][constant[ifo]] assign[=] name[flag].ifo
call[name[data]][constant[name]] assign[=] name[flag].tag
call[name[data]][constant[version]] assign[=] name[flag].version
call[name[data]][constant[active]] assign[=] name[flag].active
call[name[data]][constant[known]] assign[=] name[flag].known
call[name[data]][constant[metadata]] assign[=] dictionary[[], []]
call[call[name[data]][constant[metadata]]][constant[active_indicates_ifo_badness]] assign[=] <ast.UnaryOp object at 0x7da18f09e3e0>
call[call[name[data]][constant[metadata]]][constant[flag_description]] assign[=] name[flag].description
call[name[json].dump, parameter[name[data], name[fobj]]]
|
keyword[def] identifier[write_json_flag] ( identifier[flag] , identifier[fobj] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[fobj] , identifier[string_types] ):
keyword[with] identifier[open] ( identifier[fobj] , literal[string] ) keyword[as] identifier[fobj2] :
keyword[return] identifier[write_json_flag] ( identifier[flag] , identifier[fobj2] ,** identifier[kwargs] )
identifier[data] ={}
identifier[data] [ literal[string] ]= identifier[flag] . identifier[ifo]
identifier[data] [ literal[string] ]= identifier[flag] . identifier[tag]
identifier[data] [ literal[string] ]= identifier[flag] . identifier[version]
identifier[data] [ literal[string] ]= identifier[flag] . identifier[active]
identifier[data] [ literal[string] ]= identifier[flag] . identifier[known]
identifier[data] [ literal[string] ]={}
identifier[data] [ literal[string] ][ literal[string] ]= keyword[not] identifier[flag] . identifier[isgood]
identifier[data] [ literal[string] ][ literal[string] ]= identifier[flag] . identifier[description]
identifier[json] . identifier[dump] ( identifier[data] , identifier[fobj] ,** identifier[kwargs] )
|
def write_json_flag(flag, fobj, **kwargs):
"""Write a `DataQualityFlag` to a JSON file
Parameters
----------
flag : `DataQualityFlag`
data to write
fobj : `str`, `file`
target file (or filename) to write
**kwargs
other keyword arguments to pass to :func:`json.dump`
See also
--------
json.dump
for details on acceptable keyword arguments
"""
# write to filename
if isinstance(fobj, string_types):
with open(fobj, 'w') as fobj2:
return write_json_flag(flag, fobj2, **kwargs) # depends on [control=['with'], data=['fobj2']] # depends on [control=['if'], data=[]]
# build json packet
data = {}
data['ifo'] = flag.ifo
data['name'] = flag.tag
data['version'] = flag.version
data['active'] = flag.active
data['known'] = flag.known
data['metadata'] = {}
data['metadata']['active_indicates_ifo_badness'] = not flag.isgood
data['metadata']['flag_description'] = flag.description
# write
json.dump(data, fobj, **kwargs)
|
def _position_autocorrelation_fitting_eqn(t, Gamma, AngTrapFreq):
"""
The value of the fitting equation:
exp(-t*Gamma/2) * (cos(t* sqrt(Omega**2 - Gamma**2 /4)) + Gamma* sin(t* sqrt(Omega**2-Gamma**2 /4))/(2* sqrt(Omega**2 - Gamma**2 /4)))
[eqn 4.20 taken from DOI: DOI: 10.1007/978-1-4614-6031-2]
to be fit to the autocorrelation-exponential decay
Parameters
----------
t : float
time
Gamma : float
Big Gamma (in radians), i.e. damping
AngTrapFreq : float
Angular Trapping Frequency in Radians
Returns
-------
Value : float
The value of the fitting equation
"""
return _np.exp(-t*Gamma/2)* ( _np.cos(t* _np.sqrt(AngTrapFreq**2-Gamma**2/4)) + Gamma* _np.sin(t* _np.sqrt(AngTrapFreq**2-Gamma**2/4))/(2* _np.sqrt(AngTrapFreq**2-Gamma**2/4)) )
|
def function[_position_autocorrelation_fitting_eqn, parameter[t, Gamma, AngTrapFreq]]:
constant[
The value of the fitting equation:
exp(-t*Gamma/2) * (cos(t* sqrt(Omega**2 - Gamma**2 /4)) + Gamma* sin(t* sqrt(Omega**2-Gamma**2 /4))/(2* sqrt(Omega**2 - Gamma**2 /4)))
[eqn 4.20 taken from DOI: DOI: 10.1007/978-1-4614-6031-2]
to be fit to the autocorrelation-exponential decay
Parameters
----------
t : float
time
Gamma : float
Big Gamma (in radians), i.e. damping
AngTrapFreq : float
Angular Trapping Frequency in Radians
Returns
-------
Value : float
The value of the fitting equation
]
return[binary_operation[call[name[_np].exp, parameter[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b287b160> * name[Gamma]] / constant[2]]]] * binary_operation[call[name[_np].cos, parameter[binary_operation[name[t] * call[name[_np].sqrt, parameter[binary_operation[binary_operation[name[AngTrapFreq] ** constant[2]] - binary_operation[binary_operation[name[Gamma] ** constant[2]] / constant[4]]]]]]]] + binary_operation[binary_operation[name[Gamma] * call[name[_np].sin, parameter[binary_operation[name[t] * call[name[_np].sqrt, parameter[binary_operation[binary_operation[name[AngTrapFreq] ** constant[2]] - binary_operation[binary_operation[name[Gamma] ** constant[2]] / constant[4]]]]]]]]] / binary_operation[constant[2] * call[name[_np].sqrt, parameter[binary_operation[binary_operation[name[AngTrapFreq] ** constant[2]] - binary_operation[binary_operation[name[Gamma] ** constant[2]] / constant[4]]]]]]]]]]
|
keyword[def] identifier[_position_autocorrelation_fitting_eqn] ( identifier[t] , identifier[Gamma] , identifier[AngTrapFreq] ):
literal[string]
keyword[return] identifier[_np] . identifier[exp] (- identifier[t] * identifier[Gamma] / literal[int] )*( identifier[_np] . identifier[cos] ( identifier[t] * identifier[_np] . identifier[sqrt] ( identifier[AngTrapFreq] ** literal[int] - identifier[Gamma] ** literal[int] / literal[int] ))+ identifier[Gamma] * identifier[_np] . identifier[sin] ( identifier[t] * identifier[_np] . identifier[sqrt] ( identifier[AngTrapFreq] ** literal[int] - identifier[Gamma] ** literal[int] / literal[int] ))/( literal[int] * identifier[_np] . identifier[sqrt] ( identifier[AngTrapFreq] ** literal[int] - identifier[Gamma] ** literal[int] / literal[int] )))
|
def _position_autocorrelation_fitting_eqn(t, Gamma, AngTrapFreq):
"""
The value of the fitting equation:
exp(-t*Gamma/2) * (cos(t* sqrt(Omega**2 - Gamma**2 /4)) + Gamma* sin(t* sqrt(Omega**2-Gamma**2 /4))/(2* sqrt(Omega**2 - Gamma**2 /4)))
[eqn 4.20 taken from DOI: DOI: 10.1007/978-1-4614-6031-2]
to be fit to the autocorrelation-exponential decay
Parameters
----------
t : float
time
Gamma : float
Big Gamma (in radians), i.e. damping
AngTrapFreq : float
Angular Trapping Frequency in Radians
Returns
-------
Value : float
The value of the fitting equation
"""
return _np.exp(-t * Gamma / 2) * (_np.cos(t * _np.sqrt(AngTrapFreq ** 2 - Gamma ** 2 / 4)) + Gamma * _np.sin(t * _np.sqrt(AngTrapFreq ** 2 - Gamma ** 2 / 4)) / (2 * _np.sqrt(AngTrapFreq ** 2 - Gamma ** 2 / 4)))
|
def generate_validation_function(self, uri, name):
"""
Generate validation function for given uri with given name
"""
self._validation_functions_done.add(uri)
self.l('')
with self._resolver.resolving(uri) as definition:
with self.l('def {}(data):', name):
self.generate_func_code_block(definition, 'data', 'data', clear_variables=True)
self.l('return data')
|
def function[generate_validation_function, parameter[self, uri, name]]:
constant[
Generate validation function for given uri with given name
]
call[name[self]._validation_functions_done.add, parameter[name[uri]]]
call[name[self].l, parameter[constant[]]]
with call[name[self]._resolver.resolving, parameter[name[uri]]] begin[:]
with call[name[self].l, parameter[constant[def {}(data):], name[name]]] begin[:]
call[name[self].generate_func_code_block, parameter[name[definition], constant[data], constant[data]]]
call[name[self].l, parameter[constant[return data]]]
|
keyword[def] identifier[generate_validation_function] ( identifier[self] , identifier[uri] , identifier[name] ):
literal[string]
identifier[self] . identifier[_validation_functions_done] . identifier[add] ( identifier[uri] )
identifier[self] . identifier[l] ( literal[string] )
keyword[with] identifier[self] . identifier[_resolver] . identifier[resolving] ( identifier[uri] ) keyword[as] identifier[definition] :
keyword[with] identifier[self] . identifier[l] ( literal[string] , identifier[name] ):
identifier[self] . identifier[generate_func_code_block] ( identifier[definition] , literal[string] , literal[string] , identifier[clear_variables] = keyword[True] )
identifier[self] . identifier[l] ( literal[string] )
|
def generate_validation_function(self, uri, name):
"""
Generate validation function for given uri with given name
"""
self._validation_functions_done.add(uri)
self.l('')
with self._resolver.resolving(uri) as definition:
with self.l('def {}(data):', name):
self.generate_func_code_block(definition, 'data', 'data', clear_variables=True)
self.l('return data') # depends on [control=['with'], data=[]] # depends on [control=['with'], data=['definition']]
|
def value(self):
"""
Covariance matrix.
Returns
-------
K : ndarray
Matrix K = LLᵀ + ϵI, for a very small positive number ϵ.
"""
K = dot(self.L, self.L.T)
return K + self._epsilon * eye(K.shape[0])
|
def function[value, parameter[self]]:
constant[
Covariance matrix.
Returns
-------
K : ndarray
Matrix K = LLᵀ + ϵI, for a very small positive number ϵ.
]
variable[K] assign[=] call[name[dot], parameter[name[self].L, name[self].L.T]]
return[binary_operation[name[K] + binary_operation[name[self]._epsilon * call[name[eye], parameter[call[name[K].shape][constant[0]]]]]]]
|
keyword[def] identifier[value] ( identifier[self] ):
literal[string]
identifier[K] = identifier[dot] ( identifier[self] . identifier[L] , identifier[self] . identifier[L] . identifier[T] )
keyword[return] identifier[K] + identifier[self] . identifier[_epsilon] * identifier[eye] ( identifier[K] . identifier[shape] [ literal[int] ])
|
def value(self):
"""
Covariance matrix.
Returns
-------
K : ndarray
Matrix K = LLᵀ + ϵI, for a very small positive number ϵ.
"""
K = dot(self.L, self.L.T)
return K + self._epsilon * eye(K.shape[0])
|
def ordinal_encoding(X_in, mapping=None, cols=None, handle_unknown='value', handle_missing='value'):
"""
Ordinal encoding uses a single column of integers to represent the classes. An optional mapping dict can be passed
in, in this case we use the knowledge that there is some true order to the classes themselves. Otherwise, the classes
are assumed to have no true order and integers are selected at random.
"""
return_nan_series = pd.Series(data=[np.nan], index=[-2])
X = X_in.copy(deep=True)
if cols is None:
cols = X.columns.values
if mapping is not None:
mapping_out = mapping
for switch in mapping:
column = switch.get('col')
X[column] = X[column].map(switch['mapping'])
try:
X[column] = X[column].astype(int)
except ValueError as e:
X[column] = X[column].astype(float)
if handle_unknown == 'value':
X[column].fillna(-1, inplace=True)
elif handle_unknown == 'error':
missing = X[column].isnull()
if any(missing):
raise ValueError('Unexpected categories found in column %s' % column)
if handle_missing == 'return_nan':
X[column] = X[column].map(return_nan_series).where(X[column] == -2, X[column])
else:
mapping_out = []
for col in cols:
nan_identity = np.nan
if util.is_category(X[col].dtype):
categories = X[col].cat.categories
else:
categories = X[col].unique()
index = pd.Series(categories).fillna(nan_identity).unique()
data = pd.Series(index=index, data=range(1, len(index) + 1))
if handle_missing == 'value' and ~data.index.isnull().any():
data.loc[nan_identity] = -2
elif handle_missing == 'return_nan':
data.loc[nan_identity] = -2
mapping_out.append({'col': col, 'mapping': data, 'data_type': X[col].dtype}, )
return X, mapping_out
|
def function[ordinal_encoding, parameter[X_in, mapping, cols, handle_unknown, handle_missing]]:
constant[
Ordinal encoding uses a single column of integers to represent the classes. An optional mapping dict can be passed
in, in this case we use the knowledge that there is some true order to the classes themselves. Otherwise, the classes
are assumed to have no true order and integers are selected at random.
]
variable[return_nan_series] assign[=] call[name[pd].Series, parameter[]]
variable[X] assign[=] call[name[X_in].copy, parameter[]]
if compare[name[cols] is constant[None]] begin[:]
variable[cols] assign[=] name[X].columns.values
if compare[name[mapping] is_not constant[None]] begin[:]
variable[mapping_out] assign[=] name[mapping]
for taget[name[switch]] in starred[name[mapping]] begin[:]
variable[column] assign[=] call[name[switch].get, parameter[constant[col]]]
call[name[X]][name[column]] assign[=] call[call[name[X]][name[column]].map, parameter[call[name[switch]][constant[mapping]]]]
<ast.Try object at 0x7da20c794eb0>
if compare[name[handle_unknown] equal[==] constant[value]] begin[:]
call[call[name[X]][name[column]].fillna, parameter[<ast.UnaryOp object at 0x7da207f03280>]]
if compare[name[handle_missing] equal[==] constant[return_nan]] begin[:]
call[name[X]][name[column]] assign[=] call[call[call[name[X]][name[column]].map, parameter[name[return_nan_series]]].where, parameter[compare[call[name[X]][name[column]] equal[==] <ast.UnaryOp object at 0x7da207f03910>], call[name[X]][name[column]]]]
return[tuple[[<ast.Name object at 0x7da20c76f2e0>, <ast.Name object at 0x7da20c76e2f0>]]]
|
keyword[def] identifier[ordinal_encoding] ( identifier[X_in] , identifier[mapping] = keyword[None] , identifier[cols] = keyword[None] , identifier[handle_unknown] = literal[string] , identifier[handle_missing] = literal[string] ):
literal[string]
identifier[return_nan_series] = identifier[pd] . identifier[Series] ( identifier[data] =[ identifier[np] . identifier[nan] ], identifier[index] =[- literal[int] ])
identifier[X] = identifier[X_in] . identifier[copy] ( identifier[deep] = keyword[True] )
keyword[if] identifier[cols] keyword[is] keyword[None] :
identifier[cols] = identifier[X] . identifier[columns] . identifier[values]
keyword[if] identifier[mapping] keyword[is] keyword[not] keyword[None] :
identifier[mapping_out] = identifier[mapping]
keyword[for] identifier[switch] keyword[in] identifier[mapping] :
identifier[column] = identifier[switch] . identifier[get] ( literal[string] )
identifier[X] [ identifier[column] ]= identifier[X] [ identifier[column] ]. identifier[map] ( identifier[switch] [ literal[string] ])
keyword[try] :
identifier[X] [ identifier[column] ]= identifier[X] [ identifier[column] ]. identifier[astype] ( identifier[int] )
keyword[except] identifier[ValueError] keyword[as] identifier[e] :
identifier[X] [ identifier[column] ]= identifier[X] [ identifier[column] ]. identifier[astype] ( identifier[float] )
keyword[if] identifier[handle_unknown] == literal[string] :
identifier[X] [ identifier[column] ]. identifier[fillna] (- literal[int] , identifier[inplace] = keyword[True] )
keyword[elif] identifier[handle_unknown] == literal[string] :
identifier[missing] = identifier[X] [ identifier[column] ]. identifier[isnull] ()
keyword[if] identifier[any] ( identifier[missing] ):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[column] )
keyword[if] identifier[handle_missing] == literal[string] :
identifier[X] [ identifier[column] ]= identifier[X] [ identifier[column] ]. identifier[map] ( identifier[return_nan_series] ). identifier[where] ( identifier[X] [ identifier[column] ]==- literal[int] , identifier[X] [ identifier[column] ])
keyword[else] :
identifier[mapping_out] =[]
keyword[for] identifier[col] keyword[in] identifier[cols] :
identifier[nan_identity] = identifier[np] . identifier[nan]
keyword[if] identifier[util] . identifier[is_category] ( identifier[X] [ identifier[col] ]. identifier[dtype] ):
identifier[categories] = identifier[X] [ identifier[col] ]. identifier[cat] . identifier[categories]
keyword[else] :
identifier[categories] = identifier[X] [ identifier[col] ]. identifier[unique] ()
identifier[index] = identifier[pd] . identifier[Series] ( identifier[categories] ). identifier[fillna] ( identifier[nan_identity] ). identifier[unique] ()
identifier[data] = identifier[pd] . identifier[Series] ( identifier[index] = identifier[index] , identifier[data] = identifier[range] ( literal[int] , identifier[len] ( identifier[index] )+ literal[int] ))
keyword[if] identifier[handle_missing] == literal[string] keyword[and] ~ identifier[data] . identifier[index] . identifier[isnull] (). identifier[any] ():
identifier[data] . identifier[loc] [ identifier[nan_identity] ]=- literal[int]
keyword[elif] identifier[handle_missing] == literal[string] :
identifier[data] . identifier[loc] [ identifier[nan_identity] ]=- literal[int]
identifier[mapping_out] . identifier[append] ({ literal[string] : identifier[col] , literal[string] : identifier[data] , literal[string] : identifier[X] [ identifier[col] ]. identifier[dtype] },)
keyword[return] identifier[X] , identifier[mapping_out]
|
def ordinal_encoding(X_in, mapping=None, cols=None, handle_unknown='value', handle_missing='value'):
"""
Ordinal encoding uses a single column of integers to represent the classes. An optional mapping dict can be passed
in, in this case we use the knowledge that there is some true order to the classes themselves. Otherwise, the classes
are assumed to have no true order and integers are selected at random.
"""
return_nan_series = pd.Series(data=[np.nan], index=[-2])
X = X_in.copy(deep=True)
if cols is None:
cols = X.columns.values # depends on [control=['if'], data=['cols']]
if mapping is not None:
mapping_out = mapping
for switch in mapping:
column = switch.get('col')
X[column] = X[column].map(switch['mapping'])
try:
X[column] = X[column].astype(int) # depends on [control=['try'], data=[]]
except ValueError as e:
X[column] = X[column].astype(float) # depends on [control=['except'], data=[]]
if handle_unknown == 'value':
X[column].fillna(-1, inplace=True) # depends on [control=['if'], data=[]]
elif handle_unknown == 'error':
missing = X[column].isnull()
if any(missing):
raise ValueError('Unexpected categories found in column %s' % column) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if handle_missing == 'return_nan':
X[column] = X[column].map(return_nan_series).where(X[column] == -2, X[column]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['switch']] # depends on [control=['if'], data=['mapping']]
else:
mapping_out = []
for col in cols:
nan_identity = np.nan
if util.is_category(X[col].dtype):
categories = X[col].cat.categories # depends on [control=['if'], data=[]]
else:
categories = X[col].unique()
index = pd.Series(categories).fillna(nan_identity).unique()
data = pd.Series(index=index, data=range(1, len(index) + 1))
if handle_missing == 'value' and ~data.index.isnull().any():
data.loc[nan_identity] = -2 # depends on [control=['if'], data=[]]
elif handle_missing == 'return_nan':
data.loc[nan_identity] = -2 # depends on [control=['if'], data=[]]
mapping_out.append({'col': col, 'mapping': data, 'data_type': X[col].dtype}) # depends on [control=['for'], data=['col']]
return (X, mapping_out)
|
def event_source_mapping_exists(UUID=None, EventSourceArn=None,
FunctionName=None,
region=None, key=None, keyid=None, profile=None):
'''
Given an event source mapping ID or an event source ARN and FunctionName,
check whether the mapping exists.
Returns True if the given alias exists and returns False if the given
alias does not exist.
CLI Example:
.. code-block:: bash
salt myminion boto_lambda.alias_exists myfunction myalias
'''
desc = describe_event_source_mapping(UUID=UUID,
EventSourceArn=EventSourceArn,
FunctionName=FunctionName,
region=region, key=key,
keyid=keyid, profile=profile)
if 'error' in desc:
return desc
return {'exists': bool(desc.get('event_source_mapping'))}
|
def function[event_source_mapping_exists, parameter[UUID, EventSourceArn, FunctionName, region, key, keyid, profile]]:
constant[
Given an event source mapping ID or an event source ARN and FunctionName,
check whether the mapping exists.
Returns True if the given alias exists and returns False if the given
alias does not exist.
CLI Example:
.. code-block:: bash
salt myminion boto_lambda.alias_exists myfunction myalias
]
variable[desc] assign[=] call[name[describe_event_source_mapping], parameter[]]
if compare[constant[error] in name[desc]] begin[:]
return[name[desc]]
return[dictionary[[<ast.Constant object at 0x7da207f99c30>], [<ast.Call object at 0x7da207f98880>]]]
|
keyword[def] identifier[event_source_mapping_exists] ( identifier[UUID] = keyword[None] , identifier[EventSourceArn] = keyword[None] ,
identifier[FunctionName] = keyword[None] ,
identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ):
literal[string]
identifier[desc] = identifier[describe_event_source_mapping] ( identifier[UUID] = identifier[UUID] ,
identifier[EventSourceArn] = identifier[EventSourceArn] ,
identifier[FunctionName] = identifier[FunctionName] ,
identifier[region] = identifier[region] , identifier[key] = identifier[key] ,
identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
keyword[if] literal[string] keyword[in] identifier[desc] :
keyword[return] identifier[desc]
keyword[return] { literal[string] : identifier[bool] ( identifier[desc] . identifier[get] ( literal[string] ))}
|
def event_source_mapping_exists(UUID=None, EventSourceArn=None, FunctionName=None, region=None, key=None, keyid=None, profile=None):
"""
Given an event source mapping ID or an event source ARN and FunctionName,
check whether the mapping exists.
Returns True if the given alias exists and returns False if the given
alias does not exist.
CLI Example:
.. code-block:: bash
salt myminion boto_lambda.alias_exists myfunction myalias
"""
desc = describe_event_source_mapping(UUID=UUID, EventSourceArn=EventSourceArn, FunctionName=FunctionName, region=region, key=key, keyid=keyid, profile=profile)
if 'error' in desc:
return desc # depends on [control=['if'], data=['desc']]
return {'exists': bool(desc.get('event_source_mapping'))}
|
def _match_greedily(reaction, compound_formula, score_func):
"""Match compounds greedily based on score function.
Args:
reaction: Reaction equation :class:`psamm.reaction.Reaction`.
compound_formula: Dictionary mapping compound IDs to
:class:`psamm.formula.Formula`. Formulas must be flattened.
score_func: Function that takes two :class:`_CompoundInstance` and
returns the score.
"""
uninstantiated_left, uninstantiated_right = _reaction_to_dicts(reaction)
def compound_instances(uninstantiated):
instances = []
for compound, value in iteritems(uninstantiated):
if value > 0:
f = compound_formula[compound.name]
instances.append(_CompoundInstance(compound, value, f))
for inst in instances:
uninstantiated[inst.compound] -= 1
return instances
def instantiate(uninstantiated, compound):
n = uninstantiated[compound]
if n > 0:
f = compound_formula[compound.name]
inst = _CompoundInstance(compound, n, f)
uninstantiated[compound] -= 1
return inst
return None
left = compound_instances(uninstantiated_left)
right = compound_instances(uninstantiated_right)
instances = left + right
pairs = {}
for inst1, inst2 in product(left, right):
result = score_func(inst1, inst2)
if result is not None:
pairs[inst1, inst2] = result
def inst_pair_sort_key(entry):
"""Sort key for finding best match among instance pairs.
Rank by score in general but always match identical compounds first
(these will always have score equal to one but are handled specially
to put them ahead of other compounds with score equal to one). Use
compound names to break ties to produce a deterministic result.
"""
(inst1, inst2), score = entry
c1, c2 = inst1.compound, inst2.compound
same_compound = c1.name == c2.name and c1.compartment != c2.compartment
return same_compound, score, c1.name, c2.name
transfer = {}
while len(pairs) > 0:
(inst1, inst2), _ = max(iteritems(pairs), key=inst_pair_sort_key)
common = inst1.formula & inst2.formula
key = (inst1.compound, inst1.index), (inst2.compound, inst2.index)
if key not in transfer:
transfer[key] = Formula()
transfer[key] |= common
for inst in (inst1, inst2):
inst.formula -= common
to_insert = set()
inst = instantiate(uninstantiated_left, inst1.compound)
if inst is not None:
left.append(inst)
instances.append(inst)
to_insert.add(inst)
inst = instantiate(uninstantiated_right, inst2.compound)
if inst is not None:
right.append(inst)
instances.append(inst)
to_insert.add(inst)
to_update = {inst1, inst2}
to_delete = set()
for inst1, inst2 in pairs:
if inst1 in to_update or inst2 in to_update:
if len(inst1.formula) > 0 and len(inst2.formula) > 0:
result = score_func(inst1, inst2)
if result is None:
to_delete.add((inst1, inst2))
else:
pairs[inst1, inst2] = result
else:
to_delete.add((inst1, inst2))
for pair in to_delete:
del pairs[pair]
for inst1, inst2 in product(left, right):
if inst1 in to_insert or inst2 in to_insert:
result = score_func(inst1, inst2)
if result is not None:
pairs[inst1, inst2] = result
balance = {}
for inst in instances:
if len(inst.formula) > 0:
key = inst.compound, inst.index
balance[key] = inst.formula
return transfer, balance
|
def function[_match_greedily, parameter[reaction, compound_formula, score_func]]:
constant[Match compounds greedily based on score function.
Args:
reaction: Reaction equation :class:`psamm.reaction.Reaction`.
compound_formula: Dictionary mapping compound IDs to
:class:`psamm.formula.Formula`. Formulas must be flattened.
score_func: Function that takes two :class:`_CompoundInstance` and
returns the score.
]
<ast.Tuple object at 0x7da20e962710> assign[=] call[name[_reaction_to_dicts], parameter[name[reaction]]]
def function[compound_instances, parameter[uninstantiated]]:
variable[instances] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20e963610>, <ast.Name object at 0x7da20e963700>]]] in starred[call[name[iteritems], parameter[name[uninstantiated]]]] begin[:]
if compare[name[value] greater[>] constant[0]] begin[:]
variable[f] assign[=] call[name[compound_formula]][name[compound].name]
call[name[instances].append, parameter[call[name[_CompoundInstance], parameter[name[compound], name[value], name[f]]]]]
for taget[name[inst]] in starred[name[instances]] begin[:]
<ast.AugAssign object at 0x7da20e9627d0>
return[name[instances]]
def function[instantiate, parameter[uninstantiated, compound]]:
variable[n] assign[=] call[name[uninstantiated]][name[compound]]
if compare[name[n] greater[>] constant[0]] begin[:]
variable[f] assign[=] call[name[compound_formula]][name[compound].name]
variable[inst] assign[=] call[name[_CompoundInstance], parameter[name[compound], name[n], name[f]]]
<ast.AugAssign object at 0x7da20e9632e0>
return[name[inst]]
return[constant[None]]
variable[left] assign[=] call[name[compound_instances], parameter[name[uninstantiated_left]]]
variable[right] assign[=] call[name[compound_instances], parameter[name[uninstantiated_right]]]
variable[instances] assign[=] binary_operation[name[left] + name[right]]
variable[pairs] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da1b254ccd0>, <ast.Name object at 0x7da1b254cc40>]]] in starred[call[name[product], parameter[name[left], name[right]]]] begin[:]
variable[result] assign[=] call[name[score_func], parameter[name[inst1], name[inst2]]]
if compare[name[result] is_not constant[None]] begin[:]
call[name[pairs]][tuple[[<ast.Name object at 0x7da1b254cf70>, <ast.Name object at 0x7da1b254d210>]]] assign[=] name[result]
def function[inst_pair_sort_key, parameter[entry]]:
constant[Sort key for finding best match among instance pairs.
Rank by score in general but always match identical compounds first
(these will always have score equal to one but are handled specially
to put them ahead of other compounds with score equal to one). Use
compound names to break ties to produce a deterministic result.
]
<ast.Tuple object at 0x7da1b254e9e0> assign[=] name[entry]
<ast.Tuple object at 0x7da1b254d540> assign[=] tuple[[<ast.Attribute object at 0x7da1b254f040>, <ast.Attribute object at 0x7da1b254c5b0>]]
variable[same_compound] assign[=] <ast.BoolOp object at 0x7da1b254fc40>
return[tuple[[<ast.Name object at 0x7da1b254eaa0>, <ast.Name object at 0x7da1b254ed10>, <ast.Attribute object at 0x7da1b254fa90>, <ast.Attribute object at 0x7da1b254c340>]]]
variable[transfer] assign[=] dictionary[[], []]
while compare[call[name[len], parameter[name[pairs]]] greater[>] constant[0]] begin[:]
<ast.Tuple object at 0x7da1b254e770> assign[=] call[name[max], parameter[call[name[iteritems], parameter[name[pairs]]]]]
variable[common] assign[=] binary_operation[name[inst1].formula <ast.BitAnd object at 0x7da2590d6b60> name[inst2].formula]
variable[key] assign[=] tuple[[<ast.Tuple object at 0x7da1b254e800>, <ast.Tuple object at 0x7da1b254cee0>]]
if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[transfer]] begin[:]
call[name[transfer]][name[key]] assign[=] call[name[Formula], parameter[]]
<ast.AugAssign object at 0x7da1b254cf40>
for taget[name[inst]] in starred[tuple[[<ast.Name object at 0x7da1b254dc90>, <ast.Name object at 0x7da1b254f880>]]] begin[:]
<ast.AugAssign object at 0x7da1b254fee0>
variable[to_insert] assign[=] call[name[set], parameter[]]
variable[inst] assign[=] call[name[instantiate], parameter[name[uninstantiated_left], name[inst1].compound]]
if compare[name[inst] is_not constant[None]] begin[:]
call[name[left].append, parameter[name[inst]]]
call[name[instances].append, parameter[name[inst]]]
call[name[to_insert].add, parameter[name[inst]]]
variable[inst] assign[=] call[name[instantiate], parameter[name[uninstantiated_right], name[inst2].compound]]
if compare[name[inst] is_not constant[None]] begin[:]
call[name[right].append, parameter[name[inst]]]
call[name[instances].append, parameter[name[inst]]]
call[name[to_insert].add, parameter[name[inst]]]
variable[to_update] assign[=] <ast.Set object at 0x7da1b254f1c0>
variable[to_delete] assign[=] call[name[set], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b254ff40>, <ast.Name object at 0x7da1b254f7c0>]]] in starred[name[pairs]] begin[:]
if <ast.BoolOp object at 0x7da1b254e8c0> begin[:]
if <ast.BoolOp object at 0x7da1b254e8f0> begin[:]
variable[result] assign[=] call[name[score_func], parameter[name[inst1], name[inst2]]]
if compare[name[result] is constant[None]] begin[:]
call[name[to_delete].add, parameter[tuple[[<ast.Name object at 0x7da20c76d690>, <ast.Name object at 0x7da20c76f760>]]]]
for taget[name[pair]] in starred[name[to_delete]] begin[:]
<ast.Delete object at 0x7da20c76e080>
for taget[tuple[[<ast.Name object at 0x7da18f720760>, <ast.Name object at 0x7da18f723a30>]]] in starred[call[name[product], parameter[name[left], name[right]]]] begin[:]
if <ast.BoolOp object at 0x7da18f723e20> begin[:]
variable[result] assign[=] call[name[score_func], parameter[name[inst1], name[inst2]]]
if compare[name[result] is_not constant[None]] begin[:]
call[name[pairs]][tuple[[<ast.Name object at 0x7da18f720220>, <ast.Name object at 0x7da18f720070>]]] assign[=] name[result]
variable[balance] assign[=] dictionary[[], []]
for taget[name[inst]] in starred[name[instances]] begin[:]
if compare[call[name[len], parameter[name[inst].formula]] greater[>] constant[0]] begin[:]
variable[key] assign[=] tuple[[<ast.Attribute object at 0x7da18f721a80>, <ast.Attribute object at 0x7da18f7224d0>]]
call[name[balance]][name[key]] assign[=] name[inst].formula
return[tuple[[<ast.Name object at 0x7da18f720310>, <ast.Name object at 0x7da18f721e70>]]]
|
keyword[def] identifier[_match_greedily] ( identifier[reaction] , identifier[compound_formula] , identifier[score_func] ):
literal[string]
identifier[uninstantiated_left] , identifier[uninstantiated_right] = identifier[_reaction_to_dicts] ( identifier[reaction] )
keyword[def] identifier[compound_instances] ( identifier[uninstantiated] ):
identifier[instances] =[]
keyword[for] identifier[compound] , identifier[value] keyword[in] identifier[iteritems] ( identifier[uninstantiated] ):
keyword[if] identifier[value] > literal[int] :
identifier[f] = identifier[compound_formula] [ identifier[compound] . identifier[name] ]
identifier[instances] . identifier[append] ( identifier[_CompoundInstance] ( identifier[compound] , identifier[value] , identifier[f] ))
keyword[for] identifier[inst] keyword[in] identifier[instances] :
identifier[uninstantiated] [ identifier[inst] . identifier[compound] ]-= literal[int]
keyword[return] identifier[instances]
keyword[def] identifier[instantiate] ( identifier[uninstantiated] , identifier[compound] ):
identifier[n] = identifier[uninstantiated] [ identifier[compound] ]
keyword[if] identifier[n] > literal[int] :
identifier[f] = identifier[compound_formula] [ identifier[compound] . identifier[name] ]
identifier[inst] = identifier[_CompoundInstance] ( identifier[compound] , identifier[n] , identifier[f] )
identifier[uninstantiated] [ identifier[compound] ]-= literal[int]
keyword[return] identifier[inst]
keyword[return] keyword[None]
identifier[left] = identifier[compound_instances] ( identifier[uninstantiated_left] )
identifier[right] = identifier[compound_instances] ( identifier[uninstantiated_right] )
identifier[instances] = identifier[left] + identifier[right]
identifier[pairs] ={}
keyword[for] identifier[inst1] , identifier[inst2] keyword[in] identifier[product] ( identifier[left] , identifier[right] ):
identifier[result] = identifier[score_func] ( identifier[inst1] , identifier[inst2] )
keyword[if] identifier[result] keyword[is] keyword[not] keyword[None] :
identifier[pairs] [ identifier[inst1] , identifier[inst2] ]= identifier[result]
keyword[def] identifier[inst_pair_sort_key] ( identifier[entry] ):
literal[string]
( identifier[inst1] , identifier[inst2] ), identifier[score] = identifier[entry]
identifier[c1] , identifier[c2] = identifier[inst1] . identifier[compound] , identifier[inst2] . identifier[compound]
identifier[same_compound] = identifier[c1] . identifier[name] == identifier[c2] . identifier[name] keyword[and] identifier[c1] . identifier[compartment] != identifier[c2] . identifier[compartment]
keyword[return] identifier[same_compound] , identifier[score] , identifier[c1] . identifier[name] , identifier[c2] . identifier[name]
identifier[transfer] ={}
keyword[while] identifier[len] ( identifier[pairs] )> literal[int] :
( identifier[inst1] , identifier[inst2] ), identifier[_] = identifier[max] ( identifier[iteritems] ( identifier[pairs] ), identifier[key] = identifier[inst_pair_sort_key] )
identifier[common] = identifier[inst1] . identifier[formula] & identifier[inst2] . identifier[formula]
identifier[key] =( identifier[inst1] . identifier[compound] , identifier[inst1] . identifier[index] ),( identifier[inst2] . identifier[compound] , identifier[inst2] . identifier[index] )
keyword[if] identifier[key] keyword[not] keyword[in] identifier[transfer] :
identifier[transfer] [ identifier[key] ]= identifier[Formula] ()
identifier[transfer] [ identifier[key] ]|= identifier[common]
keyword[for] identifier[inst] keyword[in] ( identifier[inst1] , identifier[inst2] ):
identifier[inst] . identifier[formula] -= identifier[common]
identifier[to_insert] = identifier[set] ()
identifier[inst] = identifier[instantiate] ( identifier[uninstantiated_left] , identifier[inst1] . identifier[compound] )
keyword[if] identifier[inst] keyword[is] keyword[not] keyword[None] :
identifier[left] . identifier[append] ( identifier[inst] )
identifier[instances] . identifier[append] ( identifier[inst] )
identifier[to_insert] . identifier[add] ( identifier[inst] )
identifier[inst] = identifier[instantiate] ( identifier[uninstantiated_right] , identifier[inst2] . identifier[compound] )
keyword[if] identifier[inst] keyword[is] keyword[not] keyword[None] :
identifier[right] . identifier[append] ( identifier[inst] )
identifier[instances] . identifier[append] ( identifier[inst] )
identifier[to_insert] . identifier[add] ( identifier[inst] )
identifier[to_update] ={ identifier[inst1] , identifier[inst2] }
identifier[to_delete] = identifier[set] ()
keyword[for] identifier[inst1] , identifier[inst2] keyword[in] identifier[pairs] :
keyword[if] identifier[inst1] keyword[in] identifier[to_update] keyword[or] identifier[inst2] keyword[in] identifier[to_update] :
keyword[if] identifier[len] ( identifier[inst1] . identifier[formula] )> literal[int] keyword[and] identifier[len] ( identifier[inst2] . identifier[formula] )> literal[int] :
identifier[result] = identifier[score_func] ( identifier[inst1] , identifier[inst2] )
keyword[if] identifier[result] keyword[is] keyword[None] :
identifier[to_delete] . identifier[add] (( identifier[inst1] , identifier[inst2] ))
keyword[else] :
identifier[pairs] [ identifier[inst1] , identifier[inst2] ]= identifier[result]
keyword[else] :
identifier[to_delete] . identifier[add] (( identifier[inst1] , identifier[inst2] ))
keyword[for] identifier[pair] keyword[in] identifier[to_delete] :
keyword[del] identifier[pairs] [ identifier[pair] ]
keyword[for] identifier[inst1] , identifier[inst2] keyword[in] identifier[product] ( identifier[left] , identifier[right] ):
keyword[if] identifier[inst1] keyword[in] identifier[to_insert] keyword[or] identifier[inst2] keyword[in] identifier[to_insert] :
identifier[result] = identifier[score_func] ( identifier[inst1] , identifier[inst2] )
keyword[if] identifier[result] keyword[is] keyword[not] keyword[None] :
identifier[pairs] [ identifier[inst1] , identifier[inst2] ]= identifier[result]
identifier[balance] ={}
keyword[for] identifier[inst] keyword[in] identifier[instances] :
keyword[if] identifier[len] ( identifier[inst] . identifier[formula] )> literal[int] :
identifier[key] = identifier[inst] . identifier[compound] , identifier[inst] . identifier[index]
identifier[balance] [ identifier[key] ]= identifier[inst] . identifier[formula]
keyword[return] identifier[transfer] , identifier[balance]
|
def _match_greedily(reaction, compound_formula, score_func):
"""Match compounds greedily based on score function.
Args:
reaction: Reaction equation :class:`psamm.reaction.Reaction`.
compound_formula: Dictionary mapping compound IDs to
:class:`psamm.formula.Formula`. Formulas must be flattened.
score_func: Function that takes two :class:`_CompoundInstance` and
returns the score.
"""
(uninstantiated_left, uninstantiated_right) = _reaction_to_dicts(reaction)
def compound_instances(uninstantiated):
instances = []
for (compound, value) in iteritems(uninstantiated):
if value > 0:
f = compound_formula[compound.name]
instances.append(_CompoundInstance(compound, value, f)) # depends on [control=['if'], data=['value']] # depends on [control=['for'], data=[]]
for inst in instances:
uninstantiated[inst.compound] -= 1 # depends on [control=['for'], data=['inst']]
return instances
def instantiate(uninstantiated, compound):
n = uninstantiated[compound]
if n > 0:
f = compound_formula[compound.name]
inst = _CompoundInstance(compound, n, f)
uninstantiated[compound] -= 1
return inst # depends on [control=['if'], data=['n']]
return None
left = compound_instances(uninstantiated_left)
right = compound_instances(uninstantiated_right)
instances = left + right
pairs = {}
for (inst1, inst2) in product(left, right):
result = score_func(inst1, inst2)
if result is not None:
pairs[inst1, inst2] = result # depends on [control=['if'], data=['result']] # depends on [control=['for'], data=[]]
def inst_pair_sort_key(entry):
"""Sort key for finding best match among instance pairs.
Rank by score in general but always match identical compounds first
(these will always have score equal to one but are handled specially
to put them ahead of other compounds with score equal to one). Use
compound names to break ties to produce a deterministic result.
"""
((inst1, inst2), score) = entry
(c1, c2) = (inst1.compound, inst2.compound)
same_compound = c1.name == c2.name and c1.compartment != c2.compartment
return (same_compound, score, c1.name, c2.name)
transfer = {}
while len(pairs) > 0:
((inst1, inst2), _) = max(iteritems(pairs), key=inst_pair_sort_key)
common = inst1.formula & inst2.formula
key = ((inst1.compound, inst1.index), (inst2.compound, inst2.index))
if key not in transfer:
transfer[key] = Formula() # depends on [control=['if'], data=['key', 'transfer']]
transfer[key] |= common
for inst in (inst1, inst2):
inst.formula -= common # depends on [control=['for'], data=['inst']]
to_insert = set()
inst = instantiate(uninstantiated_left, inst1.compound)
if inst is not None:
left.append(inst)
instances.append(inst)
to_insert.add(inst) # depends on [control=['if'], data=['inst']]
inst = instantiate(uninstantiated_right, inst2.compound)
if inst is not None:
right.append(inst)
instances.append(inst)
to_insert.add(inst) # depends on [control=['if'], data=['inst']]
to_update = {inst1, inst2}
to_delete = set()
for (inst1, inst2) in pairs:
if inst1 in to_update or inst2 in to_update:
if len(inst1.formula) > 0 and len(inst2.formula) > 0:
result = score_func(inst1, inst2)
if result is None:
to_delete.add((inst1, inst2)) # depends on [control=['if'], data=[]]
else:
pairs[inst1, inst2] = result # depends on [control=['if'], data=[]]
else:
to_delete.add((inst1, inst2)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
for pair in to_delete:
del pairs[pair] # depends on [control=['for'], data=['pair']]
for (inst1, inst2) in product(left, right):
if inst1 in to_insert or inst2 in to_insert:
result = score_func(inst1, inst2)
if result is not None:
pairs[inst1, inst2] = result # depends on [control=['if'], data=['result']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['while'], data=[]]
balance = {}
for inst in instances:
if len(inst.formula) > 0:
key = (inst.compound, inst.index)
balance[key] = inst.formula # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['inst']]
return (transfer, balance)
|
def reindex(self, newIndexIDs=None, newIndexNames=None, newIndexClassNames=None, newIndexTagNames=None):
'''
reindex - reindex the tree. Optionally, change what fields are indexed.
@param newIndexIDs <bool/None> - None to leave same, otherwise new value to index IDs
@parma newIndexNames <bool/None> - None to leave same, otherwise new value to index names
@param newIndexClassNames <bool/None> - None to leave same, otherwise new value to index class names
@param newIndexTagNames <bool/None> - None to leave same, otherwise new value to index tag names
'''
if newIndexIDs is not None:
self.indexIDs = newIndexIDs
if newIndexNames is not None:
self.indexNames = newIndexNames
if newIndexClassNames is not None:
self.newIndexClassNames = newIndexClassNames
if newIndexTagNames is not None:
self.newIndexTagNames = newIndexTagNames
self._resetIndexInternal()
self._indexTagRecursive(self.root)
|
def function[reindex, parameter[self, newIndexIDs, newIndexNames, newIndexClassNames, newIndexTagNames]]:
constant[
reindex - reindex the tree. Optionally, change what fields are indexed.
@param newIndexIDs <bool/None> - None to leave same, otherwise new value to index IDs
@parma newIndexNames <bool/None> - None to leave same, otherwise new value to index names
@param newIndexClassNames <bool/None> - None to leave same, otherwise new value to index class names
@param newIndexTagNames <bool/None> - None to leave same, otherwise new value to index tag names
]
if compare[name[newIndexIDs] is_not constant[None]] begin[:]
name[self].indexIDs assign[=] name[newIndexIDs]
if compare[name[newIndexNames] is_not constant[None]] begin[:]
name[self].indexNames assign[=] name[newIndexNames]
if compare[name[newIndexClassNames] is_not constant[None]] begin[:]
name[self].newIndexClassNames assign[=] name[newIndexClassNames]
if compare[name[newIndexTagNames] is_not constant[None]] begin[:]
name[self].newIndexTagNames assign[=] name[newIndexTagNames]
call[name[self]._resetIndexInternal, parameter[]]
call[name[self]._indexTagRecursive, parameter[name[self].root]]
|
keyword[def] identifier[reindex] ( identifier[self] , identifier[newIndexIDs] = keyword[None] , identifier[newIndexNames] = keyword[None] , identifier[newIndexClassNames] = keyword[None] , identifier[newIndexTagNames] = keyword[None] ):
literal[string]
keyword[if] identifier[newIndexIDs] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[indexIDs] = identifier[newIndexIDs]
keyword[if] identifier[newIndexNames] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[indexNames] = identifier[newIndexNames]
keyword[if] identifier[newIndexClassNames] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[newIndexClassNames] = identifier[newIndexClassNames]
keyword[if] identifier[newIndexTagNames] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[newIndexTagNames] = identifier[newIndexTagNames]
identifier[self] . identifier[_resetIndexInternal] ()
identifier[self] . identifier[_indexTagRecursive] ( identifier[self] . identifier[root] )
|
def reindex(self, newIndexIDs=None, newIndexNames=None, newIndexClassNames=None, newIndexTagNames=None):
"""
reindex - reindex the tree. Optionally, change what fields are indexed.
@param newIndexIDs <bool/None> - None to leave same, otherwise new value to index IDs
@parma newIndexNames <bool/None> - None to leave same, otherwise new value to index names
@param newIndexClassNames <bool/None> - None to leave same, otherwise new value to index class names
@param newIndexTagNames <bool/None> - None to leave same, otherwise new value to index tag names
"""
if newIndexIDs is not None:
self.indexIDs = newIndexIDs # depends on [control=['if'], data=['newIndexIDs']]
if newIndexNames is not None:
self.indexNames = newIndexNames # depends on [control=['if'], data=['newIndexNames']]
if newIndexClassNames is not None:
self.newIndexClassNames = newIndexClassNames # depends on [control=['if'], data=['newIndexClassNames']]
if newIndexTagNames is not None:
self.newIndexTagNames = newIndexTagNames # depends on [control=['if'], data=['newIndexTagNames']]
self._resetIndexInternal()
self._indexTagRecursive(self.root)
|
def removeTab(self, index):
"""
Removes tab at index ``index``.
This method will emits tab_closed for the removed tab.
:param index: index of the tab to remove.
"""
widget = self.widget(index)
try:
self._widgets.remove(widget)
except ValueError:
pass
self.tab_closed.emit(widget)
self._del_code_edit(widget)
QTabWidget.removeTab(self, index)
if widget == self._current:
self._current = None
|
def function[removeTab, parameter[self, index]]:
constant[
Removes tab at index ``index``.
This method will emits tab_closed for the removed tab.
:param index: index of the tab to remove.
]
variable[widget] assign[=] call[name[self].widget, parameter[name[index]]]
<ast.Try object at 0x7da204622560>
call[name[self].tab_closed.emit, parameter[name[widget]]]
call[name[self]._del_code_edit, parameter[name[widget]]]
call[name[QTabWidget].removeTab, parameter[name[self], name[index]]]
if compare[name[widget] equal[==] name[self]._current] begin[:]
name[self]._current assign[=] constant[None]
|
keyword[def] identifier[removeTab] ( identifier[self] , identifier[index] ):
literal[string]
identifier[widget] = identifier[self] . identifier[widget] ( identifier[index] )
keyword[try] :
identifier[self] . identifier[_widgets] . identifier[remove] ( identifier[widget] )
keyword[except] identifier[ValueError] :
keyword[pass]
identifier[self] . identifier[tab_closed] . identifier[emit] ( identifier[widget] )
identifier[self] . identifier[_del_code_edit] ( identifier[widget] )
identifier[QTabWidget] . identifier[removeTab] ( identifier[self] , identifier[index] )
keyword[if] identifier[widget] == identifier[self] . identifier[_current] :
identifier[self] . identifier[_current] = keyword[None]
|
def removeTab(self, index):
"""
Removes tab at index ``index``.
This method will emits tab_closed for the removed tab.
:param index: index of the tab to remove.
"""
widget = self.widget(index)
try:
self._widgets.remove(widget) # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]]
self.tab_closed.emit(widget)
self._del_code_edit(widget)
QTabWidget.removeTab(self, index)
if widget == self._current:
self._current = None # depends on [control=['if'], data=[]]
|
def createExportNeuroML2 (netParams=None, simConfig=None, reference=None, connections=True, stimulations=True, output=False, format='xml'):
''' Sequence of commands to create and export network to NeuroML2 '''
from .. import sim
import __main__ as top
if not netParams: netParams = top.netParams
if not simConfig: simConfig = top.simConfig
sim.initialize(netParams, simConfig) # create network object and set cfg and net params
pops = sim.net.createPops() # instantiate network populations
cells = sim.net.createCells() # instantiate network cells based on defined populations
conns = sim.net.connectCells() # create connections between cells based on params
stims = sim.net.addStims() # add external stimulation to cells (IClamps etc)
rxd = sim.net.addRxD() # add reaction-diffusion (RxD)
simData = sim.setupRecording() # setup variables to record for each cell (spikes, V traces, etc)
sim.exportNeuroML2(reference,connections,stimulations,format) # export cells and connectivity to NeuroML 2 format
if output: return (pops, cells, conns, stims, rxd, simData)
|
def function[createExportNeuroML2, parameter[netParams, simConfig, reference, connections, stimulations, output, format]]:
constant[ Sequence of commands to create and export network to NeuroML2 ]
from relative_module[None] import module[sim]
import module[__main__] as alias[top]
if <ast.UnaryOp object at 0x7da18c4cd360> begin[:]
variable[netParams] assign[=] name[top].netParams
if <ast.UnaryOp object at 0x7da18c4ccf70> begin[:]
variable[simConfig] assign[=] name[top].simConfig
call[name[sim].initialize, parameter[name[netParams], name[simConfig]]]
variable[pops] assign[=] call[name[sim].net.createPops, parameter[]]
variable[cells] assign[=] call[name[sim].net.createCells, parameter[]]
variable[conns] assign[=] call[name[sim].net.connectCells, parameter[]]
variable[stims] assign[=] call[name[sim].net.addStims, parameter[]]
variable[rxd] assign[=] call[name[sim].net.addRxD, parameter[]]
variable[simData] assign[=] call[name[sim].setupRecording, parameter[]]
call[name[sim].exportNeuroML2, parameter[name[reference], name[connections], name[stimulations], name[format]]]
if name[output] begin[:]
return[tuple[[<ast.Name object at 0x7da18c4cca90>, <ast.Name object at 0x7da18c4cefe0>, <ast.Name object at 0x7da18c4cc1f0>, <ast.Name object at 0x7da18c4ce470>, <ast.Name object at 0x7da18c4cfa30>, <ast.Name object at 0x7da18c4cfd90>]]]
|
keyword[def] identifier[createExportNeuroML2] ( identifier[netParams] = keyword[None] , identifier[simConfig] = keyword[None] , identifier[reference] = keyword[None] , identifier[connections] = keyword[True] , identifier[stimulations] = keyword[True] , identifier[output] = keyword[False] , identifier[format] = literal[string] ):
literal[string]
keyword[from] .. keyword[import] identifier[sim]
keyword[import] identifier[__main__] keyword[as] identifier[top]
keyword[if] keyword[not] identifier[netParams] : identifier[netParams] = identifier[top] . identifier[netParams]
keyword[if] keyword[not] identifier[simConfig] : identifier[simConfig] = identifier[top] . identifier[simConfig]
identifier[sim] . identifier[initialize] ( identifier[netParams] , identifier[simConfig] )
identifier[pops] = identifier[sim] . identifier[net] . identifier[createPops] ()
identifier[cells] = identifier[sim] . identifier[net] . identifier[createCells] ()
identifier[conns] = identifier[sim] . identifier[net] . identifier[connectCells] ()
identifier[stims] = identifier[sim] . identifier[net] . identifier[addStims] ()
identifier[rxd] = identifier[sim] . identifier[net] . identifier[addRxD] ()
identifier[simData] = identifier[sim] . identifier[setupRecording] ()
identifier[sim] . identifier[exportNeuroML2] ( identifier[reference] , identifier[connections] , identifier[stimulations] , identifier[format] )
keyword[if] identifier[output] : keyword[return] ( identifier[pops] , identifier[cells] , identifier[conns] , identifier[stims] , identifier[rxd] , identifier[simData] )
|
def createExportNeuroML2(netParams=None, simConfig=None, reference=None, connections=True, stimulations=True, output=False, format='xml'):
""" Sequence of commands to create and export network to NeuroML2 """
from .. import sim
import __main__ as top
if not netParams:
netParams = top.netParams # depends on [control=['if'], data=[]]
if not simConfig:
simConfig = top.simConfig # depends on [control=['if'], data=[]]
sim.initialize(netParams, simConfig) # create network object and set cfg and net params
pops = sim.net.createPops() # instantiate network populations
cells = sim.net.createCells() # instantiate network cells based on defined populations
conns = sim.net.connectCells() # create connections between cells based on params
stims = sim.net.addStims() # add external stimulation to cells (IClamps etc)
rxd = sim.net.addRxD() # add reaction-diffusion (RxD)
simData = sim.setupRecording() # setup variables to record for each cell (spikes, V traces, etc)
sim.exportNeuroML2(reference, connections, stimulations, format) # export cells and connectivity to NeuroML 2 format
if output:
return (pops, cells, conns, stims, rxd, simData) # depends on [control=['if'], data=[]]
|
def run_hook(hook_name, *args, **kwargs):
"""Runs the passed hook on all registered plugins
The function checks, whether the hook is available in the plugin.
:param hook_name: Name of the hook, corresponds to the function name being called
:param args: Arguments
:param kwargs: Keyword arguments
"""
for module in plugin_dict.values():
if hasattr(module, "hooks") and callable(getattr(module.hooks, hook_name, None)):
getattr(module.hooks, hook_name)(*args, **kwargs)
|
def function[run_hook, parameter[hook_name]]:
constant[Runs the passed hook on all registered plugins
The function checks, whether the hook is available in the plugin.
:param hook_name: Name of the hook, corresponds to the function name being called
:param args: Arguments
:param kwargs: Keyword arguments
]
for taget[name[module]] in starred[call[name[plugin_dict].values, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da18eb55450> begin[:]
call[call[name[getattr], parameter[name[module].hooks, name[hook_name]]], parameter[<ast.Starred object at 0x7da18eb57f40>]]
|
keyword[def] identifier[run_hook] ( identifier[hook_name] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[for] identifier[module] keyword[in] identifier[plugin_dict] . identifier[values] ():
keyword[if] identifier[hasattr] ( identifier[module] , literal[string] ) keyword[and] identifier[callable] ( identifier[getattr] ( identifier[module] . identifier[hooks] , identifier[hook_name] , keyword[None] )):
identifier[getattr] ( identifier[module] . identifier[hooks] , identifier[hook_name] )(* identifier[args] ,** identifier[kwargs] )
|
def run_hook(hook_name, *args, **kwargs):
"""Runs the passed hook on all registered plugins
The function checks, whether the hook is available in the plugin.
:param hook_name: Name of the hook, corresponds to the function name being called
:param args: Arguments
:param kwargs: Keyword arguments
"""
for module in plugin_dict.values():
if hasattr(module, 'hooks') and callable(getattr(module.hooks, hook_name, None)):
getattr(module.hooks, hook_name)(*args, **kwargs) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['module']]
|
def count_authors_by_annotation(graph: BELGraph, annotation: str = 'Subgraph') -> Mapping[str, typing.Counter[str]]:
"""Group the author counters by sub-graphs induced by the annotation.
:param graph: A BEL graph
:param annotation: The annotation to use to group the graph
:return: A dictionary of Counters {subgraph name: Counter from {author: frequency}}
"""
authors = group_as_dict(_iter_authors_by_annotation(graph, annotation=annotation))
return count_defaultdict(authors)
|
def function[count_authors_by_annotation, parameter[graph, annotation]]:
constant[Group the author counters by sub-graphs induced by the annotation.
:param graph: A BEL graph
:param annotation: The annotation to use to group the graph
:return: A dictionary of Counters {subgraph name: Counter from {author: frequency}}
]
variable[authors] assign[=] call[name[group_as_dict], parameter[call[name[_iter_authors_by_annotation], parameter[name[graph]]]]]
return[call[name[count_defaultdict], parameter[name[authors]]]]
|
keyword[def] identifier[count_authors_by_annotation] ( identifier[graph] : identifier[BELGraph] , identifier[annotation] : identifier[str] = literal[string] )-> identifier[Mapping] [ identifier[str] , identifier[typing] . identifier[Counter] [ identifier[str] ]]:
literal[string]
identifier[authors] = identifier[group_as_dict] ( identifier[_iter_authors_by_annotation] ( identifier[graph] , identifier[annotation] = identifier[annotation] ))
keyword[return] identifier[count_defaultdict] ( identifier[authors] )
|
def count_authors_by_annotation(graph: BELGraph, annotation: str='Subgraph') -> Mapping[str, typing.Counter[str]]:
"""Group the author counters by sub-graphs induced by the annotation.
:param graph: A BEL graph
:param annotation: The annotation to use to group the graph
:return: A dictionary of Counters {subgraph name: Counter from {author: frequency}}
"""
authors = group_as_dict(_iter_authors_by_annotation(graph, annotation=annotation))
return count_defaultdict(authors)
|
def get_matches(expr_lst, ts):
"""
Get a list of TimeSeries objects that match the given expression.
:param list expr_lst: Expression
:param list ts: TimeSeries
:return list new_ts: Matched time series objects
:return list idxs: Indices of matched objects
"""
logger_ts.info("enter get_matches")
new_ts = []
idxs = []
match = False
try:
for idx, ts_data in enumerate(ts):
for expr in expr_lst:
try:
val = ts_data[expr[0]]
# Check what comparison operator is being used
if expr[1] == 'in':
# "IN" operator can't be used in get_truth. Handle first.
if expr[2] in val:
match = True
elif match_operators(val, expr[1], expr[2]):
# If it's a typical operator, check with the truth test.
match = True
else:
# If one comparison is false, then it can't possibly be a match
match = False
break
except KeyError as e:
logger_ts.warn("get_matches: KeyError: getting value from TimeSeries object, {}, {}".format(expr, e))
match = False
except IndexError as e:
logger_ts.warn("get_matches: IndexError: getting value from TimeSeries object, {}, {}".format(expr, e))
match = False
if match:
idxs.append(idx)
new_ts.append(ts_data)
except AttributeError as e:
logger_ts.debug("get_matches: AttributeError: unable to get expression matches, {}, {}".format(type(ts), e))
print("Error: Timeseries is an invalid data type")
if not new_ts:
print("No matches found for that expression")
else:
print("Found {} matches from {} columns".format(len(new_ts), len(ts)))
logger_ts.info("exit get_matches")
return new_ts, idxs
|
def function[get_matches, parameter[expr_lst, ts]]:
constant[
Get a list of TimeSeries objects that match the given expression.
:param list expr_lst: Expression
:param list ts: TimeSeries
:return list new_ts: Matched time series objects
:return list idxs: Indices of matched objects
]
call[name[logger_ts].info, parameter[constant[enter get_matches]]]
variable[new_ts] assign[=] list[[]]
variable[idxs] assign[=] list[[]]
variable[match] assign[=] constant[False]
<ast.Try object at 0x7da18f00d510>
if <ast.UnaryOp object at 0x7da18f00d540> begin[:]
call[name[print], parameter[constant[No matches found for that expression]]]
call[name[logger_ts].info, parameter[constant[exit get_matches]]]
return[tuple[[<ast.Name object at 0x7da18f00f610>, <ast.Name object at 0x7da18f00f460>]]]
|
keyword[def] identifier[get_matches] ( identifier[expr_lst] , identifier[ts] ):
literal[string]
identifier[logger_ts] . identifier[info] ( literal[string] )
identifier[new_ts] =[]
identifier[idxs] =[]
identifier[match] = keyword[False]
keyword[try] :
keyword[for] identifier[idx] , identifier[ts_data] keyword[in] identifier[enumerate] ( identifier[ts] ):
keyword[for] identifier[expr] keyword[in] identifier[expr_lst] :
keyword[try] :
identifier[val] = identifier[ts_data] [ identifier[expr] [ literal[int] ]]
keyword[if] identifier[expr] [ literal[int] ]== literal[string] :
keyword[if] identifier[expr] [ literal[int] ] keyword[in] identifier[val] :
identifier[match] = keyword[True]
keyword[elif] identifier[match_operators] ( identifier[val] , identifier[expr] [ literal[int] ], identifier[expr] [ literal[int] ]):
identifier[match] = keyword[True]
keyword[else] :
identifier[match] = keyword[False]
keyword[break]
keyword[except] identifier[KeyError] keyword[as] identifier[e] :
identifier[logger_ts] . identifier[warn] ( literal[string] . identifier[format] ( identifier[expr] , identifier[e] ))
identifier[match] = keyword[False]
keyword[except] identifier[IndexError] keyword[as] identifier[e] :
identifier[logger_ts] . identifier[warn] ( literal[string] . identifier[format] ( identifier[expr] , identifier[e] ))
identifier[match] = keyword[False]
keyword[if] identifier[match] :
identifier[idxs] . identifier[append] ( identifier[idx] )
identifier[new_ts] . identifier[append] ( identifier[ts_data] )
keyword[except] identifier[AttributeError] keyword[as] identifier[e] :
identifier[logger_ts] . identifier[debug] ( literal[string] . identifier[format] ( identifier[type] ( identifier[ts] ), identifier[e] ))
identifier[print] ( literal[string] )
keyword[if] keyword[not] identifier[new_ts] :
identifier[print] ( literal[string] )
keyword[else] :
identifier[print] ( literal[string] . identifier[format] ( identifier[len] ( identifier[new_ts] ), identifier[len] ( identifier[ts] )))
identifier[logger_ts] . identifier[info] ( literal[string] )
keyword[return] identifier[new_ts] , identifier[idxs]
|
def get_matches(expr_lst, ts):
"""
Get a list of TimeSeries objects that match the given expression.
:param list expr_lst: Expression
:param list ts: TimeSeries
:return list new_ts: Matched time series objects
:return list idxs: Indices of matched objects
"""
logger_ts.info('enter get_matches')
new_ts = []
idxs = []
match = False
try:
for (idx, ts_data) in enumerate(ts):
for expr in expr_lst:
try:
val = ts_data[expr[0]]
# Check what comparison operator is being used
if expr[1] == 'in':
# "IN" operator can't be used in get_truth. Handle first.
if expr[2] in val:
match = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif match_operators(val, expr[1], expr[2]):
# If it's a typical operator, check with the truth test.
match = True # depends on [control=['if'], data=[]]
else:
# If one comparison is false, then it can't possibly be a match
match = False
break # depends on [control=['try'], data=[]]
except KeyError as e:
logger_ts.warn('get_matches: KeyError: getting value from TimeSeries object, {}, {}'.format(expr, e))
match = False # depends on [control=['except'], data=['e']]
except IndexError as e:
logger_ts.warn('get_matches: IndexError: getting value from TimeSeries object, {}, {}'.format(expr, e))
match = False # depends on [control=['except'], data=['e']] # depends on [control=['for'], data=['expr']]
if match:
idxs.append(idx)
new_ts.append(ts_data) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]]
except AttributeError as e:
logger_ts.debug('get_matches: AttributeError: unable to get expression matches, {}, {}'.format(type(ts), e))
print('Error: Timeseries is an invalid data type') # depends on [control=['except'], data=['e']]
if not new_ts:
print('No matches found for that expression') # depends on [control=['if'], data=[]]
else:
print('Found {} matches from {} columns'.format(len(new_ts), len(ts)))
logger_ts.info('exit get_matches')
return (new_ts, idxs)
|
def publisher(self_url=None, hub_url=None):
"""This decorator makes it easier to implement a websub publisher. You use
it on an endpoint, and Link headers will automatically be added. To also
include these links in your template html/atom/rss (and you should!) you
can use the following to get the raw links:
- {{ websub_self_url }}
- {{ websub_hub_url }}
And the following to get them wrapped in <link tags>:
- {{ websub_self_link }}
- {{ websub_hub_link }}
If hub_url is not given, the hub needs to be a flask_websub one and the
hub and publisher need to share their application for the url to be
auto-discovered. If that is not the case, you need to set
config['HUB_URL'].
If self_url is not given, the url of the current request will be used. Note
that this includes url query arguments. If this is not what you want,
override it.
"""
def decorator(topic_view):
@functools.wraps(topic_view)
def wrapper(*args, **kwargs):
nonlocal hub_url, self_url
if not self_url:
self_url = request.url
if not hub_url:
try:
hub_url = url_for('websub_hub.endpoint', _external=True)
except BuildError:
hub_url = current_app.config['HUB_URL']
stack.top.websub_self_url = self_url
stack.top.websub_hub_url = hub_url
stack.top.websub_self_link = Markup(SELF_LINK % self_url)
stack.top.websub_hub_link = Markup(HUB_LINK % hub_url)
resp = make_response(topic_view(*args, **kwargs))
resp.headers.add('Link', HEADER_VALUE % (self_url, hub_url))
return resp
return wrapper
return decorator
|
def function[publisher, parameter[self_url, hub_url]]:
constant[This decorator makes it easier to implement a websub publisher. You use
it on an endpoint, and Link headers will automatically be added. To also
include these links in your template html/atom/rss (and you should!) you
can use the following to get the raw links:
- {{ websub_self_url }}
- {{ websub_hub_url }}
And the following to get them wrapped in <link tags>:
- {{ websub_self_link }}
- {{ websub_hub_link }}
If hub_url is not given, the hub needs to be a flask_websub one and the
hub and publisher need to share their application for the url to be
auto-discovered. If that is not the case, you need to set
config['HUB_URL'].
If self_url is not given, the url of the current request will be used. Note
that this includes url query arguments. If this is not what you want,
override it.
]
def function[decorator, parameter[topic_view]]:
def function[wrapper, parameter[]]:
<ast.Nonlocal object at 0x7da18f09cdc0>
if <ast.UnaryOp object at 0x7da18f09d570> begin[:]
variable[self_url] assign[=] name[request].url
if <ast.UnaryOp object at 0x7da18f09da80> begin[:]
<ast.Try object at 0x7da18f09da20>
name[stack].top.websub_self_url assign[=] name[self_url]
name[stack].top.websub_hub_url assign[=] name[hub_url]
name[stack].top.websub_self_link assign[=] call[name[Markup], parameter[binary_operation[name[SELF_LINK] <ast.Mod object at 0x7da2590d6920> name[self_url]]]]
name[stack].top.websub_hub_link assign[=] call[name[Markup], parameter[binary_operation[name[HUB_LINK] <ast.Mod object at 0x7da2590d6920> name[hub_url]]]]
variable[resp] assign[=] call[name[make_response], parameter[call[name[topic_view], parameter[<ast.Starred object at 0x7da18f09c5e0>]]]]
call[name[resp].headers.add, parameter[constant[Link], binary_operation[name[HEADER_VALUE] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f09db40>, <ast.Name object at 0x7da18f09faf0>]]]]]
return[name[resp]]
return[name[wrapper]]
return[name[decorator]]
|
keyword[def] identifier[publisher] ( identifier[self_url] = keyword[None] , identifier[hub_url] = keyword[None] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[topic_view] ):
@ identifier[functools] . identifier[wraps] ( identifier[topic_view] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
keyword[nonlocal] identifier[hub_url] , identifier[self_url]
keyword[if] keyword[not] identifier[self_url] :
identifier[self_url] = identifier[request] . identifier[url]
keyword[if] keyword[not] identifier[hub_url] :
keyword[try] :
identifier[hub_url] = identifier[url_for] ( literal[string] , identifier[_external] = keyword[True] )
keyword[except] identifier[BuildError] :
identifier[hub_url] = identifier[current_app] . identifier[config] [ literal[string] ]
identifier[stack] . identifier[top] . identifier[websub_self_url] = identifier[self_url]
identifier[stack] . identifier[top] . identifier[websub_hub_url] = identifier[hub_url]
identifier[stack] . identifier[top] . identifier[websub_self_link] = identifier[Markup] ( identifier[SELF_LINK] % identifier[self_url] )
identifier[stack] . identifier[top] . identifier[websub_hub_link] = identifier[Markup] ( identifier[HUB_LINK] % identifier[hub_url] )
identifier[resp] = identifier[make_response] ( identifier[topic_view] (* identifier[args] ,** identifier[kwargs] ))
identifier[resp] . identifier[headers] . identifier[add] ( literal[string] , identifier[HEADER_VALUE] %( identifier[self_url] , identifier[hub_url] ))
keyword[return] identifier[resp]
keyword[return] identifier[wrapper]
keyword[return] identifier[decorator]
|
def publisher(self_url=None, hub_url=None):
"""This decorator makes it easier to implement a websub publisher. You use
it on an endpoint, and Link headers will automatically be added. To also
include these links in your template html/atom/rss (and you should!) you
can use the following to get the raw links:
- {{ websub_self_url }}
- {{ websub_hub_url }}
And the following to get them wrapped in <link tags>:
- {{ websub_self_link }}
- {{ websub_hub_link }}
If hub_url is not given, the hub needs to be a flask_websub one and the
hub and publisher need to share their application for the url to be
auto-discovered. If that is not the case, you need to set
config['HUB_URL'].
If self_url is not given, the url of the current request will be used. Note
that this includes url query arguments. If this is not what you want,
override it.
"""
def decorator(topic_view):
@functools.wraps(topic_view)
def wrapper(*args, **kwargs):
nonlocal hub_url, self_url
if not self_url:
self_url = request.url # depends on [control=['if'], data=[]]
if not hub_url:
try:
hub_url = url_for('websub_hub.endpoint', _external=True) # depends on [control=['try'], data=[]]
except BuildError:
hub_url = current_app.config['HUB_URL'] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
stack.top.websub_self_url = self_url
stack.top.websub_hub_url = hub_url
stack.top.websub_self_link = Markup(SELF_LINK % self_url)
stack.top.websub_hub_link = Markup(HUB_LINK % hub_url)
resp = make_response(topic_view(*args, **kwargs))
resp.headers.add('Link', HEADER_VALUE % (self_url, hub_url))
return resp
return wrapper
return decorator
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.