hexsha
stringlengths 40
40
| size
int64 2
1.02M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
1.02M
| avg_line_length
float64 1
417k
| max_line_length
int64 1
987k
| alphanum_fraction
float64 0
1
| content_no_comment
stringlengths 0
1.01M
| is_comment_constant_removed
bool 1
class | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7021680a650fbf392533e7a3c5f721b93c96e14
| 32,599
|
py
|
Python
|
contrib/script/py/mmp/mmp_mcss_objects.py
|
michaelweiss092/LillyMol
|
a2b7d1d8a07ef338c754a0a2e3b2624aac694cc9
|
[
"Apache-2.0"
] | 53
|
2018-06-01T13:16:15.000Z
|
2022-02-23T21:04:28.000Z
|
contrib/script/py/mmp/mmp_mcss_objects.py
|
michaelweiss092/LillyMol
|
a2b7d1d8a07ef338c754a0a2e3b2624aac694cc9
|
[
"Apache-2.0"
] | 19
|
2018-08-14T13:43:18.000Z
|
2021-09-24T12:53:11.000Z
|
contrib/script/py/mmp/mmp_mcss_objects.py
|
michaelweiss092/LillyMol
|
a2b7d1d8a07ef338c754a0a2e3b2624aac694cc9
|
[
"Apache-2.0"
] | 19
|
2018-10-23T19:41:01.000Z
|
2022-02-17T08:14:00.000Z
|
###################################################################
""" Summary: Class and Methods for deriving MCSS based MMP's
About: Derive a matched pair based MCSS from a pair molecules
To do: - extend the method enumerate_fragment_properties to also
enumerate self.mol_smi_dict as this would allow the addition
of a flag '-p' that prints out whole molecule props alongside
MCSS and therefore compute %molecule that the MCSS covers
- could use other descriptors from IW code to get MCSS via
bond count not #Atoms or
- Should move iterators in process_mcss_list_to_string to be numeric
and store numeric ID's in self.largest_mcs_mmp_double/single
- could allow further switched to change behaviour of tie break
where single/double or double alone give tie break MCSS
[connected substructures versus disconnected or both/either]
- Extension to triple cut would allow improved search/match e.g.:
N1(C(c2c(cc3c(c2)OCO3)CC1)c4cc(c(c(c4)OC)O)OC)C(=O)OC CHEMBL311765
N1(C(c2c(cc(cc2)O)CC1)c3ccc(cc3)OCCN4CCCC4)C(=O)OCC CHEMBL94080
"""
###################################################################
import logging
import csv
import os
import sys
import unittest
import tempfile
from builtins import range
from mmp.mmp_data_objects import MMPDataObjectClass
if 'LILLYMOL_HOME' in os.environ:
import pybase.pyopmo as pymo
else:
import pybase.pymo as pymo
class MMPbasedMCSSObjectClass(MMPDataObjectClass):
def __init__(self, logger_object):
"""
Example usage:
mmplogger = logging.getLogger('lillymol_file_logger')
logging.disable(logging.CRITICAL)
my_mmp_mcss_object = MMPbasedMCSSObjectClass(mmplogger)
"""
MMPDataObjectClass.__init__(self, logger_object)
self.logger = logger_object
if len(logging.Logger.manager.loggerDict) < 1:
# exit with system status 1 and custom error
sys.exit("Invalid or no logger object passed to MMPObjectClass. Please create \
and pass a logger and set to use logging.disable if you don't want logging")
# this is used for storing the largest MCS MMP for given pair
self.largest_mcs_mmp_result = {}
self.ref_smi_props = {}
def clean_out_data_mcss_obj(self):
"""Method to clean out all objects in class"""
self.clean_out_data()
self.mcs_mmp.clear()
def enumerate_fragment_properties(self):
"""Writes out the ref_smi_dict to disk, calculates natoms, returns data to self.ref_smi_props
Some complexities in method such as double cut fragments (iw_descr only calcs largest frag)"""
frag_smi_file = tempfile.NamedTemporaryFile(delete=False, suffix='.smi')
frag_smi_props_out = tempfile.NamedTemporaryFile(delete=False)
with open(frag_smi_file.name, "w") as f:
for item in self.refsmi_dict:
if isinstance(item, int):
# can't see an easy way to do this except string compare, [1H] causes iw_descr to crash out
if self.refsmi_dict[item] != '[1H]':
f.write(self.refsmi_dict[item]+" "+str(item)+"\n")
# run pymo.iwdescr
self.logger.info("Running pymo.iwdescr on %s smi with in:%s, out:%s" %
(len(self.refsmi_dict), frag_smi_file.name, frag_smi_props_out.name))
exit_status = pymo.iwdescr(frag_smi_file.name, frag_smi_props_out.name, params_dict={'-l': '', '-v': ''},
loggero=self.logger)
self.logger.debug("Ran iwdescr with exit status %s" % exit_status)
with open(frag_smi_props_out.name, "r") as csv_file:
reader = csv.reader(csv_file, delimiter=' ')
i = -1
for row in reader:
i += 1
# if header row, append headers
if i == 0:
if row[1] != 'w_natoms':
self.logger.warn("When this was written, NATOMs was in array position 1 (zero indexed) with "
"column title w_natoms. Now it's not, it's: %s" % row[1])
sys.exit("When this was written, NATOMs was in array position 1 (zero indexed) with column "
"title w_natom. Now it's not, it's: %s" % row[1])
continue
# we trust there is only one entry per id
# print row[0], row[1]
self.ref_smi_props[int(row[0])] = int(row[1])
frag_smi_props_out.close()
self.logger.debug("Completed load of %s mol props from dict of %s from file %s" %
(len(self.ref_smi_props), len(self.refsmi_dict)/2, frag_smi_props_out.name))
def get_largest_mcs_pairs(self, out_file, cut_type, mdc_atm_soft=None, mdc_atm_soft_threshold=None,
mdc_atm_hard=None):
"""Method to print out a single smi - smi pair from the input CSV with data differences. Selection of the
exact matched pair for a given smi - smi combination is based on the largest Maximum Common Substructure
which equates to the MMP with the smallest MWT/#Atoms difference across all MMP's for that smi/smi combo
out_file:
The user specified output file
cut_type:
Specifies the type of fragmentation required. Allowed values are SINGLE,
DOUBLE or BOTH. Currently this class does not support anything greater than
double cut fragmentation
mdc_atm_hard:
max double cut atom cutoff (hard)
Never consider double cut context fragments where one half has num_atoms <= mdc_atm_hard
i.e.: this is a hard cutoff filter implemented during dicer parsing
mdc_atm_soft:
max double cut atom cutoff (soft)
* must be used with mdc_atm_soft_threshold
When double cut is greater than single, if one part of double context has num_atoms <= mdc_atm_soft and
total double cut atom <= single cut atoms + mdc_atm_soft_threshold then discard
mdc_atm_soft_threshold:
max double cut atom cutoff threshold (soft)
* must be used with mdc_atm_soft
This gets added to single cut num atoms each comparison that's done, if and when mdc_atm_soft is set
see details of mdc_atm_soft
Example usage:
# give me a CSV named my_output.pairs of all MCS based pairs:
my_mmp_object.get_largest_mcs_pairs('myoutput.csv', 'BOTH', 'DICER')
# give me a CSV of only the DOUBLE cut MCS based pairs with RDKit attachment points:
my_mmp_object.get_largest_mcs_pairs('myoutput.csv', 'DOUBLE', 'RDKIT')
"""
if (mdc_atm_soft is not None and mdc_atm_soft_threshold is None) or\
(mdc_atm_soft is None and mdc_atm_soft_threshold is not None):
sys.exit("Error, mdc_atm_soft and mdc_atm_soft_threshold must be specified together.")
def process_mcss_list_to_string(prefix, input_list):
"""sub method to build a printable string from input list of specific structure"""
out_string = ''
num_of_entries = len(input_list)
if num_of_entries > 4:
for i_ in range(0, num_of_entries, 4):
out_string = out_string + prefix + "_" + str((i_/4)+1) + "," + str(molid_L) + "," + str(molid_R)
out_string = out_string + "," + str(sum(input_list[0 + i_])) + "," + str(input_list[1 + i_]) + ","
out_string = out_string + str(input_list[2 + i_]) + "," + str(input_list[3 + i_])
out_string += "\n"
else:
if len(input_list[1]) > 1:
ctx_smi = self.refsmi_dict[input_list[1][0]] + "." + self.refsmi_dict[input_list[1][1]]
else:
ctx_smi = self.refsmi_dict[input_list[1][0]]
out_string = prefix + "," + str(molid_L) + "," + str(molid_R) + ","
out_string = out_string + str(sum(input_list[0])) + "," + ctx_smi + ","
out_string = out_string + str(self.refsmi_dict[input_list[2]]) + "," \
+ str(self.refsmi_dict[input_list[3]])
out_string += "\n"
return out_string
def disambiguate_double_list(input_list):
"""sub method to untangle double cut tie break cases"""
num_of_entries = len(input_list)
filtered_list = []
# The tie code should have only saved the example with the largest 'smallest fragment' size
# so now we just take the first example where atom numbering [1 before [2
# Theoretically, if two different examples of a double cut fragmentation pattern exist with the same number
# of atoms *in both parts* of the context, then there is another tie break here. e.g.:
# num_atoms in context = (2,10) should always appear not (1,11) but can't disentangle many (1,11)
# Decided not to handle this and instead just take the first one with the ordered numbering
for i_ in range(0, num_of_entries, 4):
# only use if the isomeric label is the right way round, [1 before [2
if '[1' in self.refsmi_dict[input_list[1 + i_][0]]:
filtered_list = input_list[(0 + i_): (4 + i_)]
else:
continue
return filtered_list
def remove_atom_num_dupes(input_list):
"""sub method to get only 1 example of simple isomeric numbering flip"""
# only use if the isomeric label is the right way round, [1 before [2
if '[1' in self.refsmi_dict[input_list[1][0]]:
# take the first 4 items
output_list = input_list[:4]
else:
# just take the last 4 items
output_list = input_list[-4:]
return output_list
self.logger.info('Opening output file for write: %s' % out_file)
# check cut_type, convert to int
if cut_type.upper() == 'DOUBLE':
# confusing but faster later
cut_type_id = 3
elif cut_type.upper() == 'BOTH':
# confusing but faster later
cut_type_id = 2
elif cut_type.upper() == 'SINGLE':
cut_type_id = 1
else:
self.logger.warn('cut_type specification is incorrect, using single cut: %s' % cut_type.upper())
cut_type_id = 1
# fail if both single_pairs_dict and double_pairs_dict are empty
if (len(self.single_pairs_dict) == 0) and (len(self.double_pairs_dict) == 0):
self.logger.debug('No data found in single_pairs_dict and/or double_pairs_dict, expect no results')
# sys.exit("Error: no data found in single_pairs_dict and/or double_pairs_dict, nothing to find and write")
#
# Here we build data structures of type:
# self.largest_mcs_mmp_result[(molid_L, molid_R)] = [(#atoms, #atoms or None),
# (context_id, context_id or None), frag_Left_id, frag_Right_id]
#
# single - this is easy as we only keep/store the one with the greatest number of atoms
if cut_type_id <= 2:
for molid_L, molid_R, ctx_id, frag_L_id, frag_R_id in \
self.iterator_single_pairs_dict_numeric(inc_attachpt=False):
if (molid_L, molid_R) in self.largest_mcs_mmp_result:
if self.largest_mcs_mmp_result[(molid_L, molid_R)][0][0] <= self.ref_smi_props[ctx_id]:
if self.largest_mcs_mmp_result[(molid_L, molid_R)][0][0] == self.ref_smi_props[ctx_id]:
self.largest_mcs_mmp_result[(molid_L, molid_R)].extend(
[(self.ref_smi_props[ctx_id], ), (ctx_id, ), frag_L_id, frag_R_id])
else:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = [
(self.ref_smi_props[ctx_id], ), (ctx_id, ), frag_L_id, frag_R_id]
else:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = [
(self.ref_smi_props[ctx_id], ), (ctx_id, ), frag_L_id, frag_R_id]
# now build the final results on the fly
# double - for each one we compare against what we already have in self.largest_mcs_mmp_result
ctx_natoms = None
if cut_type_id >= 2:
for molid_L, molid_R, ctx1_id, ctx2_id, frag_L_id, frag_R_id in \
self.iterator_double_pairs_dict_numeric(inc_attachpt=False):
#
if ctx1_id in self.ref_smi_props:
ctx_natoms = (self.ref_smi_props[ctx1_id], )
else:
ctx1_smi = self.refsmi_dict[ctx1_id]
ctx1_smi = ctx1_smi.replace("[1", "[9")
ctx1_smi = ctx1_smi.replace("[2", "[1")
ctx1_smi = ctx1_smi.replace("[9", "[2")
try:
ctx_natoms = (self.ref_smi_props[self.refsmi_dict[ctx1_smi]], )
except:
print("ERR >>>")
print(("{} {} {} {} {} {}".format(molid_L, molid_R, ctx1_id, ctx2_id, frag_L_id, frag_R_id)))
print(("{} {} {}".format(ctx1_id, ctx1_smi, self.refsmi_dict[ctx1_smi])))
print("")
if ctx2_id in self.ref_smi_props:
ctx_natoms = ctx_natoms + (self.ref_smi_props[ctx2_id], )
else:
ctx2_smi = self.refsmi_dict[ctx2_id]
ctx2_smi = ctx2_smi.replace("[1", "[9")
ctx2_smi = ctx2_smi.replace("[2", "[1")
ctx2_smi = ctx2_smi.replace("[9", "[2")
ctx_natoms = ctx_natoms + (self.ref_smi_props[self.refsmi_dict[ctx2_smi]], )
# If the indicator flag check_all_context is set to true we need to pre-filter all ctx fragments
# to ensure they are greater than or equal to the specified limit for mdc_atm_hard (maximum double
# cut atoms hard limit). This is a crude filter and could remove valid double cut MCSS.
if mdc_atm_hard is not None:
if ctx_natoms[0] <= mdc_atm_hard:
continue
elif ctx_natoms[1] <= mdc_atm_hard:
continue
#
# Main
# have we seen this smi - smi pair before?
if (molid_L, molid_R) in self.largest_mcs_mmp_result:
# get the number of atoms in the context
num_atoms_existing = self.largest_mcs_mmp_result[(molid_L, molid_R)][0]
if len(num_atoms_existing) > 1:
total_num_atoms_existing = sum(num_atoms_existing)
else:
total_num_atoms_existing = num_atoms_existing[0]
total_num_atoms_new = sum(ctx_natoms)
if total_num_atoms_new > total_num_atoms_existing:
# if it is a double and we have a min fragment setting
if mdc_atm_soft is not None:
# if it falls below the threshold at which we apply this min frag setting
if total_num_atoms_new <= (total_num_atoms_existing + mdc_atm_soft_threshold):
# only keep if both frag sizes are legal
if '[1' in self.refsmi_dict[ctx1_id]:
if (ctx_natoms[0] > mdc_atm_soft) and (ctx_natoms[1] > mdc_atm_soft):
self.largest_mcs_mmp_result[(molid_L, molid_R)] = \
[ctx_natoms, (ctx1_id, ctx2_id), frag_L_id, frag_R_id]
# above threshold so keep anyway
else:
if '[1' in self.refsmi_dict[ctx1_id]:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = \
[ctx_natoms, (ctx1_id, ctx2_id), frag_L_id, frag_R_id]
else:
if '[1' in self.refsmi_dict[ctx1_id]:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = \
[ctx_natoms, (ctx1_id, ctx2_id), frag_L_id, frag_R_id]
# tie-break
elif total_num_atoms_new == total_num_atoms_existing:
# single always wins over double, so only consider this if existing is double
# double cut tie breaks get disambiguated later using custom function
if len(num_atoms_existing) == 1:
continue
else:
# consider the size of the 'smallest fragment' and add if same, replace if bigger,
# drop if smaller
if min(ctx_natoms) > min(num_atoms_existing):
if '[1' in self.refsmi_dict[ctx1_id]:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = \
[ctx_natoms, (ctx1_id, ctx2_id), frag_L_id, frag_R_id]
elif min(ctx_natoms) == min(num_atoms_existing):
self.largest_mcs_mmp_result[(molid_L, molid_R)].extend(
[ctx_natoms, (ctx1_id, ctx2_id), frag_L_id, frag_R_id])
else:
# don't store as we have a better context with a larger 'smallest fragment'
continue
# double cut context must be smaller than what we already have so discard this new one
else:
continue
else:
# new result, case where we only have a double cut MCSS so add it!
if '[1' in self.refsmi_dict[ctx1_id]:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = [ctx_natoms, (ctx1_id, ctx2_id),
frag_L_id, frag_R_id]
with open(out_file, "w") as final_out:
final_out.write('CUT_TYPE,MOL_ID_L,MOL_ID_R,NATOMS,MCSS,FRAG_L,FRAG_R\n')
# do single cut first as these take precedence above a double
for (molid_L, molid_R) in self.largest_mcs_mmp_result:
list_length = len(self.largest_mcs_mmp_result[(molid_L, molid_R)])
# the list self.largest_mcs_mmp_result[(molid_L, molid_R)] contains an ordered list of items
# the first 4 are (1) a tuple of the num_atoms (2) fragment (3&4) context in two parts
# Therefore if the list is greater than 8 items it means we have more than one double
# cut that we need to consider, possibly as a double cut tie break. We do not consider the
# case where there are 8 items as we know this will be two identical fragmentation patterns
# with differing isomeric numbering on the atom attachment points therefore we use >8 not >=8
if list_length > 8:
if len(self.largest_mcs_mmp_result[(molid_L, molid_R)][0]) == 1:
# disambiguate single cut list
final_out.write(process_mcss_list_to_string('SINGLE', self.largest_mcs_mmp_result[
(molid_L, molid_R)][0:4]))
else:
# print("Double won (a): ", molid_L, molid_R, self.largest_mcs_mmp_result[(molid_L, molid_R)])
new_list = disambiguate_double_list(self.largest_mcs_mmp_result[(molid_L, molid_R)])
final_out.write(process_mcss_list_to_string('DOUBLE', new_list))
elif list_length == 4:
# print("Single won (a): ", molid_L, molid_R, self.largest_mcs_mmp_result[(molid_L, molid_R)])
final_out.write(process_mcss_list_to_string('SINGLE', self.largest_mcs_mmp_result[
(molid_L, molid_R)]))
else:
# print("Double wins (b): ", molid_L, molid_R, self.largest_mcs_mmp_result[(molid_L, molid_R)])
# need to remove atom numbering dupes then print
new_list = remove_atom_num_dupes(self.largest_mcs_mmp_result[(molid_L, molid_R)])
final_out.write(process_mcss_list_to_string('DOUBLE', new_list))
class _TestMMPbasedMCSSObjectClass(unittest.TestCase):
"""Test class for MMPDataObjectClass(object) written to use pythons unittest
Example usage:
python mmp_mcss_objects.py
coverage run mmp_mcss_objects.py
coverage report mmp_mcss_objects.py
"""
def setUp(self):
"""Instantiate temp file names, test data objects that get written to temp files
a silent logger object (needed to instantiate class) and the mmp object we'll test"""
self.maxDiff = None
# setup test data location use tempfile.NamedTemporaryFile(delete=False) to persist data on disk
self.temp_file_input_smi_01 = tempfile.NamedTemporaryFile(delete=False, suffix=".smi",
encoding='utf-8', mode='wt')
self.temp_file_input_smi_03 = tempfile.NamedTemporaryFile(delete=False, suffix=".smi",
encoding='utf-8', mode='wt')
self.temp_file_output_pairs = tempfile.NamedTemporaryFile(delete=False)
# setup a logger object
self.mmplogger = logging.getLogger('mmpobjectclass_testlogger')
# logging.disable(logging.CRITICAL)
# create empty mmp object
self.test_mmp_mcss_object = MMPbasedMCSSObjectClass(self.mmplogger)
# data set for use in testing input
self.test_dataset_goldeninput_smi_01 = {
# The following represent synthetic data, analogues of CHEMBL1382609
# https://www.ebi.ac.uk/chembl/compound_report_card/CHEMBL1382609/
# 1. substituents are added to the pyrazole ring to generate side chain MMPs
# H on CHEMBL1382609 between two methyls is changed to Br, F, C, I to
# visually see the change in the smiles string (avoiding Cl as already present)
# e.g.: N1C(=C(Br)C(=N1)C)C
# 2. core ring system is modified (phenyl to pyridine) to see ring switch MMP's
# Presence/Absence of Pyridine-N and N-positional isomerism in Cl-Ph ring
# e.g.: C2=NC(=CS2)C2=CC=C(Cl)C=C2 + addition of N ->
# C2=NC(=CS2)C2=CN=C(Cl)C=C2 + move N around ring ->
# C2=NC(=CS2)C2=NC=C(Cl)C=C2
# for 1,2 single wins
'001': 'N1(C2=NC(=CS2)C2=CC=C(Cl)C=C2)C(=C(Br)C(=N1)C)C',
'002': 'N1(C2=NC(=CS2)C2=CC=C(Cl)C=C2)C(=C(F)C(=N1)C)C',
# for 2,5 double wins tie
'003': 'N1(C2=NC(=CS2)C2=CN=C(Cl)C=C2)C(=C(F)C(=N1)C)C',
# The following represent synthetic data, analogues of CHEMBL1341352
# for 1341352 and it's synthetic unsubstituted analogue there is no double
# https://www.ebi.ac.uk/chembl/compound_report_card/CHEMBL1341352/
'1341352': 'Cc1cc(nn1CC(=O)NCc2ccccc2)C(F)(F)F',
'004': 'c1cc(nn1CC(=O)NCc2ccccc2)',
# more double cut only
# https://www.ebi.ac.uk/chembl/compound_report_card/CHEMBL6211
# https://www.ebi.ac.uk/chembl/compound_report_card/CHEMBL6232
'6211': 'O=C(OCC1N(C(=O)c2cc(c(OC)c(c2)OC)OC)CCN(C1)C(=O)c1cc(c(OC)c(OC)c1)OC)CCCCCCC',
'6232': 'O=C(N1C(CN(C(=O)c2cc(c(OC)c(c2)OC)OC)CC1)COC(=O)CC(C)(C)C)c1cc(c(OC)c(OC)c1)OC'
}
self.test_dataset_goldeninput_smi_03 = {
# repeat of above
'001': 'N1(C2=NC(=CS2)C2=CC=C(Cl)C=C2)C(=C(Br)C(=N1)C)C',
'002': 'N1(C2=NC(=CS2)C2=CC=C(Cl)C=C2)C(=C(F)C(=N1)C)C',
}
# all smiles are output from above input as either a repeat smiles or a fragment of them
self.test_dataset_golden_output_01 = {'CUT_TYPE,MOL_ID_L,MOL_ID_R,NATOMS,MCSS,FRAG_L,FRAG_R': None,
'SINGLE,1,2,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1BrH],[1FH]': None,
'SINGLE,2,1,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1FH],[1BrH]': None,
'DOUBLE,2,3,14,[1ClH].Fc1c([n](c2sc[2cH][n]2)[n]c1C)C,[1cH]1cc[2cH]cc1,[n]1[1cH]cc[2cH]c1': None,
'DOUBLE,3,2,14,[1ClH].Fc1c([n](c2sc[2cH][n]2)[n]c1C)C,[n]1[1cH]cc[2cH]c1,[1cH]1cc[2cH]cc1': None,
'SINGLE,1341352,4,11,O=C(NCc1ccccc1)[1CH3],Cc1[1nH][n]c(C(F)(F)F)c1,[1nH]1[n]ccc1': None,
'SINGLE,4,1341352,11,O=C(NCc1ccccc1)[1CH3],[1nH]1[n]ccc1,Cc1[1nH][n]c(C(F)(F)F)c1': None,
'DOUBLE,6211,6232,40,[1CH4].[2CH3]C(=O)OCC1N(C(=O)c2cc(c(OC)c(c2)OC)OC)CCN(C1)C(=O)c1cc(c(OC)c(OC)c1)OC,[1CH3]CCC[2CH3],C[12CH2]C': None,
'DOUBLE,6232,6211,40,[1CH4].[2CH3]C(=O)OCC1N(C(=O)c2cc(c(OC)c(c2)OC)OC)CCN(C1)C(=O)c1cc(c(OC)c(OC)c1)OC,C[12CH2]C,[1CH3]CCC[2CH3]': None}
self.test_dataset_golden_output_02 = {'CUT_TYPE,MOL_ID_L,MOL_ID_R,NATOMS,MCSS,FRAG_L,FRAG_R': None,
'SINGLE,1,2,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1BrH],[1FH]': None,
'SINGLE,2,1,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1FH],[1BrH]': None,
'SINGLE,2,3,13,Fc1c([n](c2sc[1cH][n]2)[n]c1C)C,Clc1cc[1cH]cc1,Clc1[n]c[1cH]cc1': None,
'SINGLE,3,2,13,Fc1c([n](c2sc[1cH][n]2)[n]c1C)C,Clc1[n]c[1cH]cc1,Clc1cc[1cH]cc1': None,
'SINGLE,1341352,4,11,O=C(NCc1ccccc1)[1CH3],Cc1[1nH][n]c(C(F)(F)F)c1,[1nH]1[n]ccc1': None,
'SINGLE,4,1341352,11,O=C(NCc1ccccc1)[1CH3],[1nH]1[n]ccc1,Cc1[1nH][n]c(C(F)(F)F)c1': None,
'SINGLE,6211,6232,39,[1CH3]C(=O)OCC1N(C(=O)c2cc(c(OC)c(c2)OC)OC)CCN(C1)C(=O)c1cc(c(OC)c(OC)c1)OC,[1CH3]CCCCC,C[1CH](C)C': None,
'SINGLE,6232,6211,39,[1CH3]C(=O)OCC1N(C(=O)c2cc(c(OC)c(c2)OC)OC)CCN(C1)C(=O)c1cc(c(OC)c(OC)c1)OC,C[1CH](C)C,[1CH3]CCCCC': None}
self.test_dataset_golden_output_03 = {'CUT_TYPE,MOL_ID_L,MOL_ID_R,NATOMS,MCSS,FRAG_L,FRAG_R': None,
'SINGLE,1,2,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1BrH],[1FH]': None,
'SINGLE,2,1,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1FH],[1BrH]': None,
'DOUBLE,1,2,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1BrH],[1FH]': None,
'DOUBLE,2,1,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1FH],[1BrH]': None}
# write test data to temp file (smi)
for smi_id, smi in list(self.test_dataset_goldeninput_smi_01.items()):
self.temp_file_input_smi_01.write(smi + " " + smi_id + "\n")
self.temp_file_input_smi_01.close()
# write test data to temp file (smi)
for smi_id, smi in list(self.test_dataset_goldeninput_smi_03.items()):
self.temp_file_input_smi_03.write(smi + " " + smi_id + "\n")
self.temp_file_input_smi_03.close()
# container for results data
self.test_dataset_testresults = {}
def tearDown(self):
"""Tear down object for clean reuse in further tests"""
# clean out the object
self.test_mmp_mcss_object.clean_out_data()
# clean out the temp data store
self.test_dataset_testresults.clear()
os.remove(self.temp_file_input_smi_01.name)
def test_get_largest_mcs_pairs_with_diff(self):
"""Test method to get largest MCS MMP for given smi - smi pair"""
# 6. full build then write of pairs to file, but only for a single named column
self.test_mmp_mcss_object.build_from_dicer(self.temp_file_input_smi_01.name, 'BOTH', 'NONE')
self.test_mmp_mcss_object.enumerate_fragment_properties()
self.test_mmp_mcss_object.get_largest_mcs_pairs(self.temp_file_output_pairs.name, 'BOTH')
# now read it back into temp object and check it's what we wrote out!
test_results_filehandle = open(self.temp_file_output_pairs.name, 'r')
for line in test_results_filehandle:
line = line.rstrip('\r')
line = line.rstrip('\n')
self.test_dataset_testresults[line] = None
test_results_filehandle.close()
#print(self.test_dataset_testresults)
self.assertEqual(self.test_dataset_golden_output_01, self.test_dataset_testresults)
def test_get_largest_mcs_pairs_mdc_atm_hard(self):
"""Test method to get largest MCS MMP for given smi - smi pair"""
# 6. full build then write of pairs to file, but only for a single named column
self.test_mmp_mcss_object.build_from_dicer(self.temp_file_input_smi_01.name, 'BOTH', 'NONE')
self.test_mmp_mcss_object.enumerate_fragment_properties()
self.test_mmp_mcss_object.get_largest_mcs_pairs(self.temp_file_output_pairs.name, 'BOTH', mdc_atm_hard=4)
# now read it back into temp object and check it's what we wrote out!
test_results_filehandle = open(self.temp_file_output_pairs.name, 'r')
for line in test_results_filehandle:
line = line.rstrip('\r')
line = line.rstrip('\n')
self.test_dataset_testresults[line] = None
test_results_filehandle.close()
#print(self.test_dataset_testresults)
self.assertEqual(self.test_dataset_golden_output_02, self.test_dataset_testresults)
def test_get_largest_mcs_pairs_mdc_atm_soft(self):
"""Test method to get largest MCS MMP for given smi - smi pair"""
# 6. full build then write of pairs to file, but only for a single named column
self.test_mmp_mcss_object.build_from_dicer(self.temp_file_input_smi_03.name, 'BOTH', 'NONE')
self.test_mmp_mcss_object.enumerate_fragment_properties()
#
self.test_mmp_mcss_object.get_largest_mcs_pairs(self.temp_file_output_pairs.name, 'BOTH')
# now read it back into temp object and check it's what we wrote out!
test_results_filehandle = open(self.temp_file_output_pairs.name, 'r')
for line in test_results_filehandle:
line = line.rstrip('\r')
line = line.rstrip('\n')
self.test_dataset_testresults[line] = None
test_results_filehandle.close()
self.test_mmp_mcss_object.get_largest_mcs_pairs(self.temp_file_output_pairs.name, 'BOTH', mdc_atm_soft=3,
mdc_atm_soft_threshold=4)
# now read it back into temp object and check it's what we wrote out!
test_results_filehandle = open(self.temp_file_output_pairs.name, 'r')
for line in test_results_filehandle:
line = line.rstrip('\r')
line = line.rstrip('\n')
self.test_dataset_testresults[line] = None
test_results_filehandle.close()
#print(self.test_dataset_testresults)
self.assertEqual(self.test_dataset_golden_output_03, self.test_dataset_testresults)
if __name__ == '__main__':
unittest.main()
| 53.006504
| 183
| 0.573668
|
import logging
import csv
import os
import sys
import unittest
import tempfile
from builtins import range
from mmp.mmp_data_objects import MMPDataObjectClass
if 'LILLYMOL_HOME' in os.environ:
import pybase.pyopmo as pymo
else:
import pybase.pymo as pymo
class MMPbasedMCSSObjectClass(MMPDataObjectClass):
def __init__(self, logger_object):
MMPDataObjectClass.__init__(self, logger_object)
self.logger = logger_object
if len(logging.Logger.manager.loggerDict) < 1:
sys.exit("Invalid or no logger object passed to MMPObjectClass. Please create \
and pass a logger and set to use logging.disable if you don't want logging")
# this is used for storing the largest MCS MMP for given pair
self.largest_mcs_mmp_result = {}
self.ref_smi_props = {}
def clean_out_data_mcss_obj(self):
self.clean_out_data()
self.mcs_mmp.clear()
def enumerate_fragment_properties(self):
frag_smi_file = tempfile.NamedTemporaryFile(delete=False, suffix='.smi')
frag_smi_props_out = tempfile.NamedTemporaryFile(delete=False)
with open(frag_smi_file.name, "w") as f:
for item in self.refsmi_dict:
if isinstance(item, int):
# can't see an easy way to do this except string compare, [1H] causes iw_descr to crash out
if self.refsmi_dict[item] != '[1H]':
f.write(self.refsmi_dict[item]+" "+str(item)+"\n")
self.logger.info("Running pymo.iwdescr on %s smi with in:%s, out:%s" %
(len(self.refsmi_dict), frag_smi_file.name, frag_smi_props_out.name))
exit_status = pymo.iwdescr(frag_smi_file.name, frag_smi_props_out.name, params_dict={'-l': '', '-v': ''},
loggero=self.logger)
self.logger.debug("Ran iwdescr with exit status %s" % exit_status)
with open(frag_smi_props_out.name, "r") as csv_file:
reader = csv.reader(csv_file, delimiter=' ')
i = -1
for row in reader:
i += 1
if i == 0:
if row[1] != 'w_natoms':
self.logger.warn("When this was written, NATOMs was in array position 1 (zero indexed) with "
"column title w_natoms. Now it's not, it's: %s" % row[1])
sys.exit("When this was written, NATOMs was in array position 1 (zero indexed) with column "
"title w_natom. Now it's not, it's: %s" % row[1])
continue
self.ref_smi_props[int(row[0])] = int(row[1])
frag_smi_props_out.close()
self.logger.debug("Completed load of %s mol props from dict of %s from file %s" %
(len(self.ref_smi_props), len(self.refsmi_dict)/2, frag_smi_props_out.name))
def get_largest_mcs_pairs(self, out_file, cut_type, mdc_atm_soft=None, mdc_atm_soft_threshold=None,
mdc_atm_hard=None):
if (mdc_atm_soft is not None and mdc_atm_soft_threshold is None) or\
(mdc_atm_soft is None and mdc_atm_soft_threshold is not None):
sys.exit("Error, mdc_atm_soft and mdc_atm_soft_threshold must be specified together.")
def process_mcss_list_to_string(prefix, input_list):
out_string = ''
num_of_entries = len(input_list)
if num_of_entries > 4:
for i_ in range(0, num_of_entries, 4):
out_string = out_string + prefix + "_" + str((i_/4)+1) + "," + str(molid_L) + "," + str(molid_R)
out_string = out_string + "," + str(sum(input_list[0 + i_])) + "," + str(input_list[1 + i_]) + ","
out_string = out_string + str(input_list[2 + i_]) + "," + str(input_list[3 + i_])
out_string += "\n"
else:
if len(input_list[1]) > 1:
ctx_smi = self.refsmi_dict[input_list[1][0]] + "." + self.refsmi_dict[input_list[1][1]]
else:
ctx_smi = self.refsmi_dict[input_list[1][0]]
out_string = prefix + "," + str(molid_L) + "," + str(molid_R) + ","
out_string = out_string + str(sum(input_list[0])) + "," + ctx_smi + ","
out_string = out_string + str(self.refsmi_dict[input_list[2]]) + "," \
+ str(self.refsmi_dict[input_list[3]])
out_string += "\n"
return out_string
def disambiguate_double_list(input_list):
num_of_entries = len(input_list)
filtered_list = []
# Decided not to handle this and instead just take the first one with the ordered numbering
for i_ in range(0, num_of_entries, 4):
# only use if the isomeric label is the right way round, [1 before [2
if '[1' in self.refsmi_dict[input_list[1 + i_][0]]:
filtered_list = input_list[(0 + i_): (4 + i_)]
else:
continue
return filtered_list
def remove_atom_num_dupes(input_list):
# only use if the isomeric label is the right way round, [1 before [2
if '[1' in self.refsmi_dict[input_list[1][0]]:
# take the first 4 items
output_list = input_list[:4]
else:
# just take the last 4 items
output_list = input_list[-4:]
return output_list
self.logger.info('Opening output file for write: %s' % out_file)
# check cut_type, convert to int
if cut_type.upper() == 'DOUBLE':
# confusing but faster later
cut_type_id = 3
elif cut_type.upper() == 'BOTH':
# confusing but faster later
cut_type_id = 2
elif cut_type.upper() == 'SINGLE':
cut_type_id = 1
else:
self.logger.warn('cut_type specification is incorrect, using single cut: %s' % cut_type.upper())
cut_type_id = 1
# fail if both single_pairs_dict and double_pairs_dict are empty
if (len(self.single_pairs_dict) == 0) and (len(self.double_pairs_dict) == 0):
self.logger.debug('No data found in single_pairs_dict and/or double_pairs_dict, expect no results')
# sys.exit("Error: no data found in single_pairs_dict and/or double_pairs_dict, nothing to find and write")
#
# Here we build data structures of type:
# self.largest_mcs_mmp_result[(molid_L, molid_R)] = [(#atoms, #atoms or None),
# (context_id, context_id or None), frag_Left_id, frag_Right_id]
#
# single - this is easy as we only keep/store the one with the greatest number of atoms
if cut_type_id <= 2:
for molid_L, molid_R, ctx_id, frag_L_id, frag_R_id in \
self.iterator_single_pairs_dict_numeric(inc_attachpt=False):
if (molid_L, molid_R) in self.largest_mcs_mmp_result:
if self.largest_mcs_mmp_result[(molid_L, molid_R)][0][0] <= self.ref_smi_props[ctx_id]:
if self.largest_mcs_mmp_result[(molid_L, molid_R)][0][0] == self.ref_smi_props[ctx_id]:
self.largest_mcs_mmp_result[(molid_L, molid_R)].extend(
[(self.ref_smi_props[ctx_id], ), (ctx_id, ), frag_L_id, frag_R_id])
else:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = [
(self.ref_smi_props[ctx_id], ), (ctx_id, ), frag_L_id, frag_R_id]
else:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = [
(self.ref_smi_props[ctx_id], ), (ctx_id, ), frag_L_id, frag_R_id]
# now build the final results on the fly
# double - for each one we compare against what we already have in self.largest_mcs_mmp_result
ctx_natoms = None
if cut_type_id >= 2:
for molid_L, molid_R, ctx1_id, ctx2_id, frag_L_id, frag_R_id in \
self.iterator_double_pairs_dict_numeric(inc_attachpt=False):
#
if ctx1_id in self.ref_smi_props:
ctx_natoms = (self.ref_smi_props[ctx1_id], )
else:
ctx1_smi = self.refsmi_dict[ctx1_id]
ctx1_smi = ctx1_smi.replace("[1", "[9")
ctx1_smi = ctx1_smi.replace("[2", "[1")
ctx1_smi = ctx1_smi.replace("[9", "[2")
try:
ctx_natoms = (self.ref_smi_props[self.refsmi_dict[ctx1_smi]], )
except:
print("ERR >>>")
print(("{} {} {} {} {} {}".format(molid_L, molid_R, ctx1_id, ctx2_id, frag_L_id, frag_R_id)))
print(("{} {} {}".format(ctx1_id, ctx1_smi, self.refsmi_dict[ctx1_smi])))
print("")
if ctx2_id in self.ref_smi_props:
ctx_natoms = ctx_natoms + (self.ref_smi_props[ctx2_id], )
else:
ctx2_smi = self.refsmi_dict[ctx2_id]
ctx2_smi = ctx2_smi.replace("[1", "[9")
ctx2_smi = ctx2_smi.replace("[2", "[1")
ctx2_smi = ctx2_smi.replace("[9", "[2")
ctx_natoms = ctx_natoms + (self.ref_smi_props[self.refsmi_dict[ctx2_smi]], )
# If the indicator flag check_all_context is set to true we need to pre-filter all ctx fragments
# to ensure they are greater than or equal to the specified limit for mdc_atm_hard (maximum double
# cut atoms hard limit). This is a crude filter and could remove valid double cut MCSS.
if mdc_atm_hard is not None:
if ctx_natoms[0] <= mdc_atm_hard:
continue
elif ctx_natoms[1] <= mdc_atm_hard:
continue
#
# Main
# have we seen this smi - smi pair before?
if (molid_L, molid_R) in self.largest_mcs_mmp_result:
# get the number of atoms in the context
num_atoms_existing = self.largest_mcs_mmp_result[(molid_L, molid_R)][0]
if len(num_atoms_existing) > 1:
total_num_atoms_existing = sum(num_atoms_existing)
else:
total_num_atoms_existing = num_atoms_existing[0]
total_num_atoms_new = sum(ctx_natoms)
if total_num_atoms_new > total_num_atoms_existing:
# if it is a double and we have a min fragment setting
if mdc_atm_soft is not None:
# if it falls below the threshold at which we apply this min frag setting
if total_num_atoms_new <= (total_num_atoms_existing + mdc_atm_soft_threshold):
# only keep if both frag sizes are legal
if '[1' in self.refsmi_dict[ctx1_id]:
if (ctx_natoms[0] > mdc_atm_soft) and (ctx_natoms[1] > mdc_atm_soft):
self.largest_mcs_mmp_result[(molid_L, molid_R)] = \
[ctx_natoms, (ctx1_id, ctx2_id), frag_L_id, frag_R_id]
# above threshold so keep anyway
else:
if '[1' in self.refsmi_dict[ctx1_id]:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = \
[ctx_natoms, (ctx1_id, ctx2_id), frag_L_id, frag_R_id]
else:
if '[1' in self.refsmi_dict[ctx1_id]:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = \
[ctx_natoms, (ctx1_id, ctx2_id), frag_L_id, frag_R_id]
# tie-break
elif total_num_atoms_new == total_num_atoms_existing:
# single always wins over double, so only consider this if existing is double
# double cut tie breaks get disambiguated later using custom function
if len(num_atoms_existing) == 1:
continue
else:
# consider the size of the 'smallest fragment' and add if same, replace if bigger,
# drop if smaller
if min(ctx_natoms) > min(num_atoms_existing):
if '[1' in self.refsmi_dict[ctx1_id]:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = \
[ctx_natoms, (ctx1_id, ctx2_id), frag_L_id, frag_R_id]
elif min(ctx_natoms) == min(num_atoms_existing):
self.largest_mcs_mmp_result[(molid_L, molid_R)].extend(
[ctx_natoms, (ctx1_id, ctx2_id), frag_L_id, frag_R_id])
else:
# don't store as we have a better context with a larger 'smallest fragment'
continue
else:
continue
else:
if '[1' in self.refsmi_dict[ctx1_id]:
self.largest_mcs_mmp_result[(molid_L, molid_R)] = [ctx_natoms, (ctx1_id, ctx2_id),
frag_L_id, frag_R_id]
with open(out_file, "w") as final_out:
final_out.write('CUT_TYPE,MOL_ID_L,MOL_ID_R,NATOMS,MCSS,FRAG_L,FRAG_R\n')
for (molid_L, molid_R) in self.largest_mcs_mmp_result:
list_length = len(self.largest_mcs_mmp_result[(molid_L, molid_R)])
if list_length > 8:
if len(self.largest_mcs_mmp_result[(molid_L, molid_R)][0]) == 1:
final_out.write(process_mcss_list_to_string('SINGLE', self.largest_mcs_mmp_result[
(molid_L, molid_R)][0:4]))
else:
new_list = disambiguate_double_list(self.largest_mcs_mmp_result[(molid_L, molid_R)])
final_out.write(process_mcss_list_to_string('DOUBLE', new_list))
elif list_length == 4:
final_out.write(process_mcss_list_to_string('SINGLE', self.largest_mcs_mmp_result[
(molid_L, molid_R)]))
else:
new_list = remove_atom_num_dupes(self.largest_mcs_mmp_result[(molid_L, molid_R)])
final_out.write(process_mcss_list_to_string('DOUBLE', new_list))
class _TestMMPbasedMCSSObjectClass(unittest.TestCase):
def setUp(self):
self.maxDiff = None
self.temp_file_input_smi_01 = tempfile.NamedTemporaryFile(delete=False, suffix=".smi",
encoding='utf-8', mode='wt')
self.temp_file_input_smi_03 = tempfile.NamedTemporaryFile(delete=False, suffix=".smi",
encoding='utf-8', mode='wt')
self.temp_file_output_pairs = tempfile.NamedTemporaryFile(delete=False)
self.mmplogger = logging.getLogger('mmpobjectclass_testlogger')
self.test_mmp_mcss_object = MMPbasedMCSSObjectClass(self.mmplogger)
self.test_dataset_goldeninput_smi_01 = {
# Presence/Absence of Pyridine-N and N-positional isomerism in Cl-Ph ring
# e.g.: C2=NC(=CS2)C2=CC=C(Cl)C=C2 + addition of N ->
# C2=NC(=CS2)C2=CN=C(Cl)C=C2 + move N around ring ->
# C2=NC(=CS2)C2=NC=C(Cl)C=C2
# for 1,2 single wins
'001': 'N1(C2=NC(=CS2)C2=CC=C(Cl)C=C2)C(=C(Br)C(=N1)C)C',
'002': 'N1(C2=NC(=CS2)C2=CC=C(Cl)C=C2)C(=C(F)C(=N1)C)C',
# for 2,5 double wins tie
'003': 'N1(C2=NC(=CS2)C2=CN=C(Cl)C=C2)C(=C(F)C(=N1)C)C',
# The following represent synthetic data, analogues of CHEMBL1341352
# for 1341352 and it's synthetic unsubstituted analogue there is no double
'1341352': 'Cc1cc(nn1CC(=O)NCc2ccccc2)C(F)(F)F',
'004': 'c1cc(nn1CC(=O)NCc2ccccc2)',
'6211': 'O=C(OCC1N(C(=O)c2cc(c(OC)c(c2)OC)OC)CCN(C1)C(=O)c1cc(c(OC)c(OC)c1)OC)CCCCCCC',
'6232': 'O=C(N1C(CN(C(=O)c2cc(c(OC)c(c2)OC)OC)CC1)COC(=O)CC(C)(C)C)c1cc(c(OC)c(OC)c1)OC'
}
self.test_dataset_goldeninput_smi_03 = {
'001': 'N1(C2=NC(=CS2)C2=CC=C(Cl)C=C2)C(=C(Br)C(=N1)C)C',
'002': 'N1(C2=NC(=CS2)C2=CC=C(Cl)C=C2)C(=C(F)C(=N1)C)C',
}
self.test_dataset_golden_output_01 = {'CUT_TYPE,MOL_ID_L,MOL_ID_R,NATOMS,MCSS,FRAG_L,FRAG_R': None,
'SINGLE,1,2,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1BrH],[1FH]': None,
'SINGLE,2,1,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1FH],[1BrH]': None,
'DOUBLE,2,3,14,[1ClH].Fc1c([n](c2sc[2cH][n]2)[n]c1C)C,[1cH]1cc[2cH]cc1,[n]1[1cH]cc[2cH]c1': None,
'DOUBLE,3,2,14,[1ClH].Fc1c([n](c2sc[2cH][n]2)[n]c1C)C,[n]1[1cH]cc[2cH]c1,[1cH]1cc[2cH]cc1': None,
'SINGLE,1341352,4,11,O=C(NCc1ccccc1)[1CH3],Cc1[1nH][n]c(C(F)(F)F)c1,[1nH]1[n]ccc1': None,
'SINGLE,4,1341352,11,O=C(NCc1ccccc1)[1CH3],[1nH]1[n]ccc1,Cc1[1nH][n]c(C(F)(F)F)c1': None,
'DOUBLE,6211,6232,40,[1CH4].[2CH3]C(=O)OCC1N(C(=O)c2cc(c(OC)c(c2)OC)OC)CCN(C1)C(=O)c1cc(c(OC)c(OC)c1)OC,[1CH3]CCC[2CH3],C[12CH2]C': None,
'DOUBLE,6232,6211,40,[1CH4].[2CH3]C(=O)OCC1N(C(=O)c2cc(c(OC)c(c2)OC)OC)CCN(C1)C(=O)c1cc(c(OC)c(OC)c1)OC,C[12CH2]C,[1CH3]CCC[2CH3]': None}
self.test_dataset_golden_output_02 = {'CUT_TYPE,MOL_ID_L,MOL_ID_R,NATOMS,MCSS,FRAG_L,FRAG_R': None,
'SINGLE,1,2,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1BrH],[1FH]': None,
'SINGLE,2,1,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1FH],[1BrH]': None,
'SINGLE,2,3,13,Fc1c([n](c2sc[1cH][n]2)[n]c1C)C,Clc1cc[1cH]cc1,Clc1[n]c[1cH]cc1': None,
'SINGLE,3,2,13,Fc1c([n](c2sc[1cH][n]2)[n]c1C)C,Clc1[n]c[1cH]cc1,Clc1cc[1cH]cc1': None,
'SINGLE,1341352,4,11,O=C(NCc1ccccc1)[1CH3],Cc1[1nH][n]c(C(F)(F)F)c1,[1nH]1[n]ccc1': None,
'SINGLE,4,1341352,11,O=C(NCc1ccccc1)[1CH3],[1nH]1[n]ccc1,Cc1[1nH][n]c(C(F)(F)F)c1': None,
'SINGLE,6211,6232,39,[1CH3]C(=O)OCC1N(C(=O)c2cc(c(OC)c(c2)OC)OC)CCN(C1)C(=O)c1cc(c(OC)c(OC)c1)OC,[1CH3]CCCCC,C[1CH](C)C': None,
'SINGLE,6232,6211,39,[1CH3]C(=O)OCC1N(C(=O)c2cc(c(OC)c(c2)OC)OC)CCN(C1)C(=O)c1cc(c(OC)c(OC)c1)OC,C[1CH](C)C,[1CH3]CCCCC': None}
self.test_dataset_golden_output_03 = {'CUT_TYPE,MOL_ID_L,MOL_ID_R,NATOMS,MCSS,FRAG_L,FRAG_R': None,
'SINGLE,1,2,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1BrH],[1FH]': None,
'SINGLE,2,1,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1FH],[1BrH]': None,
'DOUBLE,1,2,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1BrH],[1FH]': None,
'DOUBLE,2,1,19,Clc1ccc(c2csc([n]3[n]c([1cH]c3C)C)[n]2)cc1,[1FH],[1BrH]': None}
for smi_id, smi in list(self.test_dataset_goldeninput_smi_01.items()):
self.temp_file_input_smi_01.write(smi + " " + smi_id + "\n")
self.temp_file_input_smi_01.close()
for smi_id, smi in list(self.test_dataset_goldeninput_smi_03.items()):
self.temp_file_input_smi_03.write(smi + " " + smi_id + "\n")
self.temp_file_input_smi_03.close()
self.test_dataset_testresults = {}
def tearDown(self):
self.test_mmp_mcss_object.clean_out_data()
self.test_dataset_testresults.clear()
os.remove(self.temp_file_input_smi_01.name)
def test_get_largest_mcs_pairs_with_diff(self):
self.test_mmp_mcss_object.build_from_dicer(self.temp_file_input_smi_01.name, 'BOTH', 'NONE')
self.test_mmp_mcss_object.enumerate_fragment_properties()
self.test_mmp_mcss_object.get_largest_mcs_pairs(self.temp_file_output_pairs.name, 'BOTH')
test_results_filehandle = open(self.temp_file_output_pairs.name, 'r')
for line in test_results_filehandle:
line = line.rstrip('\r')
line = line.rstrip('\n')
self.test_dataset_testresults[line] = None
test_results_filehandle.close()
#print(self.test_dataset_testresults)
self.assertEqual(self.test_dataset_golden_output_01, self.test_dataset_testresults)
def test_get_largest_mcs_pairs_mdc_atm_hard(self):
# 6. full build then write of pairs to file, but only for a single named column
self.test_mmp_mcss_object.build_from_dicer(self.temp_file_input_smi_01.name, 'BOTH', 'NONE')
self.test_mmp_mcss_object.enumerate_fragment_properties()
self.test_mmp_mcss_object.get_largest_mcs_pairs(self.temp_file_output_pairs.name, 'BOTH', mdc_atm_hard=4)
# now read it back into temp object and check it's what we wrote out!
test_results_filehandle = open(self.temp_file_output_pairs.name, 'r')
for line in test_results_filehandle:
line = line.rstrip('\r')
line = line.rstrip('\n')
self.test_dataset_testresults[line] = None
test_results_filehandle.close()
self.assertEqual(self.test_dataset_golden_output_02, self.test_dataset_testresults)
def test_get_largest_mcs_pairs_mdc_atm_soft(self):
self.test_mmp_mcss_object.build_from_dicer(self.temp_file_input_smi_03.name, 'BOTH', 'NONE')
self.test_mmp_mcss_object.enumerate_fragment_properties()
self.test_mmp_mcss_object.get_largest_mcs_pairs(self.temp_file_output_pairs.name, 'BOTH')
test_results_filehandle = open(self.temp_file_output_pairs.name, 'r')
for line in test_results_filehandle:
line = line.rstrip('\r')
line = line.rstrip('\n')
self.test_dataset_testresults[line] = None
test_results_filehandle.close()
self.test_mmp_mcss_object.get_largest_mcs_pairs(self.temp_file_output_pairs.name, 'BOTH', mdc_atm_soft=3,
mdc_atm_soft_threshold=4)
# now read it back into temp object and check it's what we wrote out!
test_results_filehandle = open(self.temp_file_output_pairs.name, 'r')
for line in test_results_filehandle:
line = line.rstrip('\r')
line = line.rstrip('\n')
self.test_dataset_testresults[line] = None
test_results_filehandle.close()
self.assertEqual(self.test_dataset_golden_output_03, self.test_dataset_testresults)
if __name__ == '__main__':
unittest.main()
| true
| true
|
f702188d7b7bb22be9247c6cc3e29831e6c5e577
| 1,344
|
py
|
Python
|
scanner/actions/action.py
|
Eerovil/magic-cards
|
b4f18ae9656be58e99fc90146c8ac7473371ce55
|
[
"MIT"
] | null | null | null |
scanner/actions/action.py
|
Eerovil/magic-cards
|
b4f18ae9656be58e99fc90146c8ac7473371ce55
|
[
"MIT"
] | null | null | null |
scanner/actions/action.py
|
Eerovil/magic-cards
|
b4f18ae9656be58e99fc90146c8ac7473371ce55
|
[
"MIT"
] | null | null | null |
import os
import json
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
class Action:
def __init__(self, card, config):
config.pop("action", None)
self.card = card
self.config = config
def env_vars_for_object(self, config, prefix):
env_vars = {}
config.pop("action", None)
config.pop("id", None)
for key, value in config.items():
if value and isinstance(value, dict):
nested_env_vars = self.env_vars_for_object(
value, "{}_{}".format(prefix, key.upper())
)
env_vars = {**env_vars, **nested_env_vars}
else:
env_vars["{}_{}".format(prefix, key.upper())] = value
return env_vars
def env_vars(self):
with open(CURRENT_DIR + "/../../config/config.json", "r") as f:
global_config = json.load(f)
env_vars = self.env_vars_for_object(self.card, "CARD")
env_vars["magic_cards_room"] = global_config["room"]
prefix = self.__class__.__name__.replace("Action", "").upper()
return {**env_vars, **self.env_vars_for_object(self.config, prefix)}
class ChromecastAction(Action):
def __init__(self, card, config, chromecast):
super().__init__(card, config)
self.chromecast = chromecast
| 29.217391
| 76
| 0.590774
|
import os
import json
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
class Action:
def __init__(self, card, config):
config.pop("action", None)
self.card = card
self.config = config
def env_vars_for_object(self, config, prefix):
env_vars = {}
config.pop("action", None)
config.pop("id", None)
for key, value in config.items():
if value and isinstance(value, dict):
nested_env_vars = self.env_vars_for_object(
value, "{}_{}".format(prefix, key.upper())
)
env_vars = {**env_vars, **nested_env_vars}
else:
env_vars["{}_{}".format(prefix, key.upper())] = value
return env_vars
def env_vars(self):
with open(CURRENT_DIR + "/../../config/config.json", "r") as f:
global_config = json.load(f)
env_vars = self.env_vars_for_object(self.card, "CARD")
env_vars["magic_cards_room"] = global_config["room"]
prefix = self.__class__.__name__.replace("Action", "").upper()
return {**env_vars, **self.env_vars_for_object(self.config, prefix)}
class ChromecastAction(Action):
def __init__(self, card, config, chromecast):
super().__init__(card, config)
self.chromecast = chromecast
| true
| true
|
f70218f2b4f389dac4b6b4a28a071cb1c97475d0
| 5,869
|
py
|
Python
|
office365/sharepoint/tenant/administration/tenant.py
|
wreiner/Office365-REST-Python-Client
|
476bbce4f5928a140b4f5d33475d0ac9b0783530
|
[
"MIT"
] | null | null | null |
office365/sharepoint/tenant/administration/tenant.py
|
wreiner/Office365-REST-Python-Client
|
476bbce4f5928a140b4f5d33475d0ac9b0783530
|
[
"MIT"
] | null | null | null |
office365/sharepoint/tenant/administration/tenant.py
|
wreiner/Office365-REST-Python-Client
|
476bbce4f5928a140b4f5d33475d0ac9b0783530
|
[
"MIT"
] | null | null | null |
from office365.runtime.client_value_collection import ClientValueCollection
from office365.runtime.queries.service_operation_query import ServiceOperationQuery
from office365.runtime.resource_path import ResourcePath
from office365.sharepoint.base_entity import BaseEntity
from office365.sharepoint.tenant.administration.hubSiteProperties import HubSiteProperties
from office365.sharepoint.tenant.administration.secondary_administrators_fields_data import \
SecondaryAdministratorsFieldsData
from office365.sharepoint.tenant.administration.secondary_administrators_info import SecondaryAdministratorsInfo
from office365.sharepoint.tenant.administration.site_properties import SiteProperties
from office365.sharepoint.tenant.administration.site_properties_collection import SitePropertiesCollection
from office365.sharepoint.tenant.administration.sitePropertiesEnumerableFilter import SitePropertiesEnumerableFilter
from office365.sharepoint.tenant.administration.spo_operation import SpoOperation
class Tenant(BaseEntity):
def __init__(self, context):
super().__init__(context, ResourcePath("Microsoft.Online.SharePoint.TenantAdministration.Tenant"),
"Microsoft.Online.SharePoint.TenantAdministration")
def get_site_secondary_administrators(self, site_id):
"""
Gets site collection administrators
:type site_id: str
"""
return_type = ClientValueCollection(SecondaryAdministratorsInfo)
payload = SecondaryAdministratorsFieldsData(site_id)
qry = ServiceOperationQuery(self, "GetSiteSecondaryAdministrators", None, payload,
"secondaryAdministratorsFieldsData", return_type)
self.context.add_query(qry)
return return_type
def set_site_secondary_administrators(self, site_id, emails, names=None):
"""
Sets site collection administrators
:type names: list[str] or None
:type emails: list[str]
:type site_id: str
"""
payload = SecondaryAdministratorsFieldsData(site_id, emails, names)
qry = ServiceOperationQuery(self, "SetSiteSecondaryAdministrators", None, payload,
"secondaryAdministratorsFieldsData", None)
self.context.add_query(qry)
return self
def register_hub_site(self, site_url):
"""
Registers an existing site as a hub site.
:param str site_url:
:return:
"""
return_type = HubSiteProperties(self.context)
params = {"siteUrl": site_url}
qry = ServiceOperationQuery(self, "RegisterHubSite", None, params, None, return_type)
self.context.add_query(qry)
return return_type
def unregister_hub_site(self, siteUrl):
"""
Unregisters a hub site so that it is no longer a hub site.
:param str siteUrl:
:return:
"""
params = {"siteUrl": siteUrl}
qry = ServiceOperationQuery(self, "UnregisterHubSite", None, params, None, None)
self.context.add_query(qry)
return self
def create_site(self, site_create_props):
"""Queues a site collection for creation with the specified properties.
:param SiteCreationProperties site_create_props:
A SiteCreationProperties object that contains the initial properties
of the new site collection.
"""
result = SpoOperation(self.context)
qry = ServiceOperationQuery(self, "CreateSite", None, site_create_props, "siteCreationProperties", result)
self.context.add_query(qry)
return result
def remove_site(self, site_url):
"""Deletes the site with the specified URL
:param str site_url: A string representing the URL of the site.
"""
result = SpoOperation(self.context)
qry = ServiceOperationQuery(self, "removeSite", [site_url], None, None, result)
self.context.add_query(qry)
return result
def remove_deleted_site(self, site_url):
pass
def restore_deleted_site(self, site_url):
pass
def get_site_properties_by_url(self, url, include_detail):
"""
:param str url: A string that represents the site URL.
:param bool include_detail: A Boolean value that indicates whether to include all of the SPSite properties.
"""
site_props = SiteProperties(self.context)
self._sites.add_child(site_props)
payload = {
'url': url,
'includeDetail': include_detail
}
qry = ServiceOperationQuery(self, "getSitePropertiesByUrl", None, payload, None, site_props)
self.context.add_query(qry)
return site_props
def get_site_properties_from_sharepoint_by_filters(self, _filter, start_index=0, include_detail=False):
"""
:param bool include_detail:
:param int start_index:
:param str _filter:
"""
site_props_col = SitePropertiesCollection(self.context)
qry = ServiceOperationQuery(self, "getSitePropertiesFromSharePointByFilters",
None,
SitePropertiesEnumerableFilter(_filter, start_index, include_detail),
"speFilter",
site_props_col)
self.context.add_query(qry)
return site_props_col
@property
def root_site_url(self):
"""
:rtype: str or None
"""
return self.properties.get('RootSiteUrl', None)
@property
def _sites(self):
"""Gets a collection of sites."""
if self.is_property_available('sites'):
return self.properties['sites']
else:
return SitePropertiesCollection(self.context, ResourcePath("sites", self.resource_path))
| 39.655405
| 116
| 0.679332
|
from office365.runtime.client_value_collection import ClientValueCollection
from office365.runtime.queries.service_operation_query import ServiceOperationQuery
from office365.runtime.resource_path import ResourcePath
from office365.sharepoint.base_entity import BaseEntity
from office365.sharepoint.tenant.administration.hubSiteProperties import HubSiteProperties
from office365.sharepoint.tenant.administration.secondary_administrators_fields_data import \
SecondaryAdministratorsFieldsData
from office365.sharepoint.tenant.administration.secondary_administrators_info import SecondaryAdministratorsInfo
from office365.sharepoint.tenant.administration.site_properties import SiteProperties
from office365.sharepoint.tenant.administration.site_properties_collection import SitePropertiesCollection
from office365.sharepoint.tenant.administration.sitePropertiesEnumerableFilter import SitePropertiesEnumerableFilter
from office365.sharepoint.tenant.administration.spo_operation import SpoOperation
class Tenant(BaseEntity):
def __init__(self, context):
super().__init__(context, ResourcePath("Microsoft.Online.SharePoint.TenantAdministration.Tenant"),
"Microsoft.Online.SharePoint.TenantAdministration")
def get_site_secondary_administrators(self, site_id):
return_type = ClientValueCollection(SecondaryAdministratorsInfo)
payload = SecondaryAdministratorsFieldsData(site_id)
qry = ServiceOperationQuery(self, "GetSiteSecondaryAdministrators", None, payload,
"secondaryAdministratorsFieldsData", return_type)
self.context.add_query(qry)
return return_type
def set_site_secondary_administrators(self, site_id, emails, names=None):
payload = SecondaryAdministratorsFieldsData(site_id, emails, names)
qry = ServiceOperationQuery(self, "SetSiteSecondaryAdministrators", None, payload,
"secondaryAdministratorsFieldsData", None)
self.context.add_query(qry)
return self
def register_hub_site(self, site_url):
return_type = HubSiteProperties(self.context)
params = {"siteUrl": site_url}
qry = ServiceOperationQuery(self, "RegisterHubSite", None, params, None, return_type)
self.context.add_query(qry)
return return_type
def unregister_hub_site(self, siteUrl):
params = {"siteUrl": siteUrl}
qry = ServiceOperationQuery(self, "UnregisterHubSite", None, params, None, None)
self.context.add_query(qry)
return self
def create_site(self, site_create_props):
result = SpoOperation(self.context)
qry = ServiceOperationQuery(self, "CreateSite", None, site_create_props, "siteCreationProperties", result)
self.context.add_query(qry)
return result
def remove_site(self, site_url):
result = SpoOperation(self.context)
qry = ServiceOperationQuery(self, "removeSite", [site_url], None, None, result)
self.context.add_query(qry)
return result
def remove_deleted_site(self, site_url):
pass
def restore_deleted_site(self, site_url):
pass
def get_site_properties_by_url(self, url, include_detail):
site_props = SiteProperties(self.context)
self._sites.add_child(site_props)
payload = {
'url': url,
'includeDetail': include_detail
}
qry = ServiceOperationQuery(self, "getSitePropertiesByUrl", None, payload, None, site_props)
self.context.add_query(qry)
return site_props
def get_site_properties_from_sharepoint_by_filters(self, _filter, start_index=0, include_detail=False):
site_props_col = SitePropertiesCollection(self.context)
qry = ServiceOperationQuery(self, "getSitePropertiesFromSharePointByFilters",
None,
SitePropertiesEnumerableFilter(_filter, start_index, include_detail),
"speFilter",
site_props_col)
self.context.add_query(qry)
return site_props_col
@property
def root_site_url(self):
return self.properties.get('RootSiteUrl', None)
@property
def _sites(self):
if self.is_property_available('sites'):
return self.properties['sites']
else:
return SitePropertiesCollection(self.context, ResourcePath("sites", self.resource_path))
| true
| true
|
f702195bb70c4dc9e85f7b299df907641db11dfa
| 12,564
|
py
|
Python
|
python/ccxt/async_support/base/exchange.py
|
roeiba/ccxt
|
01060e12b20925fe29f1485dcb21b8cf44326d2a
|
[
"MIT"
] | 1
|
2019-10-30T13:32:54.000Z
|
2019-10-30T13:32:54.000Z
|
python/ccxt/async_support/base/exchange.py
|
roeiba/ccxt
|
01060e12b20925fe29f1485dcb21b8cf44326d2a
|
[
"MIT"
] | null | null | null |
python/ccxt/async_support/base/exchange.py
|
roeiba/ccxt
|
01060e12b20925fe29f1485dcb21b8cf44326d2a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
__version__ = '1.18.1333'
# -----------------------------------------------------------------------------
import asyncio
import concurrent
import socket
import time
import math
import random
import certifi
import aiohttp
import ssl
import sys
import yarl
# -----------------------------------------------------------------------------
from ccxt.async_support.base.throttle import throttle
# -----------------------------------------------------------------------------
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import RequestTimeout
from ccxt.base.errors import NotSupported
# -----------------------------------------------------------------------------
from ccxt.base.exchange import Exchange as BaseExchange
# -----------------------------------------------------------------------------
__all__ = [
'BaseExchange',
'Exchange',
]
# -----------------------------------------------------------------------------
class Exchange(BaseExchange):
def __init__(self, config={}):
if 'asyncio_loop' in config:
self.asyncio_loop = config['asyncio_loop']
self.asyncio_loop = self.asyncio_loop or asyncio.get_event_loop()
self.aiohttp_trust_env = config.get('aiohttp_trust_env', self.aiohttp_trust_env)
self.verify = config.get('verify', self.verify)
self.own_session = 'session' not in config
self.cafile = config.get('cafile', certifi.where())
self.open()
super(Exchange, self).__init__(config)
self.init_rest_rate_limiter()
def init_rest_rate_limiter(self):
self.throttle = throttle(self.extend({
'loop': self.asyncio_loop,
}, self.tokenBucket))
def __del__(self):
if self.session is not None:
self.logger.warning(self.id + " requires to release all resources with an explicit call to the .close() coroutine. If you are using the exchange instance with async coroutines, add exchange.close() to your code into a place when you're done with the exchange and don't need the exchange instance anymore (at the end of your async coroutine).")
if sys.version_info >= (3, 5):
async def __aenter__(self):
self.open()
return self
async def __aexit__(self, exc_type, exc, tb):
await self.close()
def open(self):
if self.own_session and self.session is None:
# Create our SSL context object with our CA cert file
context = ssl.create_default_context(cafile=self.cafile) if self.verify else self.verify
# Pass this SSL context to aiohttp and create a TCPConnector
connector = aiohttp.TCPConnector(ssl=context, loop=self.asyncio_loop)
self.session = aiohttp.ClientSession(loop=self.asyncio_loop, connector=connector, trust_env=self.aiohttp_trust_env)
async def close(self):
if self.session is not None:
if self.own_session:
await self.session.close()
self.session = None
async def wait_for_token(self):
while self.rateLimitTokens <= 1:
# if self.verbose:
# print('Waiting for tokens: Exchange: {0}'.format(self.id))
self.add_new_tokens()
seconds_delays = [0.001, 0.005, 0.022, 0.106, 0.5]
delay = random.choice(seconds_delays)
await asyncio.sleep(delay)
self.rateLimitTokens -= 1
def add_new_tokens(self):
# if self.verbose:
# print('Adding new tokens: Exchange: {0}'.format(self.id))
now = time.monotonic()
time_since_update = now - self.rateLimitUpdateTime
new_tokens = math.floor((0.8 * 1000.0 * time_since_update) / self.rateLimit)
if new_tokens > 1:
self.rateLimitTokens = min(self.rateLimitTokens + new_tokens, self.rateLimitMaxTokens)
self.rateLimitUpdateTime = now
async def fetch2(self, path, api='public', method='GET', params={}, headers=None, body=None):
"""A better wrapper over request for deferred signing"""
if self.enableRateLimit:
await self.throttle()
self.lastRestRequestTimestamp = self.milliseconds()
request = self.sign(path, api, method, params, headers, body)
return await self.fetch(request['url'], request['method'], request['headers'], request['body'])
async def fetch(self, url, method='GET', headers=None, body=None):
"""Perform a HTTP request and return decoded JSON data"""
request_headers = self.prepare_request_headers(headers)
url = self.proxy + url
if self.verbose:
print("\nRequest:", method, url, headers, body)
self.logger.debug("%s %s, Request: %s %s", method, url, headers, body)
request_body = body
encoded_body = body.encode() if body else None
session_method = getattr(self.session, method.lower())
http_response = None
http_status_code = None
http_status_text = None
json_response = None
try:
async with session_method(yarl.URL(url, encoded=True),
data=encoded_body,
headers=request_headers,
timeout=(self.timeout / 1000),
proxy=self.aiohttp_proxy) as response:
http_response = await response.text()
http_status_code = response.status
http_status_text = response.reason
json_response = self.parse_json(http_response)
headers = response.headers
if self.enableLastHttpResponse:
self.last_http_response = http_response
if self.enableLastResponseHeaders:
self.last_response_headers = headers
if self.enableLastJsonResponse:
self.last_json_response = json_response
if self.verbose:
print("\nResponse:", method, url, http_status_code, headers, http_response)
self.logger.debug("%s %s, Response: %s %s %s", method, url, http_status_code, headers, http_response)
except socket.gaierror as e:
raise ExchangeNotAvailable(method + ' ' + url)
except concurrent.futures._base.TimeoutError as e:
raise RequestTimeout(method + ' ' + url)
except aiohttp.client_exceptions.ClientConnectionError as e:
raise ExchangeNotAvailable(method + ' ' + url)
except aiohttp.client_exceptions.ClientError as e: # base exception class
raise ExchangeError(method + ' ' + url)
self.handle_errors(http_status_code, http_status_text, url, method, headers, http_response, json_response, request_headers, request_body)
self.handle_rest_errors(http_status_code, http_status_text, http_response, url, method)
self.handle_rest_response(http_response, json_response, url, method)
if json_response is not None:
return json_response
return http_response
async def load_markets(self, reload=False, params={}):
if not reload:
if self.markets:
if not self.markets_by_id:
return self.set_markets(self.markets)
return self.markets
currencies = None
if self.has['fetchCurrencies']:
currencies = await self.fetch_currencies()
markets = await self.fetch_markets(params)
return self.set_markets(markets, currencies)
async def fetch_fees(self):
trading = {}
funding = {}
if self.has['fetchTradingFees']:
trading = await self.fetch_trading_fees()
if self.has['fetchFundingFees']:
funding = await self.fetch_funding_fees()
return {
'trading': trading,
'funding': funding,
}
async def load_fees(self, reload=False):
if not reload:
if self.loaded_fees != Exchange.loaded_fees:
return self.loaded_fees
self.loaded_fees = self.deep_extend(self.loaded_fees, await self.fetch_fees())
return self.loaded_fees
async def fetch_markets(self, params={}):
# markets are returned as a list
# currencies are returned as a dict
# this is for historical reasons
# and may be changed for consistency later
return self.to_array(self.markets)
async def fetch_currencies(self, params={}):
# markets are returned as a list
# currencies are returned as a dict
# this is for historical reasons
# and may be changed for consistency later
return self.currencies
async def fetch_status(self, params={}):
if self.has['fetchTime']:
updated = await self.fetch_time(params)
self.status['updated'] = updated
return self.status
async def fetch_order_status(self, id, symbol=None, params={}):
order = await self.fetch_order(id, symbol, params)
return order['status']
async def fetch_partial_balance(self, part, params={}):
balance = await self.fetch_balance(params)
return balance[part]
async def fetch_l2_order_book(self, symbol, limit=None, params={}):
orderbook = await self.fetch_order_book(symbol, limit, params)
return self.extend(orderbook, {
'bids': self.sort_by(self.aggregate(orderbook['bids']), 0, True),
'asks': self.sort_by(self.aggregate(orderbook['asks']), 0),
})
async def perform_order_book_request(self, market, limit=None, params={}):
raise NotSupported(self.id + ' performOrderBookRequest not supported yet')
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
orderbook = await self.perform_order_book_request(market, limit, params)
return self.parse_order_book(orderbook, market, limit, params)
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
if not self.has['fetchTrades']:
raise NotSupported('fetch_ohlcv() not implemented yet')
await self.load_markets()
trades = await self.fetch_trades(symbol, since, limit, params)
return self.build_ohlcv(trades, timeframe, since, limit)
async def fetchOHLCV(self, symbol, timeframe='1m', since=None, limit=None, params={}):
return await self.fetch_ohlcv(symbol, timeframe, since, limit, params)
async def fetch_full_tickers(self, symbols=None, params={}):
return await self.fetch_tickers(symbols, params)
async def edit_order(self, id, symbol, *args):
if not self.enableRateLimit:
raise ExchangeError('updateOrder() requires enableRateLimit = true')
await self.cancel_order(id, symbol)
return await self.create_order(symbol, *args)
async def fetch_trading_fees(self, params={}):
raise NotSupported('fetch_trading_fees() not supported yet')
async def fetch_trading_fee(self, symbol, params={}):
if not self.has['fetchTradingFees']:
raise NotSupported('fetch_trading_fee() not supported yet')
return await self.fetch_trading_fees(params)
async def load_trading_limits(self, symbols=None, reload=False, params={}):
if self.has['fetchTradingLimits']:
if reload or not('limitsLoaded' in list(self.options.keys())):
response = await self.fetch_trading_limits(symbols)
for i in range(0, len(symbols)):
symbol = symbols[i]
self.markets[symbol] = self.deep_extend(self.markets[symbol], response[symbol])
self.options['limitsLoaded'] = self.milliseconds()
return self.markets
async def load_accounts(self, reload=False, params={}):
if reload:
self.accounts = await self.fetch_accounts(params)
else:
if self.accounts:
return self.accounts
else:
self.accounts = await self.fetch_accounts(params)
self.accountsById = self.index_by(self.accounts, 'id')
return self.accounts
async def fetch_ticker(self, symbol, params={}):
raise NotSupported('fetch_ticker() not supported yet')
| 41.740864
| 355
| 0.610236
|
__version__ = '1.18.1333'
import asyncio
import concurrent
import socket
import time
import math
import random
import certifi
import aiohttp
import ssl
import sys
import yarl
from ccxt.async_support.base.throttle import throttle
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import RequestTimeout
from ccxt.base.errors import NotSupported
from ccxt.base.exchange import Exchange as BaseExchange
__all__ = [
'BaseExchange',
'Exchange',
]
class Exchange(BaseExchange):
def __init__(self, config={}):
if 'asyncio_loop' in config:
self.asyncio_loop = config['asyncio_loop']
self.asyncio_loop = self.asyncio_loop or asyncio.get_event_loop()
self.aiohttp_trust_env = config.get('aiohttp_trust_env', self.aiohttp_trust_env)
self.verify = config.get('verify', self.verify)
self.own_session = 'session' not in config
self.cafile = config.get('cafile', certifi.where())
self.open()
super(Exchange, self).__init__(config)
self.init_rest_rate_limiter()
def init_rest_rate_limiter(self):
self.throttle = throttle(self.extend({
'loop': self.asyncio_loop,
}, self.tokenBucket))
def __del__(self):
if self.session is not None:
self.logger.warning(self.id + " requires to release all resources with an explicit call to the .close() coroutine. If you are using the exchange instance with async coroutines, add exchange.close() to your code into a place when you're done with the exchange and don't need the exchange instance anymore (at the end of your async coroutine).")
if sys.version_info >= (3, 5):
async def __aenter__(self):
self.open()
return self
async def __aexit__(self, exc_type, exc, tb):
await self.close()
def open(self):
if self.own_session and self.session is None:
context = ssl.create_default_context(cafile=self.cafile) if self.verify else self.verify
connector = aiohttp.TCPConnector(ssl=context, loop=self.asyncio_loop)
self.session = aiohttp.ClientSession(loop=self.asyncio_loop, connector=connector, trust_env=self.aiohttp_trust_env)
async def close(self):
if self.session is not None:
if self.own_session:
await self.session.close()
self.session = None
async def wait_for_token(self):
while self.rateLimitTokens <= 1:
self.add_new_tokens()
seconds_delays = [0.001, 0.005, 0.022, 0.106, 0.5]
delay = random.choice(seconds_delays)
await asyncio.sleep(delay)
self.rateLimitTokens -= 1
def add_new_tokens(self):
now = time.monotonic()
time_since_update = now - self.rateLimitUpdateTime
new_tokens = math.floor((0.8 * 1000.0 * time_since_update) / self.rateLimit)
if new_tokens > 1:
self.rateLimitTokens = min(self.rateLimitTokens + new_tokens, self.rateLimitMaxTokens)
self.rateLimitUpdateTime = now
async def fetch2(self, path, api='public', method='GET', params={}, headers=None, body=None):
if self.enableRateLimit:
await self.throttle()
self.lastRestRequestTimestamp = self.milliseconds()
request = self.sign(path, api, method, params, headers, body)
return await self.fetch(request['url'], request['method'], request['headers'], request['body'])
async def fetch(self, url, method='GET', headers=None, body=None):
request_headers = self.prepare_request_headers(headers)
url = self.proxy + url
if self.verbose:
print("\nRequest:", method, url, headers, body)
self.logger.debug("%s %s, Request: %s %s", method, url, headers, body)
request_body = body
encoded_body = body.encode() if body else None
session_method = getattr(self.session, method.lower())
http_response = None
http_status_code = None
http_status_text = None
json_response = None
try:
async with session_method(yarl.URL(url, encoded=True),
data=encoded_body,
headers=request_headers,
timeout=(self.timeout / 1000),
proxy=self.aiohttp_proxy) as response:
http_response = await response.text()
http_status_code = response.status
http_status_text = response.reason
json_response = self.parse_json(http_response)
headers = response.headers
if self.enableLastHttpResponse:
self.last_http_response = http_response
if self.enableLastResponseHeaders:
self.last_response_headers = headers
if self.enableLastJsonResponse:
self.last_json_response = json_response
if self.verbose:
print("\nResponse:", method, url, http_status_code, headers, http_response)
self.logger.debug("%s %s, Response: %s %s %s", method, url, http_status_code, headers, http_response)
except socket.gaierror as e:
raise ExchangeNotAvailable(method + ' ' + url)
except concurrent.futures._base.TimeoutError as e:
raise RequestTimeout(method + ' ' + url)
except aiohttp.client_exceptions.ClientConnectionError as e:
raise ExchangeNotAvailable(method + ' ' + url)
except aiohttp.client_exceptions.ClientError as e: raise ExchangeError(method + ' ' + url)
self.handle_errors(http_status_code, http_status_text, url, method, headers, http_response, json_response, request_headers, request_body)
self.handle_rest_errors(http_status_code, http_status_text, http_response, url, method)
self.handle_rest_response(http_response, json_response, url, method)
if json_response is not None:
return json_response
return http_response
async def load_markets(self, reload=False, params={}):
if not reload:
if self.markets:
if not self.markets_by_id:
return self.set_markets(self.markets)
return self.markets
currencies = None
if self.has['fetchCurrencies']:
currencies = await self.fetch_currencies()
markets = await self.fetch_markets(params)
return self.set_markets(markets, currencies)
async def fetch_fees(self):
trading = {}
funding = {}
if self.has['fetchTradingFees']:
trading = await self.fetch_trading_fees()
if self.has['fetchFundingFees']:
funding = await self.fetch_funding_fees()
return {
'trading': trading,
'funding': funding,
}
async def load_fees(self, reload=False):
if not reload:
if self.loaded_fees != Exchange.loaded_fees:
return self.loaded_fees
self.loaded_fees = self.deep_extend(self.loaded_fees, await self.fetch_fees())
return self.loaded_fees
async def fetch_markets(self, params={}):
return self.to_array(self.markets)
async def fetch_currencies(self, params={}):
return self.currencies
async def fetch_status(self, params={}):
if self.has['fetchTime']:
updated = await self.fetch_time(params)
self.status['updated'] = updated
return self.status
async def fetch_order_status(self, id, symbol=None, params={}):
order = await self.fetch_order(id, symbol, params)
return order['status']
async def fetch_partial_balance(self, part, params={}):
balance = await self.fetch_balance(params)
return balance[part]
async def fetch_l2_order_book(self, symbol, limit=None, params={}):
orderbook = await self.fetch_order_book(symbol, limit, params)
return self.extend(orderbook, {
'bids': self.sort_by(self.aggregate(orderbook['bids']), 0, True),
'asks': self.sort_by(self.aggregate(orderbook['asks']), 0),
})
async def perform_order_book_request(self, market, limit=None, params={}):
raise NotSupported(self.id + ' performOrderBookRequest not supported yet')
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
orderbook = await self.perform_order_book_request(market, limit, params)
return self.parse_order_book(orderbook, market, limit, params)
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
if not self.has['fetchTrades']:
raise NotSupported('fetch_ohlcv() not implemented yet')
await self.load_markets()
trades = await self.fetch_trades(symbol, since, limit, params)
return self.build_ohlcv(trades, timeframe, since, limit)
async def fetchOHLCV(self, symbol, timeframe='1m', since=None, limit=None, params={}):
return await self.fetch_ohlcv(symbol, timeframe, since, limit, params)
async def fetch_full_tickers(self, symbols=None, params={}):
return await self.fetch_tickers(symbols, params)
async def edit_order(self, id, symbol, *args):
if not self.enableRateLimit:
raise ExchangeError('updateOrder() requires enableRateLimit = true')
await self.cancel_order(id, symbol)
return await self.create_order(symbol, *args)
async def fetch_trading_fees(self, params={}):
raise NotSupported('fetch_trading_fees() not supported yet')
async def fetch_trading_fee(self, symbol, params={}):
if not self.has['fetchTradingFees']:
raise NotSupported('fetch_trading_fee() not supported yet')
return await self.fetch_trading_fees(params)
async def load_trading_limits(self, symbols=None, reload=False, params={}):
if self.has['fetchTradingLimits']:
if reload or not('limitsLoaded' in list(self.options.keys())):
response = await self.fetch_trading_limits(symbols)
for i in range(0, len(symbols)):
symbol = symbols[i]
self.markets[symbol] = self.deep_extend(self.markets[symbol], response[symbol])
self.options['limitsLoaded'] = self.milliseconds()
return self.markets
async def load_accounts(self, reload=False, params={}):
if reload:
self.accounts = await self.fetch_accounts(params)
else:
if self.accounts:
return self.accounts
else:
self.accounts = await self.fetch_accounts(params)
self.accountsById = self.index_by(self.accounts, 'id')
return self.accounts
async def fetch_ticker(self, symbol, params={}):
raise NotSupported('fetch_ticker() not supported yet')
| true
| true
|
f70219fcb6e5ca2640cb076239567264444a0e4f
| 553
|
py
|
Python
|
src/CSVreader.py
|
Shannon-NJIT/Calculator-Individual
|
c7dcb961c0f1fa364f9eaa2e85fb371e74646e78
|
[
"MIT"
] | null | null | null |
src/CSVreader.py
|
Shannon-NJIT/Calculator-Individual
|
c7dcb961c0f1fa364f9eaa2e85fb371e74646e78
|
[
"MIT"
] | null | null | null |
src/CSVreader.py
|
Shannon-NJIT/Calculator-Individual
|
c7dcb961c0f1fa364f9eaa2e85fb371e74646e78
|
[
"MIT"
] | null | null | null |
import csv
def ClassFactory(class_name, dictionary):
return type(class_name, (object,), dictionary)
class CsvReader:
def __init__(self, filepath):
self.data = []
with open(filepath) as csv_files:
csv_data = csv.DictReader(csv_files, delimiter=',')
for row in csv_data:
self.data.append(row)
pass
def return_data_object(self, class_name):
objects = []
for row in self.data:
objects.append(ClassFactory(class_name, row))
return objects
| 23.041667
| 63
| 0.609403
|
import csv
def ClassFactory(class_name, dictionary):
return type(class_name, (object,), dictionary)
class CsvReader:
def __init__(self, filepath):
self.data = []
with open(filepath) as csv_files:
csv_data = csv.DictReader(csv_files, delimiter=',')
for row in csv_data:
self.data.append(row)
pass
def return_data_object(self, class_name):
objects = []
for row in self.data:
objects.append(ClassFactory(class_name, row))
return objects
| true
| true
|
f7021b2241ee6f40086a774d14a70f8306e2ebd9
| 16,965
|
py
|
Python
|
dashboard/dashboard/pinpoint/models/quest/read_value_test.py
|
stefb965/catapult
|
93f9ef7fc26f0c4904ce691856ec579d2eeef824
|
[
"BSD-3-Clause"
] | null | null | null |
dashboard/dashboard/pinpoint/models/quest/read_value_test.py
|
stefb965/catapult
|
93f9ef7fc26f0c4904ce691856ec579d2eeef824
|
[
"BSD-3-Clause"
] | null | null | null |
dashboard/dashboard/pinpoint/models/quest/read_value_test.py
|
stefb965/catapult
|
93f9ef7fc26f0c4904ce691856ec579d2eeef824
|
[
"BSD-3-Clause"
] | 1
|
2020-07-24T04:42:31.000Z
|
2020-07-24T04:42:31.000Z
|
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import unittest
import mock
from dashboard.pinpoint.models.quest import read_value
from tracing.value import histogram_set
from tracing.value import histogram as histogram_module
from tracing.value.diagnostics import generic_set
from tracing.value.diagnostics import reserved_infos
_BASE_ARGUMENTS_HISTOGRAMS = {'benchmark': 'speedometer'}
_BASE_ARGUMENTS_GRAPH_JSON = {
'chart': 'chart_name',
'trace': 'trace_name',
}
class ReadHistogramsJsonValueQuestTest(unittest.TestCase):
def testMinimumArguments(self):
quest = read_value.ReadHistogramsJsonValue.FromDict(
_BASE_ARGUMENTS_HISTOGRAMS)
expected = read_value.ReadHistogramsJsonValue('chartjson-output.json')
self.assertEqual(quest, expected)
def testAllArguments(self):
arguments = dict(_BASE_ARGUMENTS_HISTOGRAMS)
arguments['chart'] = 'timeToFirst'
arguments['tir_label'] = 'pcv1-cold'
arguments['trace'] = 'trace_name'
arguments['statistic'] = 'avg'
quest = read_value.ReadHistogramsJsonValue.FromDict(arguments)
expected = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', 'timeToFirst',
'pcv1-cold', 'trace_name', 'avg')
self.assertEqual(quest, expected)
def testPerformanceTestSuite(self):
arguments = dict(_BASE_ARGUMENTS_HISTOGRAMS)
arguments['target'] = 'performance_test_suite'
quest = read_value.ReadHistogramsJsonValue.FromDict(arguments)
expected = read_value.ReadHistogramsJsonValue(
'speedometer/perf_results.json')
self.assertEqual(quest, expected)
def testPerformanceTestSuiteWindows(self):
arguments = dict(_BASE_ARGUMENTS_HISTOGRAMS)
arguments['dimensions'] = [{'key': 'os', 'value': 'Windows-10'}]
arguments['target'] = 'performance_test_suite'
quest = read_value.ReadHistogramsJsonValue.FromDict(arguments)
expected = read_value.ReadHistogramsJsonValue(
'speedometer\\perf_results.json')
self.assertEqual(quest, expected)
class ReadGraphJsonValueQuestTest(unittest.TestCase):
def testAllArguments(self):
quest = read_value.ReadGraphJsonValue.FromDict(_BASE_ARGUMENTS_GRAPH_JSON)
expected = read_value.ReadGraphJsonValue('chart_name', 'trace_name')
self.assertEqual(quest, expected)
def testMissingChart(self):
arguments = dict(_BASE_ARGUMENTS_GRAPH_JSON)
del arguments['chart']
with self.assertRaises(TypeError):
read_value.ReadGraphJsonValue.FromDict(arguments)
def testMissingTrace(self):
arguments = dict(_BASE_ARGUMENTS_GRAPH_JSON)
del arguments['trace']
with self.assertRaises(TypeError):
read_value.ReadGraphJsonValue.FromDict(arguments)
class _ReadValueExecutionTest(unittest.TestCase):
def setUp(self):
patcher = mock.patch('dashboard.services.isolate.Retrieve')
self._retrieve = patcher.start()
self.addCleanup(patcher.stop)
def SetOutputFileContents(self, contents):
self._retrieve.side_effect = (
'{"files": {"chartjson-output.json": {"h": "output json hash"}}}',
json.dumps(contents),
)
def assertReadValueError(self, execution):
self.assertTrue(execution.completed)
self.assertTrue(execution.failed)
self.assertIsInstance(execution.exception, basestring)
last_exception_line = execution.exception.splitlines()[-1]
self.assertTrue(last_exception_line.startswith('ReadValueError'))
def assertReadValueSuccess(self, execution):
self.assertTrue(execution.completed)
self.assertFalse(execution.failed)
self.assertEqual(execution.result_arguments, {})
def assertRetrievedOutputJson(self):
expected_calls = [
mock.call('server', 'output hash'),
mock.call('server', 'output json hash'),
]
self.assertEqual(self._retrieve.mock_calls, expected_calls)
class ReadHistogramsJsonValueTest(_ReadValueExecutionTest):
def testReadHistogramsJsonValue(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name, 'tir_label', 'story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueStatistic(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name,
'tir_label', 'story', statistic='avg')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (1,))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueStatisticNoSamples(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name,
'tir_label', 'story', statistic='avg')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueMultipleHistograms(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
hist2 = histogram_module.Histogram('hist', 'count')
hist2.AddSample(0)
hist2.AddSample(1)
hist2.AddSample(2)
hist3 = histogram_module.Histogram('some_other_histogram', 'count')
hist3.AddSample(3)
hist3.AddSample(4)
hist3.AddSample(5)
histograms = histogram_set.HistogramSet([hist, hist2, hist3])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name, 'tir_label', 'story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2, 0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsTraceUrls(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url1', 'trace_url2']))
hist2 = histogram_module.Histogram('hist2', 'count')
hist2.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url3']))
hist3 = histogram_module.Histogram('hist3', 'count')
hist3.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url2']))
histograms = histogram_set.HistogramSet([hist, hist2, hist3])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name)
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0,))
self.assertEqual(
{
'completed': True,
'exception': None,
'details': [
{
'key': 'trace',
'value': 'trace_url1',
'url': 'trace_url1',
},
{
'key': 'trace',
'value': 'trace_url2',
'url': 'trace_url2',
},
{
'key': 'trace',
'value': 'trace_url3',
'url': 'trace_url3',
},
],
},
execution.AsDict())
self.assertRetrievedOutputJson()
def testReadHistogramsDiagnosticRefSkipTraceUrls(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url1', 'trace_url2']))
hist2 = histogram_module.Histogram('hist2', 'count')
hist2.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url3']))
hist2.diagnostics[reserved_infos.TRACE_URLS.name].guid = 'foo'
histograms = histogram_set.HistogramSet([hist, hist2])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name)
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0,))
self.assertEqual(
{
'completed': True,
'exception': None,
'details': [
{
'key': 'trace',
'value': 'trace_url1',
'url': 'trace_url1',
},
{
'key': 'trace',
'value': 'trace_url2',
'url': 'trace_url2',
},
],
},
execution.AsDict())
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueWithNoTirLabel(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name, tir_label='tir_label')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueWithNoStory(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name, story='story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueSummary(self):
samples = []
hists = []
for i in xrange(10):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
hist.diagnostics[reserved_infos.STORIES.name] = (
generic_set.GenericSet(['story%d' % i]))
hists.append(hist)
samples.extend(hist.sample_values)
histograms = histogram_set.HistogramSet(hists)
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hists[0].name, tir_label='tir_label')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, tuple(samples))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueWithMissingFile(self):
self._retrieve.return_value = '{"files": {}}'
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='metric', tir_label='test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueEmptyHistogramSet(self):
self.SetOutputFileContents([])
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='metric', tir_label='test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueWithMissingHistogram(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='does_not_exist')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueWithNoValues(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='chart')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueTirLabelWithNoValues(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='chart', tir_label='tir_label')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueStoryWithNoValues(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='chart', story='story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
class ReadGraphJsonValueTest(_ReadValueExecutionTest):
def testReadGraphJsonValue(self):
self.SetOutputFileContents(
{'chart': {'traces': {'trace': ['126444.869721', '0.0']}}})
quest = read_value.ReadGraphJsonValue('chart', 'trace')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (126444.869721,))
self.assertRetrievedOutputJson()
def testReadGraphJsonValueWithMissingFile(self):
self._retrieve.return_value = '{"files": {}}'
quest = read_value.ReadGraphJsonValue('metric', 'test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadGraphJsonValueWithMissingChart(self):
self.SetOutputFileContents({})
quest = read_value.ReadGraphJsonValue('metric', 'test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadGraphJsonValueWithMissingTrace(self):
self.SetOutputFileContents({'chart': {'traces': {}}})
quest = read_value.ReadGraphJsonValue('metric', 'test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
| 35.124224
| 80
| 0.696611
|
import json
import unittest
import mock
from dashboard.pinpoint.models.quest import read_value
from tracing.value import histogram_set
from tracing.value import histogram as histogram_module
from tracing.value.diagnostics import generic_set
from tracing.value.diagnostics import reserved_infos
_BASE_ARGUMENTS_HISTOGRAMS = {'benchmark': 'speedometer'}
_BASE_ARGUMENTS_GRAPH_JSON = {
'chart': 'chart_name',
'trace': 'trace_name',
}
class ReadHistogramsJsonValueQuestTest(unittest.TestCase):
def testMinimumArguments(self):
quest = read_value.ReadHistogramsJsonValue.FromDict(
_BASE_ARGUMENTS_HISTOGRAMS)
expected = read_value.ReadHistogramsJsonValue('chartjson-output.json')
self.assertEqual(quest, expected)
def testAllArguments(self):
arguments = dict(_BASE_ARGUMENTS_HISTOGRAMS)
arguments['chart'] = 'timeToFirst'
arguments['tir_label'] = 'pcv1-cold'
arguments['trace'] = 'trace_name'
arguments['statistic'] = 'avg'
quest = read_value.ReadHistogramsJsonValue.FromDict(arguments)
expected = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', 'timeToFirst',
'pcv1-cold', 'trace_name', 'avg')
self.assertEqual(quest, expected)
def testPerformanceTestSuite(self):
arguments = dict(_BASE_ARGUMENTS_HISTOGRAMS)
arguments['target'] = 'performance_test_suite'
quest = read_value.ReadHistogramsJsonValue.FromDict(arguments)
expected = read_value.ReadHistogramsJsonValue(
'speedometer/perf_results.json')
self.assertEqual(quest, expected)
def testPerformanceTestSuiteWindows(self):
arguments = dict(_BASE_ARGUMENTS_HISTOGRAMS)
arguments['dimensions'] = [{'key': 'os', 'value': 'Windows-10'}]
arguments['target'] = 'performance_test_suite'
quest = read_value.ReadHistogramsJsonValue.FromDict(arguments)
expected = read_value.ReadHistogramsJsonValue(
'speedometer\\perf_results.json')
self.assertEqual(quest, expected)
class ReadGraphJsonValueQuestTest(unittest.TestCase):
def testAllArguments(self):
quest = read_value.ReadGraphJsonValue.FromDict(_BASE_ARGUMENTS_GRAPH_JSON)
expected = read_value.ReadGraphJsonValue('chart_name', 'trace_name')
self.assertEqual(quest, expected)
def testMissingChart(self):
arguments = dict(_BASE_ARGUMENTS_GRAPH_JSON)
del arguments['chart']
with self.assertRaises(TypeError):
read_value.ReadGraphJsonValue.FromDict(arguments)
def testMissingTrace(self):
arguments = dict(_BASE_ARGUMENTS_GRAPH_JSON)
del arguments['trace']
with self.assertRaises(TypeError):
read_value.ReadGraphJsonValue.FromDict(arguments)
class _ReadValueExecutionTest(unittest.TestCase):
def setUp(self):
patcher = mock.patch('dashboard.services.isolate.Retrieve')
self._retrieve = patcher.start()
self.addCleanup(patcher.stop)
def SetOutputFileContents(self, contents):
self._retrieve.side_effect = (
'{"files": {"chartjson-output.json": {"h": "output json hash"}}}',
json.dumps(contents),
)
def assertReadValueError(self, execution):
self.assertTrue(execution.completed)
self.assertTrue(execution.failed)
self.assertIsInstance(execution.exception, basestring)
last_exception_line = execution.exception.splitlines()[-1]
self.assertTrue(last_exception_line.startswith('ReadValueError'))
def assertReadValueSuccess(self, execution):
self.assertTrue(execution.completed)
self.assertFalse(execution.failed)
self.assertEqual(execution.result_arguments, {})
def assertRetrievedOutputJson(self):
expected_calls = [
mock.call('server', 'output hash'),
mock.call('server', 'output json hash'),
]
self.assertEqual(self._retrieve.mock_calls, expected_calls)
class ReadHistogramsJsonValueTest(_ReadValueExecutionTest):
def testReadHistogramsJsonValue(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name, 'tir_label', 'story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueStatistic(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name,
'tir_label', 'story', statistic='avg')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (1,))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueStatisticNoSamples(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name,
'tir_label', 'story', statistic='avg')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueMultipleHistograms(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
hist2 = histogram_module.Histogram('hist', 'count')
hist2.AddSample(0)
hist2.AddSample(1)
hist2.AddSample(2)
hist3 = histogram_module.Histogram('some_other_histogram', 'count')
hist3.AddSample(3)
hist3.AddSample(4)
hist3.AddSample(5)
histograms = histogram_set.HistogramSet([hist, hist2, hist3])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist.name, 'tir_label', 'story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2, 0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsTraceUrls(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url1', 'trace_url2']))
hist2 = histogram_module.Histogram('hist2', 'count')
hist2.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url3']))
hist3 = histogram_module.Histogram('hist3', 'count')
hist3.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url2']))
histograms = histogram_set.HistogramSet([hist, hist2, hist3])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name)
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0,))
self.assertEqual(
{
'completed': True,
'exception': None,
'details': [
{
'key': 'trace',
'value': 'trace_url1',
'url': 'trace_url1',
},
{
'key': 'trace',
'value': 'trace_url2',
'url': 'trace_url2',
},
{
'key': 'trace',
'value': 'trace_url3',
'url': 'trace_url3',
},
],
},
execution.AsDict())
self.assertRetrievedOutputJson()
def testReadHistogramsDiagnosticRefSkipTraceUrls(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url1', 'trace_url2']))
hist2 = histogram_module.Histogram('hist2', 'count')
hist2.diagnostics[reserved_infos.TRACE_URLS.name] = (
generic_set.GenericSet(['trace_url3']))
hist2.diagnostics[reserved_infos.TRACE_URLS.name].guid = 'foo'
histograms = histogram_set.HistogramSet([hist, hist2])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name)
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0,))
self.assertEqual(
{
'completed': True,
'exception': None,
'details': [
{
'key': 'trace',
'value': 'trace_url1',
'url': 'trace_url1',
},
{
'key': 'trace',
'value': 'trace_url2',
'url': 'trace_url2',
},
],
},
execution.AsDict())
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueWithNoTirLabel(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name, tir_label='tir_label')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueWithNoStory(self):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
histograms = histogram_set.HistogramSet([hist])
histograms.AddSharedDiagnostic(
reserved_infos.STORIES.name,
generic_set.GenericSet(['story']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hist.name, story='story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (0, 1, 2))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueSummary(self):
samples = []
hists = []
for i in xrange(10):
hist = histogram_module.Histogram('hist', 'count')
hist.AddSample(0)
hist.AddSample(1)
hist.AddSample(2)
hist.diagnostics[reserved_infos.STORIES.name] = (
generic_set.GenericSet(['story%d' % i]))
hists.append(hist)
samples.extend(hist.sample_values)
histograms = histogram_set.HistogramSet(hists)
histograms.AddSharedDiagnostic(
reserved_infos.STORY_TAGS.name,
generic_set.GenericSet(['group:tir_label']))
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name=hists[0].name, tir_label='tir_label')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, tuple(samples))
self.assertRetrievedOutputJson()
def testReadHistogramsJsonValueWithMissingFile(self):
self._retrieve.return_value = '{"files": {}}'
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='metric', tir_label='test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueEmptyHistogramSet(self):
self.SetOutputFileContents([])
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='metric', tir_label='test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueWithMissingHistogram(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='does_not_exist')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueWithNoValues(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='chart')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueTirLabelWithNoValues(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='chart', tir_label='tir_label')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadHistogramsJsonValueStoryWithNoValues(self):
hist = histogram_module.Histogram('hist', 'count')
histograms = histogram_set.HistogramSet([hist])
self.SetOutputFileContents(histograms.AsDicts())
quest = read_value.ReadHistogramsJsonValue(
'chartjson-output.json', hist_name='chart', story='story')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
class ReadGraphJsonValueTest(_ReadValueExecutionTest):
def testReadGraphJsonValue(self):
self.SetOutputFileContents(
{'chart': {'traces': {'trace': ['126444.869721', '0.0']}}})
quest = read_value.ReadGraphJsonValue('chart', 'trace')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueSuccess(execution)
self.assertEqual(execution.result_values, (126444.869721,))
self.assertRetrievedOutputJson()
def testReadGraphJsonValueWithMissingFile(self):
self._retrieve.return_value = '{"files": {}}'
quest = read_value.ReadGraphJsonValue('metric', 'test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadGraphJsonValueWithMissingChart(self):
self.SetOutputFileContents({})
quest = read_value.ReadGraphJsonValue('metric', 'test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
def testReadGraphJsonValueWithMissingTrace(self):
self.SetOutputFileContents({'chart': {'traces': {}}})
quest = read_value.ReadGraphJsonValue('metric', 'test')
execution = quest.Start(None, 'server', 'output hash')
execution.Poll()
self.assertReadValueError(execution)
| true
| true
|
f7021c0fb6668b29ae4164901fe3e6cf9296fc62
| 2,637
|
py
|
Python
|
tests/components/fan/test_init.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 30,023
|
2016-04-13T10:17:53.000Z
|
2020-03-02T12:56:31.000Z
|
tests/components/fan/test_init.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 24,710
|
2016-04-13T08:27:26.000Z
|
2020-03-02T12:59:13.000Z
|
tests/components/fan/test_init.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 11,956
|
2016-04-13T18:42:31.000Z
|
2020-03-02T09:32:12.000Z
|
"""Tests for fan platforms."""
import pytest
from homeassistant.components.fan import FanEntity
class BaseFan(FanEntity):
"""Implementation of the abstract FanEntity."""
def __init__(self):
"""Initialize the fan."""
def test_fanentity():
"""Test fan entity methods."""
fan = BaseFan()
assert fan.state == "off"
assert fan.preset_modes is None
assert fan.supported_features == 0
assert fan.percentage_step == 1
assert fan.speed_count == 100
assert fan.capability_attributes == {}
# Test set_speed not required
with pytest.raises(NotImplementedError):
fan.oscillate(True)
with pytest.raises(AttributeError):
fan.set_speed("low")
with pytest.raises(NotImplementedError):
fan.set_percentage(0)
with pytest.raises(NotImplementedError):
fan.set_preset_mode("auto")
with pytest.raises(NotImplementedError):
fan.turn_on()
with pytest.raises(NotImplementedError):
fan.turn_off()
async def test_async_fanentity(hass):
"""Test async fan entity methods."""
fan = BaseFan()
fan.hass = hass
assert fan.state == "off"
assert fan.preset_modes is None
assert fan.supported_features == 0
assert fan.percentage_step == 1
assert fan.speed_count == 100
assert fan.capability_attributes == {}
# Test set_speed not required
with pytest.raises(NotImplementedError):
await fan.async_oscillate(True)
with pytest.raises(AttributeError):
await fan.async_set_speed("low")
with pytest.raises(NotImplementedError):
await fan.async_set_percentage(0)
with pytest.raises(NotImplementedError):
await fan.async_set_preset_mode("auto")
with pytest.raises(NotImplementedError):
await fan.async_turn_on()
with pytest.raises(NotImplementedError):
await fan.async_turn_off()
with pytest.raises(NotImplementedError):
await fan.async_increase_speed()
with pytest.raises(NotImplementedError):
await fan.async_decrease_speed()
@pytest.mark.parametrize(
"attribute_name, attribute_value",
[
("current_direction", "forward"),
("oscillating", True),
("percentage", 50),
("preset_mode", "medium"),
("preset_modes", ["low", "medium", "high"]),
("speed_count", 50),
("supported_features", 1),
],
)
def test_fanentity_attributes(attribute_name, attribute_value):
"""Test fan entity attribute shorthand."""
fan = BaseFan()
setattr(fan, f"_attr_{attribute_name}", attribute_value)
assert getattr(fan, attribute_name) == attribute_value
| 31.023529
| 63
| 0.678043
|
import pytest
from homeassistant.components.fan import FanEntity
class BaseFan(FanEntity):
def __init__(self):
def test_fanentity():
fan = BaseFan()
assert fan.state == "off"
assert fan.preset_modes is None
assert fan.supported_features == 0
assert fan.percentage_step == 1
assert fan.speed_count == 100
assert fan.capability_attributes == {}
with pytest.raises(NotImplementedError):
fan.oscillate(True)
with pytest.raises(AttributeError):
fan.set_speed("low")
with pytest.raises(NotImplementedError):
fan.set_percentage(0)
with pytest.raises(NotImplementedError):
fan.set_preset_mode("auto")
with pytest.raises(NotImplementedError):
fan.turn_on()
with pytest.raises(NotImplementedError):
fan.turn_off()
async def test_async_fanentity(hass):
fan = BaseFan()
fan.hass = hass
assert fan.state == "off"
assert fan.preset_modes is None
assert fan.supported_features == 0
assert fan.percentage_step == 1
assert fan.speed_count == 100
assert fan.capability_attributes == {}
with pytest.raises(NotImplementedError):
await fan.async_oscillate(True)
with pytest.raises(AttributeError):
await fan.async_set_speed("low")
with pytest.raises(NotImplementedError):
await fan.async_set_percentage(0)
with pytest.raises(NotImplementedError):
await fan.async_set_preset_mode("auto")
with pytest.raises(NotImplementedError):
await fan.async_turn_on()
with pytest.raises(NotImplementedError):
await fan.async_turn_off()
with pytest.raises(NotImplementedError):
await fan.async_increase_speed()
with pytest.raises(NotImplementedError):
await fan.async_decrease_speed()
@pytest.mark.parametrize(
"attribute_name, attribute_value",
[
("current_direction", "forward"),
("oscillating", True),
("percentage", 50),
("preset_mode", "medium"),
("preset_modes", ["low", "medium", "high"]),
("speed_count", 50),
("supported_features", 1),
],
)
def test_fanentity_attributes(attribute_name, attribute_value):
fan = BaseFan()
setattr(fan, f"_attr_{attribute_name}", attribute_value)
assert getattr(fan, attribute_name) == attribute_value
| true
| true
|
f7021c9e32ae1eacec61afce8758e91751fe595e
| 89
|
py
|
Python
|
ynab-api/tests/test_ynab_api.py
|
mcbobke/python-fun
|
ecabf87b6d5983c6e2321a15d2f084bf83370b5d
|
[
"MIT"
] | null | null | null |
ynab-api/tests/test_ynab_api.py
|
mcbobke/python-fun
|
ecabf87b6d5983c6e2321a15d2f084bf83370b5d
|
[
"MIT"
] | null | null | null |
ynab-api/tests/test_ynab_api.py
|
mcbobke/python-fun
|
ecabf87b6d5983c6e2321a15d2f084bf83370b5d
|
[
"MIT"
] | null | null | null |
from ynab_api import __version__
def test_version():
assert __version__ == '0.1.0'
| 14.833333
| 33
| 0.719101
|
from ynab_api import __version__
def test_version():
assert __version__ == '0.1.0'
| true
| true
|
f7021d261117c64403f6ac1fccc17613cd3488bb
| 4,198
|
py
|
Python
|
vkbottle/__init__.py
|
mironovmeow/vkbottle
|
46917c737fb5d3ef0530defe2b48d251e52478cb
|
[
"MIT"
] | 98
|
2021-08-06T05:31:31.000Z
|
2022-03-26T03:00:08.000Z
|
vkbottle/__init__.py
|
mironovmeow/vkbottle
|
46917c737fb5d3ef0530defe2b48d251e52478cb
|
[
"MIT"
] | 68
|
2021-08-04T09:56:12.000Z
|
2022-03-31T16:23:12.000Z
|
vkbottle/__init__.py
|
mironovmeow/vkbottle
|
46917c737fb5d3ef0530defe2b48d251e52478cb
|
[
"MIT"
] | 66
|
2021-08-04T09:21:43.000Z
|
2022-03-15T14:34:56.000Z
|
from vkbottle_types import GroupTypes
from vkbottle_types.events import GroupEventType, UserEventType
from .api import (
ABCAPI,
API,
DEFAULT_REQUEST_VALIDATORS,
DEFAULT_RESPONSE_VALIDATORS,
ABCRequestRescheduler,
ABCRequestValidator,
ABCResponseValidator,
ABCTokenGenerator,
BlockingRequestRescheduler,
ConsistentTokenGenerator,
SingleTokenGenerator,
Token,
get_token_generator,
)
from .dispatch import (
ABCDispenseView,
ABCHandler,
ABCRouter,
ABCRule,
ABCStateDispenser,
ABCView,
AndRule,
BaseMiddleware,
BaseReturnManager,
BaseStateGroup,
BuiltinStateDispenser,
MiddlewareError,
NotRule,
OrRule,
Router,
StatePeer,
)
from .exception_factory import (
ABCErrorHandler,
CaptchaError,
CodeException,
ErrorHandler,
VKAPIError,
swear,
)
from .framework import (
ABCBlueprint,
ABCFramework,
Bot,
BotBlueprint,
User,
UserBlueprint,
run_multibot,
)
from .http import ABCHTTPClient, AiohttpClient, SingleAiohttpClient
from .polling import ABCPolling, BotPolling, UserPolling
from .tools import (
EMPTY_KEYBOARD,
ABCAction,
ABCStorage,
ABCValidator,
AudioUploader,
AuthError,
BaseContext,
BaseUploader,
BotTypes,
CallableValidator,
Callback,
CtxStorage,
DelayedTask,
DocMessagesUploader,
DocUploader,
DocWallUploader,
EqualsValidator,
GraffitiUploader,
IsInstanceValidator,
Keyboard,
KeyboardButtonColor,
Location,
LoopWrapper,
OpenAppEvent,
OpenLink,
OpenLinkEvent,
PhotoChatFaviconUploader,
PhotoFaviconUploader,
PhotoMarketUploader,
PhotoMessageUploader,
PhotoToAlbumUploader,
PhotoUploader,
PhotoWallUploader,
ShowSnackbarEvent,
TemplateElement,
Text,
UserAuth,
UserTypes,
VideoUploader,
VKApps,
VKPay,
VoiceMessageUploader,
keyboard_gen,
load_blueprints_from_package,
run_in_task,
run_sync,
template_gen,
vkscript,
)
event_types = GroupTypes
__all__ = (
"ABCAction",
"ABCAPI",
"ABCBlueprint",
"ABCDispenseView",
"ABCErrorHandler",
"ABCFramework",
"ABCHandler",
"ABCHTTPClient",
"ABCPolling",
"ABCRequestRescheduler",
"ABCRequestValidator",
"ABCResponseValidator",
"ABCRouter",
"ABCRule",
"ABCStateDispenser",
"ABCStorage",
"ABCTokenGenerator",
"ABCValidator",
"ABCView",
"AiohttpClient",
"AndRule",
"API",
"AudioUploader",
"AuthError",
"BaseContext",
"BaseMiddleware",
"BaseReturnManager",
"BaseStateGroup",
"BaseUploader",
"BlockingRequestRescheduler",
"Bot",
"BotBlueprint",
"BotPolling",
"BotTypes",
"BuiltinStateDispenser",
"CallableValidator",
"Callback",
"CaptchaError",
"CodeException",
"ConsistentTokenGenerator",
"CtxStorage",
"DEFAULT_REQUEST_VALIDATORS",
"DEFAULT_RESPONSE_VALIDATORS",
"DelayedTask",
"DocMessagesUploader",
"DocUploader",
"DocWallUploader",
"EMPTY_KEYBOARD",
"EqualsValidator",
"ErrorHandler",
"get_token_generator",
"GraffitiUploader",
"GroupEventType",
"GroupTypes",
"IsInstanceValidator",
"keyboard_gen",
"Keyboard",
"KeyboardButtonColor",
"load_blueprints_from_package",
"Location",
"LoopWrapper",
"MiddlewareError",
"NotRule",
"OpenAppEvent",
"OpenLink",
"OpenLinkEvent",
"OrRule",
"PhotoChatFaviconUploader",
"PhotoFaviconUploader",
"PhotoMarketUploader",
"PhotoMessageUploader",
"PhotoToAlbumUploader",
"PhotoUploader",
"PhotoWallUploader",
"Router",
"run_in_task",
"run_multibot",
"run_sync",
"ShowSnackbarEvent",
"SingleAiohttpClient",
"SingleTokenGenerator",
"StatePeer",
"swear",
"template_gen",
"TemplateElement",
"Text",
"Token",
"User",
"UserAuth",
"UserBlueprint",
"UserEventType",
"UserPolling",
"UserTypes",
"VideoUploader",
"VKAPIError",
"VKApps",
"VKPay",
"vkscript",
"VoiceMessageUploader",
)
| 19.990476
| 67
| 0.662935
|
from vkbottle_types import GroupTypes
from vkbottle_types.events import GroupEventType, UserEventType
from .api import (
ABCAPI,
API,
DEFAULT_REQUEST_VALIDATORS,
DEFAULT_RESPONSE_VALIDATORS,
ABCRequestRescheduler,
ABCRequestValidator,
ABCResponseValidator,
ABCTokenGenerator,
BlockingRequestRescheduler,
ConsistentTokenGenerator,
SingleTokenGenerator,
Token,
get_token_generator,
)
from .dispatch import (
ABCDispenseView,
ABCHandler,
ABCRouter,
ABCRule,
ABCStateDispenser,
ABCView,
AndRule,
BaseMiddleware,
BaseReturnManager,
BaseStateGroup,
BuiltinStateDispenser,
MiddlewareError,
NotRule,
OrRule,
Router,
StatePeer,
)
from .exception_factory import (
ABCErrorHandler,
CaptchaError,
CodeException,
ErrorHandler,
VKAPIError,
swear,
)
from .framework import (
ABCBlueprint,
ABCFramework,
Bot,
BotBlueprint,
User,
UserBlueprint,
run_multibot,
)
from .http import ABCHTTPClient, AiohttpClient, SingleAiohttpClient
from .polling import ABCPolling, BotPolling, UserPolling
from .tools import (
EMPTY_KEYBOARD,
ABCAction,
ABCStorage,
ABCValidator,
AudioUploader,
AuthError,
BaseContext,
BaseUploader,
BotTypes,
CallableValidator,
Callback,
CtxStorage,
DelayedTask,
DocMessagesUploader,
DocUploader,
DocWallUploader,
EqualsValidator,
GraffitiUploader,
IsInstanceValidator,
Keyboard,
KeyboardButtonColor,
Location,
LoopWrapper,
OpenAppEvent,
OpenLink,
OpenLinkEvent,
PhotoChatFaviconUploader,
PhotoFaviconUploader,
PhotoMarketUploader,
PhotoMessageUploader,
PhotoToAlbumUploader,
PhotoUploader,
PhotoWallUploader,
ShowSnackbarEvent,
TemplateElement,
Text,
UserAuth,
UserTypes,
VideoUploader,
VKApps,
VKPay,
VoiceMessageUploader,
keyboard_gen,
load_blueprints_from_package,
run_in_task,
run_sync,
template_gen,
vkscript,
)
event_types = GroupTypes
__all__ = (
"ABCAction",
"ABCAPI",
"ABCBlueprint",
"ABCDispenseView",
"ABCErrorHandler",
"ABCFramework",
"ABCHandler",
"ABCHTTPClient",
"ABCPolling",
"ABCRequestRescheduler",
"ABCRequestValidator",
"ABCResponseValidator",
"ABCRouter",
"ABCRule",
"ABCStateDispenser",
"ABCStorage",
"ABCTokenGenerator",
"ABCValidator",
"ABCView",
"AiohttpClient",
"AndRule",
"API",
"AudioUploader",
"AuthError",
"BaseContext",
"BaseMiddleware",
"BaseReturnManager",
"BaseStateGroup",
"BaseUploader",
"BlockingRequestRescheduler",
"Bot",
"BotBlueprint",
"BotPolling",
"BotTypes",
"BuiltinStateDispenser",
"CallableValidator",
"Callback",
"CaptchaError",
"CodeException",
"ConsistentTokenGenerator",
"CtxStorage",
"DEFAULT_REQUEST_VALIDATORS",
"DEFAULT_RESPONSE_VALIDATORS",
"DelayedTask",
"DocMessagesUploader",
"DocUploader",
"DocWallUploader",
"EMPTY_KEYBOARD",
"EqualsValidator",
"ErrorHandler",
"get_token_generator",
"GraffitiUploader",
"GroupEventType",
"GroupTypes",
"IsInstanceValidator",
"keyboard_gen",
"Keyboard",
"KeyboardButtonColor",
"load_blueprints_from_package",
"Location",
"LoopWrapper",
"MiddlewareError",
"NotRule",
"OpenAppEvent",
"OpenLink",
"OpenLinkEvent",
"OrRule",
"PhotoChatFaviconUploader",
"PhotoFaviconUploader",
"PhotoMarketUploader",
"PhotoMessageUploader",
"PhotoToAlbumUploader",
"PhotoUploader",
"PhotoWallUploader",
"Router",
"run_in_task",
"run_multibot",
"run_sync",
"ShowSnackbarEvent",
"SingleAiohttpClient",
"SingleTokenGenerator",
"StatePeer",
"swear",
"template_gen",
"TemplateElement",
"Text",
"Token",
"User",
"UserAuth",
"UserBlueprint",
"UserEventType",
"UserPolling",
"UserTypes",
"VideoUploader",
"VKAPIError",
"VKApps",
"VKPay",
"vkscript",
"VoiceMessageUploader",
)
| true
| true
|
f7021e2dd359a7348e9d9d8017c699383f32b80a
| 39
|
py
|
Python
|
Beginning.py
|
Vatsal101/AppleDetection
|
d828616355fe643980385bc4615f26659de28eaa
|
[
"MIT"
] | null | null | null |
Beginning.py
|
Vatsal101/AppleDetection
|
d828616355fe643980385bc4615f26659de28eaa
|
[
"MIT"
] | null | null | null |
Beginning.py
|
Vatsal101/AppleDetection
|
d828616355fe643980385bc4615f26659de28eaa
|
[
"MIT"
] | null | null | null |
import cv2 as cv
image = cv.imread()
| 13
| 19
| 0.666667
|
import cv2 as cv
image = cv.imread()
| true
| true
|
f7022030fa7d56ca72d4ea7834e10aac5a5e2e8f
| 7,132
|
py
|
Python
|
SQLAlchemy-0.9.9/test/dialect/mssql/test_engine.py
|
Abi1ity/uniclust2.0
|
c1034eede2425a683712b449d78fdf4d2d5ab414
|
[
"BSD-3-Clause"
] | null | null | null |
SQLAlchemy-0.9.9/test/dialect/mssql/test_engine.py
|
Abi1ity/uniclust2.0
|
c1034eede2425a683712b449d78fdf4d2d5ab414
|
[
"BSD-3-Clause"
] | null | null | null |
SQLAlchemy-0.9.9/test/dialect/mssql/test_engine.py
|
Abi1ity/uniclust2.0
|
c1034eede2425a683712b449d78fdf4d2d5ab414
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- encoding: utf-8
from sqlalchemy.testing import eq_, engines
from sqlalchemy import *
from sqlalchemy import exc
from sqlalchemy.dialects.mssql import pyodbc, pymssql
from sqlalchemy.engine import url
from sqlalchemy.testing import fixtures
from sqlalchemy import testing
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing.mock import Mock
class ParseConnectTest(fixtures.TestBase):
def test_pyodbc_connect_dsn_trusted(self):
dialect = pyodbc.dialect()
u = url.make_url('mssql://mydsn')
connection = dialect.create_connect_args(u)
eq_([['dsn=mydsn;Trusted_Connection=Yes'], {}], connection)
def test_pyodbc_connect_old_style_dsn_trusted(self):
dialect = pyodbc.dialect()
u = url.make_url('mssql:///?dsn=mydsn')
connection = dialect.create_connect_args(u)
eq_([['dsn=mydsn;Trusted_Connection=Yes'], {}], connection)
def test_pyodbc_connect_dsn_non_trusted(self):
dialect = pyodbc.dialect()
u = url.make_url('mssql://username:password@mydsn')
connection = dialect.create_connect_args(u)
eq_([['dsn=mydsn;UID=username;PWD=password'], {}], connection)
def test_pyodbc_connect_dsn_extra(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql://username:password@mydsn/?LANGUAGE=us_'
'english&foo=bar')
connection = dialect.create_connect_args(u)
dsn_string = connection[0][0]
assert ";LANGUAGE=us_english" in dsn_string
assert ";foo=bar" in dsn_string
def test_pyodbc_connect(self):
dialect = pyodbc.dialect()
u = url.make_url('mssql://username:password@hostspec/database')
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
'D=username;PWD=password'], {}], connection)
def test_pyodbc_connect_comma_port(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql://username:password@hostspec:12345/data'
'base')
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec,12345;Database=datab'
'ase;UID=username;PWD=password'], {}], connection)
def test_pyodbc_connect_config_port(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql://username:password@hostspec/database?p'
'ort=12345')
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
'D=username;PWD=password;port=12345'], {}], connection)
def test_pyodbc_extra_connect(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql://username:password@hostspec/database?L'
'ANGUAGE=us_english&foo=bar')
connection = dialect.create_connect_args(u)
eq_(connection[1], {})
eq_(connection[0][0]
in ('DRIVER={SQL Server};Server=hostspec;Database=database;'
'UID=username;PWD=password;foo=bar;LANGUAGE=us_english',
'DRIVER={SQL Server};Server=hostspec;Database=database;UID='
'username;PWD=password;LANGUAGE=us_english;foo=bar'), True)
def test_pyodbc_odbc_connect(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql:///?odbc_connect=DRIVER%3D%7BSQL+Server'
'%7D%3BServer%3Dhostspec%3BDatabase%3Ddatabase'
'%3BUID%3Dusername%3BPWD%3Dpassword')
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
'D=username;PWD=password'], {}], connection)
def test_pyodbc_odbc_connect_with_dsn(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql:///?odbc_connect=dsn%3Dmydsn%3BDatabase'
'%3Ddatabase%3BUID%3Dusername%3BPWD%3Dpassword'
)
connection = dialect.create_connect_args(u)
eq_([['dsn=mydsn;Database=database;UID=username;PWD=password'],
{}], connection)
def test_pyodbc_odbc_connect_ignores_other_values(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql://userdiff:passdiff@localhost/dbdiff?od'
'bc_connect=DRIVER%3D%7BSQL+Server%7D%3BServer'
'%3Dhostspec%3BDatabase%3Ddatabase%3BUID%3Duse'
'rname%3BPWD%3Dpassword')
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
'D=username;PWD=password'], {}], connection)
def test_pymssql_port_setting(self):
dialect = pymssql.dialect()
u = \
url.make_url('mssql+pymssql://scott:tiger@somehost/test')
connection = dialect.create_connect_args(u)
eq_(
[[], {'host': 'somehost', 'password': 'tiger',
'user': 'scott', 'database': 'test'}], connection
)
u = \
url.make_url('mssql+pymssql://scott:tiger@somehost:5000/test')
connection = dialect.create_connect_args(u)
eq_(
[[], {'host': 'somehost:5000', 'password': 'tiger',
'user': 'scott', 'database': 'test'}], connection
)
def test_pymssql_disconnect(self):
dialect = pymssql.dialect()
for error in [
'Adaptive Server connection timed out',
'Net-Lib error during Connection reset by peer',
'message 20003',
'Error 10054',
'Not connected to any MS SQL server',
'Connection is closed'
]:
eq_(dialect.is_disconnect(error, None, None), True)
eq_(dialect.is_disconnect("not an error", None, None), False)
@testing.only_on(['mssql+pyodbc', 'mssql+pymssql'],
"FreeTDS specific test")
def test_bad_freetds_warning(self):
engine = engines.testing_engine()
def _bad_version(connection):
return 95, 10, 255
engine.dialect._get_server_version_info = _bad_version
assert_raises_message(exc.SAWarning,
'Unrecognized server version info',
engine.connect)
class VersionDetectionTest(fixtures.TestBase):
def test_pymssql_version(self):
dialect = pymssql.MSDialect_pymssql()
for vers in [
"Microsoft SQL Server Blah - 11.0.9216.62",
"Microsoft SQL Server (XYZ) - 11.0.9216.62 \n"
"Jul 18 2014 22:00:21 \nCopyright (c) Microsoft Corporation",
"Microsoft SQL Azure (RTM) - 11.0.9216.62 \n"
"Jul 18 2014 22:00:21 \nCopyright (c) Microsoft Corporation"
]:
conn = Mock(scalar=Mock(return_value=vers))
eq_(
dialect._get_server_version_info(conn),
(11, 0, 9216, 62)
)
| 40.522727
| 74
| 0.604459
|
from sqlalchemy.testing import eq_, engines
from sqlalchemy import *
from sqlalchemy import exc
from sqlalchemy.dialects.mssql import pyodbc, pymssql
from sqlalchemy.engine import url
from sqlalchemy.testing import fixtures
from sqlalchemy import testing
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing.mock import Mock
class ParseConnectTest(fixtures.TestBase):
def test_pyodbc_connect_dsn_trusted(self):
dialect = pyodbc.dialect()
u = url.make_url('mssql://mydsn')
connection = dialect.create_connect_args(u)
eq_([['dsn=mydsn;Trusted_Connection=Yes'], {}], connection)
def test_pyodbc_connect_old_style_dsn_trusted(self):
dialect = pyodbc.dialect()
u = url.make_url('mssql:///?dsn=mydsn')
connection = dialect.create_connect_args(u)
eq_([['dsn=mydsn;Trusted_Connection=Yes'], {}], connection)
def test_pyodbc_connect_dsn_non_trusted(self):
dialect = pyodbc.dialect()
u = url.make_url('mssql://username:password@mydsn')
connection = dialect.create_connect_args(u)
eq_([['dsn=mydsn;UID=username;PWD=password'], {}], connection)
def test_pyodbc_connect_dsn_extra(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql://username:password@mydsn/?LANGUAGE=us_'
'english&foo=bar')
connection = dialect.create_connect_args(u)
dsn_string = connection[0][0]
assert ";LANGUAGE=us_english" in dsn_string
assert ";foo=bar" in dsn_string
def test_pyodbc_connect(self):
dialect = pyodbc.dialect()
u = url.make_url('mssql://username:password@hostspec/database')
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
'D=username;PWD=password'], {}], connection)
def test_pyodbc_connect_comma_port(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql://username:password@hostspec:12345/data'
'base')
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec,12345;Database=datab'
'ase;UID=username;PWD=password'], {}], connection)
def test_pyodbc_connect_config_port(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql://username:password@hostspec/database?p'
'ort=12345')
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
'D=username;PWD=password;port=12345'], {}], connection)
def test_pyodbc_extra_connect(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql://username:password@hostspec/database?L'
'ANGUAGE=us_english&foo=bar')
connection = dialect.create_connect_args(u)
eq_(connection[1], {})
eq_(connection[0][0]
in ('DRIVER={SQL Server};Server=hostspec;Database=database;'
'UID=username;PWD=password;foo=bar;LANGUAGE=us_english',
'DRIVER={SQL Server};Server=hostspec;Database=database;UID='
'username;PWD=password;LANGUAGE=us_english;foo=bar'), True)
def test_pyodbc_odbc_connect(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql:///?odbc_connect=DRIVER%3D%7BSQL+Server'
'%7D%3BServer%3Dhostspec%3BDatabase%3Ddatabase'
'%3BUID%3Dusername%3BPWD%3Dpassword')
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
'D=username;PWD=password'], {}], connection)
def test_pyodbc_odbc_connect_with_dsn(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql:///?odbc_connect=dsn%3Dmydsn%3BDatabase'
'%3Ddatabase%3BUID%3Dusername%3BPWD%3Dpassword'
)
connection = dialect.create_connect_args(u)
eq_([['dsn=mydsn;Database=database;UID=username;PWD=password'],
{}], connection)
def test_pyodbc_odbc_connect_ignores_other_values(self):
dialect = pyodbc.dialect()
u = \
url.make_url('mssql://userdiff:passdiff@localhost/dbdiff?od'
'bc_connect=DRIVER%3D%7BSQL+Server%7D%3BServer'
'%3Dhostspec%3BDatabase%3Ddatabase%3BUID%3Duse'
'rname%3BPWD%3Dpassword')
connection = dialect.create_connect_args(u)
eq_([['DRIVER={SQL Server};Server=hostspec;Database=database;UI'
'D=username;PWD=password'], {}], connection)
def test_pymssql_port_setting(self):
dialect = pymssql.dialect()
u = \
url.make_url('mssql+pymssql://scott:tiger@somehost/test')
connection = dialect.create_connect_args(u)
eq_(
[[], {'host': 'somehost', 'password': 'tiger',
'user': 'scott', 'database': 'test'}], connection
)
u = \
url.make_url('mssql+pymssql://scott:tiger@somehost:5000/test')
connection = dialect.create_connect_args(u)
eq_(
[[], {'host': 'somehost:5000', 'password': 'tiger',
'user': 'scott', 'database': 'test'}], connection
)
def test_pymssql_disconnect(self):
dialect = pymssql.dialect()
for error in [
'Adaptive Server connection timed out',
'Net-Lib error during Connection reset by peer',
'message 20003',
'Error 10054',
'Not connected to any MS SQL server',
'Connection is closed'
]:
eq_(dialect.is_disconnect(error, None, None), True)
eq_(dialect.is_disconnect("not an error", None, None), False)
@testing.only_on(['mssql+pyodbc', 'mssql+pymssql'],
"FreeTDS specific test")
def test_bad_freetds_warning(self):
engine = engines.testing_engine()
def _bad_version(connection):
return 95, 10, 255
engine.dialect._get_server_version_info = _bad_version
assert_raises_message(exc.SAWarning,
'Unrecognized server version info',
engine.connect)
class VersionDetectionTest(fixtures.TestBase):
def test_pymssql_version(self):
dialect = pymssql.MSDialect_pymssql()
for vers in [
"Microsoft SQL Server Blah - 11.0.9216.62",
"Microsoft SQL Server (XYZ) - 11.0.9216.62 \n"
"Jul 18 2014 22:00:21 \nCopyright (c) Microsoft Corporation",
"Microsoft SQL Azure (RTM) - 11.0.9216.62 \n"
"Jul 18 2014 22:00:21 \nCopyright (c) Microsoft Corporation"
]:
conn = Mock(scalar=Mock(return_value=vers))
eq_(
dialect._get_server_version_info(conn),
(11, 0, 9216, 62)
)
| true
| true
|
f70220a64f73af78d376a845924d9ae3730c7b57
| 589
|
py
|
Python
|
Datacamp Assignments/NLP Skills Track/1. NLP Fundamentals/14_ner_spacy.py
|
Ali-Parandeh/Data_Science_Playground
|
c529e9b3692381572de259e7c93938d6611d83da
|
[
"MIT"
] | null | null | null |
Datacamp Assignments/NLP Skills Track/1. NLP Fundamentals/14_ner_spacy.py
|
Ali-Parandeh/Data_Science_Playground
|
c529e9b3692381572de259e7c93938d6611d83da
|
[
"MIT"
] | null | null | null |
Datacamp Assignments/NLP Skills Track/1. NLP Fundamentals/14_ner_spacy.py
|
Ali-Parandeh/Data_Science_Playground
|
c529e9b3692381572de259e7c93938d6611d83da
|
[
"MIT"
] | 1
|
2021-03-10T09:40:05.000Z
|
2021-03-10T09:40:05.000Z
|
# Import spacy
import spacy
# Instantiate the English model: nlp
nlp = spacy.load('en', tagger=False, parser= False, matcher = False)
# Create a new document: doc
doc = nlp(article)
# Print all of the found entities and their labels
for ent in doc.ents:
print(ent.label_, ent.text)
""" <script.py> output:
ORG Uber
ORG Uber
ORG Apple
ORG Uber
ORG Uber
PERSON Travis Kalanick
ORG Uber
PERSON Tim Cook
ORG Apple
CARDINAL Millions
ORG Uber
GPE drivers’
LOC Silicon Valley’s
ORG Yahoo
PERSON Marissa Mayer
MONEY $186m """
| 19.633333
| 68
| 0.665535
|
import spacy
nlp = spacy.load('en', tagger=False, parser= False, matcher = False)
doc = nlp(article)
for ent in doc.ents:
print(ent.label_, ent.text)
| true
| true
|
f7022105af9e830f0a6616acaa6e4add128f432c
| 1,166
|
py
|
Python
|
src/cv/movie_to_image.py
|
skiteta/processing-util
|
819637682695ed7a1cb5a5f35df56a487452252a
|
[
"MIT"
] | null | null | null |
src/cv/movie_to_image.py
|
skiteta/processing-util
|
819637682695ed7a1cb5a5f35df56a487452252a
|
[
"MIT"
] | 11
|
2021-12-15T12:09:02.000Z
|
2022-03-28T12:08:48.000Z
|
src/cv/movie_to_image.py
|
skiteta/processing-util
|
819637682695ed7a1cb5a5f35df56a487452252a
|
[
"MIT"
] | null | null | null |
import glob
import cv2
import os
def extract_frame(movie_files_dir, out_dir):
movie_files = glob.glob(movie_files_dir)
if not movie_files:
print('movie files are not found.')
return
for movie_file in movie_files:
ext = movie_file.split('.')[-1]
if not ext == 'mp4' or not ext == 'MP4':
print(f"can't extract this movie file: {movie_file}")
continue
out_dir = out_dir
if not os.path.exists(out_dir):
os.mkdir(out_dir)
cap = cv2.VideoCapture(movie_file)
if not cap.isOpened():
print(f"can't extract this movie file: {movie_file}")
return
digit = len(str(int(cap.get(cv2.CAP_PROP_FRAME_COUNT))))
n = 0
while True:
ret, frame = cap.read()
if ret:
cv2.imwrite(f"{movie_file}_{str(n).zfill(digit)}.jpg", frame)
n += 1
continue
return
print(f'{len(movie_files)} movie files extracted')
if __name__ == '__main__':
movie_files_dir = 'movies/*.mp4'
out_dir = 'out/'
extract_frame(movie_files_dir, out_dir)
| 25.911111
| 77
| 0.569468
|
import glob
import cv2
import os
def extract_frame(movie_files_dir, out_dir):
movie_files = glob.glob(movie_files_dir)
if not movie_files:
print('movie files are not found.')
return
for movie_file in movie_files:
ext = movie_file.split('.')[-1]
if not ext == 'mp4' or not ext == 'MP4':
print(f"can't extract this movie file: {movie_file}")
continue
out_dir = out_dir
if not os.path.exists(out_dir):
os.mkdir(out_dir)
cap = cv2.VideoCapture(movie_file)
if not cap.isOpened():
print(f"can't extract this movie file: {movie_file}")
return
digit = len(str(int(cap.get(cv2.CAP_PROP_FRAME_COUNT))))
n = 0
while True:
ret, frame = cap.read()
if ret:
cv2.imwrite(f"{movie_file}_{str(n).zfill(digit)}.jpg", frame)
n += 1
continue
return
print(f'{len(movie_files)} movie files extracted')
if __name__ == '__main__':
movie_files_dir = 'movies/*.mp4'
out_dir = 'out/'
extract_frame(movie_files_dir, out_dir)
| true
| true
|
f702222d3dba2c9d49caadf65f84a04d301756c8
| 9,521
|
py
|
Python
|
ping360_sonar/ping360_sonar/sonar_interface.py
|
CentraleNantesRobotics/ping360_sonar_python
|
f461594aa0345a417f5bb711b8f4500fb4b4727d
|
[
"MIT"
] | 4
|
2019-12-13T20:52:54.000Z
|
2019-12-20T12:33:03.000Z
|
ping360_sonar/ping360_sonar/sonar_interface.py
|
CentraleNantesRobotics/ping360_sonar_python
|
f461594aa0345a417f5bb711b8f4500fb4b4727d
|
[
"MIT"
] | 9
|
2019-12-13T20:35:41.000Z
|
2020-01-09T19:27:56.000Z
|
ping360_sonar/ping360_sonar/sonar_interface.py
|
CentraleNantesRobotics/ping360_sonar_python
|
f461594aa0345a417f5bb711b8f4500fb4b4727d
|
[
"MIT"
] | 1
|
2020-01-06T08:34:46.000Z
|
2020-01-06T08:34:46.000Z
|
#!/usr/bin/env python
from ping360_sonar.sensor import Ping360
from numpy import pi, sqrt, tan, cos, sign
from brping import definitions
class SonarInterface:
samplePeriodTickDuration = 25e-9
firmwareMinTransmitDuration = 5
firmwareMaxTransmitDuration = 500
firmwareMaxSamples = 1200
firmwareMinSamplePeriod = 80
maxDurationRatio = 64e6
def __init__(self, port, baudrate, fallback_emulated):
self.angle = 0
try:
self.sonar = Ping360(port, baudrate)
if self.sonar.initialize():
return
except:
pass
if not fallback_emulated:
raise RuntimeError('Cannot initialize sonar')
print('Using emulated sonar')
self.sonar = None
def configureAngles(self, aperture_deg, step_deg, ensure_divisor):
# to gradians
target_half_aperture = int(aperture_deg*200/360+0.5)
best_half_aperture = target_half_aperture
self.angle_step = int(round(step_deg*400/360))
# ensure angle_step is a divisor of max-min in gradians, necessary for LaserScan messages
if ensure_divisor:
# look around step, allow increased aperture
target_step = self.angle_step
# not too far from requested aperture, as close as possible to requested step (impacts turn duration)
computeCost = lambda step,half_aperture: 1000 if half_aperture%step != 0 else abs(step-target_step) + abs(half_aperture-target_half_aperture)
best_cost = computeCost(self.angle_step, target_half_aperture)
if best_cost != 0:
for step in range(1, target_step*2):
for half_aperture in range(target_half_aperture, min(target_half_aperture+10, 200)+1):
cost = computeCost(step, half_aperture)
if cost < best_cost:
best_cost = cost
self.angle_step = step
best_half_aperture = half_aperture
self.angle_min = -best_half_aperture
self.angle_max = best_half_aperture
if self.angle_max == 200:
self.angle_max -= self.angle_step
if self.angle < self.angle_min or self.angle > self.angle_max or (self.angle-self.angle_min) % self.angle_step != 0:
self.angle = 0
@staticmethod
def grad2rad(grad):
return grad*pi/200
def angleMin(self):
return self.grad2rad(self.angle_min)
def angleMax(self):
return self.grad2rad(self.angle_max)
def angleStep(self):
return self.grad2rad(self.angle_step)
def currentAngle(self):
return self.grad2rad(self.angle)
def angleCount(self):
return (self.angle_max-self.angle_min)//self.angle_step
def angleIndex(self):
if self.angle_step > 0:
return (self.angle-self.angle_min)//self.angle_step
return (self.angle-self.angle_max)//self.angle_step
def rangeFrom(self, index):
return (index+1)*self.max_range/self.samples
def configureTransducer(self, gain, frequency, speed_of_sound, max_range):
self.gain = gain
self.frequency = frequency
self.samples = int(min(self.firmwareMaxSamples,2*max_range/(self.firmwareMinSamplePeriod*speed_of_sound*self.samplePeriodTickDuration)))
self.sample_period = int((2.*max_range)/
(self.samples*speed_of_sound*self.samplePeriodTickDuration));
#* Per firmware engineer:
#* 1. Starting point is TxPulse in usec = ((one-way range in metres) * 8000) / (Velocity of sound in metres
#* per second)
#* 2. Then check that TxPulse is wide enough for currently selected sample interval in usec, i.e.,
#* if TxPulse < (2.5 * sample interval) then TxPulse = (2.5 * sample interval)
#* 3. Perform limit checking
#1
one_way_duration_us = (8000.*max_range)/speed_of_sound
# 2 (transmit duration is microseconds, sample_period_ns is nanoseconds)
sample_period_ns = self.sample_period * self.samplePeriodTickDuration
self.transmit_duration = max(2.5*sample_period_ns/1000, one_way_duration_us)
# 3 ensure bounds
if self.transmit_duration < self.firmwareMinTransmitDuration:
self.transmit_duration = self.firmwareMinTransmitDuration
else:
max_duration = min(self.firmwareMaxTransmitDuration, sample_period_ns*self.maxDurationRatio)
if self.transmit_duration > max_duration:
self.transmit_duration = max_duration
self.transmit_duration = int(self.transmit_duration)
def transmitDuration(self):
# microseconds to seconds
return self.transmit_duration/1e6
def updateAngle(self):
self.angle += self.angle_step
if self.angle_min == -200:
# full scan
end_turn = self.angle + self.angle_step > self.angle_max
if self.angle > self.angle_max:
self.angle = self.angle_min
return end_turn
# sector scan, check near end of sector
if self.angle + self.angle_step >= self.angle_max or self.angle + self.angle_step <= self.angle_min:
self.angle_step *= -1
return True
return False
def read(self):
# update angle before transmit
end_turn = self.updateAngle()
if self.sonar is not None:
print(f'transmit: {self.transmit_duration}')
self.sonar.control_transducer(
0, # reserved
self.gain,
self.angle,
self.transmit_duration,
self.sample_period,
self.frequency,
self.samples,
1,
0)
self.sonar.wait_message([definitions.PING360_DEVICE_DATA, definitions.COMMON_NACK], 4.0)
self.data = bytearray(self.sonar._data)
return (len(self.data) != 0, end_turn)
# emulated sonar
from random import randint
from time import sleep
self.data = [0 for _ in range(self.samples)]
scale = 5*abs((self.angle+400) % 400 - 200)
for i in range(self.samples):
if randint(self.samples,2*self.samples) < 1.1*i + scale:
self.data[i] = randint(220, 255)
# emulate transmit duration in microseconds
#sleep(self.transmit_duration/1000000)
return (True, end_turn)
# handles an angular sector of the image
class Bound:
radius = 0
def __init__(self, x, tm, tM):
self.x = x
if type(tM) == int:
self.low = Bound.clamp(tm*x)
self.up = int(tM*sqrt(Bound.radius**2-x**2-1))
else:
self.low = Bound.clamp(x*tm)
self.up = Bound.clamp(x*tM)
if self.up**2 + x**2 > Bound.radius**2:
self.up = int(sign(self.up) * sqrt(Bound.radius**2-x**2-1))
if self.up < self.low:
self.low,self.up = self.up,self.low
#staticmethod
def clamp(coord):
if coord < -Bound.radius+1:
return -Bound.radius+1
elif coord > Bound.radius-1:
return Bound.radius-1
return int(coord)
class Sector:
def __init__(self):
self.dr = None
def configure(self, samples, radius):
self.dr = radius/samples
Bound.radius = radius
def init(self, angle, step):
angle_min = angle-step/2
angle_max = angle+step/2
xmin, xmax,same_side = self.xLimits(angle_min, angle_max)
tm, tM = tan(angle_min), tan(angle_max)
self.bounds = []
if same_side:
# same side
if abs(tm) > abs(tM):
tm,tM = tM,tm
for x in range(xmin, xmax+1):
self.bounds.append(Bound(x,tm,tM))
else:
f = 1 if abs(angle-pi/2) < abs(angle+pi/2) else -1
if f == -1:
tm,tM = tM,tm
for x in range(xmin, 0):
self.bounds.append(Bound(x, tM,f))
for x in range(0, xmax+1):
self.bounds.append(Bound(x, tm,f))
self.cur = -1
def xLimits(self, angle_min, angle_max):
cm = cos(angle_min)
cM = cos(angle_max)
if cM < cm:
cm,cM = cM,cm
if cm*cM > 0:
if cM < 0:
cM = 0
else:
cm = 0
return Bound.clamp(round(Bound.radius*cm)), Bound.clamp(round(Bound.radius*cM)), cm*cM >= 0
def nextPoint(self, x, y):
if self.cur == -1:
self.cur = 0
x = self.bounds[0].x
y = self.bounds[0].low
elif y < self.bounds[self.cur].up:
y += 1
else:
self.cur += 1
if self.cur == len(self.bounds):
return False, 0, 0, 0
x = self.bounds[self.cur].x
y = self.bounds[self.cur].low
return True, x, y, int(round(sqrt(x*x+y*y)/self.dr))
| 37.191406
| 153
| 0.561286
|
from ping360_sonar.sensor import Ping360
from numpy import pi, sqrt, tan, cos, sign
from brping import definitions
class SonarInterface:
samplePeriodTickDuration = 25e-9
firmwareMinTransmitDuration = 5
firmwareMaxTransmitDuration = 500
firmwareMaxSamples = 1200
firmwareMinSamplePeriod = 80
maxDurationRatio = 64e6
def __init__(self, port, baudrate, fallback_emulated):
self.angle = 0
try:
self.sonar = Ping360(port, baudrate)
if self.sonar.initialize():
return
except:
pass
if not fallback_emulated:
raise RuntimeError('Cannot initialize sonar')
print('Using emulated sonar')
self.sonar = None
def configureAngles(self, aperture_deg, step_deg, ensure_divisor):
target_half_aperture = int(aperture_deg*200/360+0.5)
best_half_aperture = target_half_aperture
self.angle_step = int(round(step_deg*400/360))
if ensure_divisor:
target_step = self.angle_step
computeCost = lambda step,half_aperture: 1000 if half_aperture%step != 0 else abs(step-target_step) + abs(half_aperture-target_half_aperture)
best_cost = computeCost(self.angle_step, target_half_aperture)
if best_cost != 0:
for step in range(1, target_step*2):
for half_aperture in range(target_half_aperture, min(target_half_aperture+10, 200)+1):
cost = computeCost(step, half_aperture)
if cost < best_cost:
best_cost = cost
self.angle_step = step
best_half_aperture = half_aperture
self.angle_min = -best_half_aperture
self.angle_max = best_half_aperture
if self.angle_max == 200:
self.angle_max -= self.angle_step
if self.angle < self.angle_min or self.angle > self.angle_max or (self.angle-self.angle_min) % self.angle_step != 0:
self.angle = 0
@staticmethod
def grad2rad(grad):
return grad*pi/200
def angleMin(self):
return self.grad2rad(self.angle_min)
def angleMax(self):
return self.grad2rad(self.angle_max)
def angleStep(self):
return self.grad2rad(self.angle_step)
def currentAngle(self):
return self.grad2rad(self.angle)
def angleCount(self):
return (self.angle_max-self.angle_min)//self.angle_step
def angleIndex(self):
if self.angle_step > 0:
return (self.angle-self.angle_min)//self.angle_step
return (self.angle-self.angle_max)//self.angle_step
def rangeFrom(self, index):
return (index+1)*self.max_range/self.samples
def configureTransducer(self, gain, frequency, speed_of_sound, max_range):
self.gain = gain
self.frequency = frequency
self.samples = int(min(self.firmwareMaxSamples,2*max_range/(self.firmwareMinSamplePeriod*speed_of_sound*self.samplePeriodTickDuration)))
self.sample_period = int((2.*max_range)/
(self.samples*speed_of_sound*self.samplePeriodTickDuration));
one_way_duration_us = (8000.*max_range)/speed_of_sound
sample_period_ns = self.sample_period * self.samplePeriodTickDuration
self.transmit_duration = max(2.5*sample_period_ns/1000, one_way_duration_us)
if self.transmit_duration < self.firmwareMinTransmitDuration:
self.transmit_duration = self.firmwareMinTransmitDuration
else:
max_duration = min(self.firmwareMaxTransmitDuration, sample_period_ns*self.maxDurationRatio)
if self.transmit_duration > max_duration:
self.transmit_duration = max_duration
self.transmit_duration = int(self.transmit_duration)
def transmitDuration(self):
return self.transmit_duration/1e6
def updateAngle(self):
self.angle += self.angle_step
if self.angle_min == -200:
end_turn = self.angle + self.angle_step > self.angle_max
if self.angle > self.angle_max:
self.angle = self.angle_min
return end_turn
if self.angle + self.angle_step >= self.angle_max or self.angle + self.angle_step <= self.angle_min:
self.angle_step *= -1
return True
return False
def read(self):
end_turn = self.updateAngle()
if self.sonar is not None:
print(f'transmit: {self.transmit_duration}')
self.sonar.control_transducer(
0, self.gain,
self.angle,
self.transmit_duration,
self.sample_period,
self.frequency,
self.samples,
1,
0)
self.sonar.wait_message([definitions.PING360_DEVICE_DATA, definitions.COMMON_NACK], 4.0)
self.data = bytearray(self.sonar._data)
return (len(self.data) != 0, end_turn)
from random import randint
from time import sleep
self.data = [0 for _ in range(self.samples)]
scale = 5*abs((self.angle+400) % 400 - 200)
for i in range(self.samples):
if randint(self.samples,2*self.samples) < 1.1*i + scale:
self.data[i] = randint(220, 255)
return (True, end_turn)
class Bound:
radius = 0
def __init__(self, x, tm, tM):
self.x = x
if type(tM) == int:
self.low = Bound.clamp(tm*x)
self.up = int(tM*sqrt(Bound.radius**2-x**2-1))
else:
self.low = Bound.clamp(x*tm)
self.up = Bound.clamp(x*tM)
if self.up**2 + x**2 > Bound.radius**2:
self.up = int(sign(self.up) * sqrt(Bound.radius**2-x**2-1))
if self.up < self.low:
self.low,self.up = self.up,self.low
def clamp(coord):
if coord < -Bound.radius+1:
return -Bound.radius+1
elif coord > Bound.radius-1:
return Bound.radius-1
return int(coord)
class Sector:
def __init__(self):
self.dr = None
def configure(self, samples, radius):
self.dr = radius/samples
Bound.radius = radius
def init(self, angle, step):
angle_min = angle-step/2
angle_max = angle+step/2
xmin, xmax,same_side = self.xLimits(angle_min, angle_max)
tm, tM = tan(angle_min), tan(angle_max)
self.bounds = []
if same_side:
if abs(tm) > abs(tM):
tm,tM = tM,tm
for x in range(xmin, xmax+1):
self.bounds.append(Bound(x,tm,tM))
else:
f = 1 if abs(angle-pi/2) < abs(angle+pi/2) else -1
if f == -1:
tm,tM = tM,tm
for x in range(xmin, 0):
self.bounds.append(Bound(x, tM,f))
for x in range(0, xmax+1):
self.bounds.append(Bound(x, tm,f))
self.cur = -1
def xLimits(self, angle_min, angle_max):
cm = cos(angle_min)
cM = cos(angle_max)
if cM < cm:
cm,cM = cM,cm
if cm*cM > 0:
if cM < 0:
cM = 0
else:
cm = 0
return Bound.clamp(round(Bound.radius*cm)), Bound.clamp(round(Bound.radius*cM)), cm*cM >= 0
def nextPoint(self, x, y):
if self.cur == -1:
self.cur = 0
x = self.bounds[0].x
y = self.bounds[0].low
elif y < self.bounds[self.cur].up:
y += 1
else:
self.cur += 1
if self.cur == len(self.bounds):
return False, 0, 0, 0
x = self.bounds[self.cur].x
y = self.bounds[self.cur].low
return True, x, y, int(round(sqrt(x*x+y*y)/self.dr))
| true
| true
|
f70222526f54560a524bad79199280aa1ec5f8cb
| 1,594
|
py
|
Python
|
stko/molecular/conversion/md_analysis.py
|
stevenbennett96/stko
|
ee340af4fc549d5a2c3e9cba8360661335efe0fd
|
[
"MIT"
] | null | null | null |
stko/molecular/conversion/md_analysis.py
|
stevenbennett96/stko
|
ee340af4fc549d5a2c3e9cba8360661335efe0fd
|
[
"MIT"
] | null | null | null |
stko/molecular/conversion/md_analysis.py
|
stevenbennett96/stko
|
ee340af4fc549d5a2c3e9cba8360661335efe0fd
|
[
"MIT"
] | 2
|
2020-05-08T17:51:25.000Z
|
2020-05-11T09:03:24.000Z
|
"""
MD Analysis
===========
#. :class:`.MDAnalysis`
Class for converting a molecule to and back from an MDAnalysis object.
"""
import logging
from ...utilities import WrapperNotInstalledException
try:
import MDAnalysis as mda
except ModuleNotFoundError:
mda = None
logger = logging.getLogger(__name__)
class MDAnalysis:
"""
Converter for :class:`stk.Molecule` to and from MDAnalysis.
Examples
--------
An stk molecule can be converted into an MDAnalysis Universe.
.. code-block:: python
import stk
import stko
stkmol = stk.BuildingBlock('NCCNCCN').with_centroid(
position=np.array((10, 10, 10))
)
universe = stko.MDAnalysis().get_universe(stkmol)
print('R_g:', universe.atoms.radius_of_gyration())
print('B_sphere:', universe.atoms.bsphere())
print('Universe COM:', universe.atoms.center_of_mass())
print('stk centroid:', stkmol.get_centroid())
"""
def __init__(self):
if mda is None:
raise WrapperNotInstalledException(
'MDAnalysis is not installed; see README for '
'installation.'
)
def get_universe(self, mol):
"""
Get an MDAnalysis object.
Parameters
----------
mol : :class:`stk.Molecule`
Molecule to convert.
Returns
-------
:class:`MDAnalysis.Universe`
The MDAnalysis Universe of the molecule.
"""
rdkit_mol = mol.to_rdkit_mol()
return mda.Universe(rdkit_mol)
| 21.540541
| 70
| 0.593476
|
import logging
from ...utilities import WrapperNotInstalledException
try:
import MDAnalysis as mda
except ModuleNotFoundError:
mda = None
logger = logging.getLogger(__name__)
class MDAnalysis:
def __init__(self):
if mda is None:
raise WrapperNotInstalledException(
'MDAnalysis is not installed; see README for '
'installation.'
)
def get_universe(self, mol):
rdkit_mol = mol.to_rdkit_mol()
return mda.Universe(rdkit_mol)
| true
| true
|
f70223428afca7ece72e103d8ae1e6f89b4c2b23
| 98
|
py
|
Python
|
Resions.py
|
alaminskaib/PythonPrograms
|
0112715d7700a4ebfd0da64a62f8ac20a43f7c79
|
[
"MIT"
] | 2
|
2019-11-11T17:19:10.000Z
|
2019-11-11T17:22:46.000Z
|
Resions.py
|
alaminskaib/PythonPrograms
|
0112715d7700a4ebfd0da64a62f8ac20a43f7c79
|
[
"MIT"
] | null | null | null |
Resions.py
|
alaminskaib/PythonPrograms
|
0112715d7700a4ebfd0da64a62f8ac20a43f7c79
|
[
"MIT"
] | null | null | null |
def fact (n):
if n == 1:
return 1
else:
return n*fact(n-1)
print(fact(5))
| 14
| 26
| 0.469388
|
def fact (n):
if n == 1:
return 1
else:
return n*fact(n-1)
print(fact(5))
| true
| true
|
f702242582d845653ca4d57a968a9b4017cef70c
| 1,332
|
py
|
Python
|
torchlars/wrapper.py
|
aknckaan/scrl
|
bff485e27d8785628e35d2cb73dce06f10065b1f
|
[
"Apache-2.0"
] | null | null | null |
torchlars/wrapper.py
|
aknckaan/scrl
|
bff485e27d8785628e35d2cb73dce06f10065b1f
|
[
"Apache-2.0"
] | null | null | null |
torchlars/wrapper.py
|
aknckaan/scrl
|
bff485e27d8785628e35d2cb73dce06f10065b1f
|
[
"Apache-2.0"
] | null | null | null |
import torch
from torch.optim import Optimizer
class OptimWrapper(Optimizer):
# Mixin class that defines convenient functions for writing Optimizer Wrappers
def __init__(self, optim):
self.optim = optim
def __getstate__(self):
return self.optim.__getstate__()
def __setstate__(self, state):
self.optim.__setstate__(state)
@property
def state(self):
return self.optim.state
@property
def param_groups(self):
return self.optim.param_groups
@param_groups.setter
def param_groups(self, value):
self.optim.param_groups = value
def state_dict(self):
return self.optim.state_dict()
def load_state_dict(self, state_dict):
self.optim.load_state_dict(state_dict)
def zero_grad(self):
self.optim.zero_grad()
def add_param_group(self, param_group):
self.optim.add_param_group(param_group)
@property
def defaults(self):
return self.optim.defaults
@defaults.setter
def defaults(self, defaults):
self.optim.defaults = defaults
@torch.no_grad()
def step(self, closure=None):
self.optim.step(closure=closure)
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.optim)
| 24.666667
| 83
| 0.643393
|
import torch
from torch.optim import Optimizer
class OptimWrapper(Optimizer):
def __init__(self, optim):
self.optim = optim
def __getstate__(self):
return self.optim.__getstate__()
def __setstate__(self, state):
self.optim.__setstate__(state)
@property
def state(self):
return self.optim.state
@property
def param_groups(self):
return self.optim.param_groups
@param_groups.setter
def param_groups(self, value):
self.optim.param_groups = value
def state_dict(self):
return self.optim.state_dict()
def load_state_dict(self, state_dict):
self.optim.load_state_dict(state_dict)
def zero_grad(self):
self.optim.zero_grad()
def add_param_group(self, param_group):
self.optim.add_param_group(param_group)
@property
def defaults(self):
return self.optim.defaults
@defaults.setter
def defaults(self, defaults):
self.optim.defaults = defaults
@torch.no_grad()
def step(self, closure=None):
self.optim.step(closure=closure)
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.optim)
| true
| true
|
f702245801d9c882c5f3432541c972a936bf876a
| 257
|
py
|
Python
|
tests/conftest.py
|
tonyseek/python-envcfg
|
bf95998681cb4215fb97810d476f1a2c5a0ed2e5
|
[
"MIT"
] | 29
|
2015-01-14T06:19:47.000Z
|
2021-04-15T18:22:26.000Z
|
tests/conftest.py
|
tonyseek/python-envcfg
|
bf95998681cb4215fb97810d476f1a2c5a0ed2e5
|
[
"MIT"
] | 2
|
2016-04-25T08:15:19.000Z
|
2016-07-07T09:04:44.000Z
|
tests/conftest.py
|
tonyseek/python-envcfg
|
bf95998681cb4215fb97810d476f1a2c5a0ed2e5
|
[
"MIT"
] | 3
|
2016-07-07T08:19:31.000Z
|
2017-10-26T08:16:23.000Z
|
import os
from pytest import fixture
@fixture(scope='function')
def environ(request):
origin = dict(os.environ)
@request.addfinalizer
def restore_environ():
os.environ.clear()
os.environ.update(origin)
return os.environ
| 16.0625
| 33
| 0.677043
|
import os
from pytest import fixture
@fixture(scope='function')
def environ(request):
origin = dict(os.environ)
@request.addfinalizer
def restore_environ():
os.environ.clear()
os.environ.update(origin)
return os.environ
| true
| true
|
f70224df77f3865e09d7c348b171782f2f84676e
| 2,106
|
py
|
Python
|
src/tests/ftest/pool/destroy_rebuild.py
|
kmajzero/daos
|
286aee8fde0d77838be7c2405885a9958c085ffb
|
[
"BSD-2-Clause-Patent"
] | 2
|
2021-07-14T12:21:50.000Z
|
2021-07-14T12:21:52.000Z
|
src/tests/ftest/pool/destroy_rebuild.py
|
kmajzero/daos
|
286aee8fde0d77838be7c2405885a9958c085ffb
|
[
"BSD-2-Clause-Patent"
] | null | null | null |
src/tests/ftest/pool/destroy_rebuild.py
|
kmajzero/daos
|
286aee8fde0d77838be7c2405885a9958c085ffb
|
[
"BSD-2-Clause-Patent"
] | 1
|
2021-11-03T05:00:42.000Z
|
2021-11-03T05:00:42.000Z
|
#!/usr/bin/python3
'''
(C) Copyright 2018-2021 Intel Corporation.
SPDX-License-Identifier: BSD-2-Clause-Patent
'''
from apricot import TestWithServers
from test_utils_pool import TestPool
class DestroyRebuild(TestWithServers):
"""Test class for pool destroy tests.
Test Class Description:
This test verifies destruction of a pool that is rebuilding.
:avocado: recursive
"""
# also remove the commented line form yaml file for rank 0
CANCEL_FOR_TICKET = [["DAOS-4891", "rank_to_kill", "[0]"]]
def test_destroy_while_rebuilding(self):
"""Jira ID: DAOS-xxxx.
Test Description:
Create a pool across multiple servers. After excluding one of the
servers, verify that the pool can be destroyed during rebuild.
Use Cases:
Verifying that a pool can be destroyed during rebuild.
:avocado: tags=all,daily_regression,medium
:avocado: tags=pool,destroypoolrebuild
"""
# Get the test parameters
self.pool = TestPool(self.context, self.get_dmg_command())
self.pool.get_params(self)
targets = self.params.get("targets", "/run/server_config/servers/*")
ranks = self.params.get("rank_to_kill", "/run/testparams/*")
# Create a pool
self.pool.create()
# Verify the pool information before starting rebuild
checks = {
"pi_nnodes": len(self.hostlist_servers),
"pi_ntargets": len(self.hostlist_servers) * targets,
"pi_ndisabled": 0,
}
self.assertTrue(
self.pool.check_pool_info(**checks),
"Invalid pool information detected prior to rebuild")
# Start rebuild
self.server_managers[0].stop_ranks(ranks, self.d_log, force=True)
self.pool.wait_for_rebuild(True)
# Destroy the pool while rebuild is active
self.pool.destroy()
self.log.info("Test Passed")
self.get_dmg_command().system_start(",".join(ranks))
self.server_managers[0].update_expected_states(",".join(ranks), ["joined"])
| 32.90625
| 83
| 0.647198
|
from apricot import TestWithServers
from test_utils_pool import TestPool
class DestroyRebuild(TestWithServers):
CANCEL_FOR_TICKET = [["DAOS-4891", "rank_to_kill", "[0]"]]
def test_destroy_while_rebuilding(self):
self.pool = TestPool(self.context, self.get_dmg_command())
self.pool.get_params(self)
targets = self.params.get("targets", "/run/server_config/servers/*")
ranks = self.params.get("rank_to_kill", "/run/testparams/*")
self.pool.create()
checks = {
"pi_nnodes": len(self.hostlist_servers),
"pi_ntargets": len(self.hostlist_servers) * targets,
"pi_ndisabled": 0,
}
self.assertTrue(
self.pool.check_pool_info(**checks),
"Invalid pool information detected prior to rebuild")
self.server_managers[0].stop_ranks(ranks, self.d_log, force=True)
self.pool.wait_for_rebuild(True)
self.pool.destroy()
self.log.info("Test Passed")
self.get_dmg_command().system_start(",".join(ranks))
self.server_managers[0].update_expected_states(",".join(ranks), ["joined"])
| true
| true
|
f702253efe7970a3d73df3a90fcb9ef03d058f4f
| 1,692
|
py
|
Python
|
examples/protocols/asio/udp_echo_server/asio_udp_server_test.py
|
umarudy/esp-idf
|
5667124b60209fb7c04a400e9f3ae1c497ddcd88
|
[
"Apache-2.0"
] | 1
|
2021-05-24T07:39:12.000Z
|
2021-05-24T07:39:12.000Z
|
examples/protocols/asio/udp_echo_server/asio_udp_server_test.py
|
umarudy/esp-idf
|
5667124b60209fb7c04a400e9f3ae1c497ddcd88
|
[
"Apache-2.0"
] | null | null | null |
examples/protocols/asio/udp_echo_server/asio_udp_server_test.py
|
umarudy/esp-idf
|
5667124b60209fb7c04a400e9f3ae1c497ddcd88
|
[
"Apache-2.0"
] | 3
|
2021-08-02T07:11:55.000Z
|
2021-11-09T06:02:05.000Z
|
import os
import re
import socket
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_WIFI_Protocols')
def test_examples_protocol_asio_udp_server(env, extra_data):
"""
steps: |
1. join AP
2. Start server
3. Test connects to server and sends a test message
4. Test evaluates received test message from server
5. Test evaluates received test message on server stdout
"""
test_msg = b'echo message from client to server'
dut1 = env.get_dut('udp_echo_server', 'examples/protocols/asio/udp_echo_server', dut_class=ttfw_idf.ESP32DUT)
# check and log bin size
binary_file = os.path.join(dut1.app.binary_path, 'asio_udp_echo_server.bin')
bin_size = os.path.getsize(binary_file)
ttfw_idf.log_performance('asio_udp_echo_server_bin_size', '{}KB'.format(bin_size // 1024))
# 1. start test
dut1.start_app()
# 2. get the server IP address
data = dut1.expect(re.compile(r' IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)'), timeout=30)
# 3. create tcp client and connect to server
cli = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
cli.settimeout(30)
cli.connect((data[0], 2222))
cli.send(test_msg)
data = cli.recv(1024)
# 4. check the message received back from the server
if (data == test_msg):
print('PASS: Received correct message')
pass
else:
print('Failure!')
raise ValueError('Wrong data received from asio udp server: {} (expected:{})'.format(data, test_msg))
# 5. check the client message appears also on server terminal
dut1.expect(test_msg.decode())
if __name__ == '__main__':
test_examples_protocol_asio_udp_server()
| 36
| 113
| 0.688534
|
import os
import re
import socket
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_WIFI_Protocols')
def test_examples_protocol_asio_udp_server(env, extra_data):
test_msg = b'echo message from client to server'
dut1 = env.get_dut('udp_echo_server', 'examples/protocols/asio/udp_echo_server', dut_class=ttfw_idf.ESP32DUT)
binary_file = os.path.join(dut1.app.binary_path, 'asio_udp_echo_server.bin')
bin_size = os.path.getsize(binary_file)
ttfw_idf.log_performance('asio_udp_echo_server_bin_size', '{}KB'.format(bin_size // 1024))
dut1.start_app()
data = dut1.expect(re.compile(r' IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)'), timeout=30)
cli = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
cli.settimeout(30)
cli.connect((data[0], 2222))
cli.send(test_msg)
data = cli.recv(1024)
if (data == test_msg):
print('PASS: Received correct message')
pass
else:
print('Failure!')
raise ValueError('Wrong data received from asio udp server: {} (expected:{})'.format(data, test_msg))
dut1.expect(test_msg.decode())
if __name__ == '__main__':
test_examples_protocol_asio_udp_server()
| true
| true
|
f70225bfe03aae6c356907a08cda096186636654
| 89,628
|
py
|
Python
|
plugins/slicer/MONAILabel/MONAILabel.py
|
hkethi002/MONAILabel
|
800dd637bf1825a183033faf3aecf62d4f26f491
|
[
"Apache-2.0"
] | null | null | null |
plugins/slicer/MONAILabel/MONAILabel.py
|
hkethi002/MONAILabel
|
800dd637bf1825a183033faf3aecf62d4f26f491
|
[
"Apache-2.0"
] | null | null | null |
plugins/slicer/MONAILabel/MONAILabel.py
|
hkethi002/MONAILabel
|
800dd637bf1825a183033faf3aecf62d4f26f491
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 - 2021 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import json
import logging
import os
import shutil
import tempfile
import time
import traceback
from collections import OrderedDict
from urllib.parse import quote_plus
import ctk
import qt
import SampleData
import SimpleITK as sitk
import sitkUtils
import slicer
import vtk
import vtkSegmentationCore
from MONAILabelLib import GenericAnatomyColors, MONAILabelClient
from slicer.ScriptedLoadableModule import *
from slicer.util import VTKObservationMixin
class MONAILabel(ScriptedLoadableModule):
def __init__(self, parent):
ScriptedLoadableModule.__init__(self, parent)
self.parent.title = "MONAILabel"
self.parent.categories = ["Active Learning"]
self.parent.dependencies = []
self.parent.contributors = ["NVIDIA, KCL"]
self.parent.helpText = """
Active Learning solution.
See more information in <a href="https://github.com/Project-MONAI/MONAILabel">module documentation</a>.
"""
self.parent.acknowledgementText = """
Developed by NVIDIA, KCL
"""
# Additional initialization step after application startup is complete
slicer.app.connect("startupCompleted()", self.initializeAfterStartup)
def initializeAfterStartup(self):
if not slicer.app.commandOptions().noMainWindow:
self.settingsPanel = MONAILabelSettingsPanel()
slicer.app.settingsDialog().addPanel("MONAI Label", self.settingsPanel)
class _ui_MONAILabelSettingsPanel(object):
def __init__(self, parent):
vBoxLayout = qt.QVBoxLayout(parent)
# settings
groupBox = ctk.ctkCollapsibleGroupBox()
groupBox.title = "MONAI Label Server"
groupLayout = qt.QFormLayout(groupBox)
serverUrl = qt.QLineEdit()
groupLayout.addRow("Server address:", serverUrl)
parent.registerProperty("MONAILabel/serverUrl", serverUrl, "text", str(qt.SIGNAL("textChanged(QString)")))
serverUrlHistory = qt.QLineEdit()
groupLayout.addRow("Server address history:", serverUrlHistory)
parent.registerProperty(
"MONAILabel/serverUrlHistory", serverUrlHistory, "text", str(qt.SIGNAL("textChanged(QString)"))
)
fileExtension = qt.QLineEdit()
fileExtension.setText(".nii.gz")
fileExtension.toolTip = "Default extension for uploading images/labels"
groupLayout.addRow("File Extension:", fileExtension)
parent.registerProperty(
"MONAILabel/fileExtension", fileExtension, "text", str(qt.SIGNAL("textChanged(QString)"))
)
clientId = qt.QLineEdit()
clientId.setText("user-xyz")
clientId.toolTip = "Client/User ID that will be sent to MONAI Label server for reference"
groupLayout.addRow("Client/User-ID:", clientId)
parent.registerProperty("MONAILabel/clientId", clientId, "text", str(qt.SIGNAL("textChanged(QString)")))
autoRunSegmentationCheckBox = qt.QCheckBox()
autoRunSegmentationCheckBox.checked = False
autoRunSegmentationCheckBox.toolTip = (
"Enable this option to auto run segmentation if pre-trained model exists when Next Sample is fetched"
)
groupLayout.addRow("Auto-Run Pre-Trained Model:", autoRunSegmentationCheckBox)
parent.registerProperty(
"MONAILabel/autoRunSegmentationOnNextSample",
ctk.ctkBooleanMapper(autoRunSegmentationCheckBox, "checked", str(qt.SIGNAL("toggled(bool)"))),
"valueAsInt",
str(qt.SIGNAL("valueAsIntChanged(int)")),
)
autoFetchNextSampleCheckBox = qt.QCheckBox()
autoFetchNextSampleCheckBox.checked = False
autoFetchNextSampleCheckBox.toolTip = "Enable this option to fetch Next Sample after saving the label"
groupLayout.addRow("Auto-Fetch Next Sample:", autoFetchNextSampleCheckBox)
parent.registerProperty(
"MONAILabel/autoFetchNextSample",
ctk.ctkBooleanMapper(autoFetchNextSampleCheckBox, "checked", str(qt.SIGNAL("toggled(bool)"))),
"valueAsInt",
str(qt.SIGNAL("valueAsIntChanged(int)")),
)
autoUpdateModelCheckBox = qt.QCheckBox()
autoUpdateModelCheckBox.checked = True
autoUpdateModelCheckBox.toolTip = "Enable this option to auto update model after submitting the label"
groupLayout.addRow("Auto-Update Model:", autoUpdateModelCheckBox)
parent.registerProperty(
"MONAILabel/autoUpdateModel",
ctk.ctkBooleanMapper(autoUpdateModelCheckBox, "checked", str(qt.SIGNAL("toggled(bool)"))),
"valueAsInt",
str(qt.SIGNAL("valueAsIntChanged(int)")),
)
askForUserNameCheckBox = qt.QCheckBox()
askForUserNameCheckBox.checked = False
askForUserNameCheckBox.toolTip = "Enable this option to ask for the user name every time the MONAILabel extension is loaded for the first time"
groupLayout.addRow("Ask For User Name:", askForUserNameCheckBox)
parent.registerProperty(
"MONAILabel/askForUserName",
ctk.ctkBooleanMapper(askForUserNameCheckBox, "checked", str(qt.SIGNAL("toggled(bool)"))),
"valueAsInt",
str(qt.SIGNAL("valueAsIntChanged(int)")),
)
allowOverlapCheckBox = qt.QCheckBox()
allowOverlapCheckBox.checked = False
allowOverlapCheckBox.toolTip = "Enable this option to allow overlapping segmentations"
groupLayout.addRow("Allow Overlapping Segmentations:", allowOverlapCheckBox)
parent.registerProperty(
"MONAILabel/allowOverlappingSegments",
ctk.ctkBooleanMapper(allowOverlapCheckBox, "checked", str(qt.SIGNAL("toggled(bool)"))),
"valueAsInt",
str(qt.SIGNAL("valueAsIntChanged(int)")),
)
allowOverlapCheckBox.connect("toggled(bool)", self.onUpdateAllowOverlap)
developerModeCheckBox = qt.QCheckBox()
developerModeCheckBox.checked = False
developerModeCheckBox.toolTip = "Enable this option to find options tab etc..."
groupLayout.addRow("Developer Mode:", developerModeCheckBox)
parent.registerProperty(
"MONAILabel/developerMode",
ctk.ctkBooleanMapper(developerModeCheckBox, "checked", str(qt.SIGNAL("toggled(bool)"))),
"valueAsInt",
str(qt.SIGNAL("valueAsIntChanged(int)")),
)
vBoxLayout.addWidget(groupBox)
vBoxLayout.addStretch(1)
def onUpdateAllowOverlap(self):
if slicer.util.settingsValue("MONAILabel/allowOverlappingSegments", True, converter=slicer.util.toBool):
if slicer.util.settingsValue("MONAILabel/fileExtension", None) != ".seg.nrrd":
slicer.util.warningDisplay(
"Overlapping segmentations are only availabel with the '.seg.nrrd' file extension! Consider changing MONAILabel file extension."
)
class MONAILabelSettingsPanel(ctk.ctkSettingsPanel):
def __init__(self, *args, **kwargs):
ctk.ctkSettingsPanel.__init__(self, *args, **kwargs)
self.ui = _ui_MONAILabelSettingsPanel(self)
class MONAILabelWidget(ScriptedLoadableModuleWidget, VTKObservationMixin):
def __init__(self, parent=None):
"""
Called when the user opens the module the first time and the widget is initialized.
"""
ScriptedLoadableModuleWidget.__init__(self, parent)
VTKObservationMixin.__init__(self) # needed for parameter node observation
self.logic = None
self._parameterNode = None
self._volumeNode = None
self._segmentNode = None
self._volumeNodes = []
self._updatingGUIFromParameterNode = False
self._scribblesEditorWidget = None
self.info = {}
self.models = OrderedDict()
self.trainers = OrderedDict()
self.config = OrderedDict()
self.current_sample = None
self.samples = {}
self.state = {
"SegmentationModel": "",
"DeepgrowModel": "",
"ScribblesMethod": "",
"CurrentStrategy": "",
"CurrentTrainer": "",
}
self.file_ext = ".nii.gz"
self.dgPositiveFiducialNode = None
self.dgPositiveFiducialNodeObservers = []
self.dgNegativeFiducialNode = None
self.dgNegativeFiducialNodeObservers = []
self.ignoreFiducialNodeAddEvent = False
self.progressBar = None
self.tmpdir = None
self.timer = None
self.scribblesMode = None
self.multi_label = False
def setup(self):
"""
Called when the user opens the module the first time and the widget is initialized.
"""
ScriptedLoadableModuleWidget.setup(self)
# Load widget from .ui file (created by Qt Designer).
# Additional widgets can be instantiated manually and added to self.layout.
uiWidget = slicer.util.loadUI(self.resourcePath("UI/MONAILabel.ui"))
self.layout.addWidget(uiWidget)
self.ui = slicer.util.childWidgetVariables(uiWidget)
# Set scene in MRML widgets. Make sure that in Qt designer the top-level qMRMLWidget's
# "mrmlSceneChanged(vtkMRMLScene*)" signal in is connected to each MRML widget's.
# "setMRMLScene(vtkMRMLScene*)" slot.
uiWidget.setMRMLScene(slicer.mrmlScene)
# These connections ensure that we update parameter node when scene is closed
self.addObserver(slicer.mrmlScene, slicer.mrmlScene.StartCloseEvent, self.onSceneStartClose)
self.addObserver(slicer.mrmlScene, slicer.mrmlScene.EndCloseEvent, self.onSceneEndClose)
self.addObserver(slicer.mrmlScene, slicer.mrmlScene.NodeAddedEvent, self.onSceneEndImport)
# Create logic class. Logic implements all computations that should be possible to run
# in batch mode, without a graphical user interface.
self.tmpdir = slicer.util.tempDirectory("slicer-monai-label")
self.logic = MONAILabelLogic(self.tmpdir)
# Set icons and tune widget properties
self.ui.serverComboBox.lineEdit().setPlaceholderText("enter server address or leave empty to use default")
self.ui.fetchServerInfoButton.setIcon(self.icon("refresh-icon.png"))
self.ui.segmentationButton.setIcon(self.icon("segment.png"))
self.ui.nextSampleButton.setIcon(self.icon("segment.png"))
self.ui.saveLabelButton.setIcon(self.icon("save.png"))
self.ui.trainingButton.setIcon(self.icon("training.png"))
self.ui.stopTrainingButton.setIcon(self.icon("stop.png"))
self.ui.uploadImageButton.setIcon(self.icon("upload.svg"))
self.ui.importLabelButton.setIcon(self.icon("download.png"))
self.ui.dgPositiveFiducialPlacementWidget.setMRMLScene(slicer.mrmlScene)
self.ui.dgPositiveFiducialPlacementWidget.placeButton().toolTip = "Select +ve points"
self.ui.dgPositiveFiducialPlacementWidget.buttonsVisible = False
self.ui.dgPositiveFiducialPlacementWidget.placeButton().show()
self.ui.dgPositiveFiducialPlacementWidget.deleteButton().show()
self.ui.dgNegativeFiducialPlacementWidget.setMRMLScene(slicer.mrmlScene)
self.ui.dgNegativeFiducialPlacementWidget.placeButton().toolTip = "Select -ve points"
self.ui.dgNegativeFiducialPlacementWidget.buttonsVisible = False
self.ui.dgNegativeFiducialPlacementWidget.placeButton().show()
self.ui.dgNegativeFiducialPlacementWidget.deleteButton().show()
self.ui.dgUpdateButton.setIcon(self.icon("segment.png"))
# Connections
self.ui.fetchServerInfoButton.connect("clicked(bool)", self.onClickFetchInfo)
self.ui.serverComboBox.connect("currentIndexChanged(int)", self.onClickFetchInfo)
self.ui.segmentationModelSelector.connect("currentIndexChanged(int)", self.updateParameterNodeFromGUI)
self.ui.segmentationButton.connect("clicked(bool)", self.onClickSegmentation)
self.ui.deepgrowModelSelector.connect("currentIndexChanged(int)", self.updateParameterNodeFromGUI)
self.ui.nextSampleButton.connect("clicked(bool)", self.onNextSampleButton)
self.ui.trainingButton.connect("clicked(bool)", self.onTraining)
self.ui.stopTrainingButton.connect("clicked(bool)", self.onStopTraining)
self.ui.saveLabelButton.connect("clicked(bool)", self.onSaveLabel)
self.ui.uploadImageButton.connect("clicked(bool)", self.onUploadImage)
self.ui.importLabelButton.connect("clicked(bool)", self.onImportLabel)
self.ui.labelComboBox.connect("currentIndexChanged(int)", self.onSelectLabel)
self.ui.dgUpdateButton.connect("clicked(bool)", self.onUpdateDeepgrow)
self.ui.dgUpdateCheckBox.setStyleSheet("padding-left: 10px;")
# Scribbles
# brush and eraser icon from: https://tablericons.com/
self.ui.scribblesMethodSelector.connect("currentIndexChanged(int)", self.updateParameterNodeFromGUI)
self.ui.paintScribblesButton.setIcon(self.icon("paint.png"))
self.ui.paintScribblesButton.setToolTip("Paint scribbles for selected scribble layer")
self.ui.eraseScribblesButton.setIcon(self.icon("eraser.png"))
self.ui.eraseScribblesButton.setToolTip("Erase scribbles for selected scribble layer")
self.ui.updateScribblesButton.setIcon(self.icon("segment.png"))
self.ui.updateScribblesButton.setToolTip(
"Update label by sending scribbles to server to apply selected post processing method"
)
self.ui.brushSizeSlider.connect("valueChanged(double)", self.updateBrushSize)
self.ui.brushSizeSlider.setToolTip("Change brush size for scribbles tool")
self.ui.brush3dCheckbox.stateChanged.connect(self.on3dBrushCheckbox)
self.ui.brush3dCheckbox.setToolTip("Use 3D brush to paint/erase in multiple slices in 3D")
self.ui.updateScribblesButton.clicked.connect(self.onUpdateScribbles)
self.ui.paintScribblesButton.clicked.connect(self.onPaintScribbles)
self.ui.eraseScribblesButton.clicked.connect(self.onEraseScribbles)
self.ui.scribblesLabelSelector.connect("currentIndexChanged(int)", self.onSelectScribblesLabel)
# creating editable combo box
self.ui.scribblesLabelSelector.addItem(self.icon("fg_green.png"), "Foreground")
self.ui.scribblesLabelSelector.addItem(self.icon("bg_red.png"), "Background")
self.ui.scribblesLabelSelector.setCurrentIndex(0)
# start with scribbles section disabled
self.ui.scribblesCollapsibleButton.setEnabled(False)
self.ui.scribblesCollapsibleButton.collapsed = True
# embedded segment editor
self.ui.embeddedSegmentEditorWidget.setMRMLScene(slicer.mrmlScene)
self.ui.embeddedSegmentEditorWidget.setSegmentationNodeSelectorVisible(False)
self.ui.embeddedSegmentEditorWidget.setMasterVolumeNodeSelectorVisible(False)
self.initializeParameterNode()
self.updateServerUrlGUIFromSettings()
# self.onClickFetchInfo()
if slicer.util.settingsValue("MONAILabel/askForUserName", False, converter=slicer.util.toBool):
text = qt.QInputDialog().getText(
self.parent,
"User Name",
"Please enter your name:",
qt.QLineEdit.Normal,
slicer.util.settingsValue("MONAILabel/clientId", None),
)
if text:
settings = qt.QSettings()
settings.setValue("MONAILabel/clientId", text)
def cleanup(self):
self.removeObservers()
shutil.rmtree(self.tmpdir, ignore_errors=True)
def enter(self):
self.initializeParameterNode()
if self._segmentNode:
self.updateGUIFromParameterNode()
def exit(self):
self.removeObserver(self._parameterNode, vtk.vtkCommand.ModifiedEvent, self.updateGUIFromParameterNode)
def onSceneStartClose(self, caller, event):
self.state = {
"SegmentationModel": self.ui.segmentationModelSelector.currentText,
"DeepgrowModel": self.ui.deepgrowModelSelector.currentText,
"ScribblesMethod": self.ui.scribblesMethodSelector.currentText,
"CurrentStrategy": self.ui.strategyBox.currentText,
"CurrentTrainer": self.ui.trainerBox.currentText,
}
self._volumeNode = None
self._segmentNode = None
self._volumeNodes.clear()
self.setParameterNode(None)
self.current_sample = None
self.samples.clear()
self.resetFiducial(
self.ui.dgPositiveFiducialPlacementWidget, self.dgPositiveFiducialNode, self.dgPositiveFiducialNodeObservers
)
self.dgPositiveFiducialNode = None
self.resetFiducial(
self.ui.dgNegativeFiducialPlacementWidget, self.dgNegativeFiducialNode, self.dgNegativeFiducialNodeObservers
)
self.dgNegativeFiducialNode = None
self.onClearScribbles()
def resetFiducial(self, fiducialWidget, fiducialNode, fiducialNodeObservers):
if fiducialWidget.placeModeEnabled:
fiducialWidget.setPlaceModeEnabled(False)
if fiducialNode:
slicer.mrmlScene.RemoveNode(fiducialNode)
self.removeFiducialNodeObservers(fiducialNode, fiducialNodeObservers)
def onSceneEndClose(self, caller, event):
if self.parent.isEntered:
self.initializeParameterNode()
def onSceneEndImport(self, caller, event):
if not self._volumeNode:
self.updateGUIFromParameterNode()
def initializeParameterNode(self):
self.setParameterNode(self.logic.getParameterNode())
# Select default input nodes if nothing is selected yet to save a few clicks for the user
if not self._parameterNode.GetNodeReference("InputVolume"):
firstVolumeNode = slicer.mrmlScene.GetFirstNodeByClass("vtkMRMLScalarVolumeNode")
if firstVolumeNode:
self._parameterNode.SetNodeReferenceID("InputVolume", firstVolumeNode.GetID())
def setParameterNode(self, inputParameterNode):
if inputParameterNode:
self.logic.setDefaultParameters(inputParameterNode)
if self._parameterNode is not None:
self.removeObserver(self._parameterNode, vtk.vtkCommand.ModifiedEvent, self.updateGUIFromParameterNode)
self._parameterNode = inputParameterNode
if self._parameterNode is not None:
self.addObserver(self._parameterNode, vtk.vtkCommand.ModifiedEvent, self.updateGUIFromParameterNode)
# Initial GUI update
self.updateGUIFromParameterNode()
def monitorTraining(self):
status = self.isTrainingRunning(check_only=False)
if status and status.get("status") == "RUNNING":
info = self.logic.info()
train_stats = info.get("train_stats")
if not train_stats:
return
train_stats = next(iter(train_stats.values())) if train_stats else train_stats
current = 0 if train_stats.get("total_time") else train_stats.get("epoch", 1)
total = train_stats.get("total_epochs", 1)
percent = max(1, 100 * current / total)
if self.ui.trainingProgressBar.value != percent:
self.ui.trainingProgressBar.setValue(percent)
self.ui.trainingProgressBar.setToolTip(f"{current}/{total} epoch is completed")
dice = train_stats.get("best_metric", 0)
self.updateAccuracyBar(dice)
return
print("Training completed")
self.ui.trainingProgressBar.setValue(100)
self.timer.stop()
self.timer = None
self.ui.trainingProgressBar.setToolTip(f"Training: {status.get('status', 'DONE')}")
self.ui.trainingButton.setEnabled(True)
self.ui.stopTrainingButton.setEnabled(False)
self.fetchInfo()
def updateGUIFromParameterNode(self, caller=None, event=None):
if self._parameterNode is None or self._updatingGUIFromParameterNode:
return
# Make sure GUI changes do not call updateParameterNodeFromGUI (it could cause infinite loop)
self._updatingGUIFromParameterNode = True
file_ext = slicer.util.settingsValue("MONAILabel/fileExtension", self.file_ext)
self.file_ext = file_ext if file_ext else self.file_ext
# Update node selectors and sliders
self.ui.inputSelector.clear()
for v in self._volumeNodes:
self.ui.inputSelector.addItem(v.GetName())
self.ui.inputSelector.setToolTip(self.current_sample.get("name", "") if self.current_sample else "")
if self._volumeNode:
self.ui.inputSelector.setCurrentIndex(self.ui.inputSelector.findText(self._volumeNode.GetName()))
self.ui.inputSelector.setEnabled(False) # Allow only one active scene
self.ui.uploadImageButton.setEnabled(False)
if self.info and slicer.mrmlScene.GetFirstNodeByClass("vtkMRMLScalarVolumeNode") and self._volumeNode is None:
self._volumeNode = slicer.mrmlScene.GetFirstNodeByClass("vtkMRMLScalarVolumeNode")
self.initSample({"id": self._volumeNode.GetName(), "session": True}, autosegment=False)
self.ui.inputSelector.setEnabled(False)
self.ui.uploadImageButton.setEnabled(self.current_sample and self.current_sample.get("session"))
self.updateSelector(self.ui.segmentationModelSelector, ["segmentation"], "SegmentationModel", 0)
self.updateSelector(self.ui.deepgrowModelSelector, ["deepgrow", "deepedit"], "DeepgrowModel", 0)
self.updateSelector(self.ui.scribblesMethodSelector, ["scribbles"], "ScribblesMethod", 0)
if self.models and [k for k, v in self.models.items() if v["type"] == "segmentation"]:
self.ui.segmentationCollapsibleButton.collapsed = False
if self.models and [k for k, v in self.models.items() if v["type"] in ("deepgrow", "deepedit")]:
self.ui.deepgrowCollapsibleButton.collapsed = False
if self.models and [k for k, v in self.models.items() if v["type"] == "scribbles"]:
self.ui.scribblesCollapsibleButton.collapsed = False
self.ui.labelComboBox.clear()
if self._segmentNode:
segmentation = self._segmentNode.GetSegmentation()
totalSegments = segmentation.GetNumberOfSegments()
segmentIds = [segmentation.GetNthSegmentID(i) for i in range(totalSegments)]
for idx, segmentId in enumerate(segmentIds):
segment = segmentation.GetSegment(segmentId)
label = segment.GetName()
if label in ["foreground_scribbles", "background_scribbles"]:
continue
self.ui.labelComboBox.addItem(label)
else:
for label in self.info.get("labels", {}):
self.ui.labelComboBox.addItem(label)
currentLabel = self._parameterNode.GetParameter("CurrentLabel")
idx = self.ui.labelComboBox.findText(currentLabel) if currentLabel else 0
idx = 0 if idx < 0 < self.ui.labelComboBox.count else idx
self.ui.labelComboBox.setCurrentIndex(idx)
self.ui.appComboBox.clear()
self.ui.appComboBox.addItem(self.info.get("name", ""))
datastore_stats = self.info.get("datastore", {})
current = datastore_stats.get("completed", 0)
total = datastore_stats.get("total", 0)
self.ui.activeLearningProgressBar.setValue(current / max(total, 1) * 100)
self.ui.activeLearningProgressBar.setToolTip(f"{current}/{total} samples are labeled")
train_stats = self.info.get("train_stats", {})
train_stats = next(iter(train_stats.values())) if train_stats else train_stats
dice = train_stats.get("best_metric", 0)
self.updateAccuracyBar(dice)
self.ui.strategyBox.clear()
for strategy in self.info.get("strategies", {}):
self.ui.strategyBox.addItem(strategy)
currentStrategy = self._parameterNode.GetParameter("CurrentStrategy")
currentStrategy = currentStrategy if currentStrategy else self.state["CurrentStrategy"]
self.ui.strategyBox.setCurrentIndex(self.ui.strategyBox.findText(currentStrategy) if currentStrategy else 0)
self.ui.trainerBox.clear()
trainers = self.info.get("trainers", {})
if trainers:
self.ui.trainerBox.addItem("ALL")
for t in trainers:
self.ui.trainerBox.addItem(t)
currentTrainer = self._parameterNode.GetParameter("CurrentTrainer")
currentTrainer = currentTrainer if currentTrainer else self.state["CurrentTrainer"]
self.ui.trainerBox.setCurrentIndex(self.ui.trainerBox.findText(currentTrainer) if currentTrainer else 0)
developer_mode = slicer.util.settingsValue("MONAILabel/developerMode", True, converter=slicer.util.toBool)
self.ui.optionsCollapsibleButton.setVisible(developer_mode)
# Enable/Disable
self.ui.nextSampleButton.setEnabled(self.ui.strategyBox.count)
is_training_running = True if self.info and self.isTrainingRunning() else False
self.ui.trainingButton.setEnabled(self.info and not is_training_running and current)
self.ui.stopTrainingButton.setEnabled(is_training_running)
if is_training_running and self.timer is None:
self.timer = qt.QTimer()
self.timer.setInterval(5000)
self.timer.connect("timeout()", self.monitorTraining)
self.timer.start()
self.ui.segmentationButton.setEnabled(
self.ui.segmentationModelSelector.currentText and self._volumeNode is not None
)
self.ui.saveLabelButton.setEnabled(self._segmentNode is not None)
self.ui.importLabelButton.setEnabled(self._segmentNode is not None)
# Create empty markup fiducial node for deep grow +ve and -ve
if self._segmentNode:
if not self.dgPositiveFiducialNode:
self.dgPositiveFiducialNode, self.dgPositiveFiducialNodeObservers = self.createFiducialNode(
"P", self.onDeepGrowFiducialNodeModified, [0.5, 1, 0.5]
)
self.ui.dgPositiveFiducialPlacementWidget.setCurrentNode(self.dgPositiveFiducialNode)
self.ui.dgPositiveFiducialPlacementWidget.setPlaceModeEnabled(False)
if not self.dgNegativeFiducialNode:
self.dgNegativeFiducialNode, self.dgNegativeFiducialNodeObservers = self.createFiducialNode(
"N", self.onDeepGrowFiducialNodeModified, [0.5, 0.5, 1]
)
self.ui.dgNegativeFiducialPlacementWidget.setCurrentNode(self.dgNegativeFiducialNode)
self.ui.dgNegativeFiducialPlacementWidget.setPlaceModeEnabled(False)
self.ui.scribblesCollapsibleButton.setEnabled(self.ui.scribblesMethodSelector.count)
self.ui.scribblesCollapsibleButton.collapsed = False
self.ui.dgPositiveFiducialPlacementWidget.setEnabled(self.ui.deepgrowModelSelector.currentText)
self.ui.dgNegativeFiducialPlacementWidget.setEnabled(self.ui.deepgrowModelSelector.currentText)
self.multi_label = "background" in self.info.get("labels", [])
if self.multi_label:
self.ui.dgLabelBackground.hide()
self.ui.dgNegativeFiducialPlacementWidget.hide()
self.ui.freezeUpdateCheckBox.show()
self.ui.dgLabelForeground.setText("Landmarks:")
else:
self.ui.dgNegativeFiducialPlacementWidget.show()
self.ui.freezeUpdateCheckBox.hide()
self.ui.dgLabelForeground.setText("Foreground:")
self.ui.dgUpdateCheckBox.setEnabled(self.ui.deepgrowModelSelector.currentText and self._segmentNode)
self.ui.dgUpdateButton.setEnabled(self.ui.deepgrowModelSelector.currentText and self._segmentNode)
self.ui.embeddedSegmentEditorWidget.setMRMLSegmentEditorNode(
slicer.mrmlScene.GetFirstNodeByClass("vtkMRMLSegmentEditorNode")
)
# All the GUI updates are done
self._updatingGUIFromParameterNode = False
def updateParameterNodeFromGUI(self, caller=None, event=None):
if self._parameterNode is None or self._updatingGUIFromParameterNode:
return
wasModified = self._parameterNode.StartModify() # Modify all properties in a single batch
segmentationModelIndex = self.ui.segmentationModelSelector.currentIndex
if segmentationModelIndex >= 0:
segmentationModel = self.ui.segmentationModelSelector.itemText(segmentationModelIndex)
self._parameterNode.SetParameter("SegmentationModel", segmentationModel)
deepgrowModelIndex = self.ui.deepgrowModelSelector.currentIndex
if deepgrowModelIndex >= 0:
deepgrowModel = self.ui.deepgrowModelSelector.itemText(deepgrowModelIndex)
self._parameterNode.SetParameter("DeepgrowModel", deepgrowModel)
scribblesMethodIndex = self.ui.scribblesMethodSelector.currentIndex
if scribblesMethodIndex >= 0:
scribblesMethod = self.ui.scribblesMethodSelector.itemText(scribblesMethodIndex)
self._parameterNode.SetParameter("ScribblesMethod", scribblesMethod)
currentLabelIndex = self.ui.labelComboBox.currentIndex
if currentLabelIndex >= 0:
currentLabel = self.ui.labelComboBox.itemText(currentLabelIndex)
self._parameterNode.SetParameter("CurrentLabel", currentLabel)
currentStrategyIndex = self.ui.strategyBox.currentIndex
if currentStrategyIndex >= 0:
currentStrategy = self.ui.strategyBox.itemText(currentStrategyIndex)
self._parameterNode.SetParameter("CurrentStrategy", currentStrategy)
currentTrainerIndex = self.ui.trainerBox.currentIndex
if currentTrainerIndex >= 0:
currentTrainer = self.ui.trainerBox.itemText(currentTrainerIndex)
self._parameterNode.SetParameter("CurrentTrainer", currentTrainer)
self._parameterNode.EndModify(wasModified)
def updateSelector(self, selector, model_types, param, defaultIndex=0):
wasSelectorBlocked = selector.blockSignals(True)
selector.clear()
for model_name, model in self.models.items():
if model["type"] in model_types:
selector.addItem(model_name)
selector.setItemData(selector.count - 1, model["description"], qt.Qt.ToolTipRole)
model = self._parameterNode.GetParameter(param)
model = model if model else self.state.get(param, "")
modelIndex = selector.findText(model)
modelIndex = defaultIndex if modelIndex < 0 < selector.count else modelIndex
selector.setCurrentIndex(modelIndex)
try:
modelInfo = self.models[model]
selector.setToolTip(modelInfo["description"])
except:
selector.setToolTip("")
selector.blockSignals(wasSelectorBlocked)
def updateConfigTable(self):
table = self.ui.configTable
table.clear()
headers = ["section", "name", "key", "value"]
table.setColumnCount(len(headers))
table.setHorizontalHeaderLabels(headers)
table.setColumnWidth(0, 50)
config = copy.deepcopy(self.info)
infer = config.get("models", {})
train = config.get("trainers", {})
activelearning = config.get("strategies", {})
scoring = config.get("scoring", {})
row_count = 0
config = {"infer": infer, "train": train, "activelearning": activelearning, "scoring": scoring}
for c in config.values():
row_count += sum([len(c[k].get("config", {})) for k in c.keys()])
# print(f"Total rows: {row_count}")
table.setRowCount(row_count)
n = 0
for section in config:
if not config[section]:
continue
c_section = config[section]
l_section = sum([len(c_section[k].get("config", {})) for k in c_section.keys()])
if not l_section:
continue
# print(f"{n} => l_section = {l_section}")
if l_section:
table.setSpan(n, 0, l_section, 1)
for name in c_section:
c_name = c_section[name]
l_name = len(c_name.get("config", {}))
if not l_name:
continue
# print(f"{n} => l_name = {l_name}")
if l_name:
table.setSpan(n, 1, l_name, 1)
for key, val in c_name.get("config", {}).items():
item = qt.QTableWidgetItem(section)
item.setFlags(item.flags() & ~qt.Qt.ItemIsEditable)
table.setItem(n, 0, item)
item = qt.QTableWidgetItem(name)
table.setItem(n, 1, item)
item.setFlags(item.flags() & ~qt.Qt.ItemIsEditable)
item = qt.QTableWidgetItem(key)
table.setItem(n, 2, item)
item.setFlags(item.flags() & ~qt.Qt.ItemIsEditable)
if isinstance(val, dict) or isinstance(val, list):
combo = qt.QComboBox()
for m, v in enumerate(val):
combo.addItem(v)
combo.setCurrentIndex(0)
table.setCellWidget(n, 3, combo)
elif isinstance(val, bool):
checkbox = qt.QCheckBox()
checkbox.setChecked(val)
table.setCellWidget(n, 3, checkbox)
else:
table.setItem(n, 3, qt.QTableWidgetItem(str(val) if val else ""))
# print(f"{n} => {section} => {name} => {key} => {val}")
n = n + 1
def updateAccuracyBar(self, dice):
self.ui.accuracyProgressBar.setValue(dice * 100)
css = ["stop: 0 red"]
if dice > 0.5:
css.append(f"stop: {0.5 / dice} orange")
if dice > 0.6:
css.append(f"stop: {0.6 / dice} yellow")
if dice > 0.7:
css.append(f"stop: {0.7 / dice} lightgreen")
if dice > 0.8:
css.append(f"stop: {0.8 / dice} green")
if dice > 0.9:
css.append(f"stop: {0.9 / dice} darkgreen")
self.ui.accuracyProgressBar.setStyleSheet(
"QProgressBar {text-align: center;} "
"QProgressBar::chunk {background-color: "
"qlineargradient(x0: 0, x2: 1, " + ",".join(css) + ")}"
)
self.ui.accuracyProgressBar.setToolTip(f"Accuracy: {dice:.4f}")
def getParamsFromConfig(self, filter, filter2=None):
mapping = {"infer": "models", "train": "trainers", "activelearning": "strategies", "scoring": "scoring"}
config = {}
for row in range(self.ui.configTable.rowCount):
section = str(self.ui.configTable.item(row, 0).text())
name = str(self.ui.configTable.item(row, 1).text())
key = str(self.ui.configTable.item(row, 2).text())
value = self.ui.configTable.item(row, 3)
if value is None:
value = self.ui.configTable.cellWidget(row, 3)
value = value.checked if isinstance(value, qt.QCheckBox) else value.currentText
else:
value = str(value.text())
v = self.info.get(mapping.get(section, ""), {}).get(name, {}).get("config", {}).get(key, {})
if isinstance(v, int):
value = int(value) if value else 0
elif isinstance(v, float):
value = float(value) if value else 0.0
# print(f"{section} => {name} => {key} => {value}")
if config.get(section) is None:
config[section] = {}
if config[section].get(name) is None:
config[section][name] = {}
config[section][name][key] = value
# print(f"row: {row}, section: {section}, name: {name}, value: {value}, type: {type(v)}")
res = config.get(filter, {})
res = res.get(filter2, {}) if filter2 else res
return res
def onDeepGrowFiducialNodeModified(self, observer, eventid):
logging.debug("Deepgrow Point Event!!")
if self.ignoreFiducialNodeAddEvent:
return
markupsNode = observer
movingMarkupIndex = markupsNode.GetDisplayNode().GetActiveControlPoint()
logging.debug("Markup point added; point ID = {}".format(movingMarkupIndex))
current_point = self.getFiducialPointXYZ(markupsNode, movingMarkupIndex)
if not self.ui.dgUpdateCheckBox.checked:
self.onClickDeepgrow(current_point, skip_infer=True)
return
self.onClickDeepgrow(current_point)
self.ignoreFiducialNodeAddEvent = True
self.onEditFiducialPoints(self.dgPositiveFiducialNode, "MONAILabel.ForegroundPoints")
self.onEditFiducialPoints(self.dgNegativeFiducialNode, "MONAILabel.BackgroundPoints")
self.ignoreFiducialNodeAddEvent = False
def getFiducialPointsXYZ(self, fiducialNode, name):
v = self._volumeNode
RasToIjkMatrix = vtk.vtkMatrix4x4()
v.GetRASToIJKMatrix(RasToIjkMatrix)
point_set = []
n = fiducialNode.GetNumberOfFiducials()
for i in range(n):
coord = [0.0, 0.0, 0.0]
fiducialNode.GetNthFiducialPosition(i, coord)
world = [0, 0, 0, 0]
fiducialNode.GetNthFiducialWorldCoordinates(i, world)
p_Ras = [coord[0], coord[1], coord[2], 1.0]
p_Ijk = RasToIjkMatrix.MultiplyDoublePoint(p_Ras)
p_Ijk = [round(i) for i in p_Ijk]
logging.debug("RAS: {}; WORLD: {}; IJK: {}".format(coord, world, p_Ijk))
point_set.append(p_Ijk[0:3])
logging.info("{} => Current Fiducials-Points: {}".format(name, point_set))
return point_set
def getFiducialPointXYZ(self, fiducialNode, index):
v = self._volumeNode
RasToIjkMatrix = vtk.vtkMatrix4x4()
v.GetRASToIJKMatrix(RasToIjkMatrix)
coord = [0.0, 0.0, 0.0]
fiducialNode.GetNthFiducialPosition(index, coord)
world = [0, 0, 0, 0]
fiducialNode.GetNthFiducialWorldCoordinates(index, world)
p_Ras = [coord[0], coord[1], coord[2], 1.0]
p_Ijk = RasToIjkMatrix.MultiplyDoublePoint(p_Ras)
p_Ijk = [round(i) for i in p_Ijk]
logging.debug("RAS: {}; WORLD: {}; IJK: {}".format(coord, world, p_Ijk))
return p_Ijk[0:3]
def onEditFiducialPoints(self, fiducialNode, tagName):
if fiducialNode is None:
return
fiducialNode.RemoveAllMarkups()
segmentId, segment = self.currentSegment()
if segment and segmentId:
v = self._volumeNode
IjkToRasMatrix = vtk.vtkMatrix4x4()
v.GetIJKToRASMatrix(IjkToRasMatrix)
fPosStr = vtk.mutable("")
segment.GetTag(tagName, fPosStr)
pointset = str(fPosStr)
logging.debug("{} => {} Fiducial points are: {}".format(segmentId, segment.GetName(), pointset))
if fPosStr is not None and len(pointset) > 0:
points = json.loads(pointset)
for p in points:
p_Ijk = [p[0], p[1], p[2], 1.0]
p_Ras = IjkToRasMatrix.MultiplyDoublePoint(p_Ijk)
logging.debug("Add Fiducial: {} => {}".format(p_Ijk, p_Ras))
fiducialNode.AddFiducialFromArray(p_Ras[0:3])
def currentSegment(self):
segmentation = self._segmentNode.GetSegmentation()
segmentId = segmentation.GetSegmentIdBySegmentName(self.ui.labelComboBox.currentText)
segment = segmentation.GetSegment(segmentId)
logging.debug("Current SegmentID: {}; Segment: {}".format(segmentId, segment))
return segmentId, segment
def onSelectLabel(self, caller=None, event=None):
self.updateParameterNodeFromGUI(caller, event)
self.ignoreFiducialNodeAddEvent = True
self.onEditFiducialPoints(self.dgPositiveFiducialNode, "MONAILabel.ForegroundPoints")
self.onEditFiducialPoints(self.dgNegativeFiducialNode, "MONAILabel.BackgroundPoints")
self.ignoreFiducialNodeAddEvent = False
def icon(self, name="MONAILabel.png"):
# It should not be necessary to modify this method
iconPath = os.path.join(os.path.dirname(__file__), "Resources", "Icons", name)
if os.path.exists(iconPath):
return qt.QIcon(iconPath)
return qt.QIcon()
def updateServerSettings(self):
self.logic.setServer(self.serverUrl())
self.logic.setClientId(slicer.util.settingsValue("MONAILabel/clientId", "user-xyz"))
self.saveServerUrl()
def serverUrl(self):
serverUrl = self.ui.serverComboBox.currentText
if not serverUrl:
serverUrl = "http://127.0.0.1:8000"
return serverUrl.rstrip("/")
def saveServerUrl(self):
self.updateParameterNodeFromGUI()
# Save selected server URL
settings = qt.QSettings()
serverUrl = self.ui.serverComboBox.currentText
settings.setValue("MONAILabel/serverUrl", serverUrl)
# Save current server URL to the top of history
serverUrlHistory = settings.value("MONAILabel/serverUrlHistory")
if serverUrlHistory:
serverUrlHistory = serverUrlHistory.split(";")
else:
serverUrlHistory = []
try:
serverUrlHistory.remove(serverUrl)
except ValueError:
pass
serverUrlHistory.insert(0, serverUrl)
serverUrlHistory = serverUrlHistory[:10] # keep up to first 10 elements
settings.setValue("MONAILabel/serverUrlHistory", ";".join(serverUrlHistory))
self.updateServerUrlGUIFromSettings()
def onClickFetchInfo(self):
self.fetchInfo()
self.updateConfigTable()
def fetchInfo(self, showInfo=False):
if not self.logic:
return
start = time.time()
try:
self.updateServerSettings()
info = self.logic.info()
self.info = info
if self.info.get("config"):
slicer.util.errorDisplay(
"Please upgrade the monai server to latest version",
detailedText=traceback.format_exc(),
)
return
except:
slicer.util.errorDisplay(
"Failed to fetch models from remote server. "
"Make sure server address is correct and <server_uri>/info/ "
"is accessible in browser",
detailedText=traceback.format_exc(),
)
return
self.models.clear()
self.config = info.get("config", {})
model_count = {}
models = info.get("models", {})
for k, v in models.items():
model_type = v.get("type", "segmentation")
model_count[model_type] = model_count.get(model_type, 0) + 1
logging.debug("{} = {}".format(k, model_type))
self.models[k] = v
self.updateGUIFromParameterNode()
msg = ""
msg += "-----------------------------------------------------\t\n"
msg += "Total Models Available: \t" + str(len(models)) + "\t\n"
msg += "-----------------------------------------------------\t\n"
for model_type in model_count.keys():
msg += model_type.capitalize() + " Models: \t" + str(model_count[model_type]) + "\t\n"
msg += "-----------------------------------------------------\t\n"
if showInfo:
qt.QMessageBox.information(slicer.util.mainWindow(), "MONAI Label", msg)
logging.info(msg)
logging.info("Time consumed by fetch info: {0:3.1f}".format(time.time() - start))
def setProgressBarLabelText(self, label):
if not self.progressBar:
self.progressBar = slicer.util.createProgressDialog(windowTitle="Wait...", maximum=100)
self.progressBar.labelText = label
def reportProgress(self, progressPercentage):
if not self.progressBar:
self.progressBar = slicer.util.createProgressDialog(windowTitle="Wait...", maximum=100)
self.progressBar.show()
self.progressBar.activateWindow()
self.progressBar.setValue(progressPercentage)
slicer.app.processEvents()
def onTraining(self):
start = time.time()
status = None
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
self.updateServerSettings()
model = self.ui.trainerBox.currentText
model = model if model and model != "ALL" else None
params = self.getParamsFromConfig("train", model)
status = self.logic.train_start(model, params)
self.ui.trainingProgressBar.setValue(1)
self.ui.trainingProgressBar.setToolTip("Training: STARTED")
time.sleep(1)
self.updateGUIFromParameterNode()
except:
slicer.util.errorDisplay(
"Failed to run training in MONAI Label Server", detailedText=traceback.format_exc()
)
finally:
qt.QApplication.restoreOverrideCursor()
if status:
msg = "ID: {}\nStatus: {}\nStart Time: {}\n".format(
status.get("id"),
status.get("status"),
status.get("start_ts"),
)
# slicer.util.infoDisplay(msg, detailedText=json.dumps(status, indent=2))
logging.info(msg)
logging.info("Time consumed by training: {0:3.1f}".format(time.time() - start))
def onStopTraining(self):
start = time.time()
status = None
if not slicer.util.confirmOkCancelDisplay(
"This will kill/stop current Training task. Are you sure to continue?"
):
return
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
self.updateServerSettings()
status = self.logic.train_stop()
except:
slicer.util.errorDisplay("Failed to stop Training Task", detailedText=traceback.format_exc())
finally:
qt.QApplication.restoreOverrideCursor()
if status:
msg = "Status: {}\nStart Time: {}\nEnd Time: {}\nResult: {}".format(
status.get("status"),
status.get("start_ts"),
status.get("end_ts"),
status.get("result", status.get("details", [])[-1]),
)
# slicer.util.infoDisplay(msg, detailedText=json.dumps(status, indent=2))
logging.info(msg)
self.updateGUIFromParameterNode()
logging.info("Time consumed by stop training: {0:3.1f}".format(time.time() - start))
def isTrainingRunning(self, check_only=True):
if not self.logic:
return False
self.updateServerSettings()
return self.logic.train_status(check_only)
def onNextSampleButton(self):
if not self.logic:
return
if self._volumeNode or len(slicer.util.getNodesByClass("vtkMRMLScalarVolumeNode")):
if not slicer.util.confirmOkCancelDisplay(
"This will close current scene. Please make sure you have saved your current work.\n"
"Are you sure to continue?"
):
return
self.onClearScribbles()
slicer.mrmlScene.Clear(0)
start = time.time()
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
self.updateServerSettings()
strategy = self.ui.strategyBox.currentText
if not strategy:
slicer.util.errorDisplay("No Strategy Found/Selected\t")
return
sample = self.logic.next_sample(strategy, self.getParamsFromConfig("activelearning", strategy))
logging.debug(sample)
if not sample.get("id"):
slicer.util.warningDisplay(
"Unlabled Samples/Images Not Found at server. Instead you can load your own image."
)
return
if self.samples.get(sample["id"]) is not None:
self.current_sample = self.samples[sample["id"]]
name = self.current_sample["VolumeNodeName"]
index = self.ui.inputSelector.findText(name)
self.ui.inputSelector.setCurrentIndex(index)
return
logging.info(sample)
image_id = sample["id"]
image_file = sample.get("path")
image_name = sample.get("name", image_id)
node_name = sample.get("PatientID", sample.get("name", image_id))[-20:]
checksum = sample.get("checksum")
local_exists = image_file and os.path.exists(image_file)
logging.info(f"Check if file exists/shared locally: {image_file} => {local_exists}")
if local_exists:
self._volumeNode = slicer.util.loadVolume(image_file)
self._volumeNode.SetName(node_name)
else:
download_uri = f"{self.serverUrl()}/datastore/image?image={quote_plus(image_id)}"
logging.info(download_uri)
sampleDataLogic = SampleData.SampleDataLogic()
self._volumeNode = sampleDataLogic.downloadFromURL(
nodeNames=node_name, fileNames=image_name, uris=download_uri, checksums=checksum
)[0]
self.initSample(sample)
except:
slicer.util.errorDisplay(
"Failed to fetch Sample from MONAI Label Server", detailedText=traceback.format_exc()
)
finally:
qt.QApplication.restoreOverrideCursor()
self.updateGUIFromParameterNode()
logging.info("Time consumed by next_sample: {0:3.1f}".format(time.time() - start))
def initSample(self, sample, autosegment=True):
sample["VolumeNodeName"] = self._volumeNode.GetName()
self.current_sample = sample
self.samples[sample["id"]] = sample
self._volumeNodes.append(self._volumeNode)
# Create Empty Segments for all labels for this node
self.createSegmentNode()
segmentEditorWidget = slicer.modules.segmenteditor.widgetRepresentation().self().editor
segmentEditorWidget.setSegmentationNode(self._segmentNode)
segmentEditorWidget.setMasterVolumeNode(self._volumeNode)
# check if user allows overlapping segments
if slicer.util.settingsValue("MONAILabel/allowOverlappingSegments", False, converter=slicer.util.toBool):
# set segment editor to allow overlaps
slicer.util.getNodesByClass("vtkMRMLSegmentEditorNode")[0].SetOverwriteMode(2)
if self.info.get("labels"):
self.updateSegmentationMask(None, self.info.get("labels"))
# Check if user wants to run auto-segmentation on new sample
if autosegment and slicer.util.settingsValue(
"MONAILabel/autoRunSegmentationOnNextSample", True, converter=slicer.util.toBool
):
for label in self.info.get("labels", []):
for name, model in self.models.items():
if label in model.get("labels", []):
qt.QApplication.restoreOverrideCursor()
self.ui.segmentationModelSelector.currentText = name
self.onClickSegmentation()
return
def getPermissionForImageDataUpload(self):
return slicer.util.confirmOkCancelDisplay(
"Master volume - without any additional patient information -"
" will be sent to remote data processing server: {0}.\n\n"
"Click 'OK' to proceed with the segmentation.\n"
"Click 'Cancel' to not upload any data and cancel segmentation.\n".format(self.serverUrl()),
dontShowAgainSettingsKey="MONAILabel/showImageDataSendWarning",
)
def onUploadImage(self, init_sample=True, session=False):
volumeNode = slicer.mrmlScene.GetFirstNodeByClass("vtkMRMLScalarVolumeNode")
image_id = volumeNode.GetName()
if not self.getPermissionForImageDataUpload():
return False
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
in_file = tempfile.NamedTemporaryFile(suffix=self.file_ext, dir=self.tmpdir).name
self.reportProgress(5)
start = time.time()
slicer.util.saveNode(volumeNode, in_file)
logging.info("Saved Input Node into {0} in {1:3.1f}s".format(in_file, time.time() - start))
self.reportProgress(30)
if session:
self.current_sample["session_id"] = self.logic.create_session(in_file)["session_id"]
else:
self.logic.upload_image(in_file, image_id)
self.current_sample["session"] = False
self.reportProgress(100)
self._volumeNode = volumeNode
if init_sample:
self.initSample({"id": image_id}, autosegment=False)
qt.QApplication.restoreOverrideCursor()
self.updateGUIFromParameterNode()
return True
except:
self.reportProgress(100)
qt.QApplication.restoreOverrideCursor()
if session:
slicer.util.errorDisplay(
"Server Error:: Session creation Failed\nPlease upgrade to latest monailable version (> 0.2.0)",
detailedText=traceback.format_exc(),
)
else:
slicer.util.errorDisplay("Failed to upload volume to Server", detailedText=traceback.format_exc())
return False
def onImportLabel(self):
if not self.ui.labelPathLineEdit.currentPath or not os.path.exists(self.ui.labelPathLineEdit.currentPath):
slicer.util.warningDisplay("Label File not selected")
return
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
self.updateSegmentationMask(self.ui.labelPathLineEdit.currentPath, self.info["labels"])
qt.QApplication.restoreOverrideCursor()
except:
qt.QApplication.restoreOverrideCursor()
slicer.util.errorDisplay("Failed to import label", detailedText=traceback.format_exc())
def onSaveLabel(self):
start = time.time()
labelmapVolumeNode = None
result = None
self.onClearScribbles()
if self.current_sample.get("session"):
if not self.onUploadImage(init_sample=False):
return
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
segmentationNode = self._segmentNode
labelmapVolumeNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLLabelMapVolumeNode")
slicer.modules.segmentations.logic().ExportVisibleSegmentsToLabelmapNode(
segmentationNode, labelmapVolumeNode, self._volumeNode
)
segmentation = segmentationNode.GetSegmentation()
totalSegments = segmentation.GetNumberOfSegments()
segmentIds = [segmentation.GetNthSegmentID(i) for i in range(totalSegments)]
label_info = []
for idx, segmentId in enumerate(segmentIds):
segment = segmentation.GetSegment(segmentId)
if segment.GetName() in ["foreground_scribbles", "background_scribbles"]:
logging.info(f"Removing segment {segmentId}: {segment.GetName()}")
segmentationNode.RemoveSegment(segmentId)
continue
label_info.append({"name": segment.GetName(), "idx": idx + 1})
# label_info.append({"color": segment.GetColor()})
label_in = tempfile.NamedTemporaryFile(suffix=self.file_ext, dir=self.tmpdir).name
self.reportProgress(5)
if (
slicer.util.settingsValue("MONAILabel/allowOverlappingSegments", True, converter=slicer.util.toBool)
and slicer.util.settingsValue("MONAILabel/fileExtension", self.file_ext) == ".seg.nrrd"
):
slicer.util.saveNode(segmentationNode, label_in)
else:
slicer.util.saveNode(labelmapVolumeNode, label_in)
self.reportProgress(30)
self.updateServerSettings()
result = self.logic.save_label(self.current_sample["id"], label_in, {"label_info": label_info})
self.fetchInfo()
if slicer.util.settingsValue("MONAILabel/autoUpdateModel", True, converter=slicer.util.toBool):
try:
if self.isTrainingRunning(check_only=True):
self.logic.train_stop()
except:
logging.info("Failed to stop training; or already stopped")
self.onTraining()
except:
slicer.util.errorDisplay("Failed to save Label to MONAI Label Server", detailedText=traceback.format_exc())
finally:
qt.QApplication.restoreOverrideCursor()
self.reportProgress(100)
if labelmapVolumeNode:
slicer.mrmlScene.RemoveNode(labelmapVolumeNode)
if result:
slicer.util.infoDisplay(
"Label-Mask saved into MONAI Label Server\t\t", detailedText=json.dumps(result, indent=2)
)
if slicer.util.settingsValue("MONAILabel/autoFetchNextSample", False, converter=slicer.util.toBool):
slicer.mrmlScene.Clear(0)
self.onNextSampleButton()
logging.info("Time consumed by save label: {0:3.1f}".format(time.time() - start))
def getSessionId(self):
session_id = None
if self.current_sample.get("session", False):
session_id = self.current_sample.get("session_id")
if not session_id or not self.logic.get_session(session_id):
self.onUploadImage(init_sample=False, session=True)
session_id = self.current_sample["session_id"]
return session_id
def onClickSegmentation(self):
if not self.current_sample:
return
start = time.time()
result_file = None
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
self.updateServerSettings()
model = self.ui.segmentationModelSelector.currentText
image_file = self.current_sample["id"]
params = self.getParamsFromConfig("infer", model)
result_file, params = self.logic.infer(model, image_file, params, session_id=self.getSessionId())
print(f"Result Params for Segmentation: {params}")
labels = (
params.get("label_names") if params and params.get("label_names") else self.models[model].get("labels")
)
if labels and isinstance(labels, dict):
labels = [k for k, _ in sorted(labels.items(), key=lambda item: item[1])]
self.updateSegmentationMask(result_file, labels)
except:
slicer.util.errorDisplay(
"Failed to run inference in MONAI Label Server", detailedText=traceback.format_exc()
)
finally:
qt.QApplication.restoreOverrideCursor()
if result_file and os.path.exists(result_file):
os.unlink(result_file)
self.updateGUIFromParameterNode()
logging.info("Time consumed by segmentation: {0:3.1f}".format(time.time() - start))
def onUpdateDeepgrow(self):
self.onClickDeepgrow(None)
def onClickDeepgrow(self, current_point, skip_infer=False):
model = self.ui.deepgrowModelSelector.currentText
if not model:
slicer.util.warningDisplay("Please select a deepgrow model")
return
_, segment = self.currentSegment()
if not segment:
slicer.util.warningDisplay("Please add the required label to run deepgrow")
return
foreground_all = self.getFiducialPointsXYZ(self.dgPositiveFiducialNode, "foreground")
background_all = self.getFiducialPointsXYZ(self.dgNegativeFiducialNode, "background")
segment.SetTag("MONAILabel.ForegroundPoints", json.dumps(foreground_all))
segment.SetTag("MONAILabel.BackgroundPoints", json.dumps(background_all))
if skip_infer:
return
# use model info "deepgrow" to determine
deepgrow_3d = False if self.models[model].get("dimension", 3) == 2 else True
start = time.time()
label = segment.GetName()
operationDescription = "Run Deepgrow for segment: {}; model: {}; 3d {}".format(label, model, deepgrow_3d)
logging.debug(operationDescription)
if not current_point:
if not foreground_all and not deepgrow_3d:
slicer.util.warningDisplay(operationDescription + " - points not added")
return
current_point = foreground_all[-1] if foreground_all else background_all[-1] if background_all else None
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
sliceIndex = None
if self.multi_label:
params = {}
segmentation = self._segmentNode.GetSegmentation()
for name in self.info.get("labels", []):
points = []
segmentId = segmentation.GetSegmentIdBySegmentName(name)
segment = segmentation.GetSegment(segmentId) if segmentId else None
if segment:
fPosStr = vtk.mutable("")
segment.GetTag("MONAILabel.ForegroundPoints", fPosStr)
pointset = str(fPosStr)
print("{} => {} Fiducial points are: {}".format(segmentId, name, pointset))
if fPosStr is not None and len(pointset) > 0:
points = json.loads(pointset)
params[name] = points
params["label"] = label
labels = None
else:
sliceIndex = current_point[2] if current_point else None
logging.debug("Slice Index: {}".format(sliceIndex))
if deepgrow_3d or not sliceIndex:
foreground = foreground_all
background = background_all
else:
foreground = [x for x in foreground_all if x[2] == sliceIndex]
background = [x for x in background_all if x[2] == sliceIndex]
logging.debug("Foreground: {}".format(foreground))
logging.debug("Background: {}".format(background))
logging.debug("Current point: {}".format(current_point))
params = {
"label": label,
"foreground": foreground,
"background": background,
}
labels = [label]
params["label"] = label
params.update(self.getParamsFromConfig("infer", model))
print(f"Request Params for Deepgrow/Deepedit: {params}")
image_file = self.current_sample["id"]
result_file, params = self.logic.infer(model, image_file, params, session_id=self.getSessionId())
print(f"Result Params for Deepgrow/Deepedit: {params}")
if labels is None:
labels = (
params.get("label_names")
if params and params.get("label_names")
else self.models[model].get("labels")
)
if labels and isinstance(labels, dict):
labels = [k for k, _ in sorted(labels.items(), key=lambda item: item[1])]
freeze = label if self.ui.freezeUpdateCheckBox.checked else None
self.updateSegmentationMask(result_file, labels, None if deepgrow_3d else sliceIndex, freeze=freeze)
except:
logging.exception("Unknown Exception")
slicer.util.errorDisplay(operationDescription + " - unexpected error.", detailedText=traceback.format_exc())
finally:
qt.QApplication.restoreOverrideCursor()
self.updateGUIFromParameterNode()
logging.info("Time consumed by Deepgrow: {0:3.1f}".format(time.time() - start))
def createCursor(self, widget):
return slicer.util.mainWindow().cursor
def createSegmentNode(self):
if self._volumeNode is None:
return
if self._segmentNode is None:
name = "segmentation_" + self._volumeNode.GetName()
self._segmentNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLSegmentationNode")
self._segmentNode.SetReferenceImageGeometryParameterFromVolumeNode(self._volumeNode)
self._segmentNode.SetName(name)
def getLabelColor(self, name):
color = GenericAnatomyColors.get(name.lower())
return [c / 255.0 for c in color] if color else None
def updateSegmentationMask(self, in_file, labels, sliceIndex=None, freeze=None):
# TODO:: Add ROI Node (for Bounding Box if provided in the result)
start = time.time()
logging.debug("Update Segmentation Mask from: {}".format(in_file))
if in_file and not os.path.exists(in_file):
return False
segmentationNode = self._segmentNode
segmentation = segmentationNode.GetSegmentation()
if in_file is None:
for label in labels:
if not segmentation.GetSegmentIdBySegmentName(label):
segmentation.AddEmptySegment(label, label, self.getLabelColor(label))
return True
labels = [l for l in labels if l != "background"]
print(f"Update Segmentation Mask using Labels: {labels}")
# segmentId, segment = self.currentSegment()
labelImage = sitk.ReadImage(in_file)
labelmapVolumeNode = sitkUtils.PushVolumeToSlicer(labelImage, None, className="vtkMRMLLabelMapVolumeNode")
existing_label_ids = {}
for label in labels:
id = segmentation.GetSegmentIdBySegmentName(label)
if id:
existing_label_ids[label] = id
freeze = [freeze] if freeze and isinstance(freeze, str) else freeze
print(f"Import only Freezed label: {freeze}")
numberOfExistingSegments = segmentation.GetNumberOfSegments()
slicer.modules.segmentations.logic().ImportLabelmapToSegmentationNode(labelmapVolumeNode, segmentationNode)
slicer.mrmlScene.RemoveNode(labelmapVolumeNode)
numberOfAddedSegments = segmentation.GetNumberOfSegments() - numberOfExistingSegments
logging.debug("Adding {} segments".format(numberOfAddedSegments))
addedSegmentIds = [
segmentation.GetNthSegmentID(numberOfExistingSegments + i) for i in range(numberOfAddedSegments)
]
for i, segmentId in enumerate(addedSegmentIds):
segment = segmentation.GetSegment(segmentId)
print("Setting new segmentation with id: {} => {}".format(segmentId, segment.GetName()))
label = labels[i] if i < len(labels) else "unknown {}".format(i)
# segment.SetName(label)
# segment.SetColor(self.getLabelColor(label))
if freeze and label not in freeze:
print(f"Discard label update for: {label}")
elif label in existing_label_ids:
segmentEditorWidget = slicer.modules.segmenteditor.widgetRepresentation().self().editor
segmentEditorWidget.setSegmentationNode(segmentationNode)
segmentEditorWidget.setMasterVolumeNode(self._volumeNode)
segmentEditorWidget.setCurrentSegmentID(existing_label_ids[label])
effect = segmentEditorWidget.effectByName("Logical operators")
labelmap = slicer.vtkOrientedImageData()
segmentationNode.GetBinaryLabelmapRepresentation(segmentId, labelmap)
if sliceIndex:
selectedSegmentLabelmap = effect.selectedSegmentLabelmap()
dims = selectedSegmentLabelmap.GetDimensions()
count = 0
for x in range(dims[0]):
for y in range(dims[1]):
if selectedSegmentLabelmap.GetScalarComponentAsDouble(x, y, sliceIndex, 0):
count = count + 1
selectedSegmentLabelmap.SetScalarComponentFromDouble(x, y, sliceIndex, 0, 0)
logging.debug("Total Non Zero: {}".format(count))
# Clear the Slice
if count:
effect.modifySelectedSegmentByLabelmap(
selectedSegmentLabelmap, slicer.qSlicerSegmentEditorAbstractEffect.ModificationModeSet
)
# Union label map
effect.modifySelectedSegmentByLabelmap(
labelmap, slicer.qSlicerSegmentEditorAbstractEffect.ModificationModeAdd
)
else:
# adding bypass masking to not overwrite other layers,
# needed for preserving scribbles during updates
# help from: https://github.com/Slicer/Slicer/blob/master/Modules/Loadable/Segmentations/EditorEffects/Python/SegmentEditorLogicalEffect.py
bypassMask = True
effect.modifySelectedSegmentByLabelmap(
labelmap, slicer.qSlicerSegmentEditorAbstractEffect.ModificationModeSet, bypassMask
)
segmentationNode.RemoveSegment(segmentId)
self.showSegmentationsIn3D()
logging.info("Time consumed by updateSegmentationMask: {0:3.1f}".format(time.time() - start))
return True
def showSegmentationsIn3D(self):
# add closed surface representation
if self._segmentNode:
self._segmentNode.CreateClosedSurfaceRepresentation()
view = slicer.app.layoutManager().threeDWidget(0).threeDView()
view.resetFocalPoint()
def updateServerUrlGUIFromSettings(self):
# Save current server URL to the top of history
settings = qt.QSettings()
serverUrlHistory = settings.value("MONAILabel/serverUrlHistory")
wasBlocked = self.ui.serverComboBox.blockSignals(True)
self.ui.serverComboBox.clear()
if serverUrlHistory:
self.ui.serverComboBox.addItems(serverUrlHistory.split(";"))
self.ui.serverComboBox.setCurrentText(settings.value("MONAILabel/serverUrl"))
self.ui.serverComboBox.blockSignals(wasBlocked)
def createFiducialNode(self, name, onMarkupNodeModified, color):
displayNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLMarkupsDisplayNode")
displayNode.SetTextScale(0)
displayNode.SetSelectedColor(color)
fiducialNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLMarkupsFiducialNode")
fiducialNode.SetName(name)
fiducialNode.SetAndObserveDisplayNodeID(displayNode.GetID())
fiducialNodeObservers = []
self.addFiducialNodeObserver(fiducialNode, onMarkupNodeModified)
return fiducialNode, fiducialNodeObservers
def removeFiducialNodeObservers(self, fiducialNode, fiducialNodeObservers):
if fiducialNode and fiducialNodeObservers:
for observer in fiducialNodeObservers:
fiducialNode.RemoveObserver(observer)
def addFiducialNodeObserver(self, fiducialNode, onMarkupNodeModified):
fiducialNodeObservers = []
if fiducialNode:
eventIds = [slicer.vtkMRMLMarkupsNode.PointPositionDefinedEvent]
for eventId in eventIds:
fiducialNodeObservers.append(fiducialNode.AddObserver(eventId, onMarkupNodeModified))
return fiducialNodeObservers
def scribblesLayersPresent(self):
scribbles_exist = False
if self._segmentNode is not None:
segmentationNode = self._segmentNode
segmentation = segmentationNode.GetSegmentation()
numSegments = segmentation.GetNumberOfSegments()
segmentIds = [segmentation.GetNthSegmentID(i) for i in range(numSegments)]
scribbles_exist = sum([int("scribbles" in sid) for sid in segmentIds]) > 0
return scribbles_exist
def onStartScribbling(self):
if not self._segmentNode:
return
logging.debug("Scribbles start event")
if (not self.scribblesLayersPresent()) and (self._scribblesEditorWidget is None):
# add background, layer index = -2 [2], color = red
self._segmentNode.GetSegmentation().AddEmptySegment(
"background_scribbles", "background_scribbles", [1.0, 0.0, 0.0]
)
# add foreground, layer index = -1 [3], color = green
self._segmentNode.GetSegmentation().AddEmptySegment(
"foreground_scribbles", "foreground_scribbles", [0.0, 1.0, 0.0]
)
# change segmentation display properties to "see through" the scribbles
# further explanation at:
# https://apidocs.slicer.org/master/classvtkMRMLSegmentationDisplayNode.html
segmentationDisplayNode = self._segmentNode.GetDisplayNode()
# background
opacity = 0.2
segmentationDisplayNode.SetSegmentOpacity2DFill("background_scribbles", opacity)
segmentationDisplayNode.SetSegmentOpacity2DOutline("background_scribbles", opacity)
# foreground
segmentationDisplayNode.SetSegmentOpacity2DFill("foreground_scribbles", opacity)
segmentationDisplayNode.SetSegmentOpacity2DOutline("foreground_scribbles", opacity)
# create segmentEditorWidget to access "Paint" and "Erase" segmentation tools
# these will be used to draw scribbles
self._scribblesEditorWidget = slicer.qMRMLSegmentEditorWidget()
self._scribblesEditorWidget.setMRMLScene(slicer.mrmlScene)
segmentEditorNode = slicer.vtkMRMLSegmentEditorNode()
# adding new scribbles can overwrite a new one-hot vector, hence erase any existing
# labels - this is not a desired behaviour hence we swith to overlay mode that enables drawing
# scribbles without changing existing labels. Further explanation at:
# https://discourse.slicer.org/t/how-can-i-set-masking-settings-on-a-segment-editor-effect-in-python/4406/7
segmentEditorNode.SetOverwriteMode(slicer.vtkMRMLSegmentEditorNode.OverwriteNone)
# add all nodes to the widget
slicer.mrmlScene.AddNode(segmentEditorNode)
self._scribblesEditorWidget.setMRMLSegmentEditorNode(segmentEditorNode)
self._scribblesEditorWidget.setSegmentationNode(self._segmentNode)
self._scribblesEditorWidget.setMasterVolumeNode(self._volumeNode)
def onUpdateScribbles(self):
logging.info("Scribbles update event")
scribblesMethod = self.ui.scribblesMethodSelector.currentText
scribbles_in = None
result_file = None
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
# get scribbles + label
segmentationNode = self._segmentNode
labelmapVolumeNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLLabelMapVolumeNode")
slicer.modules.segmentations.logic().ExportVisibleSegmentsToLabelmapNode(
segmentationNode, labelmapVolumeNode, self._volumeNode
)
scribbles_in = tempfile.NamedTemporaryFile(suffix=self.file_ext, dir=self.tmpdir).name
self.reportProgress(5)
# save scribbles + label to file
slicer.util.saveNode(labelmapVolumeNode, scribbles_in)
self.reportProgress(30)
self.updateServerSettings()
self.reportProgress(60)
# try to first fetch vtkMRMLAnnotationROINode
roiNode = slicer.mrmlScene.GetFirstNodeByClass("vtkMRMLAnnotationROINode")
if roiNode == None: # if vtkMRMLAnnotationROINode not present, then check for vtkMRMLMarkupsROINode node
roiNode = slicer.mrmlScene.GetFirstNodeByClass("vtkMRMLMarkupsROINode")
# if roi node found, then try to get roi
selected_roi = self.getROIPointsXYZ(roiNode)
# send scribbles + label to server along with selected scribbles method
params = self.getParamsFromConfig("infer", scribblesMethod)
params.update({"roi": selected_roi})
image_file = self.current_sample["id"]
result_file, params = self.logic.infer(
scribblesMethod, image_file, params, scribbles_in, session_id=self.getSessionId()
)
# display result from server
self.reportProgress(90)
_, segment = self.currentSegment()
label = segment.GetName()
self.updateSegmentationMask(result_file, [label])
except:
slicer.util.errorDisplay(
"Failed to post process label on MONAI Label Server using {}".format(scribblesMethod),
detailedText=traceback.format_exc(),
)
finally:
qt.QApplication.restoreOverrideCursor()
self.reportProgress(100)
# clear all temporary files
if scribbles_in and os.path.exists(scribbles_in):
os.unlink(scribbles_in)
if result_file and os.path.exists(result_file):
os.unlink(result_file)
def getROIPointsXYZ(self, roiNode):
if roiNode == None:
return []
v = self._volumeNode
RasToIjkMatrix = vtk.vtkMatrix4x4()
v.GetRASToIJKMatrix(RasToIjkMatrix)
roi_points_ras = [0.0] * 6
if roiNode.__class__.__name__ == "vtkMRMLMarkupsROINode":
# for vtkMRMLMarkupsROINode
print(roiNode.__class__.__name__)
center = [0] * 3
roiNode.GetCenter(center)
roi_points_ras = [(x - s / 2, x + s / 2) for x, s in zip(center, roiNode.GetSize())]
roi_points_ras = [item for sublist in roi_points_ras for item in sublist]
elif roiNode.__class__.__name__ == "vtkMRMLAnnotationROINode":
# for vtkMRMLAnnotationROINode (old method)
print(roiNode.__class__.__name__)
roiNode.GetBounds(roi_points_ras)
else:
# if none found then best to return empty list
return []
min_points_ras = [roi_points_ras[0], roi_points_ras[2], roi_points_ras[4], 1.0]
max_points_ras = [roi_points_ras[0 + 1], roi_points_ras[2 + 1], roi_points_ras[4 + 1], 1.0]
min_points_ijk = RasToIjkMatrix.MultiplyDoublePoint(min_points_ras)
max_points_ijk = RasToIjkMatrix.MultiplyDoublePoint(max_points_ras)
min_points_ijk = [round(i) for i in min_points_ijk]
max_points_ijk = [round(i) for i in max_points_ijk]
roi_points_ijk = [val for pair in zip(min_points_ijk[0:3], max_points_ijk[0:3]) for val in pair]
logging.debug("RAS: {}; IJK: {}".format(roi_points_ras, roi_points_ijk))
# print("RAS: {}; IJK: {}".format(roi_points_ras, roi_points_ijk))
return roi_points_ijk
def onClearScribblesSegmentNodes(self):
# more explanation on this at:
# https://discourse.slicer.org/t/how-to-clear-segmentation/7433/4
# clear "scribbles" segment before saving the label
if not self._segmentNode:
return
segmentation = self._segmentNode
num_segments = segmentation.GetSegmentation().GetNumberOfSegments()
for i in range(num_segments):
segmentId = segmentation.GetSegmentation().GetNthSegmentID(i)
if "scribbles" in segmentId:
logging.info("clearning {}".format(segmentId))
labelMapRep = slicer.vtkOrientedImageData()
segmentation.GetBinaryLabelmapRepresentation(segmentId, labelMapRep)
vtkSegmentationCore.vtkOrientedImageDataResample.FillImage(labelMapRep, 0, labelMapRep.GetExtent())
slicer.vtkSlicerSegmentationsModuleLogic.SetBinaryLabelmapToSegment(
labelMapRep, segmentation, segmentId, slicer.vtkSlicerSegmentationsModuleLogic.MODE_REPLACE
)
def onClearScribbles(self):
# reset scribbles mode
self.scribblesMode = None
# clear scribbles editor widget
if self._scribblesEditorWidget:
widget = self._scribblesEditorWidget
del widget
self._scribblesEditorWidget = None
# remove "scribbles" segments from label
self.onClearScribblesSegmentNodes()
# reset UI elements associated with scribbles
self.ui.scribblesCollapsibleButton.collapsed = True
self.ui.paintScribblesButton.setChecked(False)
self.ui.eraseScribblesButton.setChecked(False)
self.ui.scribblesLabelSelector.setCurrentIndex(0)
def checkAndInitialiseScribbles(self):
if not self._segmentNode:
return
if self._scribblesEditorWidget is None:
self.onStartScribbling()
if self.scribblesMode is None:
self.changeScribblesMode(tool="Paint", layer="foreground_scribbles")
self.updateScribToolLayerFromMode()
def updateScribToolLayerFromMode(self):
if not self._segmentNode:
return
logging.info("Scribbles mode {} ".format(self.scribblesMode))
self.checkAndInitialiseScribbles()
# update tool/layer select for scribblesEditorWidget
tool, layer = self.getToolAndLayerFromScribblesMode()
if self._scribblesEditorWidget:
self._scribblesEditorWidget.setActiveEffectByName(tool)
self._scribblesEditorWidget.setCurrentSegmentID(layer)
# update brush type from checkbox
if tool in ("Paint", "Erase"):
is3dbrush = self.ui.brush3dCheckbox.checkState()
self.on3dBrushCheckbox(state=is3dbrush)
# update brush size from slider
brushSize = self.ui.brushSizeSlider.value
self.updateBrushSize(value=brushSize)
def getToolAndLayerFromScribblesMode(self):
if self.scribblesMode is not None:
return self.scribblesMode.split("+")
else:
# default modes
return "Paint", "foreground_scribbles"
def changeScribblesMode(self, tool=None, layer=None):
ctool, clayer = self.getToolAndLayerFromScribblesMode()
ctool = tool if tool != None else ctool
clayer = layer if layer != None else clayer
self.scribblesMode = "+".join([ctool, clayer])
def onPaintScribbles(self):
if not self._segmentNode:
return
if self.ui.eraseScribblesButton.checked:
self.ui.eraseScribblesButton.setChecked(False)
self.changeScribblesMode(tool="Paint" if self.ui.paintScribblesButton.checked else "None")
self.updateScribToolLayerFromMode()
def onEraseScribbles(self):
if not self._segmentNode:
return
if self.ui.paintScribblesButton.checked:
self.ui.paintScribblesButton.setChecked(False)
self.changeScribblesMode(tool="Erase" if self.ui.eraseScribblesButton.checked else "None")
self.updateScribToolLayerFromMode()
def onSelectScribblesLabel(self):
if not self._segmentNode:
return
index = self.ui.scribblesLabelSelector.currentIndex
index = 0 if index < 0 else index
selected = self.ui.scribblesLabelSelector.itemText(index)
layer = "foreground_scribbles" if selected == "Foreground" else "background_scribbles"
self.changeScribblesMode(layer=layer)
self.updateScribToolLayerFromMode()
def on3dBrushCheckbox(self, state):
logging.info("3D brush update {}".format(state))
self.checkAndInitialiseScribbles()
effect = self._scribblesEditorWidget.activeEffect()
# enable scribbles in 3d using a sphere brush
effect.setParameter("BrushSphere", state)
def updateBrushSize(self, value):
logging.info("brush size update {}".format(value))
if self.ui.paintScribblesButton.checked or self.ui.eraseScribblesButton.checked:
self.checkAndInitialiseScribbles()
effect = self._scribblesEditorWidget.activeEffect()
effect.setParameter("BrushAbsoluteDiameter", value)
class MONAILabelLogic(ScriptedLoadableModuleLogic):
def __init__(self, tmpdir=None, server_url=None, progress_callback=None, client_id=None):
ScriptedLoadableModuleLogic.__init__(self)
self.server_url = server_url
self.tmpdir = slicer.util.tempDirectory("slicer-monai-label") if tmpdir is None else tmpdir
self.client_id = client_id
self.volumeToSessions = dict()
self.progress_callback = progress_callback
def setDefaultParameters(self, parameterNode):
if not parameterNode.GetParameter("SegmentationModel"):
parameterNode.SetParameter("SegmentationModel", "")
if not parameterNode.GetParameter("DeepgrowModel"):
parameterNode.SetParameter("DeepgrowModel", "")
if not parameterNode.GetParameter("ScribblesMethod"):
parameterNode.SetParameter("ScribblesMethod", "")
def __del__(self):
shutil.rmtree(self.tmpdir, ignore_errors=True)
def setServer(self, server_url=None):
self.server_url = server_url if server_url else "http://127.0.0.1:8000"
def setClientId(self, client_id):
self.client_id = client_id if client_id else "user-xyz"
def setProgressCallback(self, progress_callback=None):
self.progress_callback = progress_callback
def reportProgress(self, progress):
if self.progress_callback:
self.progress_callback(progress)
def info(self):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).info()
def next_sample(self, strategy, params={}):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).next_sample(strategy, params)
def create_session(self, image_in):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).create_session(image_in)
def get_session(self, session_id):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).get_session(session_id)
def remove_session(self, session_id):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).remove_session(session_id)
def upload_image(self, image_in, image_id=None):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).upload_image(image_in, image_id)
def save_label(self, image_in, label_in, params):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).save_label(
image_in, label_in, params=params
)
def infer(self, model, image_in, params={}, label_in=None, file=None, session_id=None):
logging.debug("Preparing input data for segmentation")
self.reportProgress(0)
client = MONAILabelClient(self.server_url, self.tmpdir, self.client_id)
result_file, params = client.infer(model, image_in, params, label_in, file, session_id)
logging.debug(f"Image Response: {result_file}")
logging.debug(f"JSON Response: {params}")
self.reportProgress(100)
return result_file, params
def train_start(self, model=None, params={}):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).train_start(model, params)
def train_status(self, check_if_running):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).train_status(check_if_running)
def train_stop(self):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).train_stop()
class MONAILabelTest(ScriptedLoadableModuleTest):
def setUp(self):
slicer.mrmlScene.Clear()
def runTest(self):
self.setUp()
self.test_MONAILabel1()
def test_MONAILabel1(self):
self.delayDisplay("Test passed")
| 44.370297
| 159
| 0.646383
|
import copy
import json
import logging
import os
import shutil
import tempfile
import time
import traceback
from collections import OrderedDict
from urllib.parse import quote_plus
import ctk
import qt
import SampleData
import SimpleITK as sitk
import sitkUtils
import slicer
import vtk
import vtkSegmentationCore
from MONAILabelLib import GenericAnatomyColors, MONAILabelClient
from slicer.ScriptedLoadableModule import *
from slicer.util import VTKObservationMixin
class MONAILabel(ScriptedLoadableModule):
def __init__(self, parent):
ScriptedLoadableModule.__init__(self, parent)
self.parent.title = "MONAILabel"
self.parent.categories = ["Active Learning"]
self.parent.dependencies = []
self.parent.contributors = ["NVIDIA, KCL"]
self.parent.helpText = """
Active Learning solution.
See more information in <a href="https://github.com/Project-MONAI/MONAILabel">module documentation</a>.
"""
self.parent.acknowledgementText = """
Developed by NVIDIA, KCL
"""
slicer.app.connect("startupCompleted()", self.initializeAfterStartup)
def initializeAfterStartup(self):
if not slicer.app.commandOptions().noMainWindow:
self.settingsPanel = MONAILabelSettingsPanel()
slicer.app.settingsDialog().addPanel("MONAI Label", self.settingsPanel)
class _ui_MONAILabelSettingsPanel(object):
def __init__(self, parent):
vBoxLayout = qt.QVBoxLayout(parent)
groupBox = ctk.ctkCollapsibleGroupBox()
groupBox.title = "MONAI Label Server"
groupLayout = qt.QFormLayout(groupBox)
serverUrl = qt.QLineEdit()
groupLayout.addRow("Server address:", serverUrl)
parent.registerProperty("MONAILabel/serverUrl", serverUrl, "text", str(qt.SIGNAL("textChanged(QString)")))
serverUrlHistory = qt.QLineEdit()
groupLayout.addRow("Server address history:", serverUrlHistory)
parent.registerProperty(
"MONAILabel/serverUrlHistory", serverUrlHistory, "text", str(qt.SIGNAL("textChanged(QString)"))
)
fileExtension = qt.QLineEdit()
fileExtension.setText(".nii.gz")
fileExtension.toolTip = "Default extension for uploading images/labels"
groupLayout.addRow("File Extension:", fileExtension)
parent.registerProperty(
"MONAILabel/fileExtension", fileExtension, "text", str(qt.SIGNAL("textChanged(QString)"))
)
clientId = qt.QLineEdit()
clientId.setText("user-xyz")
clientId.toolTip = "Client/User ID that will be sent to MONAI Label server for reference"
groupLayout.addRow("Client/User-ID:", clientId)
parent.registerProperty("MONAILabel/clientId", clientId, "text", str(qt.SIGNAL("textChanged(QString)")))
autoRunSegmentationCheckBox = qt.QCheckBox()
autoRunSegmentationCheckBox.checked = False
autoRunSegmentationCheckBox.toolTip = (
"Enable this option to auto run segmentation if pre-trained model exists when Next Sample is fetched"
)
groupLayout.addRow("Auto-Run Pre-Trained Model:", autoRunSegmentationCheckBox)
parent.registerProperty(
"MONAILabel/autoRunSegmentationOnNextSample",
ctk.ctkBooleanMapper(autoRunSegmentationCheckBox, "checked", str(qt.SIGNAL("toggled(bool)"))),
"valueAsInt",
str(qt.SIGNAL("valueAsIntChanged(int)")),
)
autoFetchNextSampleCheckBox = qt.QCheckBox()
autoFetchNextSampleCheckBox.checked = False
autoFetchNextSampleCheckBox.toolTip = "Enable this option to fetch Next Sample after saving the label"
groupLayout.addRow("Auto-Fetch Next Sample:", autoFetchNextSampleCheckBox)
parent.registerProperty(
"MONAILabel/autoFetchNextSample",
ctk.ctkBooleanMapper(autoFetchNextSampleCheckBox, "checked", str(qt.SIGNAL("toggled(bool)"))),
"valueAsInt",
str(qt.SIGNAL("valueAsIntChanged(int)")),
)
autoUpdateModelCheckBox = qt.QCheckBox()
autoUpdateModelCheckBox.checked = True
autoUpdateModelCheckBox.toolTip = "Enable this option to auto update model after submitting the label"
groupLayout.addRow("Auto-Update Model:", autoUpdateModelCheckBox)
parent.registerProperty(
"MONAILabel/autoUpdateModel",
ctk.ctkBooleanMapper(autoUpdateModelCheckBox, "checked", str(qt.SIGNAL("toggled(bool)"))),
"valueAsInt",
str(qt.SIGNAL("valueAsIntChanged(int)")),
)
askForUserNameCheckBox = qt.QCheckBox()
askForUserNameCheckBox.checked = False
askForUserNameCheckBox.toolTip = "Enable this option to ask for the user name every time the MONAILabel extension is loaded for the first time"
groupLayout.addRow("Ask For User Name:", askForUserNameCheckBox)
parent.registerProperty(
"MONAILabel/askForUserName",
ctk.ctkBooleanMapper(askForUserNameCheckBox, "checked", str(qt.SIGNAL("toggled(bool)"))),
"valueAsInt",
str(qt.SIGNAL("valueAsIntChanged(int)")),
)
allowOverlapCheckBox = qt.QCheckBox()
allowOverlapCheckBox.checked = False
allowOverlapCheckBox.toolTip = "Enable this option to allow overlapping segmentations"
groupLayout.addRow("Allow Overlapping Segmentations:", allowOverlapCheckBox)
parent.registerProperty(
"MONAILabel/allowOverlappingSegments",
ctk.ctkBooleanMapper(allowOverlapCheckBox, "checked", str(qt.SIGNAL("toggled(bool)"))),
"valueAsInt",
str(qt.SIGNAL("valueAsIntChanged(int)")),
)
allowOverlapCheckBox.connect("toggled(bool)", self.onUpdateAllowOverlap)
developerModeCheckBox = qt.QCheckBox()
developerModeCheckBox.checked = False
developerModeCheckBox.toolTip = "Enable this option to find options tab etc..."
groupLayout.addRow("Developer Mode:", developerModeCheckBox)
parent.registerProperty(
"MONAILabel/developerMode",
ctk.ctkBooleanMapper(developerModeCheckBox, "checked", str(qt.SIGNAL("toggled(bool)"))),
"valueAsInt",
str(qt.SIGNAL("valueAsIntChanged(int)")),
)
vBoxLayout.addWidget(groupBox)
vBoxLayout.addStretch(1)
def onUpdateAllowOverlap(self):
if slicer.util.settingsValue("MONAILabel/allowOverlappingSegments", True, converter=slicer.util.toBool):
if slicer.util.settingsValue("MONAILabel/fileExtension", None) != ".seg.nrrd":
slicer.util.warningDisplay(
"Overlapping segmentations are only availabel with the '.seg.nrrd' file extension! Consider changing MONAILabel file extension."
)
class MONAILabelSettingsPanel(ctk.ctkSettingsPanel):
def __init__(self, *args, **kwargs):
ctk.ctkSettingsPanel.__init__(self, *args, **kwargs)
self.ui = _ui_MONAILabelSettingsPanel(self)
class MONAILabelWidget(ScriptedLoadableModuleWidget, VTKObservationMixin):
def __init__(self, parent=None):
ScriptedLoadableModuleWidget.__init__(self, parent)
VTKObservationMixin.__init__(self)
self.logic = None
self._parameterNode = None
self._volumeNode = None
self._segmentNode = None
self._volumeNodes = []
self._updatingGUIFromParameterNode = False
self._scribblesEditorWidget = None
self.info = {}
self.models = OrderedDict()
self.trainers = OrderedDict()
self.config = OrderedDict()
self.current_sample = None
self.samples = {}
self.state = {
"SegmentationModel": "",
"DeepgrowModel": "",
"ScribblesMethod": "",
"CurrentStrategy": "",
"CurrentTrainer": "",
}
self.file_ext = ".nii.gz"
self.dgPositiveFiducialNode = None
self.dgPositiveFiducialNodeObservers = []
self.dgNegativeFiducialNode = None
self.dgNegativeFiducialNodeObservers = []
self.ignoreFiducialNodeAddEvent = False
self.progressBar = None
self.tmpdir = None
self.timer = None
self.scribblesMode = None
self.multi_label = False
def setup(self):
ScriptedLoadableModuleWidget.setup(self)
uiWidget = slicer.util.loadUI(self.resourcePath("UI/MONAILabel.ui"))
self.layout.addWidget(uiWidget)
self.ui = slicer.util.childWidgetVariables(uiWidget)
# "mrmlSceneChanged(vtkMRMLScene*)" signal in is connected to each MRML widget's.
uiWidget.setMRMLScene(slicer.mrmlScene)
self.addObserver(slicer.mrmlScene, slicer.mrmlScene.StartCloseEvent, self.onSceneStartClose)
self.addObserver(slicer.mrmlScene, slicer.mrmlScene.EndCloseEvent, self.onSceneEndClose)
self.addObserver(slicer.mrmlScene, slicer.mrmlScene.NodeAddedEvent, self.onSceneEndImport)
self.tmpdir = slicer.util.tempDirectory("slicer-monai-label")
self.logic = MONAILabelLogic(self.tmpdir)
self.ui.serverComboBox.lineEdit().setPlaceholderText("enter server address or leave empty to use default")
self.ui.fetchServerInfoButton.setIcon(self.icon("refresh-icon.png"))
self.ui.segmentationButton.setIcon(self.icon("segment.png"))
self.ui.nextSampleButton.setIcon(self.icon("segment.png"))
self.ui.saveLabelButton.setIcon(self.icon("save.png"))
self.ui.trainingButton.setIcon(self.icon("training.png"))
self.ui.stopTrainingButton.setIcon(self.icon("stop.png"))
self.ui.uploadImageButton.setIcon(self.icon("upload.svg"))
self.ui.importLabelButton.setIcon(self.icon("download.png"))
self.ui.dgPositiveFiducialPlacementWidget.setMRMLScene(slicer.mrmlScene)
self.ui.dgPositiveFiducialPlacementWidget.placeButton().toolTip = "Select +ve points"
self.ui.dgPositiveFiducialPlacementWidget.buttonsVisible = False
self.ui.dgPositiveFiducialPlacementWidget.placeButton().show()
self.ui.dgPositiveFiducialPlacementWidget.deleteButton().show()
self.ui.dgNegativeFiducialPlacementWidget.setMRMLScene(slicer.mrmlScene)
self.ui.dgNegativeFiducialPlacementWidget.placeButton().toolTip = "Select -ve points"
self.ui.dgNegativeFiducialPlacementWidget.buttonsVisible = False
self.ui.dgNegativeFiducialPlacementWidget.placeButton().show()
self.ui.dgNegativeFiducialPlacementWidget.deleteButton().show()
self.ui.dgUpdateButton.setIcon(self.icon("segment.png"))
self.ui.fetchServerInfoButton.connect("clicked(bool)", self.onClickFetchInfo)
self.ui.serverComboBox.connect("currentIndexChanged(int)", self.onClickFetchInfo)
self.ui.segmentationModelSelector.connect("currentIndexChanged(int)", self.updateParameterNodeFromGUI)
self.ui.segmentationButton.connect("clicked(bool)", self.onClickSegmentation)
self.ui.deepgrowModelSelector.connect("currentIndexChanged(int)", self.updateParameterNodeFromGUI)
self.ui.nextSampleButton.connect("clicked(bool)", self.onNextSampleButton)
self.ui.trainingButton.connect("clicked(bool)", self.onTraining)
self.ui.stopTrainingButton.connect("clicked(bool)", self.onStopTraining)
self.ui.saveLabelButton.connect("clicked(bool)", self.onSaveLabel)
self.ui.uploadImageButton.connect("clicked(bool)", self.onUploadImage)
self.ui.importLabelButton.connect("clicked(bool)", self.onImportLabel)
self.ui.labelComboBox.connect("currentIndexChanged(int)", self.onSelectLabel)
self.ui.dgUpdateButton.connect("clicked(bool)", self.onUpdateDeepgrow)
self.ui.dgUpdateCheckBox.setStyleSheet("padding-left: 10px;")
self.ui.scribblesMethodSelector.connect("currentIndexChanged(int)", self.updateParameterNodeFromGUI)
self.ui.paintScribblesButton.setIcon(self.icon("paint.png"))
self.ui.paintScribblesButton.setToolTip("Paint scribbles for selected scribble layer")
self.ui.eraseScribblesButton.setIcon(self.icon("eraser.png"))
self.ui.eraseScribblesButton.setToolTip("Erase scribbles for selected scribble layer")
self.ui.updateScribblesButton.setIcon(self.icon("segment.png"))
self.ui.updateScribblesButton.setToolTip(
"Update label by sending scribbles to server to apply selected post processing method"
)
self.ui.brushSizeSlider.connect("valueChanged(double)", self.updateBrushSize)
self.ui.brushSizeSlider.setToolTip("Change brush size for scribbles tool")
self.ui.brush3dCheckbox.stateChanged.connect(self.on3dBrushCheckbox)
self.ui.brush3dCheckbox.setToolTip("Use 3D brush to paint/erase in multiple slices in 3D")
self.ui.updateScribblesButton.clicked.connect(self.onUpdateScribbles)
self.ui.paintScribblesButton.clicked.connect(self.onPaintScribbles)
self.ui.eraseScribblesButton.clicked.connect(self.onEraseScribbles)
self.ui.scribblesLabelSelector.connect("currentIndexChanged(int)", self.onSelectScribblesLabel)
self.ui.scribblesLabelSelector.addItem(self.icon("fg_green.png"), "Foreground")
self.ui.scribblesLabelSelector.addItem(self.icon("bg_red.png"), "Background")
self.ui.scribblesLabelSelector.setCurrentIndex(0)
self.ui.scribblesCollapsibleButton.setEnabled(False)
self.ui.scribblesCollapsibleButton.collapsed = True
self.ui.embeddedSegmentEditorWidget.setMRMLScene(slicer.mrmlScene)
self.ui.embeddedSegmentEditorWidget.setSegmentationNodeSelectorVisible(False)
self.ui.embeddedSegmentEditorWidget.setMasterVolumeNodeSelectorVisible(False)
self.initializeParameterNode()
self.updateServerUrlGUIFromSettings()
if slicer.util.settingsValue("MONAILabel/askForUserName", False, converter=slicer.util.toBool):
text = qt.QInputDialog().getText(
self.parent,
"User Name",
"Please enter your name:",
qt.QLineEdit.Normal,
slicer.util.settingsValue("MONAILabel/clientId", None),
)
if text:
settings = qt.QSettings()
settings.setValue("MONAILabel/clientId", text)
def cleanup(self):
self.removeObservers()
shutil.rmtree(self.tmpdir, ignore_errors=True)
def enter(self):
self.initializeParameterNode()
if self._segmentNode:
self.updateGUIFromParameterNode()
def exit(self):
self.removeObserver(self._parameterNode, vtk.vtkCommand.ModifiedEvent, self.updateGUIFromParameterNode)
def onSceneStartClose(self, caller, event):
self.state = {
"SegmentationModel": self.ui.segmentationModelSelector.currentText,
"DeepgrowModel": self.ui.deepgrowModelSelector.currentText,
"ScribblesMethod": self.ui.scribblesMethodSelector.currentText,
"CurrentStrategy": self.ui.strategyBox.currentText,
"CurrentTrainer": self.ui.trainerBox.currentText,
}
self._volumeNode = None
self._segmentNode = None
self._volumeNodes.clear()
self.setParameterNode(None)
self.current_sample = None
self.samples.clear()
self.resetFiducial(
self.ui.dgPositiveFiducialPlacementWidget, self.dgPositiveFiducialNode, self.dgPositiveFiducialNodeObservers
)
self.dgPositiveFiducialNode = None
self.resetFiducial(
self.ui.dgNegativeFiducialPlacementWidget, self.dgNegativeFiducialNode, self.dgNegativeFiducialNodeObservers
)
self.dgNegativeFiducialNode = None
self.onClearScribbles()
def resetFiducial(self, fiducialWidget, fiducialNode, fiducialNodeObservers):
if fiducialWidget.placeModeEnabled:
fiducialWidget.setPlaceModeEnabled(False)
if fiducialNode:
slicer.mrmlScene.RemoveNode(fiducialNode)
self.removeFiducialNodeObservers(fiducialNode, fiducialNodeObservers)
def onSceneEndClose(self, caller, event):
if self.parent.isEntered:
self.initializeParameterNode()
def onSceneEndImport(self, caller, event):
if not self._volumeNode:
self.updateGUIFromParameterNode()
def initializeParameterNode(self):
self.setParameterNode(self.logic.getParameterNode())
if not self._parameterNode.GetNodeReference("InputVolume"):
firstVolumeNode = slicer.mrmlScene.GetFirstNodeByClass("vtkMRMLScalarVolumeNode")
if firstVolumeNode:
self._parameterNode.SetNodeReferenceID("InputVolume", firstVolumeNode.GetID())
def setParameterNode(self, inputParameterNode):
if inputParameterNode:
self.logic.setDefaultParameters(inputParameterNode)
if self._parameterNode is not None:
self.removeObserver(self._parameterNode, vtk.vtkCommand.ModifiedEvent, self.updateGUIFromParameterNode)
self._parameterNode = inputParameterNode
if self._parameterNode is not None:
self.addObserver(self._parameterNode, vtk.vtkCommand.ModifiedEvent, self.updateGUIFromParameterNode)
self.updateGUIFromParameterNode()
def monitorTraining(self):
status = self.isTrainingRunning(check_only=False)
if status and status.get("status") == "RUNNING":
info = self.logic.info()
train_stats = info.get("train_stats")
if not train_stats:
return
train_stats = next(iter(train_stats.values())) if train_stats else train_stats
current = 0 if train_stats.get("total_time") else train_stats.get("epoch", 1)
total = train_stats.get("total_epochs", 1)
percent = max(1, 100 * current / total)
if self.ui.trainingProgressBar.value != percent:
self.ui.trainingProgressBar.setValue(percent)
self.ui.trainingProgressBar.setToolTip(f"{current}/{total} epoch is completed")
dice = train_stats.get("best_metric", 0)
self.updateAccuracyBar(dice)
return
print("Training completed")
self.ui.trainingProgressBar.setValue(100)
self.timer.stop()
self.timer = None
self.ui.trainingProgressBar.setToolTip(f"Training: {status.get('status', 'DONE')}")
self.ui.trainingButton.setEnabled(True)
self.ui.stopTrainingButton.setEnabled(False)
self.fetchInfo()
def updateGUIFromParameterNode(self, caller=None, event=None):
if self._parameterNode is None or self._updatingGUIFromParameterNode:
return
self._updatingGUIFromParameterNode = True
file_ext = slicer.util.settingsValue("MONAILabel/fileExtension", self.file_ext)
self.file_ext = file_ext if file_ext else self.file_ext
self.ui.inputSelector.clear()
for v in self._volumeNodes:
self.ui.inputSelector.addItem(v.GetName())
self.ui.inputSelector.setToolTip(self.current_sample.get("name", "") if self.current_sample else "")
if self._volumeNode:
self.ui.inputSelector.setCurrentIndex(self.ui.inputSelector.findText(self._volumeNode.GetName()))
self.ui.inputSelector.setEnabled(False)
self.ui.uploadImageButton.setEnabled(False)
if self.info and slicer.mrmlScene.GetFirstNodeByClass("vtkMRMLScalarVolumeNode") and self._volumeNode is None:
self._volumeNode = slicer.mrmlScene.GetFirstNodeByClass("vtkMRMLScalarVolumeNode")
self.initSample({"id": self._volumeNode.GetName(), "session": True}, autosegment=False)
self.ui.inputSelector.setEnabled(False)
self.ui.uploadImageButton.setEnabled(self.current_sample and self.current_sample.get("session"))
self.updateSelector(self.ui.segmentationModelSelector, ["segmentation"], "SegmentationModel", 0)
self.updateSelector(self.ui.deepgrowModelSelector, ["deepgrow", "deepedit"], "DeepgrowModel", 0)
self.updateSelector(self.ui.scribblesMethodSelector, ["scribbles"], "ScribblesMethod", 0)
if self.models and [k for k, v in self.models.items() if v["type"] == "segmentation"]:
self.ui.segmentationCollapsibleButton.collapsed = False
if self.models and [k for k, v in self.models.items() if v["type"] in ("deepgrow", "deepedit")]:
self.ui.deepgrowCollapsibleButton.collapsed = False
if self.models and [k for k, v in self.models.items() if v["type"] == "scribbles"]:
self.ui.scribblesCollapsibleButton.collapsed = False
self.ui.labelComboBox.clear()
if self._segmentNode:
segmentation = self._segmentNode.GetSegmentation()
totalSegments = segmentation.GetNumberOfSegments()
segmentIds = [segmentation.GetNthSegmentID(i) for i in range(totalSegments)]
for idx, segmentId in enumerate(segmentIds):
segment = segmentation.GetSegment(segmentId)
label = segment.GetName()
if label in ["foreground_scribbles", "background_scribbles"]:
continue
self.ui.labelComboBox.addItem(label)
else:
for label in self.info.get("labels", {}):
self.ui.labelComboBox.addItem(label)
currentLabel = self._parameterNode.GetParameter("CurrentLabel")
idx = self.ui.labelComboBox.findText(currentLabel) if currentLabel else 0
idx = 0 if idx < 0 < self.ui.labelComboBox.count else idx
self.ui.labelComboBox.setCurrentIndex(idx)
self.ui.appComboBox.clear()
self.ui.appComboBox.addItem(self.info.get("name", ""))
datastore_stats = self.info.get("datastore", {})
current = datastore_stats.get("completed", 0)
total = datastore_stats.get("total", 0)
self.ui.activeLearningProgressBar.setValue(current / max(total, 1) * 100)
self.ui.activeLearningProgressBar.setToolTip(f"{current}/{total} samples are labeled")
train_stats = self.info.get("train_stats", {})
train_stats = next(iter(train_stats.values())) if train_stats else train_stats
dice = train_stats.get("best_metric", 0)
self.updateAccuracyBar(dice)
self.ui.strategyBox.clear()
for strategy in self.info.get("strategies", {}):
self.ui.strategyBox.addItem(strategy)
currentStrategy = self._parameterNode.GetParameter("CurrentStrategy")
currentStrategy = currentStrategy if currentStrategy else self.state["CurrentStrategy"]
self.ui.strategyBox.setCurrentIndex(self.ui.strategyBox.findText(currentStrategy) if currentStrategy else 0)
self.ui.trainerBox.clear()
trainers = self.info.get("trainers", {})
if trainers:
self.ui.trainerBox.addItem("ALL")
for t in trainers:
self.ui.trainerBox.addItem(t)
currentTrainer = self._parameterNode.GetParameter("CurrentTrainer")
currentTrainer = currentTrainer if currentTrainer else self.state["CurrentTrainer"]
self.ui.trainerBox.setCurrentIndex(self.ui.trainerBox.findText(currentTrainer) if currentTrainer else 0)
developer_mode = slicer.util.settingsValue("MONAILabel/developerMode", True, converter=slicer.util.toBool)
self.ui.optionsCollapsibleButton.setVisible(developer_mode)
self.ui.nextSampleButton.setEnabled(self.ui.strategyBox.count)
is_training_running = True if self.info and self.isTrainingRunning() else False
self.ui.trainingButton.setEnabled(self.info and not is_training_running and current)
self.ui.stopTrainingButton.setEnabled(is_training_running)
if is_training_running and self.timer is None:
self.timer = qt.QTimer()
self.timer.setInterval(5000)
self.timer.connect("timeout()", self.monitorTraining)
self.timer.start()
self.ui.segmentationButton.setEnabled(
self.ui.segmentationModelSelector.currentText and self._volumeNode is not None
)
self.ui.saveLabelButton.setEnabled(self._segmentNode is not None)
self.ui.importLabelButton.setEnabled(self._segmentNode is not None)
if self._segmentNode:
if not self.dgPositiveFiducialNode:
self.dgPositiveFiducialNode, self.dgPositiveFiducialNodeObservers = self.createFiducialNode(
"P", self.onDeepGrowFiducialNodeModified, [0.5, 1, 0.5]
)
self.ui.dgPositiveFiducialPlacementWidget.setCurrentNode(self.dgPositiveFiducialNode)
self.ui.dgPositiveFiducialPlacementWidget.setPlaceModeEnabled(False)
if not self.dgNegativeFiducialNode:
self.dgNegativeFiducialNode, self.dgNegativeFiducialNodeObservers = self.createFiducialNode(
"N", self.onDeepGrowFiducialNodeModified, [0.5, 0.5, 1]
)
self.ui.dgNegativeFiducialPlacementWidget.setCurrentNode(self.dgNegativeFiducialNode)
self.ui.dgNegativeFiducialPlacementWidget.setPlaceModeEnabled(False)
self.ui.scribblesCollapsibleButton.setEnabled(self.ui.scribblesMethodSelector.count)
self.ui.scribblesCollapsibleButton.collapsed = False
self.ui.dgPositiveFiducialPlacementWidget.setEnabled(self.ui.deepgrowModelSelector.currentText)
self.ui.dgNegativeFiducialPlacementWidget.setEnabled(self.ui.deepgrowModelSelector.currentText)
self.multi_label = "background" in self.info.get("labels", [])
if self.multi_label:
self.ui.dgLabelBackground.hide()
self.ui.dgNegativeFiducialPlacementWidget.hide()
self.ui.freezeUpdateCheckBox.show()
self.ui.dgLabelForeground.setText("Landmarks:")
else:
self.ui.dgNegativeFiducialPlacementWidget.show()
self.ui.freezeUpdateCheckBox.hide()
self.ui.dgLabelForeground.setText("Foreground:")
self.ui.dgUpdateCheckBox.setEnabled(self.ui.deepgrowModelSelector.currentText and self._segmentNode)
self.ui.dgUpdateButton.setEnabled(self.ui.deepgrowModelSelector.currentText and self._segmentNode)
self.ui.embeddedSegmentEditorWidget.setMRMLSegmentEditorNode(
slicer.mrmlScene.GetFirstNodeByClass("vtkMRMLSegmentEditorNode")
)
self._updatingGUIFromParameterNode = False
def updateParameterNodeFromGUI(self, caller=None, event=None):
if self._parameterNode is None or self._updatingGUIFromParameterNode:
return
wasModified = self._parameterNode.StartModify()
segmentationModelIndex = self.ui.segmentationModelSelector.currentIndex
if segmentationModelIndex >= 0:
segmentationModel = self.ui.segmentationModelSelector.itemText(segmentationModelIndex)
self._parameterNode.SetParameter("SegmentationModel", segmentationModel)
deepgrowModelIndex = self.ui.deepgrowModelSelector.currentIndex
if deepgrowModelIndex >= 0:
deepgrowModel = self.ui.deepgrowModelSelector.itemText(deepgrowModelIndex)
self._parameterNode.SetParameter("DeepgrowModel", deepgrowModel)
scribblesMethodIndex = self.ui.scribblesMethodSelector.currentIndex
if scribblesMethodIndex >= 0:
scribblesMethod = self.ui.scribblesMethodSelector.itemText(scribblesMethodIndex)
self._parameterNode.SetParameter("ScribblesMethod", scribblesMethod)
currentLabelIndex = self.ui.labelComboBox.currentIndex
if currentLabelIndex >= 0:
currentLabel = self.ui.labelComboBox.itemText(currentLabelIndex)
self._parameterNode.SetParameter("CurrentLabel", currentLabel)
currentStrategyIndex = self.ui.strategyBox.currentIndex
if currentStrategyIndex >= 0:
currentStrategy = self.ui.strategyBox.itemText(currentStrategyIndex)
self._parameterNode.SetParameter("CurrentStrategy", currentStrategy)
currentTrainerIndex = self.ui.trainerBox.currentIndex
if currentTrainerIndex >= 0:
currentTrainer = self.ui.trainerBox.itemText(currentTrainerIndex)
self._parameterNode.SetParameter("CurrentTrainer", currentTrainer)
self._parameterNode.EndModify(wasModified)
def updateSelector(self, selector, model_types, param, defaultIndex=0):
wasSelectorBlocked = selector.blockSignals(True)
selector.clear()
for model_name, model in self.models.items():
if model["type"] in model_types:
selector.addItem(model_name)
selector.setItemData(selector.count - 1, model["description"], qt.Qt.ToolTipRole)
model = self._parameterNode.GetParameter(param)
model = model if model else self.state.get(param, "")
modelIndex = selector.findText(model)
modelIndex = defaultIndex if modelIndex < 0 < selector.count else modelIndex
selector.setCurrentIndex(modelIndex)
try:
modelInfo = self.models[model]
selector.setToolTip(modelInfo["description"])
except:
selector.setToolTip("")
selector.blockSignals(wasSelectorBlocked)
def updateConfigTable(self):
table = self.ui.configTable
table.clear()
headers = ["section", "name", "key", "value"]
table.setColumnCount(len(headers))
table.setHorizontalHeaderLabels(headers)
table.setColumnWidth(0, 50)
config = copy.deepcopy(self.info)
infer = config.get("models", {})
train = config.get("trainers", {})
activelearning = config.get("strategies", {})
scoring = config.get("scoring", {})
row_count = 0
config = {"infer": infer, "train": train, "activelearning": activelearning, "scoring": scoring}
for c in config.values():
row_count += sum([len(c[k].get("config", {})) for k in c.keys()])
table.setRowCount(row_count)
n = 0
for section in config:
if not config[section]:
continue
c_section = config[section]
l_section = sum([len(c_section[k].get("config", {})) for k in c_section.keys()])
if not l_section:
continue
if l_section:
table.setSpan(n, 0, l_section, 1)
for name in c_section:
c_name = c_section[name]
l_name = len(c_name.get("config", {}))
if not l_name:
continue
if l_name:
table.setSpan(n, 1, l_name, 1)
for key, val in c_name.get("config", {}).items():
item = qt.QTableWidgetItem(section)
item.setFlags(item.flags() & ~qt.Qt.ItemIsEditable)
table.setItem(n, 0, item)
item = qt.QTableWidgetItem(name)
table.setItem(n, 1, item)
item.setFlags(item.flags() & ~qt.Qt.ItemIsEditable)
item = qt.QTableWidgetItem(key)
table.setItem(n, 2, item)
item.setFlags(item.flags() & ~qt.Qt.ItemIsEditable)
if isinstance(val, dict) or isinstance(val, list):
combo = qt.QComboBox()
for m, v in enumerate(val):
combo.addItem(v)
combo.setCurrentIndex(0)
table.setCellWidget(n, 3, combo)
elif isinstance(val, bool):
checkbox = qt.QCheckBox()
checkbox.setChecked(val)
table.setCellWidget(n, 3, checkbox)
else:
table.setItem(n, 3, qt.QTableWidgetItem(str(val) if val else ""))
n = n + 1
def updateAccuracyBar(self, dice):
self.ui.accuracyProgressBar.setValue(dice * 100)
css = ["stop: 0 red"]
if dice > 0.5:
css.append(f"stop: {0.5 / dice} orange")
if dice > 0.6:
css.append(f"stop: {0.6 / dice} yellow")
if dice > 0.7:
css.append(f"stop: {0.7 / dice} lightgreen")
if dice > 0.8:
css.append(f"stop: {0.8 / dice} green")
if dice > 0.9:
css.append(f"stop: {0.9 / dice} darkgreen")
self.ui.accuracyProgressBar.setStyleSheet(
"QProgressBar {text-align: center;} "
"QProgressBar::chunk {background-color: "
"qlineargradient(x0: 0, x2: 1, " + ",".join(css) + ")}"
)
self.ui.accuracyProgressBar.setToolTip(f"Accuracy: {dice:.4f}")
def getParamsFromConfig(self, filter, filter2=None):
mapping = {"infer": "models", "train": "trainers", "activelearning": "strategies", "scoring": "scoring"}
config = {}
for row in range(self.ui.configTable.rowCount):
section = str(self.ui.configTable.item(row, 0).text())
name = str(self.ui.configTable.item(row, 1).text())
key = str(self.ui.configTable.item(row, 2).text())
value = self.ui.configTable.item(row, 3)
if value is None:
value = self.ui.configTable.cellWidget(row, 3)
value = value.checked if isinstance(value, qt.QCheckBox) else value.currentText
else:
value = str(value.text())
v = self.info.get(mapping.get(section, ""), {}).get(name, {}).get("config", {}).get(key, {})
if isinstance(v, int):
value = int(value) if value else 0
elif isinstance(v, float):
value = float(value) if value else 0.0
if config.get(section) is None:
config[section] = {}
if config[section].get(name) is None:
config[section][name] = {}
config[section][name][key] = value
res = config.get(filter, {})
res = res.get(filter2, {}) if filter2 else res
return res
def onDeepGrowFiducialNodeModified(self, observer, eventid):
logging.debug("Deepgrow Point Event!!")
if self.ignoreFiducialNodeAddEvent:
return
markupsNode = observer
movingMarkupIndex = markupsNode.GetDisplayNode().GetActiveControlPoint()
logging.debug("Markup point added; point ID = {}".format(movingMarkupIndex))
current_point = self.getFiducialPointXYZ(markupsNode, movingMarkupIndex)
if not self.ui.dgUpdateCheckBox.checked:
self.onClickDeepgrow(current_point, skip_infer=True)
return
self.onClickDeepgrow(current_point)
self.ignoreFiducialNodeAddEvent = True
self.onEditFiducialPoints(self.dgPositiveFiducialNode, "MONAILabel.ForegroundPoints")
self.onEditFiducialPoints(self.dgNegativeFiducialNode, "MONAILabel.BackgroundPoints")
self.ignoreFiducialNodeAddEvent = False
def getFiducialPointsXYZ(self, fiducialNode, name):
v = self._volumeNode
RasToIjkMatrix = vtk.vtkMatrix4x4()
v.GetRASToIJKMatrix(RasToIjkMatrix)
point_set = []
n = fiducialNode.GetNumberOfFiducials()
for i in range(n):
coord = [0.0, 0.0, 0.0]
fiducialNode.GetNthFiducialPosition(i, coord)
world = [0, 0, 0, 0]
fiducialNode.GetNthFiducialWorldCoordinates(i, world)
p_Ras = [coord[0], coord[1], coord[2], 1.0]
p_Ijk = RasToIjkMatrix.MultiplyDoublePoint(p_Ras)
p_Ijk = [round(i) for i in p_Ijk]
logging.debug("RAS: {}; WORLD: {}; IJK: {}".format(coord, world, p_Ijk))
point_set.append(p_Ijk[0:3])
logging.info("{} => Current Fiducials-Points: {}".format(name, point_set))
return point_set
def getFiducialPointXYZ(self, fiducialNode, index):
v = self._volumeNode
RasToIjkMatrix = vtk.vtkMatrix4x4()
v.GetRASToIJKMatrix(RasToIjkMatrix)
coord = [0.0, 0.0, 0.0]
fiducialNode.GetNthFiducialPosition(index, coord)
world = [0, 0, 0, 0]
fiducialNode.GetNthFiducialWorldCoordinates(index, world)
p_Ras = [coord[0], coord[1], coord[2], 1.0]
p_Ijk = RasToIjkMatrix.MultiplyDoublePoint(p_Ras)
p_Ijk = [round(i) for i in p_Ijk]
logging.debug("RAS: {}; WORLD: {}; IJK: {}".format(coord, world, p_Ijk))
return p_Ijk[0:3]
def onEditFiducialPoints(self, fiducialNode, tagName):
if fiducialNode is None:
return
fiducialNode.RemoveAllMarkups()
segmentId, segment = self.currentSegment()
if segment and segmentId:
v = self._volumeNode
IjkToRasMatrix = vtk.vtkMatrix4x4()
v.GetIJKToRASMatrix(IjkToRasMatrix)
fPosStr = vtk.mutable("")
segment.GetTag(tagName, fPosStr)
pointset = str(fPosStr)
logging.debug("{} => {} Fiducial points are: {}".format(segmentId, segment.GetName(), pointset))
if fPosStr is not None and len(pointset) > 0:
points = json.loads(pointset)
for p in points:
p_Ijk = [p[0], p[1], p[2], 1.0]
p_Ras = IjkToRasMatrix.MultiplyDoublePoint(p_Ijk)
logging.debug("Add Fiducial: {} => {}".format(p_Ijk, p_Ras))
fiducialNode.AddFiducialFromArray(p_Ras[0:3])
def currentSegment(self):
segmentation = self._segmentNode.GetSegmentation()
segmentId = segmentation.GetSegmentIdBySegmentName(self.ui.labelComboBox.currentText)
segment = segmentation.GetSegment(segmentId)
logging.debug("Current SegmentID: {}; Segment: {}".format(segmentId, segment))
return segmentId, segment
def onSelectLabel(self, caller=None, event=None):
self.updateParameterNodeFromGUI(caller, event)
self.ignoreFiducialNodeAddEvent = True
self.onEditFiducialPoints(self.dgPositiveFiducialNode, "MONAILabel.ForegroundPoints")
self.onEditFiducialPoints(self.dgNegativeFiducialNode, "MONAILabel.BackgroundPoints")
self.ignoreFiducialNodeAddEvent = False
def icon(self, name="MONAILabel.png"):
iconPath = os.path.join(os.path.dirname(__file__), "Resources", "Icons", name)
if os.path.exists(iconPath):
return qt.QIcon(iconPath)
return qt.QIcon()
def updateServerSettings(self):
self.logic.setServer(self.serverUrl())
self.logic.setClientId(slicer.util.settingsValue("MONAILabel/clientId", "user-xyz"))
self.saveServerUrl()
def serverUrl(self):
serverUrl = self.ui.serverComboBox.currentText
if not serverUrl:
serverUrl = "http://127.0.0.1:8000"
return serverUrl.rstrip("/")
def saveServerUrl(self):
self.updateParameterNodeFromGUI()
settings = qt.QSettings()
serverUrl = self.ui.serverComboBox.currentText
settings.setValue("MONAILabel/serverUrl", serverUrl)
serverUrlHistory = settings.value("MONAILabel/serverUrlHistory")
if serverUrlHistory:
serverUrlHistory = serverUrlHistory.split(";")
else:
serverUrlHistory = []
try:
serverUrlHistory.remove(serverUrl)
except ValueError:
pass
serverUrlHistory.insert(0, serverUrl)
serverUrlHistory = serverUrlHistory[:10] settings.setValue("MONAILabel/serverUrlHistory", ";".join(serverUrlHistory))
self.updateServerUrlGUIFromSettings()
def onClickFetchInfo(self):
self.fetchInfo()
self.updateConfigTable()
def fetchInfo(self, showInfo=False):
if not self.logic:
return
start = time.time()
try:
self.updateServerSettings()
info = self.logic.info()
self.info = info
if self.info.get("config"):
slicer.util.errorDisplay(
"Please upgrade the monai server to latest version",
detailedText=traceback.format_exc(),
)
return
except:
slicer.util.errorDisplay(
"Failed to fetch models from remote server. "
"Make sure server address is correct and <server_uri>/info/ "
"is accessible in browser",
detailedText=traceback.format_exc(),
)
return
self.models.clear()
self.config = info.get("config", {})
model_count = {}
models = info.get("models", {})
for k, v in models.items():
model_type = v.get("type", "segmentation")
model_count[model_type] = model_count.get(model_type, 0) + 1
logging.debug("{} = {}".format(k, model_type))
self.models[k] = v
self.updateGUIFromParameterNode()
msg = ""
msg += "-----------------------------------------------------\t\n"
msg += "Total Models Available: \t" + str(len(models)) + "\t\n"
msg += "-----------------------------------------------------\t\n"
for model_type in model_count.keys():
msg += model_type.capitalize() + " Models: \t" + str(model_count[model_type]) + "\t\n"
msg += "-----------------------------------------------------\t\n"
if showInfo:
qt.QMessageBox.information(slicer.util.mainWindow(), "MONAI Label", msg)
logging.info(msg)
logging.info("Time consumed by fetch info: {0:3.1f}".format(time.time() - start))
def setProgressBarLabelText(self, label):
if not self.progressBar:
self.progressBar = slicer.util.createProgressDialog(windowTitle="Wait...", maximum=100)
self.progressBar.labelText = label
def reportProgress(self, progressPercentage):
if not self.progressBar:
self.progressBar = slicer.util.createProgressDialog(windowTitle="Wait...", maximum=100)
self.progressBar.show()
self.progressBar.activateWindow()
self.progressBar.setValue(progressPercentage)
slicer.app.processEvents()
def onTraining(self):
start = time.time()
status = None
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
self.updateServerSettings()
model = self.ui.trainerBox.currentText
model = model if model and model != "ALL" else None
params = self.getParamsFromConfig("train", model)
status = self.logic.train_start(model, params)
self.ui.trainingProgressBar.setValue(1)
self.ui.trainingProgressBar.setToolTip("Training: STARTED")
time.sleep(1)
self.updateGUIFromParameterNode()
except:
slicer.util.errorDisplay(
"Failed to run training in MONAI Label Server", detailedText=traceback.format_exc()
)
finally:
qt.QApplication.restoreOverrideCursor()
if status:
msg = "ID: {}\nStatus: {}\nStart Time: {}\n".format(
status.get("id"),
status.get("status"),
status.get("start_ts"),
)
logging.info(msg)
logging.info("Time consumed by training: {0:3.1f}".format(time.time() - start))
def onStopTraining(self):
start = time.time()
status = None
if not slicer.util.confirmOkCancelDisplay(
"This will kill/stop current Training task. Are you sure to continue?"
):
return
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
self.updateServerSettings()
status = self.logic.train_stop()
except:
slicer.util.errorDisplay("Failed to stop Training Task", detailedText=traceback.format_exc())
finally:
qt.QApplication.restoreOverrideCursor()
if status:
msg = "Status: {}\nStart Time: {}\nEnd Time: {}\nResult: {}".format(
status.get("status"),
status.get("start_ts"),
status.get("end_ts"),
status.get("result", status.get("details", [])[-1]),
)
logging.info(msg)
self.updateGUIFromParameterNode()
logging.info("Time consumed by stop training: {0:3.1f}".format(time.time() - start))
def isTrainingRunning(self, check_only=True):
if not self.logic:
return False
self.updateServerSettings()
return self.logic.train_status(check_only)
def onNextSampleButton(self):
if not self.logic:
return
if self._volumeNode or len(slicer.util.getNodesByClass("vtkMRMLScalarVolumeNode")):
if not slicer.util.confirmOkCancelDisplay(
"This will close current scene. Please make sure you have saved your current work.\n"
"Are you sure to continue?"
):
return
self.onClearScribbles()
slicer.mrmlScene.Clear(0)
start = time.time()
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
self.updateServerSettings()
strategy = self.ui.strategyBox.currentText
if not strategy:
slicer.util.errorDisplay("No Strategy Found/Selected\t")
return
sample = self.logic.next_sample(strategy, self.getParamsFromConfig("activelearning", strategy))
logging.debug(sample)
if not sample.get("id"):
slicer.util.warningDisplay(
"Unlabled Samples/Images Not Found at server. Instead you can load your own image."
)
return
if self.samples.get(sample["id"]) is not None:
self.current_sample = self.samples[sample["id"]]
name = self.current_sample["VolumeNodeName"]
index = self.ui.inputSelector.findText(name)
self.ui.inputSelector.setCurrentIndex(index)
return
logging.info(sample)
image_id = sample["id"]
image_file = sample.get("path")
image_name = sample.get("name", image_id)
node_name = sample.get("PatientID", sample.get("name", image_id))[-20:]
checksum = sample.get("checksum")
local_exists = image_file and os.path.exists(image_file)
logging.info(f"Check if file exists/shared locally: {image_file} => {local_exists}")
if local_exists:
self._volumeNode = slicer.util.loadVolume(image_file)
self._volumeNode.SetName(node_name)
else:
download_uri = f"{self.serverUrl()}/datastore/image?image={quote_plus(image_id)}"
logging.info(download_uri)
sampleDataLogic = SampleData.SampleDataLogic()
self._volumeNode = sampleDataLogic.downloadFromURL(
nodeNames=node_name, fileNames=image_name, uris=download_uri, checksums=checksum
)[0]
self.initSample(sample)
except:
slicer.util.errorDisplay(
"Failed to fetch Sample from MONAI Label Server", detailedText=traceback.format_exc()
)
finally:
qt.QApplication.restoreOverrideCursor()
self.updateGUIFromParameterNode()
logging.info("Time consumed by next_sample: {0:3.1f}".format(time.time() - start))
def initSample(self, sample, autosegment=True):
sample["VolumeNodeName"] = self._volumeNode.GetName()
self.current_sample = sample
self.samples[sample["id"]] = sample
self._volumeNodes.append(self._volumeNode)
self.createSegmentNode()
segmentEditorWidget = slicer.modules.segmenteditor.widgetRepresentation().self().editor
segmentEditorWidget.setSegmentationNode(self._segmentNode)
segmentEditorWidget.setMasterVolumeNode(self._volumeNode)
if slicer.util.settingsValue("MONAILabel/allowOverlappingSegments", False, converter=slicer.util.toBool):
slicer.util.getNodesByClass("vtkMRMLSegmentEditorNode")[0].SetOverwriteMode(2)
if self.info.get("labels"):
self.updateSegmentationMask(None, self.info.get("labels"))
if autosegment and slicer.util.settingsValue(
"MONAILabel/autoRunSegmentationOnNextSample", True, converter=slicer.util.toBool
):
for label in self.info.get("labels", []):
for name, model in self.models.items():
if label in model.get("labels", []):
qt.QApplication.restoreOverrideCursor()
self.ui.segmentationModelSelector.currentText = name
self.onClickSegmentation()
return
def getPermissionForImageDataUpload(self):
return slicer.util.confirmOkCancelDisplay(
"Master volume - without any additional patient information -"
" will be sent to remote data processing server: {0}.\n\n"
"Click 'OK' to proceed with the segmentation.\n"
"Click 'Cancel' to not upload any data and cancel segmentation.\n".format(self.serverUrl()),
dontShowAgainSettingsKey="MONAILabel/showImageDataSendWarning",
)
def onUploadImage(self, init_sample=True, session=False):
volumeNode = slicer.mrmlScene.GetFirstNodeByClass("vtkMRMLScalarVolumeNode")
image_id = volumeNode.GetName()
if not self.getPermissionForImageDataUpload():
return False
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
in_file = tempfile.NamedTemporaryFile(suffix=self.file_ext, dir=self.tmpdir).name
self.reportProgress(5)
start = time.time()
slicer.util.saveNode(volumeNode, in_file)
logging.info("Saved Input Node into {0} in {1:3.1f}s".format(in_file, time.time() - start))
self.reportProgress(30)
if session:
self.current_sample["session_id"] = self.logic.create_session(in_file)["session_id"]
else:
self.logic.upload_image(in_file, image_id)
self.current_sample["session"] = False
self.reportProgress(100)
self._volumeNode = volumeNode
if init_sample:
self.initSample({"id": image_id}, autosegment=False)
qt.QApplication.restoreOverrideCursor()
self.updateGUIFromParameterNode()
return True
except:
self.reportProgress(100)
qt.QApplication.restoreOverrideCursor()
if session:
slicer.util.errorDisplay(
"Server Error:: Session creation Failed\nPlease upgrade to latest monailable version (> 0.2.0)",
detailedText=traceback.format_exc(),
)
else:
slicer.util.errorDisplay("Failed to upload volume to Server", detailedText=traceback.format_exc())
return False
def onImportLabel(self):
if not self.ui.labelPathLineEdit.currentPath or not os.path.exists(self.ui.labelPathLineEdit.currentPath):
slicer.util.warningDisplay("Label File not selected")
return
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
self.updateSegmentationMask(self.ui.labelPathLineEdit.currentPath, self.info["labels"])
qt.QApplication.restoreOverrideCursor()
except:
qt.QApplication.restoreOverrideCursor()
slicer.util.errorDisplay("Failed to import label", detailedText=traceback.format_exc())
def onSaveLabel(self):
start = time.time()
labelmapVolumeNode = None
result = None
self.onClearScribbles()
if self.current_sample.get("session"):
if not self.onUploadImage(init_sample=False):
return
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
segmentationNode = self._segmentNode
labelmapVolumeNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLLabelMapVolumeNode")
slicer.modules.segmentations.logic().ExportVisibleSegmentsToLabelmapNode(
segmentationNode, labelmapVolumeNode, self._volumeNode
)
segmentation = segmentationNode.GetSegmentation()
totalSegments = segmentation.GetNumberOfSegments()
segmentIds = [segmentation.GetNthSegmentID(i) for i in range(totalSegments)]
label_info = []
for idx, segmentId in enumerate(segmentIds):
segment = segmentation.GetSegment(segmentId)
if segment.GetName() in ["foreground_scribbles", "background_scribbles"]:
logging.info(f"Removing segment {segmentId}: {segment.GetName()}")
segmentationNode.RemoveSegment(segmentId)
continue
label_info.append({"name": segment.GetName(), "idx": idx + 1})
label_in = tempfile.NamedTemporaryFile(suffix=self.file_ext, dir=self.tmpdir).name
self.reportProgress(5)
if (
slicer.util.settingsValue("MONAILabel/allowOverlappingSegments", True, converter=slicer.util.toBool)
and slicer.util.settingsValue("MONAILabel/fileExtension", self.file_ext) == ".seg.nrrd"
):
slicer.util.saveNode(segmentationNode, label_in)
else:
slicer.util.saveNode(labelmapVolumeNode, label_in)
self.reportProgress(30)
self.updateServerSettings()
result = self.logic.save_label(self.current_sample["id"], label_in, {"label_info": label_info})
self.fetchInfo()
if slicer.util.settingsValue("MONAILabel/autoUpdateModel", True, converter=slicer.util.toBool):
try:
if self.isTrainingRunning(check_only=True):
self.logic.train_stop()
except:
logging.info("Failed to stop training; or already stopped")
self.onTraining()
except:
slicer.util.errorDisplay("Failed to save Label to MONAI Label Server", detailedText=traceback.format_exc())
finally:
qt.QApplication.restoreOverrideCursor()
self.reportProgress(100)
if labelmapVolumeNode:
slicer.mrmlScene.RemoveNode(labelmapVolumeNode)
if result:
slicer.util.infoDisplay(
"Label-Mask saved into MONAI Label Server\t\t", detailedText=json.dumps(result, indent=2)
)
if slicer.util.settingsValue("MONAILabel/autoFetchNextSample", False, converter=slicer.util.toBool):
slicer.mrmlScene.Clear(0)
self.onNextSampleButton()
logging.info("Time consumed by save label: {0:3.1f}".format(time.time() - start))
def getSessionId(self):
session_id = None
if self.current_sample.get("session", False):
session_id = self.current_sample.get("session_id")
if not session_id or not self.logic.get_session(session_id):
self.onUploadImage(init_sample=False, session=True)
session_id = self.current_sample["session_id"]
return session_id
def onClickSegmentation(self):
if not self.current_sample:
return
start = time.time()
result_file = None
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
self.updateServerSettings()
model = self.ui.segmentationModelSelector.currentText
image_file = self.current_sample["id"]
params = self.getParamsFromConfig("infer", model)
result_file, params = self.logic.infer(model, image_file, params, session_id=self.getSessionId())
print(f"Result Params for Segmentation: {params}")
labels = (
params.get("label_names") if params and params.get("label_names") else self.models[model].get("labels")
)
if labels and isinstance(labels, dict):
labels = [k for k, _ in sorted(labels.items(), key=lambda item: item[1])]
self.updateSegmentationMask(result_file, labels)
except:
slicer.util.errorDisplay(
"Failed to run inference in MONAI Label Server", detailedText=traceback.format_exc()
)
finally:
qt.QApplication.restoreOverrideCursor()
if result_file and os.path.exists(result_file):
os.unlink(result_file)
self.updateGUIFromParameterNode()
logging.info("Time consumed by segmentation: {0:3.1f}".format(time.time() - start))
def onUpdateDeepgrow(self):
self.onClickDeepgrow(None)
def onClickDeepgrow(self, current_point, skip_infer=False):
model = self.ui.deepgrowModelSelector.currentText
if not model:
slicer.util.warningDisplay("Please select a deepgrow model")
return
_, segment = self.currentSegment()
if not segment:
slicer.util.warningDisplay("Please add the required label to run deepgrow")
return
foreground_all = self.getFiducialPointsXYZ(self.dgPositiveFiducialNode, "foreground")
background_all = self.getFiducialPointsXYZ(self.dgNegativeFiducialNode, "background")
segment.SetTag("MONAILabel.ForegroundPoints", json.dumps(foreground_all))
segment.SetTag("MONAILabel.BackgroundPoints", json.dumps(background_all))
if skip_infer:
return
deepgrow_3d = False if self.models[model].get("dimension", 3) == 2 else True
start = time.time()
label = segment.GetName()
operationDescription = "Run Deepgrow for segment: {}; model: {}; 3d {}".format(label, model, deepgrow_3d)
logging.debug(operationDescription)
if not current_point:
if not foreground_all and not deepgrow_3d:
slicer.util.warningDisplay(operationDescription + " - points not added")
return
current_point = foreground_all[-1] if foreground_all else background_all[-1] if background_all else None
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
sliceIndex = None
if self.multi_label:
params = {}
segmentation = self._segmentNode.GetSegmentation()
for name in self.info.get("labels", []):
points = []
segmentId = segmentation.GetSegmentIdBySegmentName(name)
segment = segmentation.GetSegment(segmentId) if segmentId else None
if segment:
fPosStr = vtk.mutable("")
segment.GetTag("MONAILabel.ForegroundPoints", fPosStr)
pointset = str(fPosStr)
print("{} => {} Fiducial points are: {}".format(segmentId, name, pointset))
if fPosStr is not None and len(pointset) > 0:
points = json.loads(pointset)
params[name] = points
params["label"] = label
labels = None
else:
sliceIndex = current_point[2] if current_point else None
logging.debug("Slice Index: {}".format(sliceIndex))
if deepgrow_3d or not sliceIndex:
foreground = foreground_all
background = background_all
else:
foreground = [x for x in foreground_all if x[2] == sliceIndex]
background = [x for x in background_all if x[2] == sliceIndex]
logging.debug("Foreground: {}".format(foreground))
logging.debug("Background: {}".format(background))
logging.debug("Current point: {}".format(current_point))
params = {
"label": label,
"foreground": foreground,
"background": background,
}
labels = [label]
params["label"] = label
params.update(self.getParamsFromConfig("infer", model))
print(f"Request Params for Deepgrow/Deepedit: {params}")
image_file = self.current_sample["id"]
result_file, params = self.logic.infer(model, image_file, params, session_id=self.getSessionId())
print(f"Result Params for Deepgrow/Deepedit: {params}")
if labels is None:
labels = (
params.get("label_names")
if params and params.get("label_names")
else self.models[model].get("labels")
)
if labels and isinstance(labels, dict):
labels = [k for k, _ in sorted(labels.items(), key=lambda item: item[1])]
freeze = label if self.ui.freezeUpdateCheckBox.checked else None
self.updateSegmentationMask(result_file, labels, None if deepgrow_3d else sliceIndex, freeze=freeze)
except:
logging.exception("Unknown Exception")
slicer.util.errorDisplay(operationDescription + " - unexpected error.", detailedText=traceback.format_exc())
finally:
qt.QApplication.restoreOverrideCursor()
self.updateGUIFromParameterNode()
logging.info("Time consumed by Deepgrow: {0:3.1f}".format(time.time() - start))
def createCursor(self, widget):
return slicer.util.mainWindow().cursor
def createSegmentNode(self):
if self._volumeNode is None:
return
if self._segmentNode is None:
name = "segmentation_" + self._volumeNode.GetName()
self._segmentNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLSegmentationNode")
self._segmentNode.SetReferenceImageGeometryParameterFromVolumeNode(self._volumeNode)
self._segmentNode.SetName(name)
def getLabelColor(self, name):
color = GenericAnatomyColors.get(name.lower())
return [c / 255.0 for c in color] if color else None
def updateSegmentationMask(self, in_file, labels, sliceIndex=None, freeze=None):
start = time.time()
logging.debug("Update Segmentation Mask from: {}".format(in_file))
if in_file and not os.path.exists(in_file):
return False
segmentationNode = self._segmentNode
segmentation = segmentationNode.GetSegmentation()
if in_file is None:
for label in labels:
if not segmentation.GetSegmentIdBySegmentName(label):
segmentation.AddEmptySegment(label, label, self.getLabelColor(label))
return True
labels = [l for l in labels if l != "background"]
print(f"Update Segmentation Mask using Labels: {labels}")
labelImage = sitk.ReadImage(in_file)
labelmapVolumeNode = sitkUtils.PushVolumeToSlicer(labelImage, None, className="vtkMRMLLabelMapVolumeNode")
existing_label_ids = {}
for label in labels:
id = segmentation.GetSegmentIdBySegmentName(label)
if id:
existing_label_ids[label] = id
freeze = [freeze] if freeze and isinstance(freeze, str) else freeze
print(f"Import only Freezed label: {freeze}")
numberOfExistingSegments = segmentation.GetNumberOfSegments()
slicer.modules.segmentations.logic().ImportLabelmapToSegmentationNode(labelmapVolumeNode, segmentationNode)
slicer.mrmlScene.RemoveNode(labelmapVolumeNode)
numberOfAddedSegments = segmentation.GetNumberOfSegments() - numberOfExistingSegments
logging.debug("Adding {} segments".format(numberOfAddedSegments))
addedSegmentIds = [
segmentation.GetNthSegmentID(numberOfExistingSegments + i) for i in range(numberOfAddedSegments)
]
for i, segmentId in enumerate(addedSegmentIds):
segment = segmentation.GetSegment(segmentId)
print("Setting new segmentation with id: {} => {}".format(segmentId, segment.GetName()))
label = labels[i] if i < len(labels) else "unknown {}".format(i)
if freeze and label not in freeze:
print(f"Discard label update for: {label}")
elif label in existing_label_ids:
segmentEditorWidget = slicer.modules.segmenteditor.widgetRepresentation().self().editor
segmentEditorWidget.setSegmentationNode(segmentationNode)
segmentEditorWidget.setMasterVolumeNode(self._volumeNode)
segmentEditorWidget.setCurrentSegmentID(existing_label_ids[label])
effect = segmentEditorWidget.effectByName("Logical operators")
labelmap = slicer.vtkOrientedImageData()
segmentationNode.GetBinaryLabelmapRepresentation(segmentId, labelmap)
if sliceIndex:
selectedSegmentLabelmap = effect.selectedSegmentLabelmap()
dims = selectedSegmentLabelmap.GetDimensions()
count = 0
for x in range(dims[0]):
for y in range(dims[1]):
if selectedSegmentLabelmap.GetScalarComponentAsDouble(x, y, sliceIndex, 0):
count = count + 1
selectedSegmentLabelmap.SetScalarComponentFromDouble(x, y, sliceIndex, 0, 0)
logging.debug("Total Non Zero: {}".format(count))
if count:
effect.modifySelectedSegmentByLabelmap(
selectedSegmentLabelmap, slicer.qSlicerSegmentEditorAbstractEffect.ModificationModeSet
)
effect.modifySelectedSegmentByLabelmap(
labelmap, slicer.qSlicerSegmentEditorAbstractEffect.ModificationModeAdd
)
else:
bypassMask = True
effect.modifySelectedSegmentByLabelmap(
labelmap, slicer.qSlicerSegmentEditorAbstractEffect.ModificationModeSet, bypassMask
)
segmentationNode.RemoveSegment(segmentId)
self.showSegmentationsIn3D()
logging.info("Time consumed by updateSegmentationMask: {0:3.1f}".format(time.time() - start))
return True
def showSegmentationsIn3D(self):
if self._segmentNode:
self._segmentNode.CreateClosedSurfaceRepresentation()
view = slicer.app.layoutManager().threeDWidget(0).threeDView()
view.resetFocalPoint()
def updateServerUrlGUIFromSettings(self):
settings = qt.QSettings()
serverUrlHistory = settings.value("MONAILabel/serverUrlHistory")
wasBlocked = self.ui.serverComboBox.blockSignals(True)
self.ui.serverComboBox.clear()
if serverUrlHistory:
self.ui.serverComboBox.addItems(serverUrlHistory.split(";"))
self.ui.serverComboBox.setCurrentText(settings.value("MONAILabel/serverUrl"))
self.ui.serverComboBox.blockSignals(wasBlocked)
def createFiducialNode(self, name, onMarkupNodeModified, color):
displayNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLMarkupsDisplayNode")
displayNode.SetTextScale(0)
displayNode.SetSelectedColor(color)
fiducialNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLMarkupsFiducialNode")
fiducialNode.SetName(name)
fiducialNode.SetAndObserveDisplayNodeID(displayNode.GetID())
fiducialNodeObservers = []
self.addFiducialNodeObserver(fiducialNode, onMarkupNodeModified)
return fiducialNode, fiducialNodeObservers
def removeFiducialNodeObservers(self, fiducialNode, fiducialNodeObservers):
if fiducialNode and fiducialNodeObservers:
for observer in fiducialNodeObservers:
fiducialNode.RemoveObserver(observer)
def addFiducialNodeObserver(self, fiducialNode, onMarkupNodeModified):
fiducialNodeObservers = []
if fiducialNode:
eventIds = [slicer.vtkMRMLMarkupsNode.PointPositionDefinedEvent]
for eventId in eventIds:
fiducialNodeObservers.append(fiducialNode.AddObserver(eventId, onMarkupNodeModified))
return fiducialNodeObservers
def scribblesLayersPresent(self):
scribbles_exist = False
if self._segmentNode is not None:
segmentationNode = self._segmentNode
segmentation = segmentationNode.GetSegmentation()
numSegments = segmentation.GetNumberOfSegments()
segmentIds = [segmentation.GetNthSegmentID(i) for i in range(numSegments)]
scribbles_exist = sum([int("scribbles" in sid) for sid in segmentIds]) > 0
return scribbles_exist
def onStartScribbling(self):
if not self._segmentNode:
return
logging.debug("Scribbles start event")
if (not self.scribblesLayersPresent()) and (self._scribblesEditorWidget is None):
self._segmentNode.GetSegmentation().AddEmptySegment(
"background_scribbles", "background_scribbles", [1.0, 0.0, 0.0]
)
self._segmentNode.GetSegmentation().AddEmptySegment(
"foreground_scribbles", "foreground_scribbles", [0.0, 1.0, 0.0]
)
segmentationDisplayNode = self._segmentNode.GetDisplayNode()
opacity = 0.2
segmentationDisplayNode.SetSegmentOpacity2DFill("background_scribbles", opacity)
segmentationDisplayNode.SetSegmentOpacity2DOutline("background_scribbles", opacity)
segmentationDisplayNode.SetSegmentOpacity2DFill("foreground_scribbles", opacity)
segmentationDisplayNode.SetSegmentOpacity2DOutline("foreground_scribbles", opacity)
self._scribblesEditorWidget = slicer.qMRMLSegmentEditorWidget()
self._scribblesEditorWidget.setMRMLScene(slicer.mrmlScene)
segmentEditorNode = slicer.vtkMRMLSegmentEditorNode()
segmentEditorNode.SetOverwriteMode(slicer.vtkMRMLSegmentEditorNode.OverwriteNone)
slicer.mrmlScene.AddNode(segmentEditorNode)
self._scribblesEditorWidget.setMRMLSegmentEditorNode(segmentEditorNode)
self._scribblesEditorWidget.setSegmentationNode(self._segmentNode)
self._scribblesEditorWidget.setMasterVolumeNode(self._volumeNode)
def onUpdateScribbles(self):
logging.info("Scribbles update event")
scribblesMethod = self.ui.scribblesMethodSelector.currentText
scribbles_in = None
result_file = None
try:
qt.QApplication.setOverrideCursor(qt.Qt.WaitCursor)
segmentationNode = self._segmentNode
labelmapVolumeNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLLabelMapVolumeNode")
slicer.modules.segmentations.logic().ExportVisibleSegmentsToLabelmapNode(
segmentationNode, labelmapVolumeNode, self._volumeNode
)
scribbles_in = tempfile.NamedTemporaryFile(suffix=self.file_ext, dir=self.tmpdir).name
self.reportProgress(5)
slicer.util.saveNode(labelmapVolumeNode, scribbles_in)
self.reportProgress(30)
self.updateServerSettings()
self.reportProgress(60)
roiNode = slicer.mrmlScene.GetFirstNodeByClass("vtkMRMLAnnotationROINode")
if roiNode == None: roiNode = slicer.mrmlScene.GetFirstNodeByClass("vtkMRMLMarkupsROINode")
selected_roi = self.getROIPointsXYZ(roiNode)
params = self.getParamsFromConfig("infer", scribblesMethod)
params.update({"roi": selected_roi})
image_file = self.current_sample["id"]
result_file, params = self.logic.infer(
scribblesMethod, image_file, params, scribbles_in, session_id=self.getSessionId()
)
self.reportProgress(90)
_, segment = self.currentSegment()
label = segment.GetName()
self.updateSegmentationMask(result_file, [label])
except:
slicer.util.errorDisplay(
"Failed to post process label on MONAI Label Server using {}".format(scribblesMethod),
detailedText=traceback.format_exc(),
)
finally:
qt.QApplication.restoreOverrideCursor()
self.reportProgress(100)
if scribbles_in and os.path.exists(scribbles_in):
os.unlink(scribbles_in)
if result_file and os.path.exists(result_file):
os.unlink(result_file)
def getROIPointsXYZ(self, roiNode):
if roiNode == None:
return []
v = self._volumeNode
RasToIjkMatrix = vtk.vtkMatrix4x4()
v.GetRASToIJKMatrix(RasToIjkMatrix)
roi_points_ras = [0.0] * 6
if roiNode.__class__.__name__ == "vtkMRMLMarkupsROINode":
print(roiNode.__class__.__name__)
center = [0] * 3
roiNode.GetCenter(center)
roi_points_ras = [(x - s / 2, x + s / 2) for x, s in zip(center, roiNode.GetSize())]
roi_points_ras = [item for sublist in roi_points_ras for item in sublist]
elif roiNode.__class__.__name__ == "vtkMRMLAnnotationROINode":
print(roiNode.__class__.__name__)
roiNode.GetBounds(roi_points_ras)
else:
return []
min_points_ras = [roi_points_ras[0], roi_points_ras[2], roi_points_ras[4], 1.0]
max_points_ras = [roi_points_ras[0 + 1], roi_points_ras[2 + 1], roi_points_ras[4 + 1], 1.0]
min_points_ijk = RasToIjkMatrix.MultiplyDoublePoint(min_points_ras)
max_points_ijk = RasToIjkMatrix.MultiplyDoublePoint(max_points_ras)
min_points_ijk = [round(i) for i in min_points_ijk]
max_points_ijk = [round(i) for i in max_points_ijk]
roi_points_ijk = [val for pair in zip(min_points_ijk[0:3], max_points_ijk[0:3]) for val in pair]
logging.debug("RAS: {}; IJK: {}".format(roi_points_ras, roi_points_ijk))
return roi_points_ijk
def onClearScribblesSegmentNodes(self):
if not self._segmentNode:
return
segmentation = self._segmentNode
num_segments = segmentation.GetSegmentation().GetNumberOfSegments()
for i in range(num_segments):
segmentId = segmentation.GetSegmentation().GetNthSegmentID(i)
if "scribbles" in segmentId:
logging.info("clearning {}".format(segmentId))
labelMapRep = slicer.vtkOrientedImageData()
segmentation.GetBinaryLabelmapRepresentation(segmentId, labelMapRep)
vtkSegmentationCore.vtkOrientedImageDataResample.FillImage(labelMapRep, 0, labelMapRep.GetExtent())
slicer.vtkSlicerSegmentationsModuleLogic.SetBinaryLabelmapToSegment(
labelMapRep, segmentation, segmentId, slicer.vtkSlicerSegmentationsModuleLogic.MODE_REPLACE
)
def onClearScribbles(self):
self.scribblesMode = None
if self._scribblesEditorWidget:
widget = self._scribblesEditorWidget
del widget
self._scribblesEditorWidget = None
self.onClearScribblesSegmentNodes()
self.ui.scribblesCollapsibleButton.collapsed = True
self.ui.paintScribblesButton.setChecked(False)
self.ui.eraseScribblesButton.setChecked(False)
self.ui.scribblesLabelSelector.setCurrentIndex(0)
def checkAndInitialiseScribbles(self):
if not self._segmentNode:
return
if self._scribblesEditorWidget is None:
self.onStartScribbling()
if self.scribblesMode is None:
self.changeScribblesMode(tool="Paint", layer="foreground_scribbles")
self.updateScribToolLayerFromMode()
def updateScribToolLayerFromMode(self):
if not self._segmentNode:
return
logging.info("Scribbles mode {} ".format(self.scribblesMode))
self.checkAndInitialiseScribbles()
tool, layer = self.getToolAndLayerFromScribblesMode()
if self._scribblesEditorWidget:
self._scribblesEditorWidget.setActiveEffectByName(tool)
self._scribblesEditorWidget.setCurrentSegmentID(layer)
if tool in ("Paint", "Erase"):
is3dbrush = self.ui.brush3dCheckbox.checkState()
self.on3dBrushCheckbox(state=is3dbrush)
brushSize = self.ui.brushSizeSlider.value
self.updateBrushSize(value=brushSize)
def getToolAndLayerFromScribblesMode(self):
if self.scribblesMode is not None:
return self.scribblesMode.split("+")
else:
return "Paint", "foreground_scribbles"
def changeScribblesMode(self, tool=None, layer=None):
ctool, clayer = self.getToolAndLayerFromScribblesMode()
ctool = tool if tool != None else ctool
clayer = layer if layer != None else clayer
self.scribblesMode = "+".join([ctool, clayer])
def onPaintScribbles(self):
if not self._segmentNode:
return
if self.ui.eraseScribblesButton.checked:
self.ui.eraseScribblesButton.setChecked(False)
self.changeScribblesMode(tool="Paint" if self.ui.paintScribblesButton.checked else "None")
self.updateScribToolLayerFromMode()
def onEraseScribbles(self):
if not self._segmentNode:
return
if self.ui.paintScribblesButton.checked:
self.ui.paintScribblesButton.setChecked(False)
self.changeScribblesMode(tool="Erase" if self.ui.eraseScribblesButton.checked else "None")
self.updateScribToolLayerFromMode()
def onSelectScribblesLabel(self):
if not self._segmentNode:
return
index = self.ui.scribblesLabelSelector.currentIndex
index = 0 if index < 0 else index
selected = self.ui.scribblesLabelSelector.itemText(index)
layer = "foreground_scribbles" if selected == "Foreground" else "background_scribbles"
self.changeScribblesMode(layer=layer)
self.updateScribToolLayerFromMode()
def on3dBrushCheckbox(self, state):
logging.info("3D brush update {}".format(state))
self.checkAndInitialiseScribbles()
effect = self._scribblesEditorWidget.activeEffect()
effect.setParameter("BrushSphere", state)
def updateBrushSize(self, value):
logging.info("brush size update {}".format(value))
if self.ui.paintScribblesButton.checked or self.ui.eraseScribblesButton.checked:
self.checkAndInitialiseScribbles()
effect = self._scribblesEditorWidget.activeEffect()
effect.setParameter("BrushAbsoluteDiameter", value)
class MONAILabelLogic(ScriptedLoadableModuleLogic):
def __init__(self, tmpdir=None, server_url=None, progress_callback=None, client_id=None):
ScriptedLoadableModuleLogic.__init__(self)
self.server_url = server_url
self.tmpdir = slicer.util.tempDirectory("slicer-monai-label") if tmpdir is None else tmpdir
self.client_id = client_id
self.volumeToSessions = dict()
self.progress_callback = progress_callback
def setDefaultParameters(self, parameterNode):
if not parameterNode.GetParameter("SegmentationModel"):
parameterNode.SetParameter("SegmentationModel", "")
if not parameterNode.GetParameter("DeepgrowModel"):
parameterNode.SetParameter("DeepgrowModel", "")
if not parameterNode.GetParameter("ScribblesMethod"):
parameterNode.SetParameter("ScribblesMethod", "")
def __del__(self):
shutil.rmtree(self.tmpdir, ignore_errors=True)
def setServer(self, server_url=None):
self.server_url = server_url if server_url else "http://127.0.0.1:8000"
def setClientId(self, client_id):
self.client_id = client_id if client_id else "user-xyz"
def setProgressCallback(self, progress_callback=None):
self.progress_callback = progress_callback
def reportProgress(self, progress):
if self.progress_callback:
self.progress_callback(progress)
def info(self):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).info()
def next_sample(self, strategy, params={}):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).next_sample(strategy, params)
def create_session(self, image_in):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).create_session(image_in)
def get_session(self, session_id):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).get_session(session_id)
def remove_session(self, session_id):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).remove_session(session_id)
def upload_image(self, image_in, image_id=None):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).upload_image(image_in, image_id)
def save_label(self, image_in, label_in, params):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).save_label(
image_in, label_in, params=params
)
def infer(self, model, image_in, params={}, label_in=None, file=None, session_id=None):
logging.debug("Preparing input data for segmentation")
self.reportProgress(0)
client = MONAILabelClient(self.server_url, self.tmpdir, self.client_id)
result_file, params = client.infer(model, image_in, params, label_in, file, session_id)
logging.debug(f"Image Response: {result_file}")
logging.debug(f"JSON Response: {params}")
self.reportProgress(100)
return result_file, params
def train_start(self, model=None, params={}):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).train_start(model, params)
def train_status(self, check_if_running):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).train_status(check_if_running)
def train_stop(self):
return MONAILabelClient(self.server_url, self.tmpdir, self.client_id).train_stop()
class MONAILabelTest(ScriptedLoadableModuleTest):
def setUp(self):
slicer.mrmlScene.Clear()
def runTest(self):
self.setUp()
self.test_MONAILabel1()
def test_MONAILabel1(self):
self.delayDisplay("Test passed")
| true
| true
|
f702268e91a55f101ad2a75a044d59fb343445d7
| 1,636
|
py
|
Python
|
project_euler/017.letter_counts.py
|
davemungo/various
|
ed7c17f8b75a27fc59b0a5cad6125d64d00cd3ce
|
[
"MIT"
] | 1
|
2020-01-19T01:21:56.000Z
|
2020-01-19T01:21:56.000Z
|
project_euler/017.letter_counts.py
|
davemungo/various
|
ed7c17f8b75a27fc59b0a5cad6125d64d00cd3ce
|
[
"MIT"
] | null | null | null |
project_euler/017.letter_counts.py
|
davemungo/various
|
ed7c17f8b75a27fc59b0a5cad6125d64d00cd3ce
|
[
"MIT"
] | 1
|
2021-07-02T14:40:01.000Z
|
2021-07-02T14:40:01.000Z
|
'''
Problem 017
If the numbers 1 to 5 are written out in words: one, two, three, four, five,
then there are 3 + 3 + 5 + 4 + 4 = 19 letters used in total.
If all the numbers from 1 to 1000 (one thousand) inclusive were written out in
words, how many letters would be used?
NOTE: Do not count spaces or hyphens. For example, 342 (three hundred and
forty-two) contains 23 letters and 115 (one hundred and fifteen) contains 20
letters. The use of "and" when writing out numbers is in compliance with
British usage.
Solution: Copyright 2017 Dave Cuthbert, MIT License
'''
ones_names = ['zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven',
'eight', 'nine', 'ten', 'eleven', 'twelve', 'thirteen',
'fourteen', 'fifteen', 'sixteen', 'seventeen', 'eighteen',
'nineteen']
tens_names = ['zero', 'ten', 'twenty', 'thirty', 'forty', 'fifty', 'sixty',
'seventy', 'eighty', 'ninety']
def build_words(n):
if n == 1000:
return 'one' + 'thousand'
elif n > 99:
hundreds = ones_names[int(n / 100)] + 'hundred'
n = n % 100
if n == 0:
return hundreds
return hundreds + 'and' + build_words(n)
elif n > 19:
tens = tens_names[int(n / 10)]
n = n % 10
if n == 0:
return tens
return tens + ones_names[n]
else:
return ones_names[n]
def solve_problem():
total_letters = 0
for n in range(1,1001):
total_letters += len(build_words(n))
return(total_letters)
if __name__ == "__main__":
print(solve_problem())
| 27.266667
| 79
| 0.582518
|
ones_names = ['zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven',
'eight', 'nine', 'ten', 'eleven', 'twelve', 'thirteen',
'fourteen', 'fifteen', 'sixteen', 'seventeen', 'eighteen',
'nineteen']
tens_names = ['zero', 'ten', 'twenty', 'thirty', 'forty', 'fifty', 'sixty',
'seventy', 'eighty', 'ninety']
def build_words(n):
if n == 1000:
return 'one' + 'thousand'
elif n > 99:
hundreds = ones_names[int(n / 100)] + 'hundred'
n = n % 100
if n == 0:
return hundreds
return hundreds + 'and' + build_words(n)
elif n > 19:
tens = tens_names[int(n / 10)]
n = n % 10
if n == 0:
return tens
return tens + ones_names[n]
else:
return ones_names[n]
def solve_problem():
total_letters = 0
for n in range(1,1001):
total_letters += len(build_words(n))
return(total_letters)
if __name__ == "__main__":
print(solve_problem())
| true
| true
|
f70226d0a47419dc066e849b10f0909332f1ddd8
| 2,078
|
py
|
Python
|
packages/python/plotly/plotly/validators/scattergeo/marker/gradient/__init__.py
|
sgn/plotly.py
|
587075c9f5a57a3dd60b03b2d47d925fbbb9b9b6
|
[
"MIT"
] | 3
|
2020-02-04T21:39:20.000Z
|
2020-11-17T19:07:07.000Z
|
packages/python/plotly/plotly/validators/scattergeo/marker/gradient/__init__.py
|
sgn/plotly.py
|
587075c9f5a57a3dd60b03b2d47d925fbbb9b9b6
|
[
"MIT"
] | 12
|
2020-06-06T01:22:26.000Z
|
2022-03-12T00:13:42.000Z
|
packages/python/plotly/plotly/validators/scattergeo/marker/gradient/__init__.py
|
sgn/plotly.py
|
587075c9f5a57a3dd60b03b2d47d925fbbb9b9b6
|
[
"MIT"
] | 17
|
2019-11-21T14:11:29.000Z
|
2019-11-21T15:26:23.000Z
|
import _plotly_utils.basevalidators
class TypesrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="typesrc", parent_name="scattergeo.marker.gradient", **kwargs
):
super(TypesrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class TypeValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="type", parent_name="scattergeo.marker.gradient", **kwargs
):
super(TypeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
values=kwargs.pop("values", ["radial", "horizontal", "vertical", "none"]),
**kwargs
)
import _plotly_utils.basevalidators
class ColorsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="colorsrc", parent_name="scattergeo.marker.gradient", **kwargs
):
super(ColorsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="color", parent_name="scattergeo.marker.gradient", **kwargs
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
**kwargs
)
| 31.484848
| 88
| 0.632339
|
import _plotly_utils.basevalidators
class TypesrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="typesrc", parent_name="scattergeo.marker.gradient", **kwargs
):
super(TypesrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class TypeValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="type", parent_name="scattergeo.marker.gradient", **kwargs
):
super(TypeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
values=kwargs.pop("values", ["radial", "horizontal", "vertical", "none"]),
**kwargs
)
import _plotly_utils.basevalidators
class ColorsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="colorsrc", parent_name="scattergeo.marker.gradient", **kwargs
):
super(ColorsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="color", parent_name="scattergeo.marker.gradient", **kwargs
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
**kwargs
)
| true
| true
|
f7022769f7e9d35089cc3e3827278c2a72c45ec8
| 1,655
|
py
|
Python
|
src/yellowdog_client/model/aws_instance.py
|
yellowdog/yellowdog-sdk-python-public
|
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
|
[
"Apache-2.0"
] | null | null | null |
src/yellowdog_client/model/aws_instance.py
|
yellowdog/yellowdog-sdk-python-public
|
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
|
[
"Apache-2.0"
] | null | null | null |
src/yellowdog_client/model/aws_instance.py
|
yellowdog/yellowdog-sdk-python-public
|
da69a7d6e45c92933e34fefcaef8b5d98dcd6036
|
[
"Apache-2.0"
] | null | null | null |
from dataclasses import dataclass, field
from datetime import datetime
from typing import Optional
from .cloud_provider import CloudProvider
from .instance import Instance
from .instance_status import InstanceStatus
@dataclass
class AwsInstance(Instance):
"""Extends Instance to add fields specific to the AWS compute sources."""
type: str = field(default="co.yellowdog.platform.model.AwsInstance", init=False)
id: Optional[str] = field(default=None, init=False)
"""The ID of this instance."""
instanceLifecycle: Optional[str] = None
"""The AWS EC2 instance lifecycle value for this instance."""
createdTime: Optional[datetime] = None
"""The date and time when this instance was first created."""
sourceId: Optional[str] = None
"""The ID of the compute source from which this instance was provisioned."""
imageId: Optional[str] = None
"""The machine image ID used for this instance."""
instanceType: Optional[str] = None
"""The machine type of this instance."""
provider: Optional[CloudProvider] = None
"""The cloud provider that supplies this instance."""
region: Optional[str] = None
"""The region where this instance is provisioned."""
status: Optional[InstanceStatus] = None
"""The status of this instance."""
subregion: Optional[str] = None
"""The subregion where this instance is provisioned."""
privateIpAddress: Optional[str] = None
"""The private IP address of this instance."""
publicIpAddress: Optional[str] = None
"""The public IP address of this instance."""
hostname: Optional[str] = None
"""The hostname of this instance."""
| 41.375
| 84
| 0.708157
|
from dataclasses import dataclass, field
from datetime import datetime
from typing import Optional
from .cloud_provider import CloudProvider
from .instance import Instance
from .instance_status import InstanceStatus
@dataclass
class AwsInstance(Instance):
type: str = field(default="co.yellowdog.platform.model.AwsInstance", init=False)
id: Optional[str] = field(default=None, init=False)
instanceLifecycle: Optional[str] = None
createdTime: Optional[datetime] = None
sourceId: Optional[str] = None
imageId: Optional[str] = None
instanceType: Optional[str] = None
provider: Optional[CloudProvider] = None
region: Optional[str] = None
status: Optional[InstanceStatus] = None
subregion: Optional[str] = None
privateIpAddress: Optional[str] = None
publicIpAddress: Optional[str] = None
hostname: Optional[str] = None
| true
| true
|
f7022774264758dda80e7f7df5daa6745e8fb6c6
| 8,127
|
py
|
Python
|
zfit/models/basic.py
|
schmitse/zfit
|
d42588f1d43532a34a81f31e602d2471780690e2
|
[
"BSD-3-Clause"
] | null | null | null |
zfit/models/basic.py
|
schmitse/zfit
|
d42588f1d43532a34a81f31e602d2471780690e2
|
[
"BSD-3-Clause"
] | null | null | null |
zfit/models/basic.py
|
schmitse/zfit
|
d42588f1d43532a34a81f31e602d2471780690e2
|
[
"BSD-3-Clause"
] | null | null | null |
"""Basic PDFs are provided here.
Gauss, exponential... that can be used together with Functors to build larger models.
"""
# Copyright (c) 2021 zfit
import contextlib
import numpy as np
import tensorflow as tf
import zfit.z.numpy as znp
from zfit import z
from ..core.basepdf import BasePDF
from ..core.space import ANY_LOWER, ANY_UPPER, Space
from ..util import ztyping
from ..util.exception import (AnalyticIntegralNotImplemented,
BreakingAPIChangeError)
from ..util.warnings import warn_advanced_feature
class Exponential(BasePDF):
_N_OBS = 1
def __init__(self, lam=None, obs: ztyping.ObsTypeInput = None, name: str = "Exponential", lambda_=None):
"""Exponential function exp(lambda * x).
The function is normalized over a finite range and therefore a pdf. So the PDF is precisely
defined as :math:`\\frac{ e^{\\lambda \\cdot x}}{ \\int_{lower}^{upper} e^{\\lambda \\cdot x} dx}`
Args:
lam: Accessed as parameter "lambda".
obs: The :py:class:`~zfit.Space` the pdf is defined in.
name: Name of the pdf.
dtype:
"""
if lambda_ is not None:
if lam is None:
lam = lambda_
else:
raise BreakingAPIChangeError("The 'lambda' parameter has been renamed from 'lambda_' to 'lam'.")
params = {'lambda': lam}
super().__init__(obs, name=name, params=params)
self._calc_numerics_data_shift = lambda: z.constant(0.)
if not self.space.has_limits:
warn_advanced_feature("Exponential pdf relies on a shift of the input towards 0 to keep the numerical "
f"stability high. The space {self.space} does not have limits set and no shift"
f" will occure. To set it manually, set _numerics_data_shift to the expected"
f" average values given to this function _in case you want things to be set_."
f"If this sounds unfamiliar, regard this as an error and use a normalization range.",
identifier='exp_shift')
self._set_numerics_data_shift(self.space)
def _unnormalized_pdf(self, x):
lambda_ = self.params['lambda']
x = x.unstack_x()
probs = znp.exp(lambda_ * (self._shift_x(x)))
tf.debugging.assert_all_finite(probs, f"Exponential PDF {self} has non valid values. This is likely caused"
f" by numerical problems: if the exponential is too steep, this will"
f" yield NaNs or infs. Make sure that your lambda is small enough and/or"
f" the initial space is in the same"
f" region as your data (and norm_range, if explicitly set differently)."
f" If this issue still persists, please oben an issue on Github:"
f" https://github.com/zfit/zfit")
return probs # Don't use exp! will overflow.
def _shift_x(self, x):
return x - self._calc_numerics_data_shift()
@contextlib.contextmanager
def _set_numerics_data_shift(self, limits):
if limits:
def calc_numerics_data_shift():
lower, upper = [], []
for limit in limits:
low, up = limit.rect_limits
lower.append(z.convert_to_tensor(low[:, 0]))
upper.append(z.convert_to_tensor(up[:, 0]))
lower = z.convert_to_tensor(lower)
upper = z.convert_to_tensor(upper)
lower_val = znp.min(lower, axis=0)
upper_val = znp.max(upper, axis=0)
return (upper_val + lower_val) / 2
old_value = self._calc_numerics_data_shift
self._calc_numerics_data_shift = calc_numerics_data_shift
yield
self._calc_numerics_data_shift = old_value
else:
yield
# All hooks are needed to set the right shift when "entering" the pdf. The norm range is taken where both are
# available. No special need needs to be taken for sampling (it samples from the correct region, the limits, and
# uses the predictions by the `unnormalized_prob` -> that is shifted correctly
def _single_hook_integrate(self, limits, norm_range, x):
with self._set_numerics_data_shift(norm_range):
return super()._single_hook_integrate(limits, norm_range, x=x)
def _single_hook_analytic_integrate(self, limits, norm_range):
with self._set_numerics_data_shift(limits=norm_range):
return super()._single_hook_analytic_integrate(limits, norm_range)
def _single_hook_numeric_integrate(self, limits, norm_range):
with self._set_numerics_data_shift(limits=norm_range):
return super()._single_hook_numeric_integrate(limits, norm_range)
def _single_hook_partial_integrate(self, x, limits, norm_range):
with self._set_numerics_data_shift(limits=norm_range):
return super()._single_hook_partial_integrate(x, limits, norm_range)
def _single_hook_partial_analytic_integrate(self, x, limits, norm_range):
with self._set_numerics_data_shift(limits=norm_range):
return super()._single_hook_partial_analytic_integrate(x, limits, norm_range)
def _single_hook_partial_numeric_integrate(self, x, limits, norm_range):
with self._set_numerics_data_shift(limits=norm_range):
return super()._single_hook_partial_numeric_integrate(x, limits, norm_range)
# def _single_hook_normalization(self, limits):
# with self._set_numerics_data_shift(limits=limits):
# return super()._single_hook_normalization(limits)
#
# # TODO: remove component_norm_range? But needed for integral?
# def _single_hook_unnormalized_pdf(self, x, name):
# if component_norm_range.limits_are_false:
# component_norm_range = self.space
# if component_norm_range.limits_are_set:
# with self._set_numerics_data_shift(limits=component_norm_range):
# return super()._single_hook_unnormalized_pdf(x, name)
# else:
# return super()._single_hook_unnormalized_pdf(x, name)
#
def _single_hook_pdf(self, x, norm_range):
with self._set_numerics_data_shift(limits=norm_range):
return super()._single_hook_pdf(x, norm_range)
#
def _single_hook_log_pdf(self, x, norm_range):
with self._set_numerics_data_shift(limits=norm_range):
return super()._single_hook_log_pdf(x, norm_range)
def _single_hook_sample(self, n, limits, x=None):
with self._set_numerics_data_shift(limits=limits):
return super()._single_hook_sample(n, limits, x)
def _exp_integral_from_any_to_any(limits, params, model):
lambda_ = params['lambda']
lower, upper = limits.rect_limits
# if any(np.isinf([lower, upper])):
# raise AnalyticIntegralNotImplemented
integral = _exp_integral_func_shifting(lambd=lambda_, lower=lower, upper=upper, model=model)
return integral[0]
def _exp_integral_func_shifting(lambd, lower, upper, model):
def raw_integral(x):
return z.exp(lambd * (model._shift_x(x))) / lambd # needed due to overflow in exp otherwise
lower_int = raw_integral(x=lower)
upper_int = raw_integral(x=upper)
integral = (upper_int - lower_int)
return integral
def exp_icdf(x, params, model):
lambd = params['lambda']
x = z.unstack_x(x)
x = model._shift_x(x)
return znp.log(lambd * x) / lambd
# Exponential.register_inverse_analytic_integral(exp_icdf) # TODO: register icdf for exponential
# TODO: cleanup, make cdf registrable _and_ inverse integral, but real
limits = Space(axes=0, limits=(ANY_LOWER, ANY_UPPER))
Exponential.register_analytic_integral(func=_exp_integral_from_any_to_any, limits=limits)
| 43.92973
| 119
| 0.648702
|
import contextlib
import numpy as np
import tensorflow as tf
import zfit.z.numpy as znp
from zfit import z
from ..core.basepdf import BasePDF
from ..core.space import ANY_LOWER, ANY_UPPER, Space
from ..util import ztyping
from ..util.exception import (AnalyticIntegralNotImplemented,
BreakingAPIChangeError)
from ..util.warnings import warn_advanced_feature
class Exponential(BasePDF):
_N_OBS = 1
def __init__(self, lam=None, obs: ztyping.ObsTypeInput = None, name: str = "Exponential", lambda_=None):
if lambda_ is not None:
if lam is None:
lam = lambda_
else:
raise BreakingAPIChangeError("The 'lambda' parameter has been renamed from 'lambda_' to 'lam'.")
params = {'lambda': lam}
super().__init__(obs, name=name, params=params)
self._calc_numerics_data_shift = lambda: z.constant(0.)
if not self.space.has_limits:
warn_advanced_feature("Exponential pdf relies on a shift of the input towards 0 to keep the numerical "
f"stability high. The space {self.space} does not have limits set and no shift"
f" will occure. To set it manually, set _numerics_data_shift to the expected"
f" average values given to this function _in case you want things to be set_."
f"If this sounds unfamiliar, regard this as an error and use a normalization range.",
identifier='exp_shift')
self._set_numerics_data_shift(self.space)
def _unnormalized_pdf(self, x):
lambda_ = self.params['lambda']
x = x.unstack_x()
probs = znp.exp(lambda_ * (self._shift_x(x)))
tf.debugging.assert_all_finite(probs, f"Exponential PDF {self} has non valid values. This is likely caused"
f" by numerical problems: if the exponential is too steep, this will"
f" yield NaNs or infs. Make sure that your lambda is small enough and/or"
f" the initial space is in the same"
f" region as your data (and norm_range, if explicitly set differently)."
f" If this issue still persists, please oben an issue on Github:"
f" https://github.com/zfit/zfit")
return probs
def _shift_x(self, x):
return x - self._calc_numerics_data_shift()
@contextlib.contextmanager
def _set_numerics_data_shift(self, limits):
if limits:
def calc_numerics_data_shift():
lower, upper = [], []
for limit in limits:
low, up = limit.rect_limits
lower.append(z.convert_to_tensor(low[:, 0]))
upper.append(z.convert_to_tensor(up[:, 0]))
lower = z.convert_to_tensor(lower)
upper = z.convert_to_tensor(upper)
lower_val = znp.min(lower, axis=0)
upper_val = znp.max(upper, axis=0)
return (upper_val + lower_val) / 2
old_value = self._calc_numerics_data_shift
self._calc_numerics_data_shift = calc_numerics_data_shift
yield
self._calc_numerics_data_shift = old_value
else:
yield
# All hooks are needed to set the right shift when "entering" the pdf. The norm range is taken where both are
# available. No special need needs to be taken for sampling (it samples from the correct region, the limits, and
# uses the predictions by the `unnormalized_prob` -> that is shifted correctly
def _single_hook_integrate(self, limits, norm_range, x):
with self._set_numerics_data_shift(norm_range):
return super()._single_hook_integrate(limits, norm_range, x=x)
def _single_hook_analytic_integrate(self, limits, norm_range):
with self._set_numerics_data_shift(limits=norm_range):
return super()._single_hook_analytic_integrate(limits, norm_range)
def _single_hook_numeric_integrate(self, limits, norm_range):
with self._set_numerics_data_shift(limits=norm_range):
return super()._single_hook_numeric_integrate(limits, norm_range)
def _single_hook_partial_integrate(self, x, limits, norm_range):
with self._set_numerics_data_shift(limits=norm_range):
return super()._single_hook_partial_integrate(x, limits, norm_range)
def _single_hook_partial_analytic_integrate(self, x, limits, norm_range):
with self._set_numerics_data_shift(limits=norm_range):
return super()._single_hook_partial_analytic_integrate(x, limits, norm_range)
def _single_hook_partial_numeric_integrate(self, x, limits, norm_range):
with self._set_numerics_data_shift(limits=norm_range):
return super()._single_hook_partial_numeric_integrate(x, limits, norm_range)
# def _single_hook_normalization(self, limits):
# with self._set_numerics_data_shift(limits=limits):
# return super()._single_hook_normalization(limits)
#
# # TODO: remove component_norm_range? But needed for integral?
# def _single_hook_unnormalized_pdf(self, x, name):
# if component_norm_range.limits_are_false:
# component_norm_range = self.space
# if component_norm_range.limits_are_set:
# with self._set_numerics_data_shift(limits=component_norm_range):
# return super()._single_hook_unnormalized_pdf(x, name)
# else:
# return super()._single_hook_unnormalized_pdf(x, name)
#
def _single_hook_pdf(self, x, norm_range):
with self._set_numerics_data_shift(limits=norm_range):
return super()._single_hook_pdf(x, norm_range)
#
def _single_hook_log_pdf(self, x, norm_range):
with self._set_numerics_data_shift(limits=norm_range):
return super()._single_hook_log_pdf(x, norm_range)
def _single_hook_sample(self, n, limits, x=None):
with self._set_numerics_data_shift(limits=limits):
return super()._single_hook_sample(n, limits, x)
def _exp_integral_from_any_to_any(limits, params, model):
lambda_ = params['lambda']
lower, upper = limits.rect_limits
# if any(np.isinf([lower, upper])):
# raise AnalyticIntegralNotImplemented
integral = _exp_integral_func_shifting(lambd=lambda_, lower=lower, upper=upper, model=model)
return integral[0]
def _exp_integral_func_shifting(lambd, lower, upper, model):
def raw_integral(x):
return z.exp(lambd * (model._shift_x(x))) / lambd # needed due to overflow in exp otherwise
lower_int = raw_integral(x=lower)
upper_int = raw_integral(x=upper)
integral = (upper_int - lower_int)
return integral
def exp_icdf(x, params, model):
lambd = params['lambda']
x = z.unstack_x(x)
x = model._shift_x(x)
return znp.log(lambd * x) / lambd
# Exponential.register_inverse_analytic_integral(exp_icdf) # TODO: register icdf for exponential
# TODO: cleanup, make cdf registrable _and_ inverse integral, but real
limits = Space(axes=0, limits=(ANY_LOWER, ANY_UPPER))
Exponential.register_analytic_integral(func=_exp_integral_from_any_to_any, limits=limits)
| true
| true
|
f70228d45f25247be7c181a695834dda94491b4d
| 6,195
|
py
|
Python
|
experiments/PV_noPV_7day_20k.py
|
DavidFellner/Malfunctions_in_LV_grid_datase
|
f92ca5e5ebc6e7d54c8e5c3d7d6e5c4e4c67fb5c
|
[
"MIT"
] | null | null | null |
experiments/PV_noPV_7day_20k.py
|
DavidFellner/Malfunctions_in_LV_grid_datase
|
f92ca5e5ebc6e7d54c8e5c3d7d6e5c4e4c67fb5c
|
[
"MIT"
] | null | null | null |
experiments/PV_noPV_7day_20k.py
|
DavidFellner/Malfunctions_in_LV_grid_datase
|
f92ca5e5ebc6e7d54c8e5c3d7d6e5c4e4c67fb5c
|
[
"MIT"
] | null | null | null |
import os
import math
'''
Metric goal is reached
'''
#Sytem settings
dev_mode = False
grid_data_folder = os.path.join(os.getcwd(), 'raw_data_generation', 'input')
raw_data_folder = os.path.join(os.getcwd(), 'raw_data')
datasets_folder = os.path.join(os.getcwd(), 'datasets')
test_data_folder = os.path.join(os.getcwd(), 'test')
models_folder = os.path.join(os.getcwd(), 'models')
local_machine_tz = 'Europe/Berlin' # timezone; it's important for Powerfactory
#Deep learning settings
learning_config = {
"dataset": "PV_noPV_7day_20k",
"RNN model settings": [1, 2, 6, 2], # number of input features, number of output features, number of features in hidden state, number of of layers
"number of epochs": 100,
"learning rate": 1*10**-6,
"activation function": 'tanh', # relu, tanh
"mini batch size": 60,
"optimizer": 'Adam', # Adam, SGD
"k folds": 5, #choose 1 to not do crossval
"cross_validation": True,
"early stopping": False,
"LR adjustment": 'LR controlled', #None, 'warm up' , 'LR controlled'
"percentage of epochs for warm up": 10, #warm up not performed if percentage of epochs for warm up * epochs > epochs
"train test split": 0.2, #if int, used as number up testing examples; if float, used as share of data
"baseline": False,
"metrics": ['accuracy', 'precision_macro', 'recall_macro', 'f1_macro'],
"cross_val_metrics": ['fit_time', 'test_accuracy', 'test_precision_macro', 'test_recall_macro', 'test_f1_macro'],
"plot samples": True,
"classifier": "RNN" # RNN
}
#########################################################################
### only change if new dataset or raw data should be created ###
#########################################################################
# Dataset settings
raw_data_set_name = 'PV_noPV' #'malfunctions_in_LV_grid_dataset', 'PV_noPV', dummy
dataset_available = False #set to False to recreate instances from raw data
raw_data_available = True #set to False to generate raw data using the simulation; leave True if DIGSILENT POWRFACTORY is not available
add_data = True #raw_data_available = False has to be set for this! set add_data = True to add more data to raw data;
add_noise = False
accuracy = 0.01 #accuracy according to the Genauigkeitsklasse of SmartMeter (1 = 1% i.e.)
sample_length = 7 * 96 #96 datapoints per day
smartmeter_ratedvoltage_range = [400, 415]
smartmeter_voltage_range = [363, 457]
number_of_samples = 20000
share_of_positive_samples = 0.5 #should be 0.5! only chose values that yield real numbers as invers i.e. 0.2, 0.25, 0.5 > otherwise number of samples corrupted
number_of_grids = len([i for i in os.listdir(grid_data_folder) if os.path.isdir(os.path.join(grid_data_folder, i))])
float_decimal = 5 #decimals in dataset
#Powerfactory settings
user = 'FellnerD'
system_language = 0 #chose 0 for english, 1 for german according to the lagnuage of powerfactory installed on the system
parallel_computing = True
cores = 12 #cores to be used for parallel computing (when 64 available use 12 - 24)
reduce_result_file_size = True #save results as integers to save memory in csv
just_voltages = True #if False also P and Q results given
# Simulation settings
sim_length = 365 #simulation length in days (has to be equal or bigger than sample length
if sim_length < sample_length/96: print('Choose different simulation length or sample length (sim_length >= sample_length')
if raw_data_set_name == 'PV_noPV':
positive_samples_per_simrun = 5 #data from how many terminals are there in the grid minimally > determines how many yearly simulations have to be run and used for dataset creation
simruns = math.ceil((number_of_samples * share_of_positive_samples) / (positive_samples_per_simrun * number_of_grids) / (sim_length * 96/sample_length))
elif raw_data_set_name == 'malfunctions_in_LV_grid_dataset':
simruns = math.ceil((number_of_samples * share_of_positive_samples) / (number_of_grids) / int(
(sim_length * 96 / sample_length) - 1))
elif raw_data_set_name == 'dummy':
terms_per_simrun = 5 #data from how many terminals are there in the grid minimally > determines how many yearly simulations have to be run and used for dataset creation
simruns = math.ceil((number_of_samples * share_of_positive_samples) / (terms_per_simrun * number_of_grids) / (sim_length * 96/sample_length))
else:
simruns = 10 #number of datasets produced and also used per grid (location of malfunction/PVs... is varied)
step_size = 15 #simulation step size in minutes
percentage = 25 #percentage of busses with active PVs (PV proliferation)
control_curve_choice = 0 #for all PVs: choose control curve for 0 = cos(phi)(P), 1 = Q(P), 2 = brokenQ(P) (flat curve)
broken_control_curve_choice = 3 #for broken PV: choose control curve for 0 = cos(phi)(P), 1 = Q(P), 2 = broken Q(P) (flat curve), 3 = wrong Q(P) (inversed curve)
number_of_broken_devices = 1 #define number of devices to expereince malfunctions during simulation
load_scaling = 100 #general load scaling for all loads in simulation (does not apply to setup)
generation_scaling = 100 #general generation scaling for all generation units in simulation (does not apply to setup)
whole_year = True #if True malfunction is present from start of simulation on; if False malfunction is at random point
t_start = None #default(None): times inferred from profiles in data
t_end = None
#t_start = pd.Timestamp('2017-01-01 00:00:00', tz='utc') # example for custom sim time
#t_end = pd.Timestamp('2018-01-01 00:00:00', tz='utc') - pd.Timedelta(step_size + 'T')
| 66.612903
| 187
| 0.652623
|
import os
import math
dev_mode = False
grid_data_folder = os.path.join(os.getcwd(), 'raw_data_generation', 'input')
raw_data_folder = os.path.join(os.getcwd(), 'raw_data')
datasets_folder = os.path.join(os.getcwd(), 'datasets')
test_data_folder = os.path.join(os.getcwd(), 'test')
models_folder = os.path.join(os.getcwd(), 'models')
local_machine_tz = 'Europe/Berlin'
#Deep learning settings
learning_config = {
"dataset": "PV_noPV_7day_20k",
"RNN model settings": [1, 2, 6, 2], # number of input features, number of output features, number of features in hidden state, number of of layers
"number of epochs": 100,
"learning rate": 1*10**-6,
"activation function": 'tanh', # relu, tanh
"mini batch size": 60,
"optimizer": 'Adam', # Adam, SGD
"k folds": 5, #choose 1 to not do crossval
"cross_validation": True,
"early stopping": False,
"LR adjustment": 'LR controlled', #None, 'warm up' , 'LR controlled'
"percentage of epochs for warm up": 10, #warm up not performed if percentage of epochs for warm up * epochs > epochs
"train test split": 0.2, #if int, used as number up testing examples; if float, used as share of data
"baseline": False,
"metrics": ['accuracy', 'precision_macro', 'recall_macro', 'f1_macro'],
"cross_val_metrics": ['fit_time', 'test_accuracy', 'test_precision_macro', 'test_recall_macro', 'test_f1_macro'],
"plot samples": True,
"classifier": "RNN" # RNN
}
#########################################################################
### only change if new dataset or raw data should be created ###
#########################################################################
# Dataset settings
raw_data_set_name = 'PV_noPV' #'malfunctions_in_LV_grid_dataset', 'PV_noPV', dummy
dataset_available = False #set to False to recreate instances from raw data
raw_data_available = True #set to False to generate raw data using the simulation; leave True if DIGSILENT POWRFACTORY is not available
add_data = True #raw_data_available = False has to be set for this! set add_data = True to add more data to raw data;
add_noise = False
accuracy = 0.01 #accuracy according to the Genauigkeitsklasse of SmartMeter (1 = 1% i.e.)
sample_length = 7 * 96 #96 datapoints per day
smartmeter_ratedvoltage_range = [400, 415]
smartmeter_voltage_range = [363, 457]
number_of_samples = 20000
share_of_positive_samples = 0.5 #should be 0.5! only chose values that yield real numbers as invers i.e. 0.2, 0.25, 0.5 > otherwise number of samples corrupted
number_of_grids = len([i for i in os.listdir(grid_data_folder) if os.path.isdir(os.path.join(grid_data_folder, i))])
float_decimal = 5 #decimals in dataset
#Powerfactory settings
user = 'FellnerD'
system_language = 0 #chose 0 for english, 1 for german according to the lagnuage of powerfactory installed on the system
parallel_computing = True
cores = 12 #cores to be used for parallel computing (when 64 available use 12 - 24)
reduce_result_file_size = True #save results as integers to save memory in csv
just_voltages = True #if False also P and Q results given
# Simulation settings
sim_length = 365 #simulation length in days (has to be equal or bigger than sample length
if sim_length < sample_length/96: print('Choose different simulation length or sample length (sim_length >= sample_length')
if raw_data_set_name == 'PV_noPV':
positive_samples_per_simrun = 5 #data from how many terminals are there in the grid minimally > determines how many yearly simulations have to be run and used for dataset creation
simruns = math.ceil((number_of_samples * share_of_positive_samples) / (positive_samples_per_simrun * number_of_grids) / (sim_length * 96/sample_length))
elif raw_data_set_name == 'malfunctions_in_LV_grid_dataset':
simruns = math.ceil((number_of_samples * share_of_positive_samples) / (number_of_grids) / int(
(sim_length * 96 / sample_length) - 1))
elif raw_data_set_name == 'dummy':
terms_per_simrun = 5 #data from how many terminals are there in the grid minimally > determines how many yearly simulations have to be run and used for dataset creation
simruns = math.ceil((number_of_samples * share_of_positive_samples) / (terms_per_simrun * number_of_grids) / (sim_length * 96/sample_length))
else:
simruns = 10 #number of datasets produced and also used per grid (location of malfunction/PVs... is varied)
step_size = 15 #simulation step size in minutes
percentage = 25 #percentage of busses with active PVs (PV proliferation)
control_curve_choice = 0 #for all PVs: choose control curve for 0 = cos(phi)(P), 1 = Q(P), 2 = brokenQ(P) (flat curve)
broken_control_curve_choice = 3 #for broken PV: choose control curve for 0 = cos(phi)(P), 1 = Q(P), 2 = broken Q(P) (flat curve), 3 = wrong Q(P) (inversed curve)
number_of_broken_devices = 1 #define number of devices to expereince malfunctions during simulation
load_scaling = 100 #general load scaling for all loads in simulation (does not apply to setup)
generation_scaling = 100 #general generation scaling for all generation units in simulation (does not apply to setup)
whole_year = True #if True malfunction is present from start of simulation on; if False malfunction is at random point
t_start = None #default(None): times inferred from profiles in data
t_end = None
#t_start = pd.Timestamp('2017-01-01 00:00:00', tz='utc') # example for custom sim time
#t_end = pd.Timestamp('2018-01-01 00:00:00', tz='utc') - pd.Timedelta(step_size + 'T')
| true
| true
|
f702297271349632584c3aeb49e40b474c6da936
| 18,867
|
py
|
Python
|
bin/lesson_check.py
|
kekoziar/lc-data-intro-archives
|
1307cdf22cf46a8e7656b642341781397b93116f
|
[
"CC-BY-4.0"
] | 7
|
2019-05-15T18:30:43.000Z
|
2020-12-14T16:24:20.000Z
|
bin/lesson_check.py
|
kekoziar/lc-data-intro-archives
|
1307cdf22cf46a8e7656b642341781397b93116f
|
[
"CC-BY-4.0"
] | 20
|
2019-04-16T21:45:29.000Z
|
2022-01-19T23:37:29.000Z
|
bin/lesson_check.py
|
kekoziar/lc-data-intro-archives
|
1307cdf22cf46a8e7656b642341781397b93116f
|
[
"CC-BY-4.0"
] | 12
|
2018-04-27T18:50:31.000Z
|
2022-01-05T14:53:42.000Z
|
#!/usr/bin/env python3
"""
Check lesson files and their contents.
"""
import os
import glob
import re
from argparse import ArgumentParser
from util import (Reporter, read_markdown, load_yaml, check_unwanted_files,
require)
__version__ = '0.3'
# Where to look for source Markdown files.
SOURCE_DIRS = ['', '_episodes', '_extras']
# Where to look for source Rmd files.
SOURCE_RMD_DIRS = ['_episodes_rmd']
# Required files: each entry is ('path': YAML_required).
# FIXME: We do not yet validate whether any files have the required
# YAML headers, but should in the future.
# The '%' is replaced with the source directory path for checking.
# Episodes are handled specially, and extra files in '_extras' are also handled
# specially. This list must include all the Markdown files listed in the
# 'bin/initialize' script.
REQUIRED_FILES = {
'%/CODE_OF_CONDUCT.md': True,
'%/CONTRIBUTING.md': False,
'%/LICENSE.md': True,
'%/README.md': False,
'%/_extras/discuss.md': True,
'%/_extras/guide.md': True,
'%/index.md': True,
'%/reference.md': True,
'%/setup.md': True,
}
# Episode filename pattern.
P_EPISODE_FILENAME = re.compile(r'/_episodes/(\d\d)-[-\w]+.md$')
# Pattern to match lines ending with whitespace.
P_TRAILING_WHITESPACE = re.compile(r'\s+$')
# Pattern to match figure references in HTML.
P_FIGURE_REFS = re.compile(r'<img[^>]+src="([^"]+)"[^>]*>')
# Pattern to match internally-defined Markdown links.
P_INTERNAL_LINK_REF = re.compile(r'\[([^\]]+)\]\[([^\]]+)\]')
# Pattern to match reference links (to resolve internally-defined references).
P_INTERNAL_LINK_DEF = re.compile(r'^\[([^\]]+)\]:\s*(.+)')
# Pattern to match {% include ... %} statements
P_INTERNAL_INCLUDE_LINK = re.compile(r'^{% include ([^ ]*) %}$')
# What kinds of blockquotes are allowed?
KNOWN_BLOCKQUOTES = {
'callout',
'challenge',
'checklist',
'discussion',
'keypoints',
'objectives',
'prereq',
'quotation',
'solution',
'testimonial'
}
# What kinds of code fragments are allowed?
KNOWN_CODEBLOCKS = {
'error',
'output',
'source',
'language-bash',
'html',
'language-make',
'language-matlab',
'language-python',
'language-r',
'language-shell',
'language-sql'
}
# What fields are required in teaching episode metadata?
TEACHING_METADATA_FIELDS = {
('title', str),
('teaching', int),
('exercises', int),
('questions', list),
('objectives', list),
('keypoints', list)
}
# What fields are required in break episode metadata?
BREAK_METADATA_FIELDS = {
('layout', str),
('title', str),
('break', int)
}
# How long are lines allowed to be?
# Please keep this in sync with .editorconfig!
MAX_LINE_LEN = 100
def main():
"""Main driver."""
args = parse_args()
args.reporter = Reporter()
check_config(args.reporter, args.source_dir)
check_source_rmd(args.reporter, args.source_dir, args.parser)
args.references = read_references(args.reporter, args.reference_path)
docs = read_all_markdown(args.source_dir, args.parser)
check_fileset(args.source_dir, args.reporter, list(docs.keys()))
check_unwanted_files(args.source_dir, args.reporter)
for filename in list(docs.keys()):
checker = create_checker(args, filename, docs[filename])
checker.check()
args.reporter.report()
if args.reporter.messages and not args.permissive:
exit(1)
def parse_args():
"""Parse command-line arguments."""
parser = ArgumentParser(description="""Check episode files in a lesson.""")
parser.add_argument('-l', '--linelen',
default=False,
action="store_true",
dest='line_lengths',
help='Check line lengths')
parser.add_argument('-p', '--parser',
default=None,
dest='parser',
help='path to Markdown parser')
parser.add_argument('-r', '--references',
default=None,
dest='reference_path',
help='path to Markdown file of external references')
parser.add_argument('-s', '--source',
default=os.curdir,
dest='source_dir',
help='source directory')
parser.add_argument('-w', '--whitespace',
default=False,
action="store_true",
dest='trailing_whitespace',
help='Check for trailing whitespace')
parser.add_argument('--permissive',
default=False,
action="store_true",
dest='permissive',
help='Do not raise an error even if issues are detected')
args, extras = parser.parse_known_args()
require(args.parser is not None,
'Path to Markdown parser not provided')
require(not extras,
'Unexpected trailing command-line arguments "{0}"'.format(extras))
return args
def check_config(reporter, source_dir):
"""Check configuration file."""
config_file = os.path.join(source_dir, '_config.yml')
config = load_yaml(config_file)
reporter.check_field(config_file, 'configuration',
config, 'kind', 'lesson')
reporter.check_field(config_file, 'configuration',
config, 'carpentry', ('swc', 'dc', 'lc', 'cp'))
reporter.check_field(config_file, 'configuration', config, 'title')
reporter.check_field(config_file, 'configuration', config, 'email')
for defaults in [
{'values': {'root': '.', 'layout': 'page'}},
{'values': {'root': '..', 'layout': 'episode'}, 'scope': {'type': 'episodes', 'path': ''}},
{'values': {'root': '..', 'layout': 'page'}, 'scope': {'type': 'extras', 'path': ''}}
]:
reporter.check(defaults in config.get('defaults', []),
'configuration',
'"root" not set to "." in configuration')
def check_source_rmd(reporter, source_dir, parser):
"""Check that Rmd episode files include `source: Rmd`"""
episode_rmd_dir = [os.path.join(source_dir, d) for d in SOURCE_RMD_DIRS]
episode_rmd_files = [os.path.join(d, '*.Rmd') for d in episode_rmd_dir]
results = {}
for pat in episode_rmd_files:
for f in glob.glob(pat):
data = read_markdown(parser, f)
dy = data['metadata']
if dy:
reporter.check_field(f, 'episode_rmd',
dy, 'source', 'Rmd')
def read_references(reporter, ref_path):
"""Read shared file of reference links, returning dictionary of valid references
{symbolic_name : URL}
"""
if not ref_path:
raise Warning("No filename has been provided.")
result = {}
urls_seen = set()
with open(ref_path, 'r') as reader:
for (num, line) in enumerate(reader, 1):
if P_INTERNAL_INCLUDE_LINK.search(line): continue
m = P_INTERNAL_LINK_DEF.search(line)
message = '{}: {} not a valid reference: {}'
require(m, message.format(ref_path, num, line.rstrip()))
name = m.group(1)
url = m.group(2)
message = 'Empty reference at {0}:{1}'
require(name, message.format(ref_path, num))
unique_name = name not in result
unique_url = url not in urls_seen
reporter.check(unique_name,
ref_path,
'Duplicate reference name {0} at line {1}',
name, num)
reporter.check(unique_url,
ref_path,
'Duplicate definition of URL {0} at line {1}',
url, num)
result[name] = url
urls_seen.add(url)
return result
def read_all_markdown(source_dir, parser):
"""Read source files, returning
{path : {'metadata':yaml, 'metadata_len':N, 'text':text, 'lines':[(i, line, len)], 'doc':doc}}
"""
all_dirs = [os.path.join(source_dir, d) for d in SOURCE_DIRS]
all_patterns = [os.path.join(d, '*.md') for d in all_dirs]
result = {}
for pat in all_patterns:
for filename in glob.glob(pat):
data = read_markdown(parser, filename)
if data:
result[filename] = data
return result
def check_fileset(source_dir, reporter, filenames_present):
"""Are all required files present? Are extraneous files present?"""
# Check files with predictable names.
required = [p.replace('%', source_dir) for p in REQUIRED_FILES]
missing = set(required) - set(filenames_present)
for m in missing:
reporter.add(None, 'Missing required file {0}', m)
# Check episode files' names.
seen = []
for filename in filenames_present:
if '_episodes' not in filename:
continue
m = P_EPISODE_FILENAME.search(filename)
if m and m.group(1):
seen.append(m.group(1))
else:
reporter.add(
None, 'Episode {0} has badly-formatted filename', filename)
# Check for duplicate episode numbers.
reporter.check(len(seen) == len(set(seen)),
None,
'Duplicate episode numbers {0} vs {1}',
sorted(seen), sorted(set(seen)))
# Check that numbers are consecutive.
seen = sorted([int(s) for s in seen])
clean = True
for i in range(len(seen) - 1):
clean = clean and ((seen[i+1] - seen[i]) == 1)
reporter.check(clean,
None,
'Missing or non-consecutive episode numbers {0}',
seen)
def create_checker(args, filename, info):
"""Create appropriate checker for file."""
for (pat, cls) in CHECKERS:
if pat.search(filename):
return cls(args, filename, **info)
return NotImplemented
class CheckBase:
"""Base class for checking Markdown files."""
def __init__(self, args, filename, metadata, metadata_len, text, lines, doc):
"""Cache arguments for checking."""
self.args = args
self.reporter = self.args.reporter # for convenience
self.filename = filename
self.metadata = metadata
self.metadata_len = metadata_len
self.text = text
self.lines = lines
self.doc = doc
self.layout = None
def check(self):
"""Run tests."""
self.check_metadata()
self.check_line_lengths()
self.check_trailing_whitespace()
self.check_blockquote_classes()
self.check_codeblock_classes()
self.check_defined_link_references()
def check_metadata(self):
"""Check the YAML metadata."""
self.reporter.check(self.metadata is not None,
self.filename,
'Missing metadata entirely')
if self.metadata and (self.layout is not None):
self.reporter.check_field(
self.filename, 'metadata', self.metadata, 'layout', self.layout)
def check_line_lengths(self):
"""Check the raw text of the lesson body."""
if self.args.line_lengths:
over = [i for (i, l, n) in self.lines if (
n > MAX_LINE_LEN) and (not l.startswith('!'))]
self.reporter.check(not over,
self.filename,
'Line(s) too long: {0}',
', '.join([str(i) for i in over]))
def check_trailing_whitespace(self):
"""Check for whitespace at the ends of lines."""
if self.args.trailing_whitespace:
trailing = [
i for (i, l, n) in self.lines if P_TRAILING_WHITESPACE.match(l)]
self.reporter.check(not trailing,
self.filename,
'Line(s) end with whitespace: {0}',
', '.join([str(i) for i in trailing]))
def check_blockquote_classes(self):
"""Check that all blockquotes have known classes."""
for node in self.find_all(self.doc, {'type': 'blockquote'}):
cls = self.get_val(node, 'attr', 'class')
self.reporter.check(cls in KNOWN_BLOCKQUOTES,
(self.filename, self.get_loc(node)),
'Unknown or missing blockquote type {0}',
cls)
def check_codeblock_classes(self):
"""Check that all code blocks have known classes."""
for node in self.find_all(self.doc, {'type': 'codeblock'}):
cls = self.get_val(node, 'attr', 'class')
self.reporter.check(cls in KNOWN_CODEBLOCKS,
(self.filename, self.get_loc(node)),
'Unknown or missing code block type {0}',
cls)
def check_defined_link_references(self):
"""Check that defined links resolve in the file.
Internally-defined links match the pattern [text][label].
"""
result = set()
for node in self.find_all(self.doc, {'type': 'text'}):
for match in P_INTERNAL_LINK_REF.findall(node['value']):
text = match[0]
link = match[1]
if link not in self.args.references:
result.add('"{0}"=>"{1}"'.format(text, link))
self.reporter.check(not result,
self.filename,
'Internally-defined links may be missing definitions: {0}',
', '.join(sorted(result)))
def find_all(self, node, pattern, accum=None):
"""Find all matches for a pattern."""
assert isinstance(pattern, dict), 'Patterns must be dictionaries'
if accum is None:
accum = []
if self.match(node, pattern):
accum.append(node)
for child in node.get('children', []):
self.find_all(child, pattern, accum)
return accum
def match(self, node, pattern):
"""Does this node match the given pattern?"""
for key in pattern:
if key not in node:
return False
val = pattern[key]
if isinstance(val, str):
if node[key] != val:
return False
elif isinstance(val, dict):
if not self.match(node[key], val):
return False
return True
@staticmethod
def get_val(node, *chain):
"""Get value one or more levels down."""
curr = node
for selector in chain:
curr = curr.get(selector, None)
if curr is None:
break
return curr
def get_loc(self, node):
"""Convenience method to get node's line number."""
result = self.get_val(node, 'options', 'location')
if self.metadata_len is not None:
result += self.metadata_len
return result
class CheckNonJekyll(CheckBase):
"""Check a file that isn't translated by Jekyll."""
def check_metadata(self):
self.reporter.check(self.metadata is None,
self.filename,
'Unexpected metadata')
class CheckIndex(CheckBase):
"""Check the main index page."""
def __init__(self, args, filename, metadata, metadata_len, text, lines, doc):
super().__init__(args, filename, metadata, metadata_len, text, lines, doc)
self.layout = 'lesson'
def check_metadata(self):
super().check_metadata()
self.reporter.check(self.metadata.get('root', '') == '.',
self.filename,
'Root not set to "."')
class CheckEpisode(CheckBase):
"""Check an episode page."""
def check(self):
"""Run extra tests."""
super().check()
self.check_reference_inclusion()
def check_metadata(self):
super().check_metadata()
if self.metadata:
if 'layout' in self.metadata:
if self.metadata['layout'] == 'break':
self.check_metadata_fields(BREAK_METADATA_FIELDS)
else:
self.reporter.add(self.filename,
'Unknown episode layout "{0}"',
self.metadata['layout'])
else:
self.check_metadata_fields(TEACHING_METADATA_FIELDS)
def check_metadata_fields(self, expected):
"""Check metadata fields."""
for (name, type_) in expected:
if name not in self.metadata:
self.reporter.add(self.filename,
'Missing metadata field {0}',
name)
elif not isinstance(self.metadata[name], type_):
self.reporter.add(self.filename,
'"{0}" has wrong type in metadata ({1} instead of {2})',
name, type(self.metadata[name]), type_)
def check_reference_inclusion(self):
"""Check that links file has been included."""
if not self.args.reference_path:
return
for (i, last_line, line_len) in reversed(self.lines):
if last_line:
break
require(last_line,
'No non-empty lines in {0}'.format(self.filename))
include_filename = os.path.split(self.args.reference_path)[-1]
if include_filename not in last_line:
self.reporter.add(self.filename,
'episode does not include "{0}"',
include_filename)
class CheckReference(CheckBase):
"""Check the reference page."""
def __init__(self, args, filename, metadata, metadata_len, text, lines, doc):
super().__init__(args, filename, metadata, metadata_len, text, lines, doc)
self.layout = 'reference'
class CheckGeneric(CheckBase):
"""Check a generic page."""
def __init__(self, args, filename, metadata, metadata_len, text, lines, doc):
super().__init__(args, filename, metadata, metadata_len, text, lines, doc)
CHECKERS = [
(re.compile(r'CONTRIBUTING\.md'), CheckNonJekyll),
(re.compile(r'README\.md'), CheckNonJekyll),
(re.compile(r'index\.md'), CheckIndex),
(re.compile(r'reference\.md'), CheckReference),
(re.compile(r'_episodes/.*\.md'), CheckEpisode),
(re.compile(r'.*\.md'), CheckGeneric)
]
if __name__ == '__main__':
main()
| 33.39292
| 103
| 0.563842
|
import os
import glob
import re
from argparse import ArgumentParser
from util import (Reporter, read_markdown, load_yaml, check_unwanted_files,
require)
__version__ = '0.3'
SOURCE_DIRS = ['', '_episodes', '_extras']
SOURCE_RMD_DIRS = ['_episodes_rmd']
REQUIRED_FILES = {
'%/CODE_OF_CONDUCT.md': True,
'%/CONTRIBUTING.md': False,
'%/LICENSE.md': True,
'%/README.md': False,
'%/_extras/discuss.md': True,
'%/_extras/guide.md': True,
'%/index.md': True,
'%/reference.md': True,
'%/setup.md': True,
}
P_EPISODE_FILENAME = re.compile(r'/_episodes/(\d\d)-[-\w]+.md$')
P_TRAILING_WHITESPACE = re.compile(r'\s+$')
P_FIGURE_REFS = re.compile(r'<img[^>]+src="([^"]+)"[^>]*>')
# Pattern to match internally-defined Markdown links.
P_INTERNAL_LINK_REF = re.compile(r'\[([^\]]+)\]\[([^\]]+)\]')
# Pattern to match reference links (to resolve internally-defined references).
P_INTERNAL_LINK_DEF = re.compile(r'^\[([^\]]+)\]:\s*(.+)')
# Pattern to match {% include ... %} statements
P_INTERNAL_INCLUDE_LINK = re.compile(r'^{% include ([^ ]*) %}$')
# What kinds of blockquotes are allowed?
KNOWN_BLOCKQUOTES = {
'callout',
'challenge',
'checklist',
'discussion',
'keypoints',
'objectives',
'prereq',
'quotation',
'solution',
'testimonial'
}
# What kinds of code fragments are allowed?
KNOWN_CODEBLOCKS = {
'error',
'output',
'source',
'language-bash',
'html',
'language-make',
'language-matlab',
'language-python',
'language-r',
'language-shell',
'language-sql'
}
# What fields are required in teaching episode metadata?
TEACHING_METADATA_FIELDS = {
('title', str),
('teaching', int),
('exercises', int),
('questions', list),
('objectives', list),
('keypoints', list)
}
# What fields are required in break episode metadata?
BREAK_METADATA_FIELDS = {
('layout', str),
('title', str),
('break', int)
}
# How long are lines allowed to be?
# Please keep this in sync with .editorconfig!
MAX_LINE_LEN = 100
def main():
args = parse_args()
args.reporter = Reporter()
check_config(args.reporter, args.source_dir)
check_source_rmd(args.reporter, args.source_dir, args.parser)
args.references = read_references(args.reporter, args.reference_path)
docs = read_all_markdown(args.source_dir, args.parser)
check_fileset(args.source_dir, args.reporter, list(docs.keys()))
check_unwanted_files(args.source_dir, args.reporter)
for filename in list(docs.keys()):
checker = create_checker(args, filename, docs[filename])
checker.check()
args.reporter.report()
if args.reporter.messages and not args.permissive:
exit(1)
def parse_args():
parser = ArgumentParser(description="""Check episode files in a lesson.""")
parser.add_argument('-l', '--linelen',
default=False,
action="store_true",
dest='line_lengths',
help='Check line lengths')
parser.add_argument('-p', '--parser',
default=None,
dest='parser',
help='path to Markdown parser')
parser.add_argument('-r', '--references',
default=None,
dest='reference_path',
help='path to Markdown file of external references')
parser.add_argument('-s', '--source',
default=os.curdir,
dest='source_dir',
help='source directory')
parser.add_argument('-w', '--whitespace',
default=False,
action="store_true",
dest='trailing_whitespace',
help='Check for trailing whitespace')
parser.add_argument('--permissive',
default=False,
action="store_true",
dest='permissive',
help='Do not raise an error even if issues are detected')
args, extras = parser.parse_known_args()
require(args.parser is not None,
'Path to Markdown parser not provided')
require(not extras,
'Unexpected trailing command-line arguments "{0}"'.format(extras))
return args
def check_config(reporter, source_dir):
config_file = os.path.join(source_dir, '_config.yml')
config = load_yaml(config_file)
reporter.check_field(config_file, 'configuration',
config, 'kind', 'lesson')
reporter.check_field(config_file, 'configuration',
config, 'carpentry', ('swc', 'dc', 'lc', 'cp'))
reporter.check_field(config_file, 'configuration', config, 'title')
reporter.check_field(config_file, 'configuration', config, 'email')
for defaults in [
{'values': {'root': '.', 'layout': 'page'}},
{'values': {'root': '..', 'layout': 'episode'}, 'scope': {'type': 'episodes', 'path': ''}},
{'values': {'root': '..', 'layout': 'page'}, 'scope': {'type': 'extras', 'path': ''}}
]:
reporter.check(defaults in config.get('defaults', []),
'configuration',
'"root" not set to "." in configuration')
def check_source_rmd(reporter, source_dir, parser):
episode_rmd_dir = [os.path.join(source_dir, d) for d in SOURCE_RMD_DIRS]
episode_rmd_files = [os.path.join(d, '*.Rmd') for d in episode_rmd_dir]
results = {}
for pat in episode_rmd_files:
for f in glob.glob(pat):
data = read_markdown(parser, f)
dy = data['metadata']
if dy:
reporter.check_field(f, 'episode_rmd',
dy, 'source', 'Rmd')
def read_references(reporter, ref_path):
if not ref_path:
raise Warning("No filename has been provided.")
result = {}
urls_seen = set()
with open(ref_path, 'r') as reader:
for (num, line) in enumerate(reader, 1):
if P_INTERNAL_INCLUDE_LINK.search(line): continue
m = P_INTERNAL_LINK_DEF.search(line)
message = '{}: {} not a valid reference: {}'
require(m, message.format(ref_path, num, line.rstrip()))
name = m.group(1)
url = m.group(2)
message = 'Empty reference at {0}:{1}'
require(name, message.format(ref_path, num))
unique_name = name not in result
unique_url = url not in urls_seen
reporter.check(unique_name,
ref_path,
'Duplicate reference name {0} at line {1}',
name, num)
reporter.check(unique_url,
ref_path,
'Duplicate definition of URL {0} at line {1}',
url, num)
result[name] = url
urls_seen.add(url)
return result
def read_all_markdown(source_dir, parser):
all_dirs = [os.path.join(source_dir, d) for d in SOURCE_DIRS]
all_patterns = [os.path.join(d, '*.md') for d in all_dirs]
result = {}
for pat in all_patterns:
for filename in glob.glob(pat):
data = read_markdown(parser, filename)
if data:
result[filename] = data
return result
def check_fileset(source_dir, reporter, filenames_present):
# Check files with predictable names.
required = [p.replace('%', source_dir) for p in REQUIRED_FILES]
missing = set(required) - set(filenames_present)
for m in missing:
reporter.add(None, 'Missing required file {0}', m)
# Check episode files' names.
seen = []
for filename in filenames_present:
if '_episodes' not in filename:
continue
m = P_EPISODE_FILENAME.search(filename)
if m and m.group(1):
seen.append(m.group(1))
else:
reporter.add(
None, 'Episode {0} has badly-formatted filename', filename)
# Check for duplicate episode numbers.
reporter.check(len(seen) == len(set(seen)),
None,
'Duplicate episode numbers {0} vs {1}',
sorted(seen), sorted(set(seen)))
# Check that numbers are consecutive.
seen = sorted([int(s) for s in seen])
clean = True
for i in range(len(seen) - 1):
clean = clean and ((seen[i+1] - seen[i]) == 1)
reporter.check(clean,
None,
'Missing or non-consecutive episode numbers {0}',
seen)
def create_checker(args, filename, info):
for (pat, cls) in CHECKERS:
if pat.search(filename):
return cls(args, filename, **info)
return NotImplemented
class CheckBase:
def __init__(self, args, filename, metadata, metadata_len, text, lines, doc):
self.args = args
self.reporter = self.args.reporter # for convenience
self.filename = filename
self.metadata = metadata
self.metadata_len = metadata_len
self.text = text
self.lines = lines
self.doc = doc
self.layout = None
def check(self):
self.check_metadata()
self.check_line_lengths()
self.check_trailing_whitespace()
self.check_blockquote_classes()
self.check_codeblock_classes()
self.check_defined_link_references()
def check_metadata(self):
self.reporter.check(self.metadata is not None,
self.filename,
'Missing metadata entirely')
if self.metadata and (self.layout is not None):
self.reporter.check_field(
self.filename, 'metadata', self.metadata, 'layout', self.layout)
def check_line_lengths(self):
if self.args.line_lengths:
over = [i for (i, l, n) in self.lines if (
n > MAX_LINE_LEN) and (not l.startswith('!'))]
self.reporter.check(not over,
self.filename,
'Line(s) too long: {0}',
', '.join([str(i) for i in over]))
def check_trailing_whitespace(self):
if self.args.trailing_whitespace:
trailing = [
i for (i, l, n) in self.lines if P_TRAILING_WHITESPACE.match(l)]
self.reporter.check(not trailing,
self.filename,
'Line(s) end with whitespace: {0}',
', '.join([str(i) for i in trailing]))
def check_blockquote_classes(self):
for node in self.find_all(self.doc, {'type': 'blockquote'}):
cls = self.get_val(node, 'attr', 'class')
self.reporter.check(cls in KNOWN_BLOCKQUOTES,
(self.filename, self.get_loc(node)),
'Unknown or missing blockquote type {0}',
cls)
def check_codeblock_classes(self):
for node in self.find_all(self.doc, {'type': 'codeblock'}):
cls = self.get_val(node, 'attr', 'class')
self.reporter.check(cls in KNOWN_CODEBLOCKS,
(self.filename, self.get_loc(node)),
'Unknown or missing code block type {0}',
cls)
def check_defined_link_references(self):
result = set()
for node in self.find_all(self.doc, {'type': 'text'}):
for match in P_INTERNAL_LINK_REF.findall(node['value']):
text = match[0]
link = match[1]
if link not in self.args.references:
result.add('"{0}"=>"{1}"'.format(text, link))
self.reporter.check(not result,
self.filename,
'Internally-defined links may be missing definitions: {0}',
', '.join(sorted(result)))
def find_all(self, node, pattern, accum=None):
assert isinstance(pattern, dict), 'Patterns must be dictionaries'
if accum is None:
accum = []
if self.match(node, pattern):
accum.append(node)
for child in node.get('children', []):
self.find_all(child, pattern, accum)
return accum
def match(self, node, pattern):
for key in pattern:
if key not in node:
return False
val = pattern[key]
if isinstance(val, str):
if node[key] != val:
return False
elif isinstance(val, dict):
if not self.match(node[key], val):
return False
return True
@staticmethod
def get_val(node, *chain):
curr = node
for selector in chain:
curr = curr.get(selector, None)
if curr is None:
break
return curr
def get_loc(self, node):
result = self.get_val(node, 'options', 'location')
if self.metadata_len is not None:
result += self.metadata_len
return result
class CheckNonJekyll(CheckBase):
def check_metadata(self):
self.reporter.check(self.metadata is None,
self.filename,
'Unexpected metadata')
class CheckIndex(CheckBase):
def __init__(self, args, filename, metadata, metadata_len, text, lines, doc):
super().__init__(args, filename, metadata, metadata_len, text, lines, doc)
self.layout = 'lesson'
def check_metadata(self):
super().check_metadata()
self.reporter.check(self.metadata.get('root', '') == '.',
self.filename,
'Root not set to "."')
class CheckEpisode(CheckBase):
def check(self):
super().check()
self.check_reference_inclusion()
def check_metadata(self):
super().check_metadata()
if self.metadata:
if 'layout' in self.metadata:
if self.metadata['layout'] == 'break':
self.check_metadata_fields(BREAK_METADATA_FIELDS)
else:
self.reporter.add(self.filename,
'Unknown episode layout "{0}"',
self.metadata['layout'])
else:
self.check_metadata_fields(TEACHING_METADATA_FIELDS)
def check_metadata_fields(self, expected):
for (name, type_) in expected:
if name not in self.metadata:
self.reporter.add(self.filename,
'Missing metadata field {0}',
name)
elif not isinstance(self.metadata[name], type_):
self.reporter.add(self.filename,
'"{0}" has wrong type in metadata ({1} instead of {2})',
name, type(self.metadata[name]), type_)
def check_reference_inclusion(self):
if not self.args.reference_path:
return
for (i, last_line, line_len) in reversed(self.lines):
if last_line:
break
require(last_line,
'No non-empty lines in {0}'.format(self.filename))
include_filename = os.path.split(self.args.reference_path)[-1]
if include_filename not in last_line:
self.reporter.add(self.filename,
'episode does not include "{0}"',
include_filename)
class CheckReference(CheckBase):
def __init__(self, args, filename, metadata, metadata_len, text, lines, doc):
super().__init__(args, filename, metadata, metadata_len, text, lines, doc)
self.layout = 'reference'
class CheckGeneric(CheckBase):
def __init__(self, args, filename, metadata, metadata_len, text, lines, doc):
super().__init__(args, filename, metadata, metadata_len, text, lines, doc)
CHECKERS = [
(re.compile(r'CONTRIBUTING\.md'), CheckNonJekyll),
(re.compile(r'README\.md'), CheckNonJekyll),
(re.compile(r'index\.md'), CheckIndex),
(re.compile(r'reference\.md'), CheckReference),
(re.compile(r'_episodes/.*\.md'), CheckEpisode),
(re.compile(r'.*\.md'), CheckGeneric)
]
if __name__ == '__main__':
main()
| true
| true
|
f7022974d4d86de398d7c731c85dc7c61ca334bc
| 16,594
|
py
|
Python
|
distil_labse_repo/distil_funcs.py
|
bobub/distil_labse
|
ad587d7e4e49101a22fb1459b724b25733715caa
|
[
"Apache-2.0"
] | null | null | null |
distil_labse_repo/distil_funcs.py
|
bobub/distil_labse
|
ad587d7e4e49101a22fb1459b724b25733715caa
|
[
"Apache-2.0"
] | null | null | null |
distil_labse_repo/distil_funcs.py
|
bobub/distil_labse
|
ad587d7e4e49101a22fb1459b724b25733715caa
|
[
"Apache-2.0"
] | null | null | null |
# Imports
import torch
from labml_nn.transformers.switch import SwitchTransformer, SwitchTransformerLayer, SwitchFeedForward
from labml_nn.transformers import MultiHeadAttention
from labml_nn.transformers.feed_forward import FeedForward
import numpy as np
from transformers import AutoConfig, AutoModel
import torch.nn as nn
import math
from torch.utils.data import Dataset, DataLoader
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.metrics import mean_squared_error
from random import choice
from sklearn.decomposition import PCA
from copy import deepcopy
from transformers import BertModel, BertConfig
# Custom dataset function to store Open Subtitles data
class CustomDataset(torch.utils.data.Dataset):
'Characterizes a dataset for PyTorch'
def __init__(self, input_ids, token_type_ids, attention_masks):
'Initialization'
self.input_ids = input_ids
self.token_type_ids = token_type_ids
self.attention_masks = attention_masks
def __len__(self):
'Denotes the total number of samples'
return len(self.input_ids)
def __getitem__(self, index):
'Generates one sample of data'
input_id = self.input_ids[index]
token_type_ID = self.token_type_ids[index]
attention_mask = self.attention_masks[index]
sample = {'input_ids':input_id, 'token_type_ids':token_type_ID , 'attention_mask':attention_mask}
return sample
# Weights init and switch init initialise the weights for the model as desribed in Switch Transformer paper
def weights_init(tensor: torch.Tensor):
if isinstance(tensor, nn.Linear):
switch_init(tensor.weight.data)
torch.nn.init.zeros_(tensor.bias.data)
if isinstance(tensor, nn.LayerNorm):
torch.nn.init.zeros_(tensor.weight.data)
torch.nn.init.zeros_(tensor.bias.data)
def switch_init(tensor: torch.Tensor, s: float = 0.1, mean: float=0) -> torch.Tensor:
fan_in, fan_out = torch.nn.init._calculate_fan_in_and_fan_out(tensor)
std = math.sqrt(s/fan_in)
return torch.nn.init.trunc_normal_(tensor=tensor, mean=mean, std=std)
class LaBSE_Switch(nn.Module):
"""
Torch module for to create a Switch Transformer for LaBSE.
Can be used for other BERT based models too, just change the input_id
tokenization and word_embedding module.
Inputs:
config = dictionary of configuration
word_embeddings_module = torch module mapping token ids to word embeddings
Forward:
Input_ids = ids using labse tokenizer
attention_mask = binary, indicates to model which tokens should be attended to,
and which should not.
Outputs:
outputs = a dictionary containing x, counts, route_prob, n_dropped, logits, attention, values
See Switch Transformer paper to understand all except:
attention, values and logits, which are used during knowledge distillation.
"""
def __init__(self, config, word_embeddings_module):
super().__init__()
# set the switch transformer as the actual neural net
self.switch_model = SwitchTransformer(
SwitchTransformerLayer(
d_model=config['d_model'],
attn=MultiHeadAttention(config['heads'], config['d_model'], config['dropout']),
feed_forward=SwitchFeedForward(
capacity_factor=config['capacity_factor'],
drop_tokens=config['drop_tokens'],
is_scale_prob=config['is_scale_prob'],
n_experts=config['n_experts'],
expert=FeedForward(config['d_model'], config['d_ff'], config['dropout_ffn']),
d_model=config['d_model']),
dropout_prob=config['dropout']),
config['n_layers'],
d_out = int(768),
dropout_prob = config['dropout'])
# initialise weights
# self.switch_model.apply(weights_init)
# module that maps input tokens into embedding vectors
self.word_embeddings = word_embeddings_module
# get attention weights from teacher
# self.weight_init_from_teacher(teacher_model=teacher_model, int_matches=int_matches)
def weight_init_from_teacher(self, teacher_model, int_matches):
"""
Initialises attention modules of student with those of the teacher for the --- specific to LaBSE and DistilSwitch
int_matches should be a list of tuples of [(teacher_layer, student_layer),...]
e.g. int_matches = [(5,0),(11,2)] --> give attention weights of teacher layer 5 to student layer 0
"""
# teacher_model=load_teacher(device=torch.device('cuda'))
self.switch_model.layers[int_matches[1]].attn.query.linear.weight = teacher_model.encoder.layer[int_matches[0]].attention.self.query.weight
self.switch_model.layers[int_matches[1]].attn.query.linear.bias = teacher_model.encoder.layer[int_matches[0]].attention.self.query.bias
self.switch_model.layers[int_matches[1]].attn.key.linear.weight = teacher_model.encoder.layer[int_matches[0]].attention.self.key.weight
self.switch_model.layers[int_matches[1]].attn.key.linear.bias = teacher_model.encoder.layer[int_matches[0]].attention.self.key.bias
self.switch_model.layers[int_matches[1]].attn.value.linear.weight = teacher_model.encoder.layer[int_matches[0]].attention.self.value.weight
self.switch_model.layers[int_matches[1]].attn.value.linear.bias = teacher_model.encoder.layer[int_matches[0]].attention.self.value.bias
self.switch_model.layers[int_matches[1]].attn.output.weight = teacher_model.encoder.layer[int_matches[0]].attention.output.dense.weight
self.switch_model.layers[int_matches[1]].attn.output.bias = teacher_model.encoder.layer[int_matches[0]].attention.output.dense.bias
# self.switch_model.layers[int_matches[1]].norm_ff.weight = teacher_model.encoder.layer[int_matches[0]].output.LayerNorm.weight
# self.switch_model.layers[int_matches[1]].norm_ff.bias = teacher_model.encoder.layer[int_matches[0]].output.LayerNorm.bias
def forward(self, input_ids, token_type_ids=None, attention_mask=None):
# masks and token type ids not used, as we're just creating sentence embeddings for classification tasks
# word embeddings of shape [batch, seq_len, d_model]
input_embeddings = self.word_embeddings(input_ids)
# model input on shape [seq_len, batch, d_model] and mask
_batch,_seq_len,_n_hid = input_embeddings.shape
#print(_n_hid)
# call switch transformer
outputs = self.switch_model(torch.reshape(input_embeddings, (_seq_len, _batch, _n_hid)),
attention_mask=None)
return outputs
# function to blackbox load the student for distillation - can be switch or bert based
def load_student(name, student_config, device, teacher_model, int_matches, N_LAYERS):
if name!='switch':
# for pretrained bert models - setup config
student_config = BertConfig.from_pretrained(name)
student_config.num_hidden_layers = N_LAYERS
student_config.output_hidden_states = True
student_config.output_attentions = True
student_config.use_cache = True
student_config.is_decoder = True
# load model and set input embeddings
student_model = BertModel.from_pretrained(name, config=student_config)
student_model.set_input_embeddings(teacher_model.get_input_embeddings())
student_model = student_model.float()
student_model.to(device=device)
return student_model
if name=='switch':
# create compressed word embeddings from those of the teacher
word_embeddings = deepcopy(teacher_model.get_input_embeddings())
compressed_word_embeddings = word_embedding_compression(word_embeddings, student_config['d_model'])
# create student model
student_model = LaBSE_Switch(config=student_config, word_embeddings_module=compressed_word_embeddings)
# initialise weights
student_model.switch_model.apply(weights_init)
student_model.weight_init_from_teacher(teacher_model=teacher_model, int_matches=int_matches)
# convert model to float32 and move to device
student_model = student_model.float()
student_model.to(device=device)
return student_model
# loads teacher model from Huggingface
def load_teacher(device):
teacher_config = AutoConfig.from_pretrained('sentence-transformers/LaBSE')
teacher_config.output_hidden_states = True
teacher_config.output_attentions = True
teacher_config.use_cache = True
teacher_config.is_decoder = True
teacher_model = AutoModel.from_pretrained('sentence-transformers/LaBSE', config=teacher_config)
teacher_model.float() # needs to be 32 bit precision to get decent results from distillation
teacher_model.to(device=device)
return teacher_model
# Adaptor for BERT based models
def simple_adaptor(batch, model_outputs):
# values need to be reformatted from Huggingface 'past_key_values' output
values = []
for i in model_outputs['past_key_values']:
values.append(i[1])
values = torch.stack(values)
attentions = []
for j in model_outputs['attentions']:
attentions.append(inv_softmax(j))
attentions = torch.stack(attentions)
# we use pooler output as logits
return {'logits': model_outputs['pooler_output'],
'hidden': model_outputs['hidden_states'],
#'attention': model_outputs['attentions'],
'attention':attentions,
'inputs_mask': batch['attention_mask'],
'value_relation': values,
'pooler_output':model_outputs['pooler_output']}
def inv_softmax(x,C=-50):
# reverses softmax operation - used in teacher_adaptor
# C variable sets the min value of the scores, -50 works well.
result = torch.log(x)
result = torch.where(result <= float('-inf'), torch.full_like(result,C), result)
return result
def teacher_adaptor(batch, model_outputs):
# selects relevant model and batch outputs used for distillation loss calculation
values = []
for i in model_outputs['past_key_values']:
values.append(i[1])
values = torch.stack(values)
attentions = []
for j in model_outputs['attentions']:
attentions.append(inv_softmax(j))
attentions = torch.stack(attentions)
# print(model_outputs['pooler_output'].requires_grad)
return {#'logits': model_outputs['last_hidden_state'],
'logits':model_outputs['pooler_output'],
'hidden': model_outputs['hidden_states'],
#'attention': model_outputs['attentions'],
'attention': attentions,
'inputs_mask': batch['attention_mask'],
'value_relation': values,
'pooler_output':model_outputs['pooler_output']}
# adaptor for switch model
def switch_student_adaptor(batch, model_outputs):
# selects relevant model and batch outputs and reformats them
# needs to have same shapes as teacher adaptor
# reformat attention
layers, len, len, batch_size, heads = model_outputs['attention'].shape
attention = model_outputs['attention'].reshape(layers, batch_size, heads, len, len)
# reformat logits
len, batch_size, d_model = model_outputs['logits'].shape
logits = model_outputs['logits'].reshape(batch_size, len, d_model)
# print(model_outputs['pooler_output'].requires_grad)
# reformat values
layers, len, batch_size, heads, embedding_per_head = model_outputs['values'].shape
values = model_outputs['values'].reshape(layers, batch_size, heads, len, embedding_per_head)
return {#'logits': logits,
'logits':model_outputs['pooler_output'],
'counts': model_outputs['counts'],
'attention': attention,
'inputs_mask': batch['attention_mask'],
'route_prob': model_outputs['route_prob'],
'n_dropped': model_outputs['n_dropped'],
'value_relation': values}
# Predict function evaluates model every epoch to show training progress
def predict(model, teacher_model, eval_dataset, step, device, STUDENT, BATCH_SIZE, eval_metric='cosine_similarity', feedback=True):
'''
model = student_model
teacher_model = labse
eval_dataset = num of dev set samples to test model on per callback
device = cuda or cpu
student = switch or !switch
eval_metric = metric to evaluate the model - mse or cosine_similarity
'''
model.eval()
student_logits = []
teacher_logits =[]
batch_counts = []
batch_n_dropped = []
batch_route_prob = []
dataloader = DataLoader(eval_dataset,batch_size=BATCH_SIZE)
print('Running callback function on {} dev set samples...'.format(len(eval_dataset)))
for batch in dataloader:
input_ids = batch['input_ids'].to(device)
attention_mask = batch['attention_mask'].to(device)
with torch.no_grad():
model_outputs = model(input_ids=input_ids, attention_mask=attention_mask)
logits_S = model_outputs['pooler_output']
logits_T = teacher_model(input_ids=input_ids, attention_mask=attention_mask)['pooler_output']
cpu_logits_S = logits_S.detach().cpu()
cpu_logits_T = logits_T.detach().cpu()
if STUDENT=='switch' and feedback==True:
counts = model_outputs['counts'].detach().cpu()
n_dropped = model_outputs['n_dropped']
route_prob = model_outputs['route_prob'].detach().cpu()
for i in range(len(cpu_logits_S)):
student_logits.append(cpu_logits_S[i].numpy())
teacher_logits.append(cpu_logits_T[i].numpy())
if STUDENT=='switch' and feedback==True:
for i in range(len(counts)):
batch_counts.append(counts[i].numpy())
batch_n_dropped.append(n_dropped[i])
batch_route_prob.append(route_prob[i].numpy())
model.train()
student_logits = np.array(student_logits)
teacher_logits = np.array(teacher_logits)
if eval_metric=='cosine_similarity':
similarities = np.diag(cosine_similarity(student_logits, teacher_logits))
print ("Average cosine similarity for these samples: ", np.mean(similarities))
if eval_metric=='mse':
mse_error = mean_squared_error(student_logits, teacher_logits)
print ("Average mean squared error for these samples: ", mse_error)
if STUDENT=='switch' and feedback==True:
switch_counts = np.array(batch_counts)
switch_n_dropped = np.array(batch_n_dropped)
switch_route_prob = np.array(batch_route_prob)
print('SWITCH BEHAVIOUR:')
print('Counts Shape: \n', switch_counts.shape)
print('Counts: \n', switch_counts)
print('N_dropped: \n', switch_n_dropped)
print('Route Prob: \n', switch_route_prob)
return torch.Tensor([np.mean(similarities)])
# generates random parameters for hyperparam tuning
def generate_random_params(params):
# input: params dictionary containing lists of possible values
chosen_params = {}
for param in params:
chosen_params[param] = choice(params[param])
return chosen_params
def word_embedding_compression(word_embedding_module, d_model):
"""
Compresses a given word_embedding_module (type torch.Embedding) into a module of d_model dimensionality.
"""
word_embedding_matrix = word_embedding_module.weight
assert word_embedding_matrix.shape[1]>=d_model, 'The desired word embedding dimensionality is greater than the teacher word embeddings. That is not compression! Make d_model smaller.'
# return the module if it's the same dimensionality
if word_embedding_matrix.shape[1]==d_model:
return word_embedding_module
# else compress
pca = PCA(n_components = d_model)
compressed_word_embedding_matrix = pca.fit_transform(word_embedding_matrix.detach().cpu().numpy())
compressed_word_embedding_matrix = torch.from_numpy(compressed_word_embedding_matrix)
word_embedding_module.weight = torch.nn.parameter.Parameter(compressed_word_embedding_matrix)
return word_embedding_module
| 44.368984
| 187
| 0.693142
|
import torch
from labml_nn.transformers.switch import SwitchTransformer, SwitchTransformerLayer, SwitchFeedForward
from labml_nn.transformers import MultiHeadAttention
from labml_nn.transformers.feed_forward import FeedForward
import numpy as np
from transformers import AutoConfig, AutoModel
import torch.nn as nn
import math
from torch.utils.data import Dataset, DataLoader
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.metrics import mean_squared_error
from random import choice
from sklearn.decomposition import PCA
from copy import deepcopy
from transformers import BertModel, BertConfig
class CustomDataset(torch.utils.data.Dataset):
def __init__(self, input_ids, token_type_ids, attention_masks):
self.input_ids = input_ids
self.token_type_ids = token_type_ids
self.attention_masks = attention_masks
def __len__(self):
return len(self.input_ids)
def __getitem__(self, index):
input_id = self.input_ids[index]
token_type_ID = self.token_type_ids[index]
attention_mask = self.attention_masks[index]
sample = {'input_ids':input_id, 'token_type_ids':token_type_ID , 'attention_mask':attention_mask}
return sample
def weights_init(tensor: torch.Tensor):
if isinstance(tensor, nn.Linear):
switch_init(tensor.weight.data)
torch.nn.init.zeros_(tensor.bias.data)
if isinstance(tensor, nn.LayerNorm):
torch.nn.init.zeros_(tensor.weight.data)
torch.nn.init.zeros_(tensor.bias.data)
def switch_init(tensor: torch.Tensor, s: float = 0.1, mean: float=0) -> torch.Tensor:
fan_in, fan_out = torch.nn.init._calculate_fan_in_and_fan_out(tensor)
std = math.sqrt(s/fan_in)
return torch.nn.init.trunc_normal_(tensor=tensor, mean=mean, std=std)
class LaBSE_Switch(nn.Module):
def __init__(self, config, word_embeddings_module):
super().__init__()
self.switch_model = SwitchTransformer(
SwitchTransformerLayer(
d_model=config['d_model'],
attn=MultiHeadAttention(config['heads'], config['d_model'], config['dropout']),
feed_forward=SwitchFeedForward(
capacity_factor=config['capacity_factor'],
drop_tokens=config['drop_tokens'],
is_scale_prob=config['is_scale_prob'],
n_experts=config['n_experts'],
expert=FeedForward(config['d_model'], config['d_ff'], config['dropout_ffn']),
d_model=config['d_model']),
dropout_prob=config['dropout']),
config['n_layers'],
d_out = int(768),
dropout_prob = config['dropout'])
self.word_embeddings = word_embeddings_module
def weight_init_from_teacher(self, teacher_model, int_matches):
self.switch_model.layers[int_matches[1]].attn.query.linear.weight = teacher_model.encoder.layer[int_matches[0]].attention.self.query.weight
self.switch_model.layers[int_matches[1]].attn.query.linear.bias = teacher_model.encoder.layer[int_matches[0]].attention.self.query.bias
self.switch_model.layers[int_matches[1]].attn.key.linear.weight = teacher_model.encoder.layer[int_matches[0]].attention.self.key.weight
self.switch_model.layers[int_matches[1]].attn.key.linear.bias = teacher_model.encoder.layer[int_matches[0]].attention.self.key.bias
self.switch_model.layers[int_matches[1]].attn.value.linear.weight = teacher_model.encoder.layer[int_matches[0]].attention.self.value.weight
self.switch_model.layers[int_matches[1]].attn.value.linear.bias = teacher_model.encoder.layer[int_matches[0]].attention.self.value.bias
self.switch_model.layers[int_matches[1]].attn.output.weight = teacher_model.encoder.layer[int_matches[0]].attention.output.dense.weight
self.switch_model.layers[int_matches[1]].attn.output.bias = teacher_model.encoder.layer[int_matches[0]].attention.output.dense.bias
def forward(self, input_ids, token_type_ids=None, attention_mask=None):
# word embeddings of shape [batch, seq_len, d_model]
input_embeddings = self.word_embeddings(input_ids)
# model input on shape [seq_len, batch, d_model] and mask
_batch,_seq_len,_n_hid = input_embeddings.shape
#print(_n_hid)
# call switch transformer
outputs = self.switch_model(torch.reshape(input_embeddings, (_seq_len, _batch, _n_hid)),
attention_mask=None)
return outputs
# function to blackbox load the student for distillation - can be switch or bert based
def load_student(name, student_config, device, teacher_model, int_matches, N_LAYERS):
if name!='switch':
# for pretrained bert models - setup config
student_config = BertConfig.from_pretrained(name)
student_config.num_hidden_layers = N_LAYERS
student_config.output_hidden_states = True
student_config.output_attentions = True
student_config.use_cache = True
student_config.is_decoder = True
# load model and set input embeddings
student_model = BertModel.from_pretrained(name, config=student_config)
student_model.set_input_embeddings(teacher_model.get_input_embeddings())
student_model = student_model.float()
student_model.to(device=device)
return student_model
if name=='switch':
# create compressed word embeddings from those of the teacher
word_embeddings = deepcopy(teacher_model.get_input_embeddings())
compressed_word_embeddings = word_embedding_compression(word_embeddings, student_config['d_model'])
# create student model
student_model = LaBSE_Switch(config=student_config, word_embeddings_module=compressed_word_embeddings)
# initialise weights
student_model.switch_model.apply(weights_init)
student_model.weight_init_from_teacher(teacher_model=teacher_model, int_matches=int_matches)
# convert model to float32 and move to device
student_model = student_model.float()
student_model.to(device=device)
return student_model
# loads teacher model from Huggingface
def load_teacher(device):
teacher_config = AutoConfig.from_pretrained('sentence-transformers/LaBSE')
teacher_config.output_hidden_states = True
teacher_config.output_attentions = True
teacher_config.use_cache = True
teacher_config.is_decoder = True
teacher_model = AutoModel.from_pretrained('sentence-transformers/LaBSE', config=teacher_config)
teacher_model.float() # needs to be 32 bit precision to get decent results from distillation
teacher_model.to(device=device)
return teacher_model
# Adaptor for BERT based models
def simple_adaptor(batch, model_outputs):
# values need to be reformatted from Huggingface 'past_key_values' output
values = []
for i in model_outputs['past_key_values']:
values.append(i[1])
values = torch.stack(values)
attentions = []
for j in model_outputs['attentions']:
attentions.append(inv_softmax(j))
attentions = torch.stack(attentions)
# we use pooler output as logits
return {'logits': model_outputs['pooler_output'],
'hidden': model_outputs['hidden_states'],
#'attention': model_outputs['attentions'],
'attention':attentions,
'inputs_mask': batch['attention_mask'],
'value_relation': values,
'pooler_output':model_outputs['pooler_output']}
def inv_softmax(x,C=-50):
# reverses softmax operation - used in teacher_adaptor
# C variable sets the min value of the scores, -50 works well.
result = torch.log(x)
result = torch.where(result <= float('-inf'), torch.full_like(result,C), result)
return result
def teacher_adaptor(batch, model_outputs):
# selects relevant model and batch outputs used for distillation loss calculation
values = []
for i in model_outputs['past_key_values']:
values.append(i[1])
values = torch.stack(values)
attentions = []
for j in model_outputs['attentions']:
attentions.append(inv_softmax(j))
attentions = torch.stack(attentions)
# print(model_outputs['pooler_output'].requires_grad)
return {#'logits': model_outputs['last_hidden_state'],
'logits':model_outputs['pooler_output'],
'hidden': model_outputs['hidden_states'],
#'attention': model_outputs['attentions'],
'attention': attentions,
'inputs_mask': batch['attention_mask'],
'value_relation': values,
'pooler_output':model_outputs['pooler_output']}
# adaptor for switch model
def switch_student_adaptor(batch, model_outputs):
# selects relevant model and batch outputs and reformats them
# needs to have same shapes as teacher adaptor
# reformat attention
layers, len, len, batch_size, heads = model_outputs['attention'].shape
attention = model_outputs['attention'].reshape(layers, batch_size, heads, len, len)
# reformat logits
len, batch_size, d_model = model_outputs['logits'].shape
logits = model_outputs['logits'].reshape(batch_size, len, d_model)
# print(model_outputs['pooler_output'].requires_grad)
# reformat values
layers, len, batch_size, heads, embedding_per_head = model_outputs['values'].shape
values = model_outputs['values'].reshape(layers, batch_size, heads, len, embedding_per_head)
return {#'logits': logits,
'logits':model_outputs['pooler_output'],
'counts': model_outputs['counts'],
'attention': attention,
'inputs_mask': batch['attention_mask'],
'route_prob': model_outputs['route_prob'],
'n_dropped': model_outputs['n_dropped'],
'value_relation': values}
# Predict function evaluates model every epoch to show training progress
def predict(model, teacher_model, eval_dataset, step, device, STUDENT, BATCH_SIZE, eval_metric='cosine_similarity', feedback=True):
model.eval()
student_logits = []
teacher_logits =[]
batch_counts = []
batch_n_dropped = []
batch_route_prob = []
dataloader = DataLoader(eval_dataset,batch_size=BATCH_SIZE)
print('Running callback function on {} dev set samples...'.format(len(eval_dataset)))
for batch in dataloader:
input_ids = batch['input_ids'].to(device)
attention_mask = batch['attention_mask'].to(device)
with torch.no_grad():
model_outputs = model(input_ids=input_ids, attention_mask=attention_mask)
logits_S = model_outputs['pooler_output']
logits_T = teacher_model(input_ids=input_ids, attention_mask=attention_mask)['pooler_output']
cpu_logits_S = logits_S.detach().cpu()
cpu_logits_T = logits_T.detach().cpu()
if STUDENT=='switch' and feedback==True:
counts = model_outputs['counts'].detach().cpu()
n_dropped = model_outputs['n_dropped']
route_prob = model_outputs['route_prob'].detach().cpu()
for i in range(len(cpu_logits_S)):
student_logits.append(cpu_logits_S[i].numpy())
teacher_logits.append(cpu_logits_T[i].numpy())
if STUDENT=='switch' and feedback==True:
for i in range(len(counts)):
batch_counts.append(counts[i].numpy())
batch_n_dropped.append(n_dropped[i])
batch_route_prob.append(route_prob[i].numpy())
model.train()
student_logits = np.array(student_logits)
teacher_logits = np.array(teacher_logits)
if eval_metric=='cosine_similarity':
similarities = np.diag(cosine_similarity(student_logits, teacher_logits))
print ("Average cosine similarity for these samples: ", np.mean(similarities))
if eval_metric=='mse':
mse_error = mean_squared_error(student_logits, teacher_logits)
print ("Average mean squared error for these samples: ", mse_error)
if STUDENT=='switch' and feedback==True:
switch_counts = np.array(batch_counts)
switch_n_dropped = np.array(batch_n_dropped)
switch_route_prob = np.array(batch_route_prob)
print('SWITCH BEHAVIOUR:')
print('Counts Shape: \n', switch_counts.shape)
print('Counts: \n', switch_counts)
print('N_dropped: \n', switch_n_dropped)
print('Route Prob: \n', switch_route_prob)
return torch.Tensor([np.mean(similarities)])
# generates random parameters for hyperparam tuning
def generate_random_params(params):
# input: params dictionary containing lists of possible values
chosen_params = {}
for param in params:
chosen_params[param] = choice(params[param])
return chosen_params
def word_embedding_compression(word_embedding_module, d_model):
word_embedding_matrix = word_embedding_module.weight
assert word_embedding_matrix.shape[1]>=d_model, 'The desired word embedding dimensionality is greater than the teacher word embeddings. That is not compression! Make d_model smaller.'
# return the module if it's the same dimensionality
if word_embedding_matrix.shape[1]==d_model:
return word_embedding_module
pca = PCA(n_components = d_model)
compressed_word_embedding_matrix = pca.fit_transform(word_embedding_matrix.detach().cpu().numpy())
compressed_word_embedding_matrix = torch.from_numpy(compressed_word_embedding_matrix)
word_embedding_module.weight = torch.nn.parameter.Parameter(compressed_word_embedding_matrix)
return word_embedding_module
| true
| true
|
f7022a7f024c96d6c75b7c897d5507df75a22e08
| 647
|
py
|
Python
|
utilities/video_2_jpg.py
|
VEDANTGHODKE/Ikshana-Assistant-For-Specially-Visioned
|
64958296702b01e47bd2a26316b30b5e28d9b9ae
|
[
"BSD-2-Clause"
] | 2
|
2020-10-19T09:34:09.000Z
|
2020-10-19T12:09:09.000Z
|
utilities/video_2_jpg.py
|
VEDANTGHODKE/Ikshana-Assistant-For-Specially-Visioned
|
64958296702b01e47bd2a26316b30b5e28d9b9ae
|
[
"BSD-2-Clause"
] | null | null | null |
utilities/video_2_jpg.py
|
VEDANTGHODKE/Ikshana-Assistant-For-Specially-Visioned
|
64958296702b01e47bd2a26316b30b5e28d9b9ae
|
[
"BSD-2-Clause"
] | null | null | null |
import cv2
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-p2', "--path", help="path to input video")
parser.add_argument('-p1', "--spath", help="path where the images should be stored")
parser.add_argument('-n', "--num", help="number from which image label naming starts", type=int)
args = parser.parse_args()
num = args.num
cap = cv2.VideoCapture(args.path)
count = 0
path = args.spath
print(args.num, args.path, args.spath)
ret = True
while ret:
ret, frame=cap.read()
count += 1
if count % 10 == 0:
cv2.imwrite(path+str(num)+'.jpg', frame)
print(path+str(num)+'.jpg')
num += 1
| 28.130435
| 96
| 0.658423
|
import cv2
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-p2', "--path", help="path to input video")
parser.add_argument('-p1', "--spath", help="path where the images should be stored")
parser.add_argument('-n', "--num", help="number from which image label naming starts", type=int)
args = parser.parse_args()
num = args.num
cap = cv2.VideoCapture(args.path)
count = 0
path = args.spath
print(args.num, args.path, args.spath)
ret = True
while ret:
ret, frame=cap.read()
count += 1
if count % 10 == 0:
cv2.imwrite(path+str(num)+'.jpg', frame)
print(path+str(num)+'.jpg')
num += 1
| true
| true
|
f7022b106191f7e769f494a9e9e6e19c38892823
| 1,472
|
py
|
Python
|
qnarre/doc/justifier.py
|
quantapix/qnarre.com
|
f51d5945c20ef8182c4aa11f1b407d064c190c70
|
[
"MIT"
] | null | null | null |
qnarre/doc/justifier.py
|
quantapix/qnarre.com
|
f51d5945c20ef8182c4aa11f1b407d064c190c70
|
[
"MIT"
] | null | null | null |
qnarre/doc/justifier.py
|
quantapix/qnarre.com
|
f51d5945c20ef8182c4aa11f1b407d064c190c70
|
[
"MIT"
] | null | null | null |
# Copyright 2019 Quantapix Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
class Justifier:
def __init__(self, **kw):
super().__init__(**kw)
self.justs = [0] * 9
self.offsets = [(0, 0, 0, 1, 1, 1, 1, 1, 1),
(0, -1, -2, 0, 0, 0, 1, 1, 1),
(0, -1, -2, 0, -1, -2, 0, 0, 0)]
def init_justs(self, justs):
for i in justs:
i = i // 3
os = self.offsets[i]
if os:
self.justs = [sum(x) for x in zip(self.justs, os)]
self.offsets[i] = None
def calc_just(self, justs):
for i in justs:
i = self.justs[i] + (i % 3)
if i == 1:
return 'justify-content-center'
elif i > 1:
return 'justify-content-end'
return 'justify-content-start'
| 35.902439
| 79
| 0.536005
|
class Justifier:
def __init__(self, **kw):
super().__init__(**kw)
self.justs = [0] * 9
self.offsets = [(0, 0, 0, 1, 1, 1, 1, 1, 1),
(0, -1, -2, 0, 0, 0, 1, 1, 1),
(0, -1, -2, 0, -1, -2, 0, 0, 0)]
def init_justs(self, justs):
for i in justs:
i = i // 3
os = self.offsets[i]
if os:
self.justs = [sum(x) for x in zip(self.justs, os)]
self.offsets[i] = None
def calc_just(self, justs):
for i in justs:
i = self.justs[i] + (i % 3)
if i == 1:
return 'justify-content-center'
elif i > 1:
return 'justify-content-end'
return 'justify-content-start'
| true
| true
|
f7022b72854d8995d4771a29eac9e0ad3b7e4567
| 5,213
|
py
|
Python
|
examples/pybullet/gym/pybullet_envs/minitaur/envs/minitaur_logging.py
|
StanfordVL/bullet3_ik
|
52da668d60b32bfe6eea96d3ef3b9d442b2b8926
|
[
"Zlib"
] | 1
|
2019-08-24T09:06:26.000Z
|
2019-08-24T09:06:26.000Z
|
examples/pybullet/gym/pybullet_envs/minitaur/envs/minitaur_logging.py
|
StanfordVL/bullet3_ik
|
52da668d60b32bfe6eea96d3ef3b9d442b2b8926
|
[
"Zlib"
] | null | null | null |
examples/pybullet/gym/pybullet_envs/minitaur/envs/minitaur_logging.py
|
StanfordVL/bullet3_ik
|
52da668d60b32bfe6eea96d3ef3b9d442b2b8926
|
[
"Zlib"
] | null | null | null |
"""A proto buffer based logging system for minitaur experiments.
The logging system records the time since reset, base position, orientation,
angular velocity and motor information (joint angle, speed, and torque) into a
proto buffer. See minitaur_logging.proto for more details. The episode_proto is
updated per time step by the environment and saved onto disk for each episode.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import os
import time
import tensorflow as tf
import minitaur_logging_pb2
NUM_MOTORS = 8
def _update_base_state(base_state, values):
base_state.x = values[0]
base_state.y = values[1]
base_state.z = values[2]
def preallocate_episode_proto(episode_proto, max_num_steps):
"""Preallocate the memory for proto buffer.
Dynamically allocating memory as the protobuf expands causes unexpected delay
that is not tolerable with locomotion control.
Args:
episode_proto: The proto that holds the state/action data for the current
episode.
max_num_steps: The max number of steps that will be recorded in the proto.
The state/data over max_num_steps will not be stored in the proto.
"""
for _ in range(max_num_steps):
step_log = episode_proto.state_action.add()
step_log.info_valid = False
step_log.time.seconds = 0
step_log.time.nanos = 0
for _ in range(NUM_MOTORS):
motor_state = step_log.motor_states.add()
motor_state.angle = 0
motor_state.velocity = 0
motor_state.torque = 0
motor_state.action = 0
_update_base_state(step_log.base_position, [0, 0, 0])
_update_base_state(step_log.base_orientation, [0, 0, 0])
_update_base_state(step_log.base_angular_vel, [0, 0, 0])
def update_episode_proto(episode_proto, minitaur, action, step):
"""Update the episode proto by appending the states/action of the minitaur.
Note that the state/data over max_num_steps preallocated
(len(episode_proto.state_action)) will not be stored in the proto.
Args:
episode_proto: The proto that holds the state/action data for the current
episode.
minitaur: The minitaur instance. See envs.minitaur for details.
action: The action applied at this time step. The action is an 8-element
numpy floating-point array.
step: The current step index.
"""
max_num_steps = len(episode_proto.state_action)
if step >= max_num_steps:
tf.logging.warning(
"{}th step is not recorded in the logging since only {} steps were "
"pre-allocated.".format(step, max_num_steps))
return
step_log = episode_proto.state_action[step]
step_log.info_valid = minitaur.IsObservationValid()
time_in_seconds = minitaur.GetTimeSinceReset()
step_log.time.seconds = int(time_in_seconds)
step_log.time.nanos = int((time_in_seconds - int(time_in_seconds)) * 1e9)
motor_angles = minitaur.GetMotorAngles()
motor_velocities = minitaur.GetMotorVelocities()
motor_torques = minitaur.GetMotorTorques()
for i in range(minitaur.num_motors):
step_log.motor_states[i].angle = motor_angles[i]
step_log.motor_states[i].velocity = motor_velocities[i]
step_log.motor_states[i].torque = motor_torques[i]
step_log.motor_states[i].action = action[i]
_update_base_state(step_log.base_position, minitaur.GetBasePosition())
_update_base_state(step_log.base_orientation, minitaur.GetBaseRollPitchYaw())
_update_base_state(step_log.base_angular_vel,
minitaur.GetBaseRollPitchYawRate())
class MinitaurLogging(object):
"""A logging system that records the states/action of the minitaur."""
def __init__(self, log_path=None):
self._log_path = log_path
# TODO(jietan): Consider using recordio to write the logs.
def save_episode(self, episode_proto):
"""Save episode_proto to self._log_path.
self._log_path is the directory name. A time stamp is the file name of the
log file. For example, when self._log_path is "/tmp/logs/", the actual
log file would be "/tmp/logs/yyyy-mm-dd-hh:mm:ss".
Args:
episode_proto: The proto that holds the states/action for the current
episode that needs to be save to disk.
Returns:
The full log path, including the directory name and the file name.
"""
if not self._log_path or not episode_proto.state_action:
return self._log_path
if not tf.gfile.Exists(self._log_path):
tf.gfile.MakeDirs(self._log_path)
ts = time.time()
time_stamp = datetime.datetime.fromtimestamp(ts).strftime(
"%Y-%m-%d-%H:%M:%S")
log_path = os.path.join(self._log_path,
"minitaur_log_{}".format(time_stamp))
with tf.gfile.Open(log_path, "w") as f:
f.write(episode_proto.SerializeToString())
return log_path
def restore_episode(self, log_path):
"""Restore the episodic proto from the log path.
Args:
log_path: The full path of the log file.
Returns:
The minitaur episode proto.
"""
with tf.gfile.Open(log_path) as f:
content = f.read()
episode_proto = minitaur_logging_pb2.MinitaurEpisode()
episode_proto.ParseFromString(content)
return episode_proto
| 36.454545
| 79
| 0.730865
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import os
import time
import tensorflow as tf
import minitaur_logging_pb2
NUM_MOTORS = 8
def _update_base_state(base_state, values):
base_state.x = values[0]
base_state.y = values[1]
base_state.z = values[2]
def preallocate_episode_proto(episode_proto, max_num_steps):
for _ in range(max_num_steps):
step_log = episode_proto.state_action.add()
step_log.info_valid = False
step_log.time.seconds = 0
step_log.time.nanos = 0
for _ in range(NUM_MOTORS):
motor_state = step_log.motor_states.add()
motor_state.angle = 0
motor_state.velocity = 0
motor_state.torque = 0
motor_state.action = 0
_update_base_state(step_log.base_position, [0, 0, 0])
_update_base_state(step_log.base_orientation, [0, 0, 0])
_update_base_state(step_log.base_angular_vel, [0, 0, 0])
def update_episode_proto(episode_proto, minitaur, action, step):
max_num_steps = len(episode_proto.state_action)
if step >= max_num_steps:
tf.logging.warning(
"{}th step is not recorded in the logging since only {} steps were "
"pre-allocated.".format(step, max_num_steps))
return
step_log = episode_proto.state_action[step]
step_log.info_valid = minitaur.IsObservationValid()
time_in_seconds = minitaur.GetTimeSinceReset()
step_log.time.seconds = int(time_in_seconds)
step_log.time.nanos = int((time_in_seconds - int(time_in_seconds)) * 1e9)
motor_angles = minitaur.GetMotorAngles()
motor_velocities = minitaur.GetMotorVelocities()
motor_torques = minitaur.GetMotorTorques()
for i in range(minitaur.num_motors):
step_log.motor_states[i].angle = motor_angles[i]
step_log.motor_states[i].velocity = motor_velocities[i]
step_log.motor_states[i].torque = motor_torques[i]
step_log.motor_states[i].action = action[i]
_update_base_state(step_log.base_position, minitaur.GetBasePosition())
_update_base_state(step_log.base_orientation, minitaur.GetBaseRollPitchYaw())
_update_base_state(step_log.base_angular_vel,
minitaur.GetBaseRollPitchYawRate())
class MinitaurLogging(object):
def __init__(self, log_path=None):
self._log_path = log_path
def save_episode(self, episode_proto):
if not self._log_path or not episode_proto.state_action:
return self._log_path
if not tf.gfile.Exists(self._log_path):
tf.gfile.MakeDirs(self._log_path)
ts = time.time()
time_stamp = datetime.datetime.fromtimestamp(ts).strftime(
"%Y-%m-%d-%H:%M:%S")
log_path = os.path.join(self._log_path,
"minitaur_log_{}".format(time_stamp))
with tf.gfile.Open(log_path, "w") as f:
f.write(episode_proto.SerializeToString())
return log_path
def restore_episode(self, log_path):
with tf.gfile.Open(log_path) as f:
content = f.read()
episode_proto = minitaur_logging_pb2.MinitaurEpisode()
episode_proto.ParseFromString(content)
return episode_proto
| true
| true
|
f7022bfcbb871ca0f803248d774267f56badc709
| 3,062
|
py
|
Python
|
tests/test_area.py
|
everypolitician/everypolitician-popolo-python
|
21670d64f78ac33a088d9adc68006c355f21aa35
|
[
"MIT"
] | 13
|
2017-02-27T16:06:47.000Z
|
2021-04-14T02:56:17.000Z
|
tests/test_area.py
|
everypolitician/everypolitician-popolo-python
|
21670d64f78ac33a088d9adc68006c355f21aa35
|
[
"MIT"
] | 17
|
2016-10-12T17:14:47.000Z
|
2017-02-10T14:24:08.000Z
|
tests/test_area.py
|
everypolitician/everypolitician-popolo-python
|
21670d64f78ac33a088d9adc68006c355f21aa35
|
[
"MIT"
] | 4
|
2016-10-20T16:20:31.000Z
|
2022-01-08T00:01:57.000Z
|
# -*- coding: utf-8 -*-
from unittest import TestCase
import six
from popolo_data.importer import Popolo
EXAMPLE_AREA = {
"id": "area/tartu_linn",
"identifiers": [
{
"identifier": "Q3032626",
"scheme": "wikidata"
}
],
"name": "Tartu linn",
"other_names": [
{
"lang": "fr",
"name": "Dixième circonscription législative d'Estonie",
"note": "multilingual"
},
{
"lang": "et",
"name": "Valimisringkond nr 10",
"note": "multilingual"
},
{
"lang": "en",
"name": "Electoral District 10 (Tartu)",
"note": "multilingual"
}
],
"type": "constituency"
}
class TestAreas(TestCase):
def test_empty_file_gives_no_areas(self):
popolo = Popolo({})
assert len(popolo.areas) == 0
def test_single_area_with_name(self):
popolo = Popolo({"areas": [EXAMPLE_AREA]})
assert len(popolo.areas) == 1
area = popolo.areas[0]
assert area.name == 'Tartu linn'
def test_area_id(self):
popolo = Popolo({"areas": [EXAMPLE_AREA]})
area = popolo.areas[0]
assert area.id == 'area/tartu_linn'
def test_area_type(self):
popolo = Popolo({"areas": [EXAMPLE_AREA]})
area = popolo.areas[0]
assert area.type == 'constituency'
def test_area_identifiers(self):
popolo = Popolo({"areas": [EXAMPLE_AREA]})
area = popolo.areas[0]
assert area.identifiers == [
{
"identifier": "Q3032626",
"scheme": "wikidata"
}
]
def test_area_other_names(self):
popolo = Popolo({"areas": [EXAMPLE_AREA]})
area = popolo.areas[0]
assert area.other_names == [
{
"lang": "fr",
"name": "Dixième circonscription législative d'Estonie",
"note": "multilingual"
},
{
"lang": "et",
"name": "Valimisringkond nr 10",
"note": "multilingual"
},
{
"lang": "en",
"name": "Electoral District 10 (Tartu)",
"note": "multilingual"
}
]
def test_area_wikidata(self):
popolo = Popolo({"areas": [EXAMPLE_AREA]})
area = popolo.areas[0]
assert area.wikidata == 'Q3032626'
def test_area_repr(self):
popolo = Popolo({"areas": [EXAMPLE_AREA]})
area = popolo.areas[0]
if six.PY2:
assert repr(area) == b"<Area: Tartu linn>"
else:
assert repr(area) == u"<Area: Tartu linn>"
def test_area_identity_equality_and_inequality(self):
popolo_a = Popolo({"areas": [EXAMPLE_AREA]})
area_a = popolo_a.areas[0]
popolo_b = Popolo({"areas": [EXAMPLE_AREA]})
area_b = popolo_b.areas[0]
assert area_a == area_b
assert not (area_a != area_b)
| 27.097345
| 72
| 0.506532
|
from unittest import TestCase
import six
from popolo_data.importer import Popolo
EXAMPLE_AREA = {
"id": "area/tartu_linn",
"identifiers": [
{
"identifier": "Q3032626",
"scheme": "wikidata"
}
],
"name": "Tartu linn",
"other_names": [
{
"lang": "fr",
"name": "Dixième circonscription législative d'Estonie",
"note": "multilingual"
},
{
"lang": "et",
"name": "Valimisringkond nr 10",
"note": "multilingual"
},
{
"lang": "en",
"name": "Electoral District 10 (Tartu)",
"note": "multilingual"
}
],
"type": "constituency"
}
class TestAreas(TestCase):
def test_empty_file_gives_no_areas(self):
popolo = Popolo({})
assert len(popolo.areas) == 0
def test_single_area_with_name(self):
popolo = Popolo({"areas": [EXAMPLE_AREA]})
assert len(popolo.areas) == 1
area = popolo.areas[0]
assert area.name == 'Tartu linn'
def test_area_id(self):
popolo = Popolo({"areas": [EXAMPLE_AREA]})
area = popolo.areas[0]
assert area.id == 'area/tartu_linn'
def test_area_type(self):
popolo = Popolo({"areas": [EXAMPLE_AREA]})
area = popolo.areas[0]
assert area.type == 'constituency'
def test_area_identifiers(self):
popolo = Popolo({"areas": [EXAMPLE_AREA]})
area = popolo.areas[0]
assert area.identifiers == [
{
"identifier": "Q3032626",
"scheme": "wikidata"
}
]
def test_area_other_names(self):
popolo = Popolo({"areas": [EXAMPLE_AREA]})
area = popolo.areas[0]
assert area.other_names == [
{
"lang": "fr",
"name": "Dixième circonscription législative d'Estonie",
"note": "multilingual"
},
{
"lang": "et",
"name": "Valimisringkond nr 10",
"note": "multilingual"
},
{
"lang": "en",
"name": "Electoral District 10 (Tartu)",
"note": "multilingual"
}
]
def test_area_wikidata(self):
popolo = Popolo({"areas": [EXAMPLE_AREA]})
area = popolo.areas[0]
assert area.wikidata == 'Q3032626'
def test_area_repr(self):
popolo = Popolo({"areas": [EXAMPLE_AREA]})
area = popolo.areas[0]
if six.PY2:
assert repr(area) == b"<Area: Tartu linn>"
else:
assert repr(area) == u"<Area: Tartu linn>"
def test_area_identity_equality_and_inequality(self):
popolo_a = Popolo({"areas": [EXAMPLE_AREA]})
area_a = popolo_a.areas[0]
popolo_b = Popolo({"areas": [EXAMPLE_AREA]})
area_b = popolo_b.areas[0]
assert area_a == area_b
assert not (area_a != area_b)
| true
| true
|
f7022c4c7c673a12cef7e81ed9dc5253eeaa03e2
| 1,614
|
py
|
Python
|
openiPrototype/appUI/importProductData.py
|
OPENi-ict/ntua_demo
|
104118fbe1f54db35386ca96286317ceb64cb658
|
[
"Apache-2.0"
] | null | null | null |
openiPrototype/appUI/importProductData.py
|
OPENi-ict/ntua_demo
|
104118fbe1f54db35386ca96286317ceb64cb658
|
[
"Apache-2.0"
] | null | null | null |
openiPrototype/appUI/importProductData.py
|
OPENi-ict/ntua_demo
|
104118fbe1f54db35386ca96286317ceb64cb658
|
[
"Apache-2.0"
] | null | null | null |
__author__ = 'alvertisjo'
from django.core.serializers import json
import requests
from requests.packages.urllib3 import Timeout
from requests.packages.urllib3.exceptions import ConnectionError
class OpenProductData(object):
def getData(self):
# rowStep=100
# currentPage=0
# #####documentation: http://pod.opendatasoft.com/api/doc/#doc-datasets-search
# full_url='http://pod.opendatasoft.com/api/records/1.0/search' #http://pod.opendatasoft.com/api/records/1.0/search?dataset=pod_gtin&rows=10&start=11&facet=gpc_s_nm&facet=brand_nm&facet=owner_nm&facet=gln_nm&facet=prefix_nm
# dataset='pod_gtin'
# #print(full_url)
# try:
# response = requests.get(full_url, verify=False)
# #print response
# return response.json()
# except ConnectionError as e: # This is the correct syntax
# print "error: %s" %e
# return response.json()
# except Timeout as t: # This is the correct syntax
# print "Timeout error: %s" %t
# return json.dumps({"error":t.message})
# except:
# return json.dumps([])
pass
def readDataFromFile(self):
pass
def storeToGraph(self,data):
# POST http://localhost:7474/db/data/transaction/commit
# Accept: application/json; charset=UTF-8
# Content-Type: application/json
url= 'http://snf-561492.vm.okeanos.grnet.gr:7474/'
# {
# "statements" : [ {
# "statement" : "CREATE (n) RETURN id(n)"
# } ]
# }
pass
| 38.428571
| 231
| 0.607187
|
__author__ = 'alvertisjo'
from django.core.serializers import json
import requests
from requests.packages.urllib3 import Timeout
from requests.packages.urllib3.exceptions import ConnectionError
class OpenProductData(object):
def getData(self):
pass
def readDataFromFile(self):
pass
def storeToGraph(self,data):
url= 'http://snf-561492.vm.okeanos.grnet.gr:7474/'
pass
| true
| true
|
f7022e79616991d5da307aa7c60dcdfa477777ad
| 116
|
py
|
Python
|
pages/views.py
|
LombaAnderson/personalizar-user-django
|
b923587fd9e9cfc65224dd31f80621eb197a6098
|
[
"MIT"
] | null | null | null |
pages/views.py
|
LombaAnderson/personalizar-user-django
|
b923587fd9e9cfc65224dd31f80621eb197a6098
|
[
"MIT"
] | null | null | null |
pages/views.py
|
LombaAnderson/personalizar-user-django
|
b923587fd9e9cfc65224dd31f80621eb197a6098
|
[
"MIT"
] | null | null | null |
from django.views.generic import TemplateView
class HomePageView(TemplateView):
template_name = "home.html"
| 14.5
| 45
| 0.775862
|
from django.views.generic import TemplateView
class HomePageView(TemplateView):
template_name = "home.html"
| true
| true
|
f7022f7075bdd6537b307688382d872a3f7fd177
| 53
|
py
|
Python
|
Interfaces/__init__.py
|
ahmadryan/TurbAn
|
b8866d103a2ca2f5fbad73bcd4416f19299f22b2
|
[
"BSD-2-Clause-Patent"
] | null | null | null |
Interfaces/__init__.py
|
ahmadryan/TurbAn
|
b8866d103a2ca2f5fbad73bcd4416f19299f22b2
|
[
"BSD-2-Clause-Patent"
] | null | null | null |
Interfaces/__init__.py
|
ahmadryan/TurbAn
|
b8866d103a2ca2f5fbad73bcd4416f19299f22b2
|
[
"BSD-2-Clause-Patent"
] | 10
|
2019-03-22T15:30:12.000Z
|
2021-02-10T02:55:50.000Z
|
from . import Simulations
from . import Spacecraft
| 17.666667
| 26
| 0.773585
|
from . import Simulations
from . import Spacecraft
| true
| true
|
f7022ff2149a27035982e6b84787ca961a0ed00e
| 2,752
|
py
|
Python
|
bin/apps/py/lib/hhslib/tests/test_hhslib.py
|
yorevs/homesetup
|
6e77fd0bffbda3a44d4daf95c974b8d2c744eb5d
|
[
"MIT"
] | 34
|
2018-08-29T11:07:41.000Z
|
2022-03-31T12:18:39.000Z
|
bin/apps/py/lib/hhslib/tests/test_hhslib.py
|
yorevs/homesetup
|
6e77fd0bffbda3a44d4daf95c974b8d2c744eb5d
|
[
"MIT"
] | 22
|
2020-01-11T01:04:04.000Z
|
2020-03-05T17:27:12.000Z
|
bin/apps/py/lib/hhslib/tests/test_hhslib.py
|
yorevs/homesetup
|
6e77fd0bffbda3a44d4daf95c974b8d2c744eb5d
|
[
"MIT"
] | 4
|
2020-02-28T03:35:32.000Z
|
2020-06-05T15:09:55.000Z
|
"""
@package: jsonutils
@script: test_JsonUtils.py
@purpose: Test Suite for JsonUtils.
@created: Aug 26, 2017
@author: <B>H</B>ugo <B>S</B>aporetti <B>J</B>unior
@mailto: yorevs@hotmail.com
@site: https://github.com/yorevs/homesetup
@license: Please refer to <https://opensource.org/licenses/MIT>
"""
import os
import unittest
from hhslib.security import *
PASSPHRASE = '12345'
SAMPLE_IN_FILE_NAME = "resources/secret.in"
SAMPLE_OUT_FILE_NAME = "resources/secret.out"
OUT_FILE = "resources/outfile.out"
OUT_FILE_GPG = "resources/outfile.out.gpg"
ORIGINAL_FILE_CONTENTS = "HomeSetup Secrets"
ENCODED_FILE_CONTENTS = "SG9tZVNldHVwIFNlY3JldHM="
class TestHhsLib(unittest.TestCase):
# Setup tests
def setUp(self):
with open(SAMPLE_IN_FILE_NAME, 'w') as f_in:
f_in.write(ORIGINAL_FILE_CONTENTS)
with open(SAMPLE_OUT_FILE_NAME, 'w') as f_in:
f_in.write(ENCODED_FILE_CONTENTS)
# Teardown tests
def tearDown(self):
if os.path.exists(OUT_FILE):
os.remove(OUT_FILE)
if os.path.exists(OUT_FILE_GPG):
os.remove(OUT_FILE_GPG)
# TEST CASES ----------
# TC1 - Test encoding a file.
def test_should_encode_file(self):
with open(SAMPLE_IN_FILE_NAME, 'r') as f_in:
contents = str(f_in.read().strip())
self.assertEquals(ORIGINAL_FILE_CONTENTS, contents)
encode(SAMPLE_IN_FILE_NAME, OUT_FILE)
with open(OUT_FILE, 'r') as f_out:
contents = str(f_out.read().strip())
self.assertEquals(ENCODED_FILE_CONTENTS, contents)
# TC2 - Test decoding a file.
def test_should_decode_file(self):
with open(SAMPLE_OUT_FILE_NAME, 'r') as f_in:
contents = str(f_in.read().strip())
self.assertEquals(ENCODED_FILE_CONTENTS, contents)
decode(SAMPLE_OUT_FILE_NAME, OUT_FILE)
with open(OUT_FILE, 'r') as f_out:
contents = str(f_out.read().strip())
self.assertEquals(ORIGINAL_FILE_CONTENTS, contents)
# TC3 - Test encrypting a file.
def test_should_encrypt_decrypt_file(self):
with open(SAMPLE_IN_FILE_NAME, 'r') as f_in:
contents = str(f_in.read().strip())
self.assertEquals(ORIGINAL_FILE_CONTENTS, contents)
encrypt(SAMPLE_IN_FILE_NAME, OUT_FILE_GPG, PASSPHRASE)
decrypt(OUT_FILE_GPG, OUT_FILE, PASSPHRASE)
with open(OUT_FILE, 'r') as f_out:
contents = str(f_out.read().strip())
self.assertEquals(ORIGINAL_FILE_CONTENTS, contents)
# Program entry point.
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestHhsLib)
unittest.TextTestRunner(verbosity=2).run(suite)
| 33.560976
| 67
| 0.667515
|
import os
import unittest
from hhslib.security import *
PASSPHRASE = '12345'
SAMPLE_IN_FILE_NAME = "resources/secret.in"
SAMPLE_OUT_FILE_NAME = "resources/secret.out"
OUT_FILE = "resources/outfile.out"
OUT_FILE_GPG = "resources/outfile.out.gpg"
ORIGINAL_FILE_CONTENTS = "HomeSetup Secrets"
ENCODED_FILE_CONTENTS = "SG9tZVNldHVwIFNlY3JldHM="
class TestHhsLib(unittest.TestCase):
def setUp(self):
with open(SAMPLE_IN_FILE_NAME, 'w') as f_in:
f_in.write(ORIGINAL_FILE_CONTENTS)
with open(SAMPLE_OUT_FILE_NAME, 'w') as f_in:
f_in.write(ENCODED_FILE_CONTENTS)
def tearDown(self):
if os.path.exists(OUT_FILE):
os.remove(OUT_FILE)
if os.path.exists(OUT_FILE_GPG):
os.remove(OUT_FILE_GPG)
def test_should_encode_file(self):
with open(SAMPLE_IN_FILE_NAME, 'r') as f_in:
contents = str(f_in.read().strip())
self.assertEquals(ORIGINAL_FILE_CONTENTS, contents)
encode(SAMPLE_IN_FILE_NAME, OUT_FILE)
with open(OUT_FILE, 'r') as f_out:
contents = str(f_out.read().strip())
self.assertEquals(ENCODED_FILE_CONTENTS, contents)
def test_should_decode_file(self):
with open(SAMPLE_OUT_FILE_NAME, 'r') as f_in:
contents = str(f_in.read().strip())
self.assertEquals(ENCODED_FILE_CONTENTS, contents)
decode(SAMPLE_OUT_FILE_NAME, OUT_FILE)
with open(OUT_FILE, 'r') as f_out:
contents = str(f_out.read().strip())
self.assertEquals(ORIGINAL_FILE_CONTENTS, contents)
def test_should_encrypt_decrypt_file(self):
with open(SAMPLE_IN_FILE_NAME, 'r') as f_in:
contents = str(f_in.read().strip())
self.assertEquals(ORIGINAL_FILE_CONTENTS, contents)
encrypt(SAMPLE_IN_FILE_NAME, OUT_FILE_GPG, PASSPHRASE)
decrypt(OUT_FILE_GPG, OUT_FILE, PASSPHRASE)
with open(OUT_FILE, 'r') as f_out:
contents = str(f_out.read().strip())
self.assertEquals(ORIGINAL_FILE_CONTENTS, contents)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestHhsLib)
unittest.TextTestRunner(verbosity=2).run(suite)
| true
| true
|
f702305f7ba2cf5b92dd37e75253366482adb81d
| 19,015
|
py
|
Python
|
GUI_app.py
|
Howardlinsanity/CIS-433-Group-Project-W18
|
f821957e498a8062b4d9c90e0cef369851ae236e
|
[
"BSD-3-Clause"
] | null | null | null |
GUI_app.py
|
Howardlinsanity/CIS-433-Group-Project-W18
|
f821957e498a8062b4d9c90e0cef369851ae236e
|
[
"BSD-3-Clause"
] | 2
|
2021-03-31T18:28:55.000Z
|
2021-12-13T19:45:24.000Z
|
GUI_app.py
|
Howardlinsanity/CIS-433-Group-Project-W18
|
f821957e498a8062b4d9c90e0cef369851ae236e
|
[
"BSD-3-Clause"
] | null | null | null |
import threading
import requests
# encryption
import Encrypt
import unicodedata
from ttk import Style, Button, Label, Entry, Progressbar, Checkbutton
from Tkinter import Tk, Frame, RIGHT, BOTH, RAISED
from Tkinter import TOP, X, N, LEFT
from Tkinter import END, Listbox, MULTIPLE
from Tkinter import Toplevel, DISABLED
from Tkinter import StringVar, Scrollbar
from multiprocessing import Queue
from random import choice, randint
from fbchat import log, client
from fbchat.graphql import *
# Wrapper for the client class just in case we need to modify client to make it work
class GuiClient(client.Client):
def __init__(self, email, password, user_agent=None, max_tries=5, session_cookies=None, logging_level=logging.INFO):
"""
Initializes and logs in the client
:param email: Facebook `email`, `id` or `phone number`
:param password: Facebook account password
:param user_agent: Custom user agent to use when sending requests. If `None`, user agent will be chosen from a premade list (see :any:`utils.USER_AGENTS`)
:param max_tries: Maximum number of times to try logging in
:param session_cookies: Cookies from a previous session (Will default to login if these are invalid)
:param logging_level: Configures the `logging level <https://docs.python.org/3/library/logging.html#logging-levels>`_. Defaults to `INFO`
:type max_tries: int
:type session_cookies: dict
:type logging_level: int
:raises: FBchatException on failed login
"""
self.sticky, self.pool = (None, None)
self._session = requests.session()
self.req_counter = 1
self.seq = "0"
self.payloadDefault = {}
self.client = 'mercury'
self.default_thread_id = None
self.default_thread_type = None
self.req_url = ReqUrl()
self.most_recent_message = None
self.most_recent_messages_queue = Queue()
if not user_agent:
user_agent = choice(USER_AGENTS)
self._header = {
'Content-Type': 'application/x-www-form-urlencoded',
'Referer': self.req_url.BASE,
'Origin': self.req_url.BASE,
'User-Agent': user_agent,
'Connection': 'keep-alive',
}
handler.setLevel(logging_level)
# If session cookies aren't set, not properly loaded or gives us an invalid session, then do the login
if not session_cookies or not self.setSession(session_cookies) or not self.isLoggedIn():
self.login(email, password, max_tries)
else:
self.email = email
self.password = password
def onMessage(self, author_id, message_object, thread_id, thread_type, **kwargs):
self.markAsDelivered(author_id, thread_id)
self.markAsRead(author_id)
if (message_object is not None):
self.most_recent_message = message_object
self.most_recent_messages_queue.put(message_object)
def stopListening(self):
"""Cleans up the variables from startListening"""
print("Logging off...")
self.listening = False
self.sticky, self.pool = (None, None)
def listen(self, markAlive=True):
"""
Initializes and runs the listening loop continually
:param markAlive: Whether this should ping the Facebook server each time the loop runs
:type markAlive: bool
"""
self.startListening()
self.onListening()
while self.listening and self.doOneListen(markAlive):
pass
self.stopListening()
class GUI(Frame):
"""
This is the root window
"""
def __init__(self, parent, client):
self.queue = Queue()
# I got sick of filling in the login parameters repeatedly,
# for the sake of testing I will leave it like this and clear it before finishing the gui
self.email = ""
self.password = ""
self.name = ""
self.parent = parent
self.initialized = False
self.loadWindow = None
self.remember = False
self.client = None
self.msg_list = None
self.changingConvo = False
self.loginScreen()
def centerWindow(self, notself=None):
"""
This centers the window into place
if notself is set, then it centers
the notself window
@param:
notself - TKobject
"""
if notself is not None: # notself is primarly for progressbar
sw = self.parent.winfo_screenwidth()
sh = self.parent.winfo_screenheight()
x = (sw - self.w / 2) / 2
y = (sh - self.h / 2) / 2
notself.geometry('%dx%d+%d+%d' % (self.w / 1.8, self.h / 1.8, x, y))
else:
sw = self.parent.winfo_screenwidth()
sh = self.parent.winfo_screenheight()
x = (sw - self.w) / 2
y = (sh - self.h) / 2
self.parent.geometry('%dx%d+%d+%d' % (self.w, self.h, x, y))
def startWindow(self):
"""
This method starts/creates the window for
the UI
"""
Frame.__init__(self, self.parent, background="white")
self.style = Style()
self.style.theme_use("default")
self.pack(fill=BOTH, expand=1)
if (not self.initialized):
self.centerWindow()
else:
self.parent.geometry('%dx%d' % (self.w, self.h))
self.initialized = True
def resetWindow(self):
"""
Resets the window
"""
if (self.initialized):
self.destroy()
if (self.loadWindow is not None):
self.loadWindow.destroy()
self.startWindow()
def loginScreen(self):
"""
First screen that user will see, will require Facebook credentials to be inputted
"""
# Resetting window
self.h = 150
self.w = 350
self.resetWindow()
self.parent.title("Welcome")
# Creating frame that takes in email
emailFrame = Frame(self)
emailFrame.pack(fill=X, side=TOP)
emailLabel = Label(emailFrame, text="Email:", background="white")
emailLabel.pack(side=LEFT, padx=15, pady=10)
self.emailEntry = Entry(emailFrame, width=30)
self.emailEntry.insert(0, self.email)
self.emailEntry.pack(side=LEFT, padx=35, pady=10)
# Done with email frame
# Creating password frame
passwordFrame = Frame(self)
passwordFrame.pack(fill=X, side=TOP)
passwordLabel = Label(passwordFrame, text="Password:", background="white")
passwordLabel.pack(side=LEFT, padx=15, pady=10)
self.passwordEntry = Entry(passwordFrame, show="*", width=30)
self.passwordEntry.bind("<Return>", self.start)
self.passwordEntry.insert(0, self.password)
self.passwordEntry.pack(side=LEFT, padx=35, pady=10)
# Done with password frame
# Creating bottom buttons
frame = Frame(self, borderwidth=1)
frame.pack(fill=BOTH, expand=True)
self.pack(fill=BOTH, expand=True)
exitButton = Button(self, text="Exit", command=self.parent.destroy)
exitButton.pack(side=RIGHT, padx=5, pady=5)
self.loginButton = Button(self, text="Log In", command=self.start)
self.loginButton.pack(side=RIGHT)
# Done with bottom buttons
def start(self, opt=""):
"""
Initiates login, starts loading screen.
"""
thread1 = ThreadedTask(self.queue, self.login)
thread2 = ThreadedTask(self.queue, self.loadingScreen)
thread2.start()
thread1.start()
self.checkThread(thread1, self.chatUI)
def loadingScreen(self):
"""
This starts the loading screen
and disables all buttons
"""
for i in self.winfo_children():
if Button == type(i):
i.configure(state=DISABLED)
self.loadWindow = Toplevel(self.parent)
loadingstring = "Logging in..."
loadinglabel = Label(self.loadWindow, text=loadingstring, background="white")
progressbar = Progressbar(self.loadWindow, orient="horizontal",
length=300, mode="indeterminate")
progressbar.pack(pady=self.h / 10)
loadinglabel.pack()
self.centerWindow(self.loadWindow)
self.loadWindow.title("Wait")
progressbar.start()
def login(self):
"""
Login with the inputted credentials from the loginScreen
"""
if(self.client is not None):
if(self.client.isLoggedIn()):
self.client.logout()
self.email = self.emailEntry.get()
self.password = self.passwordEntry.get()
# This will log into Facebook with the given credentials
self.client = GuiClient(self.email, self.password)
print(self.client._fetchInfo(self.client.uid)[self.client.uid].get('first_name'))
self.thread3 = ThreadedTask(self.queue, self.listen)
self.thread3.start()
def listen(self):
"""
We start the listening loop
"""
self.client.listen()
def chatUI(self):
"""
Chat GUI page
"""
self.h = 350
self.w = 700
self.resetWindow()
self.parent.title("Messenger")
# We make the chat side of the UI
self.right_frame = Frame(self)
self.right_frame.pack(side=RIGHT, fill='y')
self.messages_frame = Frame(self.right_frame)
self.messages_frame.pack(side=TOP)
self.my_msg = StringVar() # For messages to be sent.
self.my_msg.set("")
self.msg_scrollbar = Scrollbar(self.messages_frame) # Navigate through past messages
# Following will contain the messages
self.msg_list = Listbox(self.messages_frame, height=15, width=50, yscrollcommand=self.msg_scrollbar.set)
self.msg_scrollbar.config(command=self.msg_list.yview)
self.msg_scrollbar.pack(side=RIGHT, fill='y', padx=5)
self.msg_list.pack(side=RIGHT)
self.entry_field = Entry(self.right_frame, textvariable=self.my_msg)
self.entry_field.bind("<Return>", self.send)
self.send_button = Button(self.right_frame, text="Send", command=self.send)
self.entry_field.pack(side="top", fill=X, padx=5, pady=5)
self.send_button.pack(side="top")
self.exitButton = Button(self.right_frame, text="Exit", command=self.exit)
self.exitButton.pack(side="bottom", padx=5, pady=5)
# We make the the side that contains the other users.
self.left_frame = Frame(self)
self.left_frame.pack(side=LEFT, fill='y')
self.usr_scrollbar = Scrollbar(self.left_frame)
self.usr_list = Listbox(self.left_frame, height=15, width=50, yscrollcommand=self.usr_scrollbar.set)
self.usr_scrollbar.config(command=self.usr_list.yview)
self.usr_search_bar = Entry(self.left_frame, textvariable="")
self.usr_search_button = Button(self.left_frame, text="Search", command=self.search)
self.usr_search_bar.pack(side="top", fill=X, pady=2, padx=1)
self.usr_search_button.pack(side="top", fill=X, pady=2, padx=1)
self.usr_scrollbar.pack(side=RIGHT, fill='y', padx=5)
self.usr_list.pack(side=RIGHT, fill='y')
# The user loading logic is in the search function
self.search()
self.usr_list.bind('<Double-1>', self.changeConvo)
def search(self):
fresh_users = self.client.fetchAllUsers()
self.users = []
if (self.usr_search_bar.get() is not ""):
for user in fresh_users:
if (self.usr_search_bar.get() in user.name):
self.users.append(user)
else:
self.users = fresh_users
if (self.usr_list.size() is not 0):
self.usr_list.delete(0, END)
for user in self.users:
self.usr_list.insert(END, " " + user.name)
# By default I would just take the first conversation
self.currentUser = self.users[0] # TODO: fix IndexOutOfRange Error when searched for a string not found
self.usr_search_bar.delete(0, END)
def send(self, _=""):
"""
Send messages, will send whatever is in the message field and then clear it
"""
plaintext = self.entry_field.get()
key = randint(-60, 60)
ciphertext = Encrypt.encrypt(plaintext, key)
ciphertext = "{}Q_Q{}".format(key, ciphertext)
message = Message(text=unicode(ciphertext, "ascii"))
self.client.send(message, self.currentUser.uid)
self.entry_field.delete(0, END)
self.client.most_recent_message = message
self.msg_list.insert(0, self.name + ": " + plaintext)
self.msg_list.see(END)
def changeConvo(self, param):
"""
When you click on another user in the chat we update the page
"""
print("CHANGING CONVO")
selectionIndex = self.usr_list.curselection()
self.currentUser = self.users[selectionIndex[0]]
self.changingConvo = True
self.updateConversation()
def updateConversation(self):
"""
Clear the conversation box, reupdate with new conversation, pings facebook server if they got anything
"""
if (self.changingConvo): # we are changing the conversation/switching users
print("[updateConversation] we are changing conversation")
messages = self.client.fetchThreadMessages(self.currentUser.uid)
self.msg_list.delete(0, END)
for message in messages:
text = self.decrypt_w_uc(message)
self.msg_list.insert(0, self.client._fetchInfo(message.author)[message.author][
"first_name"] + ": " + text)
# The message listbox will automatically look at the last/"most recent" message
self.msg_list.see(END)
# We no longer need to change the conversation
self.changingConvo = False
else: # same user, but checking for new messages
# Sees last message from the message list box
last_message = self.msg_list.get(END)
if (self.client is not None and self.client.isLoggedIn() and self.client.most_recent_message is not None):
msg_object = self.client.most_recent_message
msg_author = self.client.most_recent_message.author
name = ""
if (msg_author is None):
msg_author = self.name
else:
name = self.client._fetchInfo(msg_author)[msg_author]["first_name"]
text = self.decrypt_w_uc(msg_object)
new_last_message = name + ": " + text
if (last_message != new_last_message):
# This is checking if were updating the current convo or refreshing convo
if (name + ": " in last_message):
while (self.client.most_recent_messages_queue.empty() is not True):
message = self.client.most_recent_messages_queue.get()
text = self.decrypt_w_uc(message)
self.msg_list.insert(END, self.client._fetchInfo(message.author)[message.author][
"first_name"] + ": " + text)
self.msg_list.see(END)
else:
messages = self.client.fetchThreadMessages(self.currentUser.uid)
self.msg_list.delete(0, END)
for message in messages:
text = self.decrypt_w_uc(message)
self.msg_list.insert(0, self.client._fetchInfo(message.author)[message.author][
"first_name"] + ": " + text)
self.msg_list.see(END)
self.client.most_recent_message = messages[0]
def decrypt_w_uc(self, message):
"""
Decrypt with unicode character check - will decrypt when necessary,
and then convert unicode to ascii so TCL won't freak out
Input: message -> fbchat.models.Message, Message object
Output: clean_text -> String
"""
clean_text = ""
if "Q_Q" in message.text: # to be decrypted
key, ciphertext = message.text.split("Q_Q")
clean_text = Encrypt.decrypt(ciphertext, int(key))
else:
clean_text = message.text
# now we do unicode and emoji
clean_clean_text = ""
for character in clean_text:
# if character not in emoji.UNICODE_EMOJI:
if type(character) is unicode:
clean_clean_text += unicodedata.normalize('NFKD', character).encode('ascii', 'replace')
else:
clean_clean_text += character
return clean_clean_text
def exit(self):
"""
Stops listening and ends GUI
"""
self.client.stopListening()
self.parent.destroy()
def checkThread(self, thread, function):
"""
This function checks to see if
the given thread is dead, if it
is not, it recalls a new checkThread.
After the thread is dead, it calls the
given function
@param:
thread - ThreadedTask
functoin - a function
"""
if thread.is_alive():
self.parent.after(1000, lambda: self.checkThread(thread, function))
else:
function()
class ThreadedTask(threading.Thread):
"""
Used for creating a threaded task
"""
def __init__(self, queue, function):
"""
Starts the threaded task
@param:
queue - Queue object
function - a function
"""
threading.Thread.__init__(self)
self.queue = queue
self.function = function
def run(self):
"""
Runs the function
"""
self.function()
def tk_loop(root, ex):
"""
Checks for messages every half a second
"""
if (ex.msg_list is not None):
ex.updateConversation()
root.after(2000, tk_loop, root, ex)
def initiate_tk_loop(root, ex):
"""
I honestly don't know how to thread this other than doing this terrible piece of code
"""
root.after(2000, tk_loop, root, ex)
def removeEmoji(msg):
"""
removes non ASCII chars
:param msg:
:return: new_msg with emjoy char removed
"""
new_msg = ""
for ch in msg:
pass
return new_msg
if __name__ == "__main__":
# create GUI
root = Tk()
root.resizable(width=False, height=False)
ex = GUI(root, client)
# make calls to api to load GUI with relavent information
initiate_tk_loop(root, ex)
root.mainloop()
| 35.278293
| 162
| 0.602892
|
import threading
import requests
import Encrypt
import unicodedata
from ttk import Style, Button, Label, Entry, Progressbar, Checkbutton
from Tkinter import Tk, Frame, RIGHT, BOTH, RAISED
from Tkinter import TOP, X, N, LEFT
from Tkinter import END, Listbox, MULTIPLE
from Tkinter import Toplevel, DISABLED
from Tkinter import StringVar, Scrollbar
from multiprocessing import Queue
from random import choice, randint
from fbchat import log, client
from fbchat.graphql import *
class GuiClient(client.Client):
def __init__(self, email, password, user_agent=None, max_tries=5, session_cookies=None, logging_level=logging.INFO):
self.sticky, self.pool = (None, None)
self._session = requests.session()
self.req_counter = 1
self.seq = "0"
self.payloadDefault = {}
self.client = 'mercury'
self.default_thread_id = None
self.default_thread_type = None
self.req_url = ReqUrl()
self.most_recent_message = None
self.most_recent_messages_queue = Queue()
if not user_agent:
user_agent = choice(USER_AGENTS)
self._header = {
'Content-Type': 'application/x-www-form-urlencoded',
'Referer': self.req_url.BASE,
'Origin': self.req_url.BASE,
'User-Agent': user_agent,
'Connection': 'keep-alive',
}
handler.setLevel(logging_level)
if not session_cookies or not self.setSession(session_cookies) or not self.isLoggedIn():
self.login(email, password, max_tries)
else:
self.email = email
self.password = password
def onMessage(self, author_id, message_object, thread_id, thread_type, **kwargs):
self.markAsDelivered(author_id, thread_id)
self.markAsRead(author_id)
if (message_object is not None):
self.most_recent_message = message_object
self.most_recent_messages_queue.put(message_object)
def stopListening(self):
print("Logging off...")
self.listening = False
self.sticky, self.pool = (None, None)
def listen(self, markAlive=True):
self.startListening()
self.onListening()
while self.listening and self.doOneListen(markAlive):
pass
self.stopListening()
class GUI(Frame):
def __init__(self, parent, client):
self.queue = Queue()
# I got sick of filling in the login parameters repeatedly,
# for the sake of testing I will leave it like this and clear it before finishing the gui
self.email = ""
self.password = ""
self.name = ""
self.parent = parent
self.initialized = False
self.loadWindow = None
self.remember = False
self.client = None
self.msg_list = None
self.changingConvo = False
self.loginScreen()
def centerWindow(self, notself=None):
if notself is not None: # notself is primarly for progressbar
sw = self.parent.winfo_screenwidth()
sh = self.parent.winfo_screenheight()
x = (sw - self.w / 2) / 2
y = (sh - self.h / 2) / 2
notself.geometry('%dx%d+%d+%d' % (self.w / 1.8, self.h / 1.8, x, y))
else:
sw = self.parent.winfo_screenwidth()
sh = self.parent.winfo_screenheight()
x = (sw - self.w) / 2
y = (sh - self.h) / 2
self.parent.geometry('%dx%d+%d+%d' % (self.w, self.h, x, y))
def startWindow(self):
Frame.__init__(self, self.parent, background="white")
self.style = Style()
self.style.theme_use("default")
self.pack(fill=BOTH, expand=1)
if (not self.initialized):
self.centerWindow()
else:
self.parent.geometry('%dx%d' % (self.w, self.h))
self.initialized = True
def resetWindow(self):
if (self.initialized):
self.destroy()
if (self.loadWindow is not None):
self.loadWindow.destroy()
self.startWindow()
def loginScreen(self):
# Resetting window
self.h = 150
self.w = 350
self.resetWindow()
self.parent.title("Welcome")
# Creating frame that takes in email
emailFrame = Frame(self)
emailFrame.pack(fill=X, side=TOP)
emailLabel = Label(emailFrame, text="Email:", background="white")
emailLabel.pack(side=LEFT, padx=15, pady=10)
self.emailEntry = Entry(emailFrame, width=30)
self.emailEntry.insert(0, self.email)
self.emailEntry.pack(side=LEFT, padx=35, pady=10)
# Done with email frame
# Creating password frame
passwordFrame = Frame(self)
passwordFrame.pack(fill=X, side=TOP)
passwordLabel = Label(passwordFrame, text="Password:", background="white")
passwordLabel.pack(side=LEFT, padx=15, pady=10)
self.passwordEntry = Entry(passwordFrame, show="*", width=30)
self.passwordEntry.bind("<Return>", self.start)
self.passwordEntry.insert(0, self.password)
self.passwordEntry.pack(side=LEFT, padx=35, pady=10)
# Done with password frame
# Creating bottom buttons
frame = Frame(self, borderwidth=1)
frame.pack(fill=BOTH, expand=True)
self.pack(fill=BOTH, expand=True)
exitButton = Button(self, text="Exit", command=self.parent.destroy)
exitButton.pack(side=RIGHT, padx=5, pady=5)
self.loginButton = Button(self, text="Log In", command=self.start)
self.loginButton.pack(side=RIGHT)
# Done with bottom buttons
def start(self, opt=""):
thread1 = ThreadedTask(self.queue, self.login)
thread2 = ThreadedTask(self.queue, self.loadingScreen)
thread2.start()
thread1.start()
self.checkThread(thread1, self.chatUI)
def loadingScreen(self):
for i in self.winfo_children():
if Button == type(i):
i.configure(state=DISABLED)
self.loadWindow = Toplevel(self.parent)
loadingstring = "Logging in..."
loadinglabel = Label(self.loadWindow, text=loadingstring, background="white")
progressbar = Progressbar(self.loadWindow, orient="horizontal",
length=300, mode="indeterminate")
progressbar.pack(pady=self.h / 10)
loadinglabel.pack()
self.centerWindow(self.loadWindow)
self.loadWindow.title("Wait")
progressbar.start()
def login(self):
if(self.client is not None):
if(self.client.isLoggedIn()):
self.client.logout()
self.email = self.emailEntry.get()
self.password = self.passwordEntry.get()
# This will log into Facebook with the given credentials
self.client = GuiClient(self.email, self.password)
print(self.client._fetchInfo(self.client.uid)[self.client.uid].get('first_name'))
self.thread3 = ThreadedTask(self.queue, self.listen)
self.thread3.start()
def listen(self):
self.client.listen()
def chatUI(self):
self.h = 350
self.w = 700
self.resetWindow()
self.parent.title("Messenger")
# We make the chat side of the UI
self.right_frame = Frame(self)
self.right_frame.pack(side=RIGHT, fill='y')
self.messages_frame = Frame(self.right_frame)
self.messages_frame.pack(side=TOP)
self.my_msg = StringVar() # For messages to be sent.
self.my_msg.set("")
self.msg_scrollbar = Scrollbar(self.messages_frame) # Navigate through past messages
# Following will contain the messages
self.msg_list = Listbox(self.messages_frame, height=15, width=50, yscrollcommand=self.msg_scrollbar.set)
self.msg_scrollbar.config(command=self.msg_list.yview)
self.msg_scrollbar.pack(side=RIGHT, fill='y', padx=5)
self.msg_list.pack(side=RIGHT)
self.entry_field = Entry(self.right_frame, textvariable=self.my_msg)
self.entry_field.bind("<Return>", self.send)
self.send_button = Button(self.right_frame, text="Send", command=self.send)
self.entry_field.pack(side="top", fill=X, padx=5, pady=5)
self.send_button.pack(side="top")
self.exitButton = Button(self.right_frame, text="Exit", command=self.exit)
self.exitButton.pack(side="bottom", padx=5, pady=5)
# We make the the side that contains the other users.
self.left_frame = Frame(self)
self.left_frame.pack(side=LEFT, fill='y')
self.usr_scrollbar = Scrollbar(self.left_frame)
self.usr_list = Listbox(self.left_frame, height=15, width=50, yscrollcommand=self.usr_scrollbar.set)
self.usr_scrollbar.config(command=self.usr_list.yview)
self.usr_search_bar = Entry(self.left_frame, textvariable="")
self.usr_search_button = Button(self.left_frame, text="Search", command=self.search)
self.usr_search_bar.pack(side="top", fill=X, pady=2, padx=1)
self.usr_search_button.pack(side="top", fill=X, pady=2, padx=1)
self.usr_scrollbar.pack(side=RIGHT, fill='y', padx=5)
self.usr_list.pack(side=RIGHT, fill='y')
# The user loading logic is in the search function
self.search()
self.usr_list.bind('<Double-1>', self.changeConvo)
def search(self):
fresh_users = self.client.fetchAllUsers()
self.users = []
if (self.usr_search_bar.get() is not ""):
for user in fresh_users:
if (self.usr_search_bar.get() in user.name):
self.users.append(user)
else:
self.users = fresh_users
if (self.usr_list.size() is not 0):
self.usr_list.delete(0, END)
for user in self.users:
self.usr_list.insert(END, " " + user.name)
# By default I would just take the first conversation
self.currentUser = self.users[0] # TODO: fix IndexOutOfRange Error when searched for a string not found
self.usr_search_bar.delete(0, END)
def send(self, _=""):
plaintext = self.entry_field.get()
key = randint(-60, 60)
ciphertext = Encrypt.encrypt(plaintext, key)
ciphertext = "{}Q_Q{}".format(key, ciphertext)
message = Message(text=unicode(ciphertext, "ascii"))
self.client.send(message, self.currentUser.uid)
self.entry_field.delete(0, END)
self.client.most_recent_message = message
self.msg_list.insert(0, self.name + ": " + plaintext)
self.msg_list.see(END)
def changeConvo(self, param):
print("CHANGING CONVO")
selectionIndex = self.usr_list.curselection()
self.currentUser = self.users[selectionIndex[0]]
self.changingConvo = True
self.updateConversation()
def updateConversation(self):
if (self.changingConvo): # we are changing the conversation/switching users
print("[updateConversation] we are changing conversation")
messages = self.client.fetchThreadMessages(self.currentUser.uid)
self.msg_list.delete(0, END)
for message in messages:
text = self.decrypt_w_uc(message)
self.msg_list.insert(0, self.client._fetchInfo(message.author)[message.author][
"first_name"] + ": " + text)
# The message listbox will automatically look at the last/"most recent" message
self.msg_list.see(END)
# We no longer need to change the conversation
self.changingConvo = False
else: # same user, but checking for new messages
# Sees last message from the message list box
last_message = self.msg_list.get(END)
if (self.client is not None and self.client.isLoggedIn() and self.client.most_recent_message is not None):
msg_object = self.client.most_recent_message
msg_author = self.client.most_recent_message.author
name = ""
if (msg_author is None):
msg_author = self.name
else:
name = self.client._fetchInfo(msg_author)[msg_author]["first_name"]
text = self.decrypt_w_uc(msg_object)
new_last_message = name + ": " + text
if (last_message != new_last_message):
# This is checking if were updating the current convo or refreshing convo
if (name + ": " in last_message):
while (self.client.most_recent_messages_queue.empty() is not True):
message = self.client.most_recent_messages_queue.get()
text = self.decrypt_w_uc(message)
self.msg_list.insert(END, self.client._fetchInfo(message.author)[message.author][
"first_name"] + ": " + text)
self.msg_list.see(END)
else:
messages = self.client.fetchThreadMessages(self.currentUser.uid)
self.msg_list.delete(0, END)
for message in messages:
text = self.decrypt_w_uc(message)
self.msg_list.insert(0, self.client._fetchInfo(message.author)[message.author][
"first_name"] + ": " + text)
self.msg_list.see(END)
self.client.most_recent_message = messages[0]
def decrypt_w_uc(self, message):
clean_text = ""
if "Q_Q" in message.text: # to be decrypted
key, ciphertext = message.text.split("Q_Q")
clean_text = Encrypt.decrypt(ciphertext, int(key))
else:
clean_text = message.text
# now we do unicode and emoji
clean_clean_text = ""
for character in clean_text:
# if character not in emoji.UNICODE_EMOJI:
if type(character) is unicode:
clean_clean_text += unicodedata.normalize('NFKD', character).encode('ascii', 'replace')
else:
clean_clean_text += character
return clean_clean_text
def exit(self):
self.client.stopListening()
self.parent.destroy()
def checkThread(self, thread, function):
if thread.is_alive():
self.parent.after(1000, lambda: self.checkThread(thread, function))
else:
function()
class ThreadedTask(threading.Thread):
def __init__(self, queue, function):
threading.Thread.__init__(self)
self.queue = queue
self.function = function
def run(self):
self.function()
def tk_loop(root, ex):
if (ex.msg_list is not None):
ex.updateConversation()
root.after(2000, tk_loop, root, ex)
def initiate_tk_loop(root, ex):
root.after(2000, tk_loop, root, ex)
def removeEmoji(msg):
new_msg = ""
for ch in msg:
pass
return new_msg
if __name__ == "__main__":
# create GUI
root = Tk()
root.resizable(width=False, height=False)
ex = GUI(root, client)
# make calls to api to load GUI with relavent information
initiate_tk_loop(root, ex)
root.mainloop()
| true
| true
|
f7023321a26af3f421bcb7abd350b5d2520d5a0e
| 2,760
|
py
|
Python
|
data_collector/helper.py
|
cardwizard/vulnerable-python-ecosystem
|
582fda1f45bbe912352ed31a302d798365628713
|
[
"MIT"
] | null | null | null |
data_collector/helper.py
|
cardwizard/vulnerable-python-ecosystem
|
582fda1f45bbe912352ed31a302d798365628713
|
[
"MIT"
] | null | null | null |
data_collector/helper.py
|
cardwizard/vulnerable-python-ecosystem
|
582fda1f45bbe912352ed31a302d798365628713
|
[
"MIT"
] | null | null | null |
from pipdeptree import get_installed_distributions, build_dist_index, construct_tree
from bs4 import BeautifulSoup
from json import dump, load
from urllib.request import urlretrieve
from pathlib import Path
from unittest import mock
from pkginfo import SDist
from johnnydep.cli import JohnnyDist
import requests
import setuptools
import tarfile
def read_packages():
with open("python_packages_list.json", "r") as f:
package_info = load(f)
return package_info
def download(download_link, output_folder, package_name, version):
url = download_link
dst = Path(output_folder).joinpath("{}_{}.tar.gz".format(package_name, version))
urlretrieve(url, dst)
def get_packages(package_name):
"""
Note that package name already starts with /simple/
This downloader only focuses on .tar.gz files
:param package_name:
:return:
"""
url = "https://pypi.org{}".format(package_name)
r = requests.get(url)
soup = BeautifulSoup(r.content, features='html.parser')
tar = 0
for id, link in enumerate(soup.find_all('a', href=True)):
if ".tar.gz" in link["href"]:
download(link["href"], "downloaded_packages/tar", package_name.split("/")[-2], id)
tar += 1
return {"tar": tar}
def extract_info_from_setup():
with mock.patch.object(setuptools, 'setup') as mock_setup:
import data_collector.downloaded_packages.setup
args, kwargs = mock_setup.call_args
print(kwargs)
def unpack(package_name):
with tarfile.open(package_name, mode="r:gz") as tf:
tf.extractall()
def parse(pkg_info):
mypackage = SDist(pkg_info)
return PackageInfo(version=mypackage.version, author=mypackage.author_email,
license=mypackage.license,
name=mypackage.name,
maintainer=mypackage.maintainer_email, additional_details=mypackage.__dict__)
def get_dependencies(package):
url = 'https://pypi.org/pypi/{}/json'
json = requests.get(url.format(package)).json()
print(json.keys())
# return json
def get_johnny_dep(package):
dist = JohnnyDist(package, index_url=None, env=None, extra_index_url=None)
return dist.serialise(fields=["name", "requires", "required_by", "project_name", "versions_available"], format=None, recurse=True)
if __name__ == '__main__':
# get_package_list()
# get_packages("/simple/jupyter/")
# unpack("downloaded_packages/tar/jupyter_1.tar.gz")
# extract_info_from_setup()
# print(parse("downloaded_packages/tar/jupyter_1.tar.gz").dump_details())
# print(pkg_resources.get_distribution("downloaded_packages/tar/jupyter_1.tar.gz"))
print(get_dependencies("pandas"))
# print(get_johnny_dep("ipython"))
| 29.677419
| 134
| 0.698188
|
from pipdeptree import get_installed_distributions, build_dist_index, construct_tree
from bs4 import BeautifulSoup
from json import dump, load
from urllib.request import urlretrieve
from pathlib import Path
from unittest import mock
from pkginfo import SDist
from johnnydep.cli import JohnnyDist
import requests
import setuptools
import tarfile
def read_packages():
with open("python_packages_list.json", "r") as f:
package_info = load(f)
return package_info
def download(download_link, output_folder, package_name, version):
url = download_link
dst = Path(output_folder).joinpath("{}_{}.tar.gz".format(package_name, version))
urlretrieve(url, dst)
def get_packages(package_name):
url = "https://pypi.org{}".format(package_name)
r = requests.get(url)
soup = BeautifulSoup(r.content, features='html.parser')
tar = 0
for id, link in enumerate(soup.find_all('a', href=True)):
if ".tar.gz" in link["href"]:
download(link["href"], "downloaded_packages/tar", package_name.split("/")[-2], id)
tar += 1
return {"tar": tar}
def extract_info_from_setup():
with mock.patch.object(setuptools, 'setup') as mock_setup:
import data_collector.downloaded_packages.setup
args, kwargs = mock_setup.call_args
print(kwargs)
def unpack(package_name):
with tarfile.open(package_name, mode="r:gz") as tf:
tf.extractall()
def parse(pkg_info):
mypackage = SDist(pkg_info)
return PackageInfo(version=mypackage.version, author=mypackage.author_email,
license=mypackage.license,
name=mypackage.name,
maintainer=mypackage.maintainer_email, additional_details=mypackage.__dict__)
def get_dependencies(package):
url = 'https://pypi.org/pypi/{}/json'
json = requests.get(url.format(package)).json()
print(json.keys())
def get_johnny_dep(package):
dist = JohnnyDist(package, index_url=None, env=None, extra_index_url=None)
return dist.serialise(fields=["name", "requires", "required_by", "project_name", "versions_available"], format=None, recurse=True)
if __name__ == '__main__':
print(get_dependencies("pandas"))
| true
| true
|
f7023348f5b5d56b419a34ca4af3669c787d2364
| 575
|
py
|
Python
|
simulations/NewRochelle_population/sub_average_testing.py
|
Dynamical-Systems-Laboratory/ABM-COVID-DSL
|
bf1fcbbd7ae161f2a026335fa830cd21ccdf48f0
|
[
"MIT"
] | 3
|
2020-11-04T03:58:49.000Z
|
2021-01-14T22:16:24.000Z
|
simulations/NewRochelle_population/sub_average_testing.py
|
atruszkowska/ABM-COVID-DSL
|
bf1fcbbd7ae161f2a026335fa830cd21ccdf48f0
|
[
"MIT"
] | 1
|
2021-01-12T14:59:59.000Z
|
2021-01-12T14:59:59.000Z
|
simulations/NewRochelle_population/sub_average_testing.py
|
atruszkowska/ABM-COVID-DSL
|
bf1fcbbd7ae161f2a026335fa830cd21ccdf48f0
|
[
"MIT"
] | 2
|
2020-11-03T17:18:41.000Z
|
2021-01-15T04:13:03.000Z
|
#!/usr/local/bin/python3.6
# Substitute average testing value from optimization
import sys
if len(sys.argv) != 2:
sys.stderr.write('Usage: python3.X %s sy_fraction\n' % sys.argv[0])
raise SystemExit(1)
sy_fraction = sys.argv[1]
parameters = []
with open('input_data/infection_parameters.txt','r') as fin:
parameters = fin.readlines()
for ind,param in enumerate(parameters):
if 'average fraction to get tested' in param:
parameters[ind+1] = str(sy_fraction) + '\n'
break
with open('input_data/infection_parameters.txt','w') as fout:
fout.writelines(parameters)
| 25
| 68
| 0.732174
|
import sys
if len(sys.argv) != 2:
sys.stderr.write('Usage: python3.X %s sy_fraction\n' % sys.argv[0])
raise SystemExit(1)
sy_fraction = sys.argv[1]
parameters = []
with open('input_data/infection_parameters.txt','r') as fin:
parameters = fin.readlines()
for ind,param in enumerate(parameters):
if 'average fraction to get tested' in param:
parameters[ind+1] = str(sy_fraction) + '\n'
break
with open('input_data/infection_parameters.txt','w') as fout:
fout.writelines(parameters)
| true
| true
|
f7023395c77fc980e89233ef8430c33af75e94f4
| 1,158
|
py
|
Python
|
functions_string.py
|
jhuboo/ROSbot
|
792c663d3da348a3c375d2aea0b879278451d7be
|
[
"MIT"
] | null | null | null |
functions_string.py
|
jhuboo/ROSbot
|
792c663d3da348a3c375d2aea0b879278451d7be
|
[
"MIT"
] | null | null | null |
functions_string.py
|
jhuboo/ROSbot
|
792c663d3da348a3c375d2aea0b879278451d7be
|
[
"MIT"
] | null | null | null |
""" Simple functions to manipulate strings """
# Replace the functions below with your implementation as described in the assignment
def is_rhyme(word1, word2, k):
"""
Returns True if the last k letters of the two words are the same (case sensitive).
Automatically returns False if either word contains less than k letters.
"""
if (k == 0): # Cannot compare if k is 0
return False
if (len(word1) < k or len(word2) < k): # Return False if either word
return False # contains less than k letters
rev_word1 = word1[::-1] # Reverse word1
rev_word2 = word2[::-1] # Reverse word2
# Compare first k chars of reversed word1 and reversed word2
# Equivalent of comparing last k chars of word 1 and word2
return rev_word1[:k] == rev_word2[:k]
# Test Cases
# print(is_rhyme("hello", "world", 7)) # False
# print(is_rhyme("hello", "llo", 3)) # True
# print(is_rhyme("ello", "ello", 0)) # False
# print(is_rhyme("elo", "ello", 3)) # False
# print(is_rhyme("ello", "ello", 4)) # True
# print(is_rhyme("ello", "ello", 5)) # False
| 36.1875
| 86
| 0.614853
|
def is_rhyme(word1, word2, k):
if (k == 0): return False
if (len(word1) < k or len(word2) < k): return False
rev_word1 = word1[::-1] rev_word2 = word2[::-1]
return rev_word1[:k] == rev_word2[:k]
| true
| true
|
f70233f09d4a5fd06d0484524a760dd01a14127a
| 1,318
|
py
|
Python
|
orgmybmarks.py
|
aihy/orgmybmarks
|
2e263a5ae3167c41d0d7495d2ae99f3d9f6ee256
|
[
"MIT"
] | null | null | null |
orgmybmarks.py
|
aihy/orgmybmarks
|
2e263a5ae3167c41d0d7495d2ae99f3d9f6ee256
|
[
"MIT"
] | null | null | null |
orgmybmarks.py
|
aihy/orgmybmarks
|
2e263a5ae3167c41d0d7495d2ae99f3d9f6ee256
|
[
"MIT"
] | null | null | null |
import sys
if len(sys.argv) != 2:
print("Usage: python3 orgmybmarks.py bookmarks.html")
quit()
file = open(sys.argv[1])
# file = open("in.html")
fileout = open("out.html", "w")
hreflist = []
# 读到第一个链接
numm = 1
while True:
line = file.readline()
if not line:
break
if line.find("HREF") == -1:
continue
num = line.find("HREF")
href = line[num + 6:]
num = href.find("\"")
href = href[:num]
hreflist.append(href)
print("%d now %s" % (numm, href))
numm += 1
numbef = len(hreflist)
hreflist = list(set(hreflist)) # 去重
numaft = len(hreflist)
fir = '''<!DOCTYPE NETSCAPE-Bookmark-file-1>
<!-- This is an automatically generated file.
It will be read and overwritten.
DO NOT EDIT! -->
<META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=UTF-8">
<TITLE>Bookmarks</TITLE>
<H1>Bookmarks</H1>
<DL><p>
<DT><H3 ADD_DATE="1530070116" LAST_MODIFIED="1532090715" PERSONAL_TOOLBAR_FOLDER="true">书签栏</H3>
<DL><p>
'''
fileout.write(fir)
for i in range(len(hreflist)):
sec = " <DT><A HREF=\"%s\">%d</A>\n" % (hreflist[i], i)
fileout.write(sec)
end = ''' </DL><p>
</DL><p>'''
fileout.write(end)
file.close()
fileout.close()
print("finished! now you have %d bookmarks, %d duplicated bookmarks deleted!" % (numaft, numbef - numaft))
| 26.897959
| 106
| 0.616844
|
import sys
if len(sys.argv) != 2:
print("Usage: python3 orgmybmarks.py bookmarks.html")
quit()
file = open(sys.argv[1])
fileout = open("out.html", "w")
hreflist = []
numm = 1
while True:
line = file.readline()
if not line:
break
if line.find("HREF") == -1:
continue
num = line.find("HREF")
href = line[num + 6:]
num = href.find("\"")
href = href[:num]
hreflist.append(href)
print("%d now %s" % (numm, href))
numm += 1
numbef = len(hreflist)
hreflist = list(set(hreflist)) # 去重
numaft = len(hreflist)
fir = '''<!DOCTYPE NETSCAPE-Bookmark-file-1>
<!-- This is an automatically generated file.
It will be read and overwritten.
DO NOT EDIT! -->
<META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=UTF-8">
<TITLE>Bookmarks</TITLE>
<H1>Bookmarks</H1>
<DL><p>
<DT><H3 ADD_DATE="1530070116" LAST_MODIFIED="1532090715" PERSONAL_TOOLBAR_FOLDER="true">书签栏</H3>
<DL><p>
'''
fileout.write(fir)
for i in range(len(hreflist)):
sec = " <DT><A HREF=\"%s\">%d</A>\n" % (hreflist[i], i)
fileout.write(sec)
end = ''' </DL><p>
</DL><p>'''
fileout.write(end)
file.close()
fileout.close()
print("finished! now you have %d bookmarks, %d duplicated bookmarks deleted!" % (numaft, numbef - numaft))
| true
| true
|
f70235169294c1d9d15e2d05dddcff373f8b327f
| 84
|
py
|
Python
|
tests/test_execute.py
|
kevindiltinero/seass3
|
7b108cdf09d2600cab4dc20b069124492feb801a
|
[
"BSD-2-Clause"
] | 1
|
2018-06-10T08:28:17.000Z
|
2018-06-10T08:28:17.000Z
|
tests/test_execute.py
|
kevindiltinero/seass3
|
7b108cdf09d2600cab4dc20b069124492feb801a
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_execute.py
|
kevindiltinero/seass3
|
7b108cdf09d2600cab4dc20b069124492feb801a
|
[
"BSD-2-Clause"
] | null | null | null |
import main
def test_execute():
count = main.results
assert count == 400410
| 16.8
| 26
| 0.690476
|
import main
def test_execute():
count = main.results
assert count == 400410
| true
| true
|
f70235171c12de4abc9cc41be600db803f93f97c
| 23,197
|
py
|
Python
|
pyburst/mcmc/mcmc_plot.py
|
zacjohnston/pyburst
|
f7d5ae9a229704d1cbcf656afb9fb3e29fb71c0c
|
[
"MIT"
] | 4
|
2019-05-01T07:30:15.000Z
|
2021-08-04T15:04:38.000Z
|
pyburst/mcmc/mcmc_plot.py
|
zacjohnston/pyburst
|
f7d5ae9a229704d1cbcf656afb9fb3e29fb71c0c
|
[
"MIT"
] | null | null | null |
pyburst/mcmc/mcmc_plot.py
|
zacjohnston/pyburst
|
f7d5ae9a229704d1cbcf656afb9fb3e29fb71c0c
|
[
"MIT"
] | 4
|
2019-03-26T12:38:37.000Z
|
2022-03-09T05:30:18.000Z
|
import numpy as np
import os
import sys
import matplotlib.pyplot as plt
import chainconsumer
from math import ceil
# pyburst
from . import mcmc_versions
from . import mcmc_tools
from . import burstfit
from . import mcmc_params
from pyburst.observations import obs_tools
from pyburst.plotting import plot_tools
from pyburst.grids.grid_strings import get_source_path, print_warning
from pyburst.misc.pyprint import printv
GRIDS_PATH = os.environ['KEPLER_GRIDS']
def default_plt_options():
"""Initialise default plot parameters"""
params = {'mathtext.default': 'regular',
'font.family': 'serif',
'text.usetex': False}
plt.rcParams.update(params)
default_plt_options()
def save_plot(fig, prefix, save, source, version, display, chain=None, n_dimensions=None,
n_walkers=None, n_steps=None, label=None, extension='.png',
enforce_chain_info=True):
"""Handles saving/displaying of a figure passed to it
"""
if enforce_chain_info and (None in (n_dimensions, n_walkers, n_steps)):
if chain is None:
raise ValueError('Must provide chain, or specify each of '
'(n_dimensions, n_walkers, n_steps)')
else:
n_walkers, n_steps, n_dimensions = chain.shape
if save:
filename = mcmc_tools.get_mcmc_string(source=source, version=version,
n_walkers=n_walkers, n_steps=n_steps,
prefix=prefix, label=label,
extension=extension)
source_path = get_source_path(source)
filepath = os.path.join(source_path, 'plots', prefix, f'{filename}')
fig.savefig(filepath)
if display:
plt.show(block=False)
else:
plt.close(fig)
def save_multiple_synth(series, source, version, n_steps, discard, n_walkers=960,
walkers=True, posteriors=True, contours=False,
display=False, mass_radius=True,
synth=True, compressed=False):
"""Save plots for multiple series in a synthetic data batch
"""
# TODO reuse max_lhood point
default_plt_options()
for ser in series:
if synth:
full_source = f'{source}_{ser}'
else:
full_source = source
chain = mcmc_tools.load_chain(full_source, n_walkers=n_walkers, n_steps=n_steps,
version=version, compressed=compressed)
if walkers:
plot_walkers(chain, source=full_source, save=True,
display=display, version=version)
if posteriors:
plot_posteriors(chain, source=full_source, save=True, discard=discard,
display=display, version=version)
if contours:
plot_contours(chain, source=full_source, save=True, discard=discard,
display=display, version=version)
if mass_radius:
plot_mass_radius(chain, source=full_source, save=True, discard=discard,
display=display, version=version)
def save_all_plots(source, version, discard, n_steps, n_walkers=1000, display=False,
save=True, cap=None, posteriors=True, contours=True,
redshift=True, mass_radius=True, verbose=True, compressed=False):
"""Saves (and/or displays) main MCMC plots
"""
chain = mcmc_tools.load_chain(source, version=version, n_steps=n_steps,
n_walkers=n_walkers, verbose=verbose,
compressed=compressed)
if posteriors:
printv('Plotting posteriors', verbose=verbose)
plot_posteriors(chain, source=source, save=save, discard=discard, cap=cap,
display=display, version=version)
if contours:
printv('Plotting contours', verbose=verbose)
plot_contours(chain, source=source, save=save, discard=discard, cap=cap,
display=display, version=version)
if mass_radius:
printv('Plotting mass-radius', verbose=verbose)
plot_mass_radius(chain, source=source, save=save, discard=discard, cap=cap,
display=display, version=version)
if redshift:
printv('Plotting redshift', verbose=verbose)
plot_redshift(chain, source=source, save=save, discard=discard, cap=cap,
display=display, version=version)
def plot_contours(chain, discard, source, version, cap=None,
display=True, save=False, truth_values=None, parameters=None,
sigmas=np.linspace(0, 2, 5), cc=None, summary=False, fontsize=14,
max_ticks=4):
"""Plots posterior contours of mcmc chain
parameters : [str]
specify which parameters to plot
"""
default_plt_options()
if cc is None:
pkeys = mcmc_versions.get_parameter(source, version, 'param_keys')
pkey_labels = plot_tools.convert_mcmc_labels(param_keys=pkeys)
cc = mcmc_tools.setup_chainconsumer(chain=chain, param_labels=pkey_labels,
discard=discard, cap=cap, sigmas=sigmas,
summary=summary, fontsize=fontsize,
max_ticks=max_ticks)
if parameters is not None:
parameters = plot_tools.convert_mcmc_labels(param_keys=parameters)
# TODO: figsize
if truth_values is not None:
fig = cc.plotter.plot(truth=truth_values, parameters=parameters)
else:
fig = cc.plotter.plot(parameters=parameters)
save_plot(fig, prefix='contours', chain=chain, save=save, source=source,
version=version, display=display)
return fig
def plot_posteriors(chain, discard, source, version, cap=None,
display=True, save=False, truth_values=None,
cc=None):
"""Plots posterior distributions of mcmc chain
truth_values : list|dict
Specify parameters of point (e.g. the true value) to draw on the distributions.
"""
default_plt_options()
pkeys = mcmc_versions.get_parameter(source, version, 'param_keys')
pkey_labels = plot_tools.convert_mcmc_labels(param_keys=pkeys)
if cc is None:
cc = mcmc_tools.setup_chainconsumer(chain=chain, param_labels=pkey_labels,
discard=discard, cap=cap)
height = 3 * ceil(len(pkeys) / 4)
if truth_values is not None:
fig = cc.plotter.plot_distributions(figsize=[10, height],
truth=truth_values)
else:
fig = cc.plotter.plot_distributions(figsize=[10, height])
plt.tight_layout()
save_plot(fig, prefix='posteriors', chain=chain, save=save, source=source,
version=version, display=display)
return fig
def plot_mass_radius(chain, discard, source, version, cap=None,
display=True, save=False, summary=False,
sigmas=np.linspace(0, 2, 5), fontsize=18, figsize='column'):
"""Plots contours of mass versus radius from a given chain
"""
default_plt_options()
mass_nw, mass_gr = mcmc_params.get_constant_masses(source, version)
mass_radius_chain = mcmc_params.get_mass_radius_chain(chain=chain, discard=discard,
source=source, version=version,
cap=cap, mass_nw=mass_nw,
mass_gr=mass_gr)
cc = mcmc_tools.setup_custom_chainconsumer(mass_radius_chain, parameters=['R', 'M'],
sigmas=sigmas, summary=summary,
fontsize=fontsize)
fig = cc.plotter.plot(figsize=figsize)
fig.subplots_adjust(left=0.16, bottom=0.15)
save_plot(fig, prefix='mass-radius', chain=chain, save=save, source=source,
version=version, display=display)
return fig
def plot_redshift(chain, discard, source, version, cap=None, display=True, save=False):
"""Plots posterior distribution of redshift given a chain
"""
mass_nw, mass_gr = mcmc_params.get_constant_masses(source, version)
redshift_chain = mcmc_params.get_redshift_chain(chain=chain, discard=discard,
source=source, version=version,
cap=cap, mass_nw=mass_nw,
mass_gr=mass_gr)
cc = mcmc_tools.setup_custom_chainconsumer(redshift_chain, parameters=['1+z'])
fig = cc.plotter.plot_distributions(figsize=[5, 5])
plt.tight_layout()
save_plot(fig, prefix='redshift', chain=chain, save=save, source=source,
version=version, display=display)
return fig
def plot_gravitational_contours(chain, discard, source, version, cap=None, display=True,
save=False, r_nw=10, sigmas=np.linspace(0, 2, 5),
summary=False, unit_labels=True, fontsize=16,
fixed_grav=False, figsize=None):
"""Plots contours of gravitational parameters
"""
cc = mcmc_tools.setup_gravitational_chainconsumer(chain=chain, discard=discard,
source=source, version=version,
cap=cap, fixed_grav=fixed_grav,
summary=summary, r_nw=r_nw,
unit_labels=unit_labels,
sigmas=sigmas, fontsize=fontsize)
if fixed_grav:
fig = cc.plotter.plot_distributions(figsize=figsize)
plt.tight_layout()
else:
fig = cc.plotter.plot()
save_plot(fig, prefix='gravitational', chain=chain, save=save, source=source,
version=version, display=display)
return fig
def plot_inclination(chain, discard, source, version, cap=None, display=True,
save=False, disc_model='he16_a', sigmas=np.linspace(0, 2, 5),
summary=False, unit_labels=True, figsize=(4, 4), fontsize=18):
"""Plots contours of parameters derived using disc model
"""
disc_chain = mcmc_params.get_disc_chain(chain=chain, discard=discard, cap=cap,
source=source, version=version,
disc_model=disc_model)
cc = mcmc_tools.setup_custom_chainconsumer(disc_chain, parameters=['d', 'i'],
sigmas=sigmas, summary=summary,
unit_labels=unit_labels, fontsize=fontsize)
fig = cc.plotter.plot(figsize=figsize)
fig.subplots_adjust(left=0.15, bottom=0.15)
save_plot(fig, prefix='disc', chain=chain, save=save, source=source,
version=version, display=display)
return fig
def plot_distance_anisotropy(chain, discard, source, version, cap=None, display=True,
save=False, sigmas=np.linspace(0, 2, 5), summary=False,
figsize=(4, 4), unit_labels=True, fontsize=18):
"""Plots contours of MCMC parameters d_b, xi_ratio
"""
d_b_chain = mcmc_params.get_param_chain(chain, param='d_b', discard=discard,
source=source, version=version, cap=cap)
xi_ratio_chain = mcmc_params.get_param_chain(chain, param='xi_ratio', discard=discard,
source=source, version=version, cap=cap)
flat_chain = np.column_stack([d_b_chain, xi_ratio_chain])
cc = mcmc_tools.setup_custom_chainconsumer(flat_chain, parameters=['d_b', 'xi_ratio'],
sigmas=sigmas, summary=summary,
unit_labels=unit_labels, fontsize=fontsize)
fig = cc.plotter.plot(figsize=figsize)
fig.subplots_adjust(left=0.2, bottom=0.2)
save_plot(fig, prefix='distance', chain=chain, save=save, source=source,
version=version, display=display)
return fig
def plot_xedd(chain, discard, source, version, cap=None, display=True,
save=False, cloud=True, sigmas=np.linspace(0, 2, 10), figsize=(5, 5)):
"""Plots posterior for Eddington hydrogen composition (X_Edd)
"""
default_plt_options()
xedd_chain = mcmc_params.get_xedd_chain(chain=chain, discard=discard, source=source,
version=version, cap=cap)
label = plot_tools.quantity_label('xedd')
cc = mcmc_tools.setup_custom_chainconsumer(xedd_chain, parameters=[label],
sigmas=sigmas, cloud=cloud)
fig = cc.plotter.plot(figsize=figsize)
save_plot(fig, prefix='xedd', chain=chain, save=save, source=source,
version=version, display=display)
return fig
def plot_walkers(chain, source, version, params=None, n_lines=30, xlim=-1,
display=True, save=False, label=''):
"""Plots walkers vs steps (i.e. "time")
Parameters
----------
source : str
version : int
chain : np.array
chain as returned by load_chain()
params : [str]
parameter(s) of which to plot walkers.
n_lines : int
approx number of lines/walkers to plot on parameter
xlim : int
x-axis limit to plot (n_steps), i.e. ax.set_xlim((0, xlim))
label : str
optional label to add to filename when saving
display : bool
save : bool
"""
default_plt_options()
pkeys = mcmc_versions.get_parameter(source, version, 'param_keys')
# ===== Default to splitting all params into 2 plots =====
if params is None:
half = int(len(pkeys) / 2)
for i, param_split in enumerate((pkeys[:half], pkeys[half:])):
plot_walkers(chain=chain, source=source, version=version,
params=param_split, n_lines=n_lines, xlim=xlim,
display=display, save=save, label=f'P{i + 1}')
return
n_walkers, n_steps, n_dim = chain.shape
n_params = len(params)
jump_size = round(n_walkers / n_lines)
steps = np.arange(n_steps)
walker_idxs = np.arange(0, n_walkers, jump_size)
# noinspection PyTypeChecker
fig, ax = plt.subplots(n_params, 1, sharex=True, figsize=(10, 12))
for i in range(n_params):
p_idx = pkeys.index(params[i])
for j in walker_idxs:
walker = chain[j, :, p_idx]
ax[i].plot(steps, walker, linewidth=0.5, color='black')
ax[i].set_ylabel(params[i])
if xlim == -1:
xlim = n_steps
ax[-1].set_xlabel('Step')
ax[-1].set_xlim([0, xlim])
plt.tight_layout()
if display:
plt.show(block=False)
save_plot(fig, prefix='walkers', chain=chain, save=save, source=source,
version=version, display=display,
label=label, extension='.png')
def plot_qb_mdot(chain, source, version, discard, cap=None, display=True, save=False,
figsize=(5, 5), fontsize=16, sigmas=(1, 2)):
"""Plots 2D contours of Qb versus Mdot for each epoch (from multi-epoch chain)
"""
mv = mcmc_versions.McmcVersion(source=source, version=version)
chain_flat = mcmc_tools.slice_chain(chain, discard=discard, cap=cap, flatten=True)
system_table = obs_tools.load_summary(mv.system)
epochs = list(system_table.epoch)
cc = chainconsumer.ChainConsumer()
param_labels = []
for param in ['mdot', 'qb']:
param_labels += [plot_tools.full_label(param)]
for i, epoch in enumerate(epochs):
mdot_idx = mv.param_keys.index(f'mdot{i + 1}')
qb_idx = mv.param_keys.index(f'qb{i + 1}')
param_idxs = [mdot_idx, qb_idx]
cc.add_chain(chain_flat[:, param_idxs], parameters=param_labels,
name=str(epoch))
cc.configure(kde=False, smooth=0, label_font_size=fontsize,
tick_font_size=fontsize-2, sigmas=sigmas)
fig = cc.plotter.plot(display=False, figsize=figsize)
fig.subplots_adjust(left=0.2, bottom=0.2)
save_plot(fig, prefix='qb', save=save, source=source, version=version,
display=display, chain=chain)
return fig
def plot_epoch_posteriors(master_cc, source, version, display=True, save=False,
col_wrap=None, alt_params=True, unit_labels=True,
add_text=True, fontsize=16):
"""Plot posteriors for multiiple epoch chains
parameters
----------
master_cc : ChainConsumer
Contains the multi-epoch chain, created with setup_master_chainconsumer()
source : str
version : int
display : bool (optional)
save : bool (optional)
col_wrap : int (optional)
"""
param_order = {
'grid5': ['mdot1', 'mdot2', 'mdot3', 'qb1', 'qb2', 'qb3', 'x', 'z', 'm_nw',
'm_gr', 'd_b', 'xi_ratio'],
'he2': ['mdot1', 'mdot2', 'qb1', 'qb2', 'm_gr', 'd_b', 'xi_ratio'],
}
param_keys = param_order[source]
# TODO: xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
# quick and dirty patch!
if alt_params:
param_keys = ['mdot1', 'mdot2', 'mdot3', 'qb1', 'qb2', 'qb3', 'x', 'z', 'g',
'M', 'd_b', 'xi_ratio']
# TODO: xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
formatted_params = plot_tools.convert_mcmc_labels(param_keys, unit_labels=unit_labels)
n_epochs = len(master_cc.chains) - 1
if col_wrap is None:
col_wrap = n_epochs
height = 3 * ceil(len(param_keys) / n_epochs)
fig = master_cc.plotter.plot_distributions(parameters=formatted_params,
col_wrap=col_wrap,
figsize=[8, height],
display=False)
if add_text:
add_epoch_text(fig, fontsize=fontsize)
plt.tight_layout()
save_plot(fig, prefix='multi_posteriors', save=save, source=source, version=version,
display=display, enforce_chain_info=False)
return fig
def plot_max_lhood(source, version, n_walkers, n_steps, verbose=True, re_interp=False,
display=True, save=False):
default_plt_options()
max_params, max_lhood = mcmc_tools.get_max_lhood_params(source, version=version,
n_walkers=n_walkers,
n_steps=n_steps,
verbose=verbose,
return_lhood=True)
bfit = burstfit.BurstFit(source=source, version=version, verbose=False, re_interp=re_interp)
lhood, fig = bfit.lhood(max_params, plot=True)
if lhood != max_lhood:
print_warning(f'lhoods do not match (original={max_lhood:.2f}, current={lhood:.2f}). '
+ 'BurstFit (e.g. lhood, lnhood) or interpolator may have changed')
save_plot(fig, prefix='compare', n_dimensions=len(max_params),
n_walkers=n_walkers, n_steps=n_steps, save=save, source=source,
version=version, display=display)
def plot_bprop_sample(bp_sample, source, version, bprops=None, legend=True,
subplot_figsize=(3, 2.5), bfit=None, fontsize=14,
vlines=True):
"""Plot burst properties from large sample against observations
bprop_sample : np.array
obtained using mcmc_tools.bprop_sample()
"""
if bfit is None:
bfit = burstfit.BurstFit(source=source, version=version, verbose=False)
if bprops is None:
bprops = bfit.mcmc_version.bprops
cc = mcmc_tools.setup_bprop_chainconsumer(chain=None, n=None, discard=None,
source=source, version=version,
bp_sample=bp_sample)
bp_summary = mcmc_tools.extract_bprop_summary(cc, source=source, version=version)
n_bprops = len(bprops)
n_rows = int(np.ceil(n_bprops / 2))
n_cols = {False: 1, True: 2}.get(n_bprops > 1)
figsize = (n_cols * subplot_figsize[0], n_rows * subplot_figsize[1])
fig, ax = plt.subplots(n_rows, n_cols, sharex=False, figsize=figsize)
if n_bprops % 2 == 1 and n_bprops > 1: # blank odd-numbered subplot
ax[-1, -1].axis('off')
for i, bprop in enumerate(bprops):
subplot_row = int(np.floor(i / 2))
subplot_col = i % 2
if n_cols > 1:
axis = ax[subplot_row, subplot_col]
else:
axis = ax
u_model = np.diff(bp_summary[:, :, i], axis=0)
bfit.plot_compare(model=bp_summary[1, :, i], u_model=u_model,
bprop=bprop, fontsize=fontsize,
ax=axis, display=False, vlines=vlines,
legend=True if (i == 0 and legend) else False,
xlabel=True if (i in [n_bprops-1, ]) else False)
fig.subplots_adjust(wspace=0.4)
plt.show(block=False)
return fig
def plot_autocorrelation(chain, source, version, n_points=10, load=True, save_tau=True,
ylims=None):
"""Plots estimated integrated autocorrelation time
Note: Adapted from https://dfm.io/posts/autocorr/
"""
mv = mcmc_versions.McmcVersion(source=source, version=version)
params_fmt = plot_tools.convert_mcmc_labels(mv.param_keys)
if load:
sample_steps, autoc = mcmc_tools.load_autocorrelation(source, version=version,
n_steps=chain.shape[1])
else:
sample_steps, autoc = mcmc_tools.get_autocorrelation(chain, source=source,
version=version,
n_points=n_points,
save=save_tau)
fig, ax = plt.subplots()
for i, param in enumerate(mv.param_keys):
ax.loglog(sample_steps, autoc[i], "o-", label=rf"{params_fmt[i]}")
ax.plot(sample_steps, sample_steps / 10.0, "--k", label=r"$\tau = N/10$")
if ylims is None:
xlim = ax.get_xlim()
ylims = [5, xlim[1] / 10]
ax.set_ylim(ylims)
ax.set_xlabel("N steps")
ax.set_ylabel(r"$\tau$ estimate (N)")
ax.legend(fontsize=14, ncol=2, labelspacing=0.3)
plt.show(block=False)
return fig
def add_epoch_text(fig, fontsize, epochs=(1998, 2000, 2007),
colours=('C0', 'C2', 'C3')):
"""Adds text of epoch to figure subplots
"""
for i, epoch in enumerate(epochs):
ax = fig.axes[i]
ax.text(0.95, 0.95, str(epoch), color=colours[i], fontsize=fontsize,
transform=ax.transAxes, va='top', ha='right')
| 40.839789
| 96
| 0.589516
|
import numpy as np
import os
import sys
import matplotlib.pyplot as plt
import chainconsumer
from math import ceil
from . import mcmc_versions
from . import mcmc_tools
from . import burstfit
from . import mcmc_params
from pyburst.observations import obs_tools
from pyburst.plotting import plot_tools
from pyburst.grids.grid_strings import get_source_path, print_warning
from pyburst.misc.pyprint import printv
GRIDS_PATH = os.environ['KEPLER_GRIDS']
def default_plt_options():
params = {'mathtext.default': 'regular',
'font.family': 'serif',
'text.usetex': False}
plt.rcParams.update(params)
default_plt_options()
def save_plot(fig, prefix, save, source, version, display, chain=None, n_dimensions=None,
n_walkers=None, n_steps=None, label=None, extension='.png',
enforce_chain_info=True):
if enforce_chain_info and (None in (n_dimensions, n_walkers, n_steps)):
if chain is None:
raise ValueError('Must provide chain, or specify each of '
'(n_dimensions, n_walkers, n_steps)')
else:
n_walkers, n_steps, n_dimensions = chain.shape
if save:
filename = mcmc_tools.get_mcmc_string(source=source, version=version,
n_walkers=n_walkers, n_steps=n_steps,
prefix=prefix, label=label,
extension=extension)
source_path = get_source_path(source)
filepath = os.path.join(source_path, 'plots', prefix, f'{filename}')
fig.savefig(filepath)
if display:
plt.show(block=False)
else:
plt.close(fig)
def save_multiple_synth(series, source, version, n_steps, discard, n_walkers=960,
walkers=True, posteriors=True, contours=False,
display=False, mass_radius=True,
synth=True, compressed=False):
default_plt_options()
for ser in series:
if synth:
full_source = f'{source}_{ser}'
else:
full_source = source
chain = mcmc_tools.load_chain(full_source, n_walkers=n_walkers, n_steps=n_steps,
version=version, compressed=compressed)
if walkers:
plot_walkers(chain, source=full_source, save=True,
display=display, version=version)
if posteriors:
plot_posteriors(chain, source=full_source, save=True, discard=discard,
display=display, version=version)
if contours:
plot_contours(chain, source=full_source, save=True, discard=discard,
display=display, version=version)
if mass_radius:
plot_mass_radius(chain, source=full_source, save=True, discard=discard,
display=display, version=version)
def save_all_plots(source, version, discard, n_steps, n_walkers=1000, display=False,
save=True, cap=None, posteriors=True, contours=True,
redshift=True, mass_radius=True, verbose=True, compressed=False):
chain = mcmc_tools.load_chain(source, version=version, n_steps=n_steps,
n_walkers=n_walkers, verbose=verbose,
compressed=compressed)
if posteriors:
printv('Plotting posteriors', verbose=verbose)
plot_posteriors(chain, source=source, save=save, discard=discard, cap=cap,
display=display, version=version)
if contours:
printv('Plotting contours', verbose=verbose)
plot_contours(chain, source=source, save=save, discard=discard, cap=cap,
display=display, version=version)
if mass_radius:
printv('Plotting mass-radius', verbose=verbose)
plot_mass_radius(chain, source=source, save=save, discard=discard, cap=cap,
display=display, version=version)
if redshift:
printv('Plotting redshift', verbose=verbose)
plot_redshift(chain, source=source, save=save, discard=discard, cap=cap,
display=display, version=version)
def plot_contours(chain, discard, source, version, cap=None,
display=True, save=False, truth_values=None, parameters=None,
sigmas=np.linspace(0, 2, 5), cc=None, summary=False, fontsize=14,
max_ticks=4):
default_plt_options()
if cc is None:
pkeys = mcmc_versions.get_parameter(source, version, 'param_keys')
pkey_labels = plot_tools.convert_mcmc_labels(param_keys=pkeys)
cc = mcmc_tools.setup_chainconsumer(chain=chain, param_labels=pkey_labels,
discard=discard, cap=cap, sigmas=sigmas,
summary=summary, fontsize=fontsize,
max_ticks=max_ticks)
if parameters is not None:
parameters = plot_tools.convert_mcmc_labels(param_keys=parameters)
if truth_values is not None:
fig = cc.plotter.plot(truth=truth_values, parameters=parameters)
else:
fig = cc.plotter.plot(parameters=parameters)
save_plot(fig, prefix='contours', chain=chain, save=save, source=source,
version=version, display=display)
return fig
def plot_posteriors(chain, discard, source, version, cap=None,
display=True, save=False, truth_values=None,
cc=None):
default_plt_options()
pkeys = mcmc_versions.get_parameter(source, version, 'param_keys')
pkey_labels = plot_tools.convert_mcmc_labels(param_keys=pkeys)
if cc is None:
cc = mcmc_tools.setup_chainconsumer(chain=chain, param_labels=pkey_labels,
discard=discard, cap=cap)
height = 3 * ceil(len(pkeys) / 4)
if truth_values is not None:
fig = cc.plotter.plot_distributions(figsize=[10, height],
truth=truth_values)
else:
fig = cc.plotter.plot_distributions(figsize=[10, height])
plt.tight_layout()
save_plot(fig, prefix='posteriors', chain=chain, save=save, source=source,
version=version, display=display)
return fig
def plot_mass_radius(chain, discard, source, version, cap=None,
display=True, save=False, summary=False,
sigmas=np.linspace(0, 2, 5), fontsize=18, figsize='column'):
default_plt_options()
mass_nw, mass_gr = mcmc_params.get_constant_masses(source, version)
mass_radius_chain = mcmc_params.get_mass_radius_chain(chain=chain, discard=discard,
source=source, version=version,
cap=cap, mass_nw=mass_nw,
mass_gr=mass_gr)
cc = mcmc_tools.setup_custom_chainconsumer(mass_radius_chain, parameters=['R', 'M'],
sigmas=sigmas, summary=summary,
fontsize=fontsize)
fig = cc.plotter.plot(figsize=figsize)
fig.subplots_adjust(left=0.16, bottom=0.15)
save_plot(fig, prefix='mass-radius', chain=chain, save=save, source=source,
version=version, display=display)
return fig
def plot_redshift(chain, discard, source, version, cap=None, display=True, save=False):
mass_nw, mass_gr = mcmc_params.get_constant_masses(source, version)
redshift_chain = mcmc_params.get_redshift_chain(chain=chain, discard=discard,
source=source, version=version,
cap=cap, mass_nw=mass_nw,
mass_gr=mass_gr)
cc = mcmc_tools.setup_custom_chainconsumer(redshift_chain, parameters=['1+z'])
fig = cc.plotter.plot_distributions(figsize=[5, 5])
plt.tight_layout()
save_plot(fig, prefix='redshift', chain=chain, save=save, source=source,
version=version, display=display)
return fig
def plot_gravitational_contours(chain, discard, source, version, cap=None, display=True,
save=False, r_nw=10, sigmas=np.linspace(0, 2, 5),
summary=False, unit_labels=True, fontsize=16,
fixed_grav=False, figsize=None):
cc = mcmc_tools.setup_gravitational_chainconsumer(chain=chain, discard=discard,
source=source, version=version,
cap=cap, fixed_grav=fixed_grav,
summary=summary, r_nw=r_nw,
unit_labels=unit_labels,
sigmas=sigmas, fontsize=fontsize)
if fixed_grav:
fig = cc.plotter.plot_distributions(figsize=figsize)
plt.tight_layout()
else:
fig = cc.plotter.plot()
save_plot(fig, prefix='gravitational', chain=chain, save=save, source=source,
version=version, display=display)
return fig
def plot_inclination(chain, discard, source, version, cap=None, display=True,
save=False, disc_model='he16_a', sigmas=np.linspace(0, 2, 5),
summary=False, unit_labels=True, figsize=(4, 4), fontsize=18):
disc_chain = mcmc_params.get_disc_chain(chain=chain, discard=discard, cap=cap,
source=source, version=version,
disc_model=disc_model)
cc = mcmc_tools.setup_custom_chainconsumer(disc_chain, parameters=['d', 'i'],
sigmas=sigmas, summary=summary,
unit_labels=unit_labels, fontsize=fontsize)
fig = cc.plotter.plot(figsize=figsize)
fig.subplots_adjust(left=0.15, bottom=0.15)
save_plot(fig, prefix='disc', chain=chain, save=save, source=source,
version=version, display=display)
return fig
def plot_distance_anisotropy(chain, discard, source, version, cap=None, display=True,
save=False, sigmas=np.linspace(0, 2, 5), summary=False,
figsize=(4, 4), unit_labels=True, fontsize=18):
d_b_chain = mcmc_params.get_param_chain(chain, param='d_b', discard=discard,
source=source, version=version, cap=cap)
xi_ratio_chain = mcmc_params.get_param_chain(chain, param='xi_ratio', discard=discard,
source=source, version=version, cap=cap)
flat_chain = np.column_stack([d_b_chain, xi_ratio_chain])
cc = mcmc_tools.setup_custom_chainconsumer(flat_chain, parameters=['d_b', 'xi_ratio'],
sigmas=sigmas, summary=summary,
unit_labels=unit_labels, fontsize=fontsize)
fig = cc.plotter.plot(figsize=figsize)
fig.subplots_adjust(left=0.2, bottom=0.2)
save_plot(fig, prefix='distance', chain=chain, save=save, source=source,
version=version, display=display)
return fig
def plot_xedd(chain, discard, source, version, cap=None, display=True,
save=False, cloud=True, sigmas=np.linspace(0, 2, 10), figsize=(5, 5)):
default_plt_options()
xedd_chain = mcmc_params.get_xedd_chain(chain=chain, discard=discard, source=source,
version=version, cap=cap)
label = plot_tools.quantity_label('xedd')
cc = mcmc_tools.setup_custom_chainconsumer(xedd_chain, parameters=[label],
sigmas=sigmas, cloud=cloud)
fig = cc.plotter.plot(figsize=figsize)
save_plot(fig, prefix='xedd', chain=chain, save=save, source=source,
version=version, display=display)
return fig
def plot_walkers(chain, source, version, params=None, n_lines=30, xlim=-1,
display=True, save=False, label=''):
default_plt_options()
pkeys = mcmc_versions.get_parameter(source, version, 'param_keys')
if params is None:
half = int(len(pkeys) / 2)
for i, param_split in enumerate((pkeys[:half], pkeys[half:])):
plot_walkers(chain=chain, source=source, version=version,
params=param_split, n_lines=n_lines, xlim=xlim,
display=display, save=save, label=f'P{i + 1}')
return
n_walkers, n_steps, n_dim = chain.shape
n_params = len(params)
jump_size = round(n_walkers / n_lines)
steps = np.arange(n_steps)
walker_idxs = np.arange(0, n_walkers, jump_size)
fig, ax = plt.subplots(n_params, 1, sharex=True, figsize=(10, 12))
for i in range(n_params):
p_idx = pkeys.index(params[i])
for j in walker_idxs:
walker = chain[j, :, p_idx]
ax[i].plot(steps, walker, linewidth=0.5, color='black')
ax[i].set_ylabel(params[i])
if xlim == -1:
xlim = n_steps
ax[-1].set_xlabel('Step')
ax[-1].set_xlim([0, xlim])
plt.tight_layout()
if display:
plt.show(block=False)
save_plot(fig, prefix='walkers', chain=chain, save=save, source=source,
version=version, display=display,
label=label, extension='.png')
def plot_qb_mdot(chain, source, version, discard, cap=None, display=True, save=False,
figsize=(5, 5), fontsize=16, sigmas=(1, 2)):
mv = mcmc_versions.McmcVersion(source=source, version=version)
chain_flat = mcmc_tools.slice_chain(chain, discard=discard, cap=cap, flatten=True)
system_table = obs_tools.load_summary(mv.system)
epochs = list(system_table.epoch)
cc = chainconsumer.ChainConsumer()
param_labels = []
for param in ['mdot', 'qb']:
param_labels += [plot_tools.full_label(param)]
for i, epoch in enumerate(epochs):
mdot_idx = mv.param_keys.index(f'mdot{i + 1}')
qb_idx = mv.param_keys.index(f'qb{i + 1}')
param_idxs = [mdot_idx, qb_idx]
cc.add_chain(chain_flat[:, param_idxs], parameters=param_labels,
name=str(epoch))
cc.configure(kde=False, smooth=0, label_font_size=fontsize,
tick_font_size=fontsize-2, sigmas=sigmas)
fig = cc.plotter.plot(display=False, figsize=figsize)
fig.subplots_adjust(left=0.2, bottom=0.2)
save_plot(fig, prefix='qb', save=save, source=source, version=version,
display=display, chain=chain)
return fig
def plot_epoch_posteriors(master_cc, source, version, display=True, save=False,
col_wrap=None, alt_params=True, unit_labels=True,
add_text=True, fontsize=16):
param_order = {
'grid5': ['mdot1', 'mdot2', 'mdot3', 'qb1', 'qb2', 'qb3', 'x', 'z', 'm_nw',
'm_gr', 'd_b', 'xi_ratio'],
'he2': ['mdot1', 'mdot2', 'qb1', 'qb2', 'm_gr', 'd_b', 'xi_ratio'],
}
param_keys = param_order[source]
if alt_params:
param_keys = ['mdot1', 'mdot2', 'mdot3', 'qb1', 'qb2', 'qb3', 'x', 'z', 'g',
'M', 'd_b', 'xi_ratio']
formatted_params = plot_tools.convert_mcmc_labels(param_keys, unit_labels=unit_labels)
n_epochs = len(master_cc.chains) - 1
if col_wrap is None:
col_wrap = n_epochs
height = 3 * ceil(len(param_keys) / n_epochs)
fig = master_cc.plotter.plot_distributions(parameters=formatted_params,
col_wrap=col_wrap,
figsize=[8, height],
display=False)
if add_text:
add_epoch_text(fig, fontsize=fontsize)
plt.tight_layout()
save_plot(fig, prefix='multi_posteriors', save=save, source=source, version=version,
display=display, enforce_chain_info=False)
return fig
def plot_max_lhood(source, version, n_walkers, n_steps, verbose=True, re_interp=False,
display=True, save=False):
default_plt_options()
max_params, max_lhood = mcmc_tools.get_max_lhood_params(source, version=version,
n_walkers=n_walkers,
n_steps=n_steps,
verbose=verbose,
return_lhood=True)
bfit = burstfit.BurstFit(source=source, version=version, verbose=False, re_interp=re_interp)
lhood, fig = bfit.lhood(max_params, plot=True)
if lhood != max_lhood:
print_warning(f'lhoods do not match (original={max_lhood:.2f}, current={lhood:.2f}). '
+ 'BurstFit (e.g. lhood, lnhood) or interpolator may have changed')
save_plot(fig, prefix='compare', n_dimensions=len(max_params),
n_walkers=n_walkers, n_steps=n_steps, save=save, source=source,
version=version, display=display)
def plot_bprop_sample(bp_sample, source, version, bprops=None, legend=True,
subplot_figsize=(3, 2.5), bfit=None, fontsize=14,
vlines=True):
if bfit is None:
bfit = burstfit.BurstFit(source=source, version=version, verbose=False)
if bprops is None:
bprops = bfit.mcmc_version.bprops
cc = mcmc_tools.setup_bprop_chainconsumer(chain=None, n=None, discard=None,
source=source, version=version,
bp_sample=bp_sample)
bp_summary = mcmc_tools.extract_bprop_summary(cc, source=source, version=version)
n_bprops = len(bprops)
n_rows = int(np.ceil(n_bprops / 2))
n_cols = {False: 1, True: 2}.get(n_bprops > 1)
figsize = (n_cols * subplot_figsize[0], n_rows * subplot_figsize[1])
fig, ax = plt.subplots(n_rows, n_cols, sharex=False, figsize=figsize)
if n_bprops % 2 == 1 and n_bprops > 1: ax[-1, -1].axis('off')
for i, bprop in enumerate(bprops):
subplot_row = int(np.floor(i / 2))
subplot_col = i % 2
if n_cols > 1:
axis = ax[subplot_row, subplot_col]
else:
axis = ax
u_model = np.diff(bp_summary[:, :, i], axis=0)
bfit.plot_compare(model=bp_summary[1, :, i], u_model=u_model,
bprop=bprop, fontsize=fontsize,
ax=axis, display=False, vlines=vlines,
legend=True if (i == 0 and legend) else False,
xlabel=True if (i in [n_bprops-1, ]) else False)
fig.subplots_adjust(wspace=0.4)
plt.show(block=False)
return fig
def plot_autocorrelation(chain, source, version, n_points=10, load=True, save_tau=True,
ylims=None):
mv = mcmc_versions.McmcVersion(source=source, version=version)
params_fmt = plot_tools.convert_mcmc_labels(mv.param_keys)
if load:
sample_steps, autoc = mcmc_tools.load_autocorrelation(source, version=version,
n_steps=chain.shape[1])
else:
sample_steps, autoc = mcmc_tools.get_autocorrelation(chain, source=source,
version=version,
n_points=n_points,
save=save_tau)
fig, ax = plt.subplots()
for i, param in enumerate(mv.param_keys):
ax.loglog(sample_steps, autoc[i], "o-", label=rf"{params_fmt[i]}")
ax.plot(sample_steps, sample_steps / 10.0, "--k", label=r"$\tau = N/10$")
if ylims is None:
xlim = ax.get_xlim()
ylims = [5, xlim[1] / 10]
ax.set_ylim(ylims)
ax.set_xlabel("N steps")
ax.set_ylabel(r"$\tau$ estimate (N)")
ax.legend(fontsize=14, ncol=2, labelspacing=0.3)
plt.show(block=False)
return fig
def add_epoch_text(fig, fontsize, epochs=(1998, 2000, 2007),
colours=('C0', 'C2', 'C3')):
for i, epoch in enumerate(epochs):
ax = fig.axes[i]
ax.text(0.95, 0.95, str(epoch), color=colours[i], fontsize=fontsize,
transform=ax.transAxes, va='top', ha='right')
| true
| true
|
f70235e18bbc07ecaef1d55d58b7d01548f0b411
| 1,088
|
py
|
Python
|
tests/tests_timeout.py
|
gisce/xmlrpclib-to
|
c37db9eeff71f379d78ef97e5a206d4efede7821
|
[
"MIT"
] | 1
|
2018-04-13T15:42:52.000Z
|
2018-04-13T15:42:52.000Z
|
tests/tests_timeout.py
|
gisce/xmlrpclib-to
|
c37db9eeff71f379d78ef97e5a206d4efede7821
|
[
"MIT"
] | null | null | null |
tests/tests_timeout.py
|
gisce/xmlrpclib-to
|
c37db9eeff71f379d78ef97e5a206d4efede7821
|
[
"MIT"
] | null | null | null |
import socket
import time
from xmlrpclib_to import ServerProxy
import httpretty
import pytest
XML_RESPONSE = """<?xml version="1.0"?>
<methodResponse>
<params>
<param>
<value><string>Test</string></value>
</param>
</params>
</methodResponse>"""
def timeout(request, url, headers):
time.sleep(1)
return 200, headers, XML_RESPONSE
@httpretty.activate
def test_timeout():
httpretty.register_uri(
httpretty.POST,
'http://example.com/RPC2',
content_type='text/xml',
body=timeout
)
proxy = ServerProxy('http://example.com', timeout=0.5)
with pytest.raises(socket.timeout):
proxy.test()
@httpretty.activate
def test_timeout_https():
httpretty.register_uri(
httpretty.POST,
'https://example.com/RPC2',
content_type='text/xml',
body=timeout
)
proxy = ServerProxy('https://example.com', timeout=0.5)
with pytest.raises(socket.timeout):
proxy.test()
if __name__ == "__main__":
test_timeout()
test_timeout_https()
| 19.428571
| 59
| 0.629596
|
import socket
import time
from xmlrpclib_to import ServerProxy
import httpretty
import pytest
XML_RESPONSE = """<?xml version="1.0"?>
<methodResponse>
<params>
<param>
<value><string>Test</string></value>
</param>
</params>
</methodResponse>"""
def timeout(request, url, headers):
time.sleep(1)
return 200, headers, XML_RESPONSE
@httpretty.activate
def test_timeout():
httpretty.register_uri(
httpretty.POST,
'http://example.com/RPC2',
content_type='text/xml',
body=timeout
)
proxy = ServerProxy('http://example.com', timeout=0.5)
with pytest.raises(socket.timeout):
proxy.test()
@httpretty.activate
def test_timeout_https():
httpretty.register_uri(
httpretty.POST,
'https://example.com/RPC2',
content_type='text/xml',
body=timeout
)
proxy = ServerProxy('https://example.com', timeout=0.5)
with pytest.raises(socket.timeout):
proxy.test()
if __name__ == "__main__":
test_timeout()
test_timeout_https()
| true
| true
|
f702369c720d1d7f21a51cf5907306bdf414098a
| 306
|
py
|
Python
|
bookmarks/images/signals.py
|
anilsaini10/django-bookmarks
|
9bb77aa73c35322f68873de0e8e4fb6f596465bf
|
[
"MIT"
] | 5
|
2020-09-08T08:12:28.000Z
|
2020-09-09T20:50:26.000Z
|
bookmarks/images/signals.py
|
anilsaini10/django-bookmarks
|
9bb77aa73c35322f68873de0e8e4fb6f596465bf
|
[
"MIT"
] | 1
|
2020-10-01T05:46:23.000Z
|
2020-10-01T05:46:23.000Z
|
bookmarks/images/signals.py
|
anilsaini10/django-bookmarks
|
9bb77aa73c35322f68873de0e8e4fb6f596465bf
|
[
"MIT"
] | 1
|
2020-09-30T19:41:40.000Z
|
2020-09-30T19:41:40.000Z
|
from django.db.models.signals import m2m_changed
from django.dispatch import receiver
from .models import Image
@receiver(m2m_changed, sender=Image.users_like.through)
def users_like_changed(sender, instance, **kwargs):
instance.total_likes = instance.users_like.count()
instance.save()
| 30.6
| 56
| 0.77451
|
from django.db.models.signals import m2m_changed
from django.dispatch import receiver
from .models import Image
@receiver(m2m_changed, sender=Image.users_like.through)
def users_like_changed(sender, instance, **kwargs):
instance.total_likes = instance.users_like.count()
instance.save()
| true
| true
|
f70236de1aae5c90f06badde4d82b1d629d38b76
| 1,772
|
gyp
|
Python
|
binding.gyp
|
ivan/node-blake2
|
e0dd018a45bc4a792512b78066ea9378cb542549
|
[
"CC0-1.0"
] | 34
|
2015-05-09T23:31:13.000Z
|
2019-07-16T17:00:37.000Z
|
binding.gyp
|
ludios/node-blake2
|
82c6cb2d4a3bdc43dbc382a56c8c15306b2abfd3
|
[
"CC0-1.0"
] | 12
|
2016-07-27T09:16:56.000Z
|
2019-09-09T13:19:10.000Z
|
binding.gyp
|
ivan/node-blake2
|
e0dd018a45bc4a792512b78066ea9378cb542549
|
[
"CC0-1.0"
] | 16
|
2015-08-02T05:45:52.000Z
|
2019-08-12T14:09:26.000Z
|
{
"targets": [
{
"target_name": "binding",
"win_delay_load_hook": "true",
"conditions": [
["target_arch == 'x64' or target_arch == 'ia32'", {
"sources": [
"src/binding.cpp",
"src/BLAKE2/sse/blake2b.c",
"src/BLAKE2/sse/blake2bp.c",
"src/BLAKE2/sse/blake2s.c",
"src/BLAKE2/sse/blake2sp.c"
],
"include_dirs": [
"<!(node -e \"require('nan')\")",
"src/BLAKE2/sse"
]
}],
["target_arch == 'arm64'", {
"sources": [
"src/binding.cpp",
"src/BLAKE2/neon/blake2b-neon.c",
"src/BLAKE2/neon/blake2bp.c",
"src/BLAKE2/neon/blake2s-neon.c",
"src/BLAKE2/neon/blake2sp.c"
],
"include_dirs": [
"<!(node -e \"require('nan')\")",
"src/BLAKE2/neon"
]
}],
["target_arch != 'x64' and target_arch != 'ia32' and target_arch != 'arm64'", {
"sources": [
"src/binding.cpp",
"src/BLAKE2/ref/blake2b-ref.c",
"src/BLAKE2/ref/blake2bp-ref.c",
"src/BLAKE2/ref/blake2s-ref.c",
"src/BLAKE2/ref/blake2sp-ref.c"
],
"include_dirs": [
"<!(node -e \"require('nan')\")",
"src/BLAKE2/ref"
]
}]
],
"cflags_c": [
"-std=c99",
"-Wstrict-aliasing",
"-Wextra",
"-Wno-unused-function",
"-Wno-unused-const-variable"
],
"cflags_cc": [
"-Wstrict-aliasing",
"-Wextra",
"-Wno-unused-function",
"-Wno-unused-const-variable",
"-Wno-unused-parameter"
],
'xcode_settings': {
'OTHER_CFLAGS': [
"-Wstrict-aliasing",
"-Wextra",
"-Wno-unused-function",
"-Wno-unused-const-variable",
"-Wno-unused-parameter"
]
},
"msvs_settings": {
"VCCLCompilerTool": {
"AdditionalOptions": ["/arch:AVX"]
}
}
}
]
}
| 22.717949
| 83
| 0.520316
|
{
"targets": [
{
"target_name": "binding",
"win_delay_load_hook": "true",
"conditions": [
["target_arch == 'x64' or target_arch == 'ia32'", {
"sources": [
"src/binding.cpp",
"src/BLAKE2/sse/blake2b.c",
"src/BLAKE2/sse/blake2bp.c",
"src/BLAKE2/sse/blake2s.c",
"src/BLAKE2/sse/blake2sp.c"
],
"include_dirs": [
"<!(node -e \"require('nan')\")",
"src/BLAKE2/sse"
]
}],
["target_arch == 'arm64'", {
"sources": [
"src/binding.cpp",
"src/BLAKE2/neon/blake2b-neon.c",
"src/BLAKE2/neon/blake2bp.c",
"src/BLAKE2/neon/blake2s-neon.c",
"src/BLAKE2/neon/blake2sp.c"
],
"include_dirs": [
"<!(node -e \"require('nan')\")",
"src/BLAKE2/neon"
]
}],
["target_arch != 'x64' and target_arch != 'ia32' and target_arch != 'arm64'", {
"sources": [
"src/binding.cpp",
"src/BLAKE2/ref/blake2b-ref.c",
"src/BLAKE2/ref/blake2bp-ref.c",
"src/BLAKE2/ref/blake2s-ref.c",
"src/BLAKE2/ref/blake2sp-ref.c"
],
"include_dirs": [
"<!(node -e \"require('nan')\")",
"src/BLAKE2/ref"
]
}]
],
"cflags_c": [
"-std=c99",
"-Wstrict-aliasing",
"-Wextra",
"-Wno-unused-function",
"-Wno-unused-const-variable"
],
"cflags_cc": [
"-Wstrict-aliasing",
"-Wextra",
"-Wno-unused-function",
"-Wno-unused-const-variable",
"-Wno-unused-parameter"
],
'xcode_settings': {
'OTHER_CFLAGS': [
"-Wstrict-aliasing",
"-Wextra",
"-Wno-unused-function",
"-Wno-unused-const-variable",
"-Wno-unused-parameter"
]
},
"msvs_settings": {
"VCCLCompilerTool": {
"AdditionalOptions": ["/arch:AVX"]
}
}
}
]
}
| true
| true
|
f7023897727aa90fe9eab96a3d2bdf6ea1371009
| 10,908
|
py
|
Python
|
tests/functions/test_process_text.py
|
vishalbelsare/pyjanitor
|
9c5ff2c4ad5969ee4bc683ba82010b55b55fd2bb
|
[
"MIT"
] | 225
|
2021-04-01T13:57:39.000Z
|
2022-03-31T23:12:51.000Z
|
tests/functions/test_process_text.py
|
vishalbelsare/pyjanitor
|
9c5ff2c4ad5969ee4bc683ba82010b55b55fd2bb
|
[
"MIT"
] | 251
|
2021-03-25T10:52:31.000Z
|
2022-03-31T01:46:29.000Z
|
tests/functions/test_process_text.py
|
vishalbelsare/pyjanitor
|
9c5ff2c4ad5969ee4bc683ba82010b55b55fd2bb
|
[
"MIT"
] | 30
|
2021-03-25T20:35:43.000Z
|
2022-03-20T20:20:42.000Z
|
import numpy as np
import pandas as pd
import pytest
from pandas.testing import assert_frame_equal
@pytest.fixture
def process_test_df():
"Base DataFrame"
return pd.DataFrame(
{"text": ["a_b_c", "c_d_e", np.nan, "f_g_h"], "numbers": range(1, 5)}
)
@pytest.fixture
def test_returns_dataframe():
"Base DataFrame"
return pd.DataFrame(
{"text": ["a1a2", "b1", "c1"], "numbers": [1, 2, 3]},
index=["A", "B", "C"],
)
def test_column_name_type(process_test_df):
"""Raise TypeError if `column_name` type is not `str`."""
with pytest.raises(TypeError):
process_test_df.process_text(["text"])
@pytest.mark.xfail(reason="new_column_names is deprecated.")
def test_new_column_names_type(process_test_df):
"""Raise TypeError if `new_column_names` type is not string or list."""
with pytest.raises(TypeError):
process_test_df.process_text(
column_name="text", new_column_names={"nutext": "rar"}
)
def test_column_name_presence(process_test_df):
"""Raise ValueError if `column_name` is not in dataframe."""
with pytest.raises(ValueError):
process_test_df.process_text(
column_name="Test", string_function="lower"
)
@pytest.mark.xfail(reason="new_column_names is deprecated.")
def test_new_column_names_presence_str(test_returns_dataframe):
"""
Raise ValueError if `new_column_names` is a str
and is in the dataframe.
"""
with pytest.raises(ValueError):
test_returns_dataframe.process_text(
column_name="text",
new_column_names="text",
string_function="extractall",
pat=r"([ab])?(\d)",
)
@pytest.mark.xfail(reason="new_column_names is deprecated.")
def test_new_column_names_presence_list(test_returns_dataframe):
"""
Raise ValueError if `new_column_names` is a list and at least
one of the new names is in the dataframe.
"""
with pytest.raises(ValueError):
test_returns_dataframe.process_text(
column_name="text",
new_column_names=["numbers", "newtext"],
string_function="extractall",
pat=r"([ab])?(\d)",
)
@pytest.mark.xfail(reason="merge_frame is deprecated.")
def test_merge_frame_type(test_returns_dataframe):
"""
Raise TypeError if `merge_frame` type is not bool."""
with pytest.raises(TypeError):
test_returns_dataframe.process_text(
column_name="text",
new_column_names=["number", "newtext"],
string_function="extractall",
pat=r"([ab])?(\d)",
merge_frame="True",
)
@pytest.mark.xfail(reason="string_function must be present.")
def test_string_function_is_None(process_test_df):
"""Test that dataframe is returned if string_function is None."""
result = process_test_df.process_text(column_name="text")
assert_frame_equal(result, process_test_df)
def test_str_split(process_test_df):
"""Test wrapper for Pandas `str.split()` method."""
expected = process_test_df.assign(
text=process_test_df["text"].str.split("_")
)
result = process_test_df.process_text(
column_name="text", string_function="split", pat="_"
)
assert_frame_equal(result, expected)
@pytest.mark.xfail(reason="new_column_names is deprecated.")
def test_new_column_names(process_test_df):
"""
Test that a new column name is created when
`new_column_name` is not None.
"""
result = process_test_df.process_text(
column_name="text",
new_column_names="new_text",
string_function="slice",
start=2,
)
expected = process_test_df.assign(
new_text=process_test_df["text"].str.slice(start=2)
)
assert_frame_equal(result, expected)
@pytest.fixture
def no_nulls_df():
return pd.DataFrame({"text": ["a", "b", "c", "d"], "numbers": range(1, 5)})
def test_str_cat(no_nulls_df):
"""Test outcome for Pandas `.str.cat()` method."""
result = no_nulls_df.process_text(
column_name="text",
string_function="cat",
others=["A", "B", "C", "D"],
)
expected = no_nulls_df.assign(
text=no_nulls_df["text"].str.cat(others=["A", "B", "C", "D"])
)
assert_frame_equal(result, expected)
def test_str_cat_result_is_a_string(no_nulls_df):
"""
Test wrapper for Pandas `.str.cat()` method
when the outcome is a string.
"""
result = no_nulls_df.process_text(
column_name="text",
string_function="cat",
)
expected = no_nulls_df.assign(text=no_nulls_df["text"].str.cat())
assert_frame_equal(result, expected)
@pytest.mark.xfail(reason="new_column_names is deprecated.")
def test_str_cat_result_is_a_string_and_new_column_names(no_nulls_df):
"""
Test wrapper for Pandas `.str.cat()` method when the outcome is a string,
and `new_column_names` is not None.
"""
result = no_nulls_df.process_text(
column_name="text", string_function="cat", new_column_names="combined"
)
expected = no_nulls_df.assign(combined=no_nulls_df["text"].str.cat())
assert_frame_equal(result, expected)
def test_str_get():
"""Test outcome for Pandas `.str.get()` method."""
df = pd.DataFrame(
{"text": ["aA", "bB", "cC", "dD"], "numbers": range(1, 5)}
)
expected = df.assign(text=df["text"].str.get(1))
result = df.process_text(column_name="text", string_function="get", i=-1)
assert_frame_equal(result, expected)
def test_str_lower():
"""Test string conversion to lowercase using `.str.lower()`."""
df = pd.DataFrame(
{
"codes": range(1, 7),
"names": [
"Graham Chapman",
"John Cleese",
"Terry Gilliam",
"Eric Idle",
"Terry Jones",
"Michael Palin",
],
}
)
expected = df.assign(names=df["names"].str.lower())
result = df.process_text(column_name="names", string_function="lower")
assert_frame_equal(result, expected)
def test_str_wrong(process_test_df):
"""Test that an invalid Pandas string method raises an exception."""
with pytest.raises(KeyError):
process_test_df.process_text(
column_name="text", string_function="invalid_function"
)
def test_str_wrong_parameters(process_test_df):
"""Test that invalid argument for Pandas string method raises an error."""
with pytest.raises(TypeError):
process_test_df.process_text(
column_name="text", string_function="split", pattern="_"
)
@pytest.fixture
def returns_frame_1():
return pd.DataFrame(
{
"ticker": [
"spx 5/25/2001 p500",
"spx 5/25/2001 p600",
"spx 5/25/2001 p700",
]
}
)
@pytest.mark.xfail(reason="merge_frame is deprecated.")
def test_return_dataframe_merge_is_None(returns_frame_1):
"""
Test that the dataframe returned when `merge_frame` is None
is the result of the text processing, and is not merged to
the original dataframe.
"""
expected_output = returns_frame_1["ticker"].str.split(" ", expand=True)
result = returns_frame_1.process_text(
column_name="ticker", string_function="split", expand=True, pat=" "
)
assert_frame_equal(result, expected_output)
@pytest.mark.xfail(reason="merge_frame is deprecated.")
def test_return_dataframe_merge_is_not_None(returns_frame_1):
"""
Test that the dataframe returned when `merge_frame` is not None
is a merger of the original dataframe, and the dataframe
generated from the text processing.
"""
expected_output = pd.concat(
[
returns_frame_1,
returns_frame_1["ticker"]
.str.split(" ", expand=True)
.add_prefix("new_"),
],
axis="columns",
)
result = returns_frame_1.process_text(
column_name="ticker",
new_column_names="new_",
merge_frame=True,
string_function="split",
expand=True,
pat=" ",
)
assert_frame_equal(result, expected_output)
@pytest.mark.xfail(reason="merge_frame is deprecated.")
def test_return_dataframe_merge_is_not_None_new_column_names_is_a_list(
returns_frame_1,
):
"""
Test that the dataframe returned when `merge_frame` is not None
is a merger of the original dataframe, and the dataframe
generated from the text processing. Also, the `new_column_names`
is a list.
"""
expected_output = pd.concat(
[
returns_frame_1,
returns_frame_1["ticker"]
.str.split(" ", expand=True)
.set_axis(["header1", "header2", "header3"], axis="columns"),
],
axis="columns",
)
result = returns_frame_1.process_text(
column_name="ticker",
new_column_names=["header1", "header2", "header3"],
merge_frame=True,
string_function="split",
expand=True,
pat=" ",
)
assert_frame_equal(result, expected_output)
@pytest.mark.xfail(reason="new_column_names is deprecated.")
def test_return_dataframe_new_column_names_is_a_list_len_unequal(
returns_frame_1,
):
"""
Raise error if text processing returns a dataframe,
`new_column_names` is not None, and the length of
`new_column_names` is not equal to the length of the
new dataframe's columns.
"""
with pytest.raises(ValueError):
returns_frame_1.process_text(
column_name="ticker",
new_column_names=["header1", "header2"],
merge_frame=True,
string_function="split",
expand=True,
pat=" ",
)
def test_output_extractall(test_returns_dataframe):
"""
Raise ValueError if the output is a dataframe.
"""
with pytest.raises(ValueError):
test_returns_dataframe.process_text(
column_name="text",
string_function="extractall",
pat=r"(?P<letter>[ab])?(?P<digit>\d)",
)
@pytest.mark.xfail(reason="merge_frame is deprecated.")
def test_output_extractall_merge_frame_is_not_None(test_returns_dataframe):
"""
Test output when `string_function` is "extractall"
and `merge_frame` is not None.
"""
expected_output = test_returns_dataframe["text"].str.extractall(
r"(?P<letter>[ab])?(?P<digit>\d)"
)
expected_output = test_returns_dataframe.join(
expected_output.reset_index("match"), how="outer"
).set_index("match", append=True)
result = test_returns_dataframe.process_text(
column_name="text",
merge_frame=True,
string_function="extractall",
pat=r"(?P<letter>[ab])?(?P<digit>\d)",
)
assert_frame_equal(result, expected_output)
| 29.010638
| 79
| 0.641272
|
import numpy as np
import pandas as pd
import pytest
from pandas.testing import assert_frame_equal
@pytest.fixture
def process_test_df():
return pd.DataFrame(
{"text": ["a_b_c", "c_d_e", np.nan, "f_g_h"], "numbers": range(1, 5)}
)
@pytest.fixture
def test_returns_dataframe():
return pd.DataFrame(
{"text": ["a1a2", "b1", "c1"], "numbers": [1, 2, 3]},
index=["A", "B", "C"],
)
def test_column_name_type(process_test_df):
with pytest.raises(TypeError):
process_test_df.process_text(["text"])
@pytest.mark.xfail(reason="new_column_names is deprecated.")
def test_new_column_names_type(process_test_df):
with pytest.raises(TypeError):
process_test_df.process_text(
column_name="text", new_column_names={"nutext": "rar"}
)
def test_column_name_presence(process_test_df):
with pytest.raises(ValueError):
process_test_df.process_text(
column_name="Test", string_function="lower"
)
@pytest.mark.xfail(reason="new_column_names is deprecated.")
def test_new_column_names_presence_str(test_returns_dataframe):
with pytest.raises(ValueError):
test_returns_dataframe.process_text(
column_name="text",
new_column_names="text",
string_function="extractall",
pat=r"([ab])?(\d)",
)
@pytest.mark.xfail(reason="new_column_names is deprecated.")
def test_new_column_names_presence_list(test_returns_dataframe):
with pytest.raises(ValueError):
test_returns_dataframe.process_text(
column_name="text",
new_column_names=["numbers", "newtext"],
string_function="extractall",
pat=r"([ab])?(\d)",
)
@pytest.mark.xfail(reason="merge_frame is deprecated.")
def test_merge_frame_type(test_returns_dataframe):
with pytest.raises(TypeError):
test_returns_dataframe.process_text(
column_name="text",
new_column_names=["number", "newtext"],
string_function="extractall",
pat=r"([ab])?(\d)",
merge_frame="True",
)
@pytest.mark.xfail(reason="string_function must be present.")
def test_string_function_is_None(process_test_df):
result = process_test_df.process_text(column_name="text")
assert_frame_equal(result, process_test_df)
def test_str_split(process_test_df):
expected = process_test_df.assign(
text=process_test_df["text"].str.split("_")
)
result = process_test_df.process_text(
column_name="text", string_function="split", pat="_"
)
assert_frame_equal(result, expected)
@pytest.mark.xfail(reason="new_column_names is deprecated.")
def test_new_column_names(process_test_df):
result = process_test_df.process_text(
column_name="text",
new_column_names="new_text",
string_function="slice",
start=2,
)
expected = process_test_df.assign(
new_text=process_test_df["text"].str.slice(start=2)
)
assert_frame_equal(result, expected)
@pytest.fixture
def no_nulls_df():
return pd.DataFrame({"text": ["a", "b", "c", "d"], "numbers": range(1, 5)})
def test_str_cat(no_nulls_df):
result = no_nulls_df.process_text(
column_name="text",
string_function="cat",
others=["A", "B", "C", "D"],
)
expected = no_nulls_df.assign(
text=no_nulls_df["text"].str.cat(others=["A", "B", "C", "D"])
)
assert_frame_equal(result, expected)
def test_str_cat_result_is_a_string(no_nulls_df):
result = no_nulls_df.process_text(
column_name="text",
string_function="cat",
)
expected = no_nulls_df.assign(text=no_nulls_df["text"].str.cat())
assert_frame_equal(result, expected)
@pytest.mark.xfail(reason="new_column_names is deprecated.")
def test_str_cat_result_is_a_string_and_new_column_names(no_nulls_df):
result = no_nulls_df.process_text(
column_name="text", string_function="cat", new_column_names="combined"
)
expected = no_nulls_df.assign(combined=no_nulls_df["text"].str.cat())
assert_frame_equal(result, expected)
def test_str_get():
df = pd.DataFrame(
{"text": ["aA", "bB", "cC", "dD"], "numbers": range(1, 5)}
)
expected = df.assign(text=df["text"].str.get(1))
result = df.process_text(column_name="text", string_function="get", i=-1)
assert_frame_equal(result, expected)
def test_str_lower():
df = pd.DataFrame(
{
"codes": range(1, 7),
"names": [
"Graham Chapman",
"John Cleese",
"Terry Gilliam",
"Eric Idle",
"Terry Jones",
"Michael Palin",
],
}
)
expected = df.assign(names=df["names"].str.lower())
result = df.process_text(column_name="names", string_function="lower")
assert_frame_equal(result, expected)
def test_str_wrong(process_test_df):
with pytest.raises(KeyError):
process_test_df.process_text(
column_name="text", string_function="invalid_function"
)
def test_str_wrong_parameters(process_test_df):
with pytest.raises(TypeError):
process_test_df.process_text(
column_name="text", string_function="split", pattern="_"
)
@pytest.fixture
def returns_frame_1():
return pd.DataFrame(
{
"ticker": [
"spx 5/25/2001 p500",
"spx 5/25/2001 p600",
"spx 5/25/2001 p700",
]
}
)
@pytest.mark.xfail(reason="merge_frame is deprecated.")
def test_return_dataframe_merge_is_None(returns_frame_1):
expected_output = returns_frame_1["ticker"].str.split(" ", expand=True)
result = returns_frame_1.process_text(
column_name="ticker", string_function="split", expand=True, pat=" "
)
assert_frame_equal(result, expected_output)
@pytest.mark.xfail(reason="merge_frame is deprecated.")
def test_return_dataframe_merge_is_not_None(returns_frame_1):
expected_output = pd.concat(
[
returns_frame_1,
returns_frame_1["ticker"]
.str.split(" ", expand=True)
.add_prefix("new_"),
],
axis="columns",
)
result = returns_frame_1.process_text(
column_name="ticker",
new_column_names="new_",
merge_frame=True,
string_function="split",
expand=True,
pat=" ",
)
assert_frame_equal(result, expected_output)
@pytest.mark.xfail(reason="merge_frame is deprecated.")
def test_return_dataframe_merge_is_not_None_new_column_names_is_a_list(
returns_frame_1,
):
expected_output = pd.concat(
[
returns_frame_1,
returns_frame_1["ticker"]
.str.split(" ", expand=True)
.set_axis(["header1", "header2", "header3"], axis="columns"),
],
axis="columns",
)
result = returns_frame_1.process_text(
column_name="ticker",
new_column_names=["header1", "header2", "header3"],
merge_frame=True,
string_function="split",
expand=True,
pat=" ",
)
assert_frame_equal(result, expected_output)
@pytest.mark.xfail(reason="new_column_names is deprecated.")
def test_return_dataframe_new_column_names_is_a_list_len_unequal(
returns_frame_1,
):
with pytest.raises(ValueError):
returns_frame_1.process_text(
column_name="ticker",
new_column_names=["header1", "header2"],
merge_frame=True,
string_function="split",
expand=True,
pat=" ",
)
def test_output_extractall(test_returns_dataframe):
with pytest.raises(ValueError):
test_returns_dataframe.process_text(
column_name="text",
string_function="extractall",
pat=r"(?P<letter>[ab])?(?P<digit>\d)",
)
@pytest.mark.xfail(reason="merge_frame is deprecated.")
def test_output_extractall_merge_frame_is_not_None(test_returns_dataframe):
expected_output = test_returns_dataframe["text"].str.extractall(
r"(?P<letter>[ab])?(?P<digit>\d)"
)
expected_output = test_returns_dataframe.join(
expected_output.reset_index("match"), how="outer"
).set_index("match", append=True)
result = test_returns_dataframe.process_text(
column_name="text",
merge_frame=True,
string_function="extractall",
pat=r"(?P<letter>[ab])?(?P<digit>\d)",
)
assert_frame_equal(result, expected_output)
| true
| true
|
f70239220de1326dc46870b4912397756fdb1361
| 18,427
|
py
|
Python
|
bin/Python27/Lib/site-packages/sympy/printing/tests/test_str.py
|
lefevre-fraser/openmeta-mms
|
08f3115e76498df1f8d70641d71f5c52cab4ce5f
|
[
"MIT"
] | null | null | null |
bin/Python27/Lib/site-packages/sympy/printing/tests/test_str.py
|
lefevre-fraser/openmeta-mms
|
08f3115e76498df1f8d70641d71f5c52cab4ce5f
|
[
"MIT"
] | null | null | null |
bin/Python27/Lib/site-packages/sympy/printing/tests/test_str.py
|
lefevre-fraser/openmeta-mms
|
08f3115e76498df1f8d70641d71f5c52cab4ce5f
|
[
"MIT"
] | null | null | null |
from __future__ import division
from sympy import (Abs, Catalan, cos, Derivative, E, EulerGamma, exp,
factorial, factorial2, Function, GoldenRatio, I, Integer, Integral,
Interval, Lambda, Limit, log, Matrix, nan, O, oo, pi, Rational, Float, Rel,
S, sin, SparseMatrix, sqrt, summation, Sum, Symbol, symbols, Wild,
WildFunction, zeta, zoo, Dummy, Dict, Tuple, FiniteSet)
from sympy.core import Expr
from sympy.physics.units import second, joule
from sympy.polys import Poly, RootOf, RootSum, groebner
from sympy.statistics.distributions import Normal, Sample, Uniform
from sympy.geometry import Point, Circle
from sympy.utilities.pytest import raises
from sympy.printing import sstr, sstrrepr, StrPrinter
from sympy.core.trace import Tr
x, y, z, w = symbols('x,y,z,w')
d = Dummy('d')
def test_printmethod():
class R(Abs):
def _sympystr(self, printer):
return "foo(%s)" % printer._print(self.args[0])
assert sstr(R(x)) == "foo(x)"
class R(Abs):
def _sympystr(self, printer):
return "foo"
assert sstr(R(x)) == "foo"
def test_Abs():
assert str(Abs(x)) == "Abs(x)"
assert str(Abs(Rational(1,6))) == "1/6"
assert str(Abs(Rational(-1,6))) == "1/6"
def test_Add():
assert str(x+y) == "x + y"
assert str(x+1) == "x + 1"
assert str(x+x**2) == "x**2 + x"
assert str(5+x+y+x*y+x**2+y**2) == "x**2 + x*y + x + y**2 + y + 5"
assert str(1+x+x**2/2+x**3/3) == "x**3/3 + x**2/2 + x + 1"
assert str(2*x-7*x**2 + 2 + 3*y) == "-7*x**2 + 2*x + 3*y + 2"
assert str(x-y) == "x - y"
assert str(2-x) == "-x + 2"
assert str(x-2) == "x - 2"
assert str(x-y-z-w) == "-w + x - y - z"
assert str(x-z*y**2*z*w) == "-w*y**2*z**2 + x"
assert str(x-1*y*x*y) == "-x*y**2 + x"
assert str(sin(x).series(x, 0, 15)) == "x - x**3/6 + x**5/120 - x**7/5040 + x**9/362880 - x**11/39916800 + x**13/6227020800 + O(x**15)"
def test_Catalan():
assert str(Catalan) == "Catalan"
def test_ComplexInfinity():
assert str(zoo) == "zoo"
def test_Derivative():
assert str(Derivative(x, y)) == "Derivative(x, y)"
assert str(Derivative(x**2, x, evaluate=False)) == "Derivative(x**2, x)"
assert str(Derivative(x**2/y, x, y, evaluate=False)) == "Derivative(x**2/y, x, y)"
def test_dict():
assert str({1: 1+x}) == sstr({1: 1+x}) == "{1: x + 1}"
assert str({1: x**2, 2: y*x}) in ("{1: x**2, 2: x*y}", "{2: x*y, 1: x**2}")
assert sstr({1: x**2, 2: y*x}) == "{1: x**2, 2: x*y}"
def test_Dict():
assert str(Dict({1: 1+x})) == sstr({1: 1+x}) == "{1: x + 1}"
assert str(Dict({1: x**2, 2: y*x})) in (
"{1: x**2, 2: x*y}", "{2: x*y, 1: x**2}")
assert sstr(Dict({1: x**2, 2: y*x})) == "{1: x**2, 2: x*y}"
def test_Dummy():
assert str(d) == "_d"
assert str(d+x) == "_d + x"
def test_EulerGamma():
assert str(EulerGamma) == "EulerGamma"
def test_Exp():
assert str(E) == "E"
def test_factorial():
n = Symbol('n', integer=True)
assert str(factorial(-2)) == "0"
assert str(factorial(0)) == "1"
assert str(factorial(7)) == "5040"
assert str(factorial(n)) == "n!"
assert str(factorial(2*n)) == "(2*n)!"
assert str(factorial(factorial(n))) == '(n!)!'
assert str(factorial(factorial2(n))) == '(n!!)!'
assert str(factorial2(factorial(n))) == '(n!)!!'
assert str(factorial2(factorial2(n))) == '(n!!)!!'
def test_Function():
f = Function('f')
fx = f(x)
w = WildFunction('w')
assert str(f) == "f"
assert str(fx) == "f(x)"
assert str(w) == "w_"
def test_Geometry():
assert sstr(Point(0,0)) == 'Point(0, 0)'
assert sstr(Circle(Point(0,0), 3)) == 'Circle(Point(0, 0), 3)'
# TODO test other Geometry entities
def test_GoldenRatio():
assert str(GoldenRatio) == "GoldenRatio"
def test_ImaginaryUnit():
assert str(I) == "I"
def test_Infinity():
assert str(oo) == "oo"
assert str(oo*I) == "oo*I"
def test_Integer():
assert str(Integer(-1)) == "-1"
assert str(Integer(1)) == "1"
assert str(Integer(-3)) == "-3"
assert str(Integer(0)) == "0"
assert str(Integer(25)) == "25"
def test_Integral():
assert str(Integral(sin(x), y)) == "Integral(sin(x), y)"
assert str(Integral(sin(x), (y, 0, 1))) == "Integral(sin(x), (y, 0, 1))"
def test_Interval():
a = Symbol('a', real=True)
assert str(Interval(0, a)) == "[0, a]"
assert str(Interval(0, a, False, False)) == "[0, a]"
assert str(Interval(0, a, True, False)) == "(0, a]"
assert str(Interval(0, a, False, True)) == "[0, a)"
assert str(Interval(0, a, True, True)) == "(0, a)"
def test_Lambda():
assert str(Lambda(d, d**2)) == "Lambda(_d, _d**2)"
def test_Limit():
assert str(Limit(sin(x)/x, x, y)) == "Limit(sin(x)/x, x, y)"
assert str(Limit(1/x, x, 0)) == "Limit(1/x, x, 0)"
assert str(Limit(sin(x)/x, x, y, dir="-")) == "Limit(sin(x)/x, x, y, dir='-')"
def test_list():
assert str([x]) == sstr([x]) == "[x]"
assert str([x**2, x*y+1]) == sstr([x**2, x*y+1]) == "[x**2, x*y + 1]"
assert str([x**2, [y+x]]) == sstr([x**2, [y+x]]) == "[x**2, [x + y]]"
def test_Matrix():
M = Matrix([[x**+1, 1], [y, x+y]])
assert str(M) == sstr(M) == "[x, 1]\n[y, x + y]"
M = Matrix()
assert str(M) == sstr(M) == "[]"
M = Matrix(0, 1, lambda i, j: 0)
assert str(M) == sstr(M) == "[]"
def test_Mul():
assert str(x/y) == "x/y"
assert str(y/x) == "y/x"
assert str(x/y/z) == "x/(y*z)"
assert str((x+1)/(y+2)) == "(x + 1)/(y + 2)"
assert str(2*x/3) == '2*x/3'
assert str(-2*x/3) == '-2*x/3'
class CustomClass1(Expr):
is_commutative = True
class CustomClass2(Expr):
is_commutative = True
cc1 = CustomClass1()
cc2 = CustomClass2()
assert str(Rational(2)*cc1) == '2*CustomClass1()'
assert str(cc1*Rational(2)) == '2*CustomClass1()'
assert str(cc1*Float("1.5")) == '1.5*CustomClass1()'
assert str(cc2*Rational(2)) == '2*CustomClass2()'
assert str(cc2*Rational(2)*cc1) == '2*CustomClass1()*CustomClass2()'
assert str(cc1*Rational(2)*cc2) == '2*CustomClass1()*CustomClass2()'
def test_NaN():
assert str(nan) == "nan"
def test_NegativeInfinity():
assert str(-oo) == "-oo"
def test_Normal():
assert str(Normal(x+y, z)) == "Normal(x + y, z)"
def test_Order():
assert str(O(x)) == "O(x)"
assert str(O(x**2)) == "O(x**2)"
assert str(O(x*y)) == "O(x*y, x, y)"
def test_Permutation_Cycle():
from sympy.combinatorics import Permutation, Cycle
# general principle: economically, canonically show all moved elements
# and the size of the permutation.
for p, s in [
(Cycle(),
'Cycle()'),
(Cycle(2),
'Cycle(2)'),
(Cycle(2, 1),
'Cycle(1, 2)'),
(Cycle(1, 2)(5)(6, 7)(10),
'Cycle(1, 2)(6, 7)(10)'),
(Cycle(3, 4)(1, 2)(3, 4),
'Cycle(1, 2)(4)'),
]:
assert str(p) == s
Permutation.print_cyclic = False
for p, s in [
(Permutation([]),
'Permutation([])'),
(Permutation([], size=1),
'Permutation([0])'),
(Permutation([], size=2),
'Permutation([0, 1])'),
(Permutation([], size=10),
'Permutation([], size=10)'),
(Permutation([1, 0, 2]),
'Permutation([1, 0, 2])'),
(Permutation([1, 0, 2, 3, 4, 5]),
'Permutation([1, 0], size=6)'),
(Permutation([1, 0, 2, 3, 4, 5], size=10),
'Permutation([1, 0], size=10)'),
]:
assert str(p) == s
Permutation.print_cyclic = True
for p, s in [
(Permutation([]),
'Permutation()'),
(Permutation([], size=1),
'Permutation(0)'),
(Permutation([], size=2),
'Permutation(1)'),
(Permutation([], size=10),
'Permutation(9)'),
(Permutation([1, 0, 2]),
'Permutation(2)(0, 1)'),
(Permutation([1, 0, 2, 3, 4, 5]),
'Permutation(5)(0, 1)'),
(Permutation([1, 0, 2, 3, 4, 5], size=10),
'Permutation(9)(0, 1)'),
(Permutation([0, 1, 3, 2, 4, 5], size=10),
'Permutation(9)(2, 3)'),
]:
assert str(p) == s
def test_Pi():
assert str(pi) == "pi"
def test_Poly():
assert str(Poly(0, x)) == "Poly(0, x, domain='ZZ')"
assert str(Poly(1, x)) == "Poly(1, x, domain='ZZ')"
assert str(Poly(x, x)) == "Poly(x, x, domain='ZZ')"
assert str(Poly(2*x + 1, x)) == "Poly(2*x + 1, x, domain='ZZ')"
assert str(Poly(2*x - 1, x)) == "Poly(2*x - 1, x, domain='ZZ')"
assert str(Poly(-1, x)) == "Poly(-1, x, domain='ZZ')"
assert str(Poly(-x, x)) == "Poly(-x, x, domain='ZZ')"
assert str(Poly(-2*x + 1, x)) == "Poly(-2*x + 1, x, domain='ZZ')"
assert str(Poly(-2*x - 1, x)) == "Poly(-2*x - 1, x, domain='ZZ')"
assert str(Poly(x - 1, x)) == "Poly(x - 1, x, domain='ZZ')"
assert str(Poly(x**2 + 1 + y, x)) == "Poly(x**2 + y + 1, x, domain='ZZ[y]')"
assert str(Poly(x**2 - 1 + y, x)) == "Poly(x**2 + y - 1, x, domain='ZZ[y]')"
assert str(Poly(x**2 + I*x, x)) == "Poly(x**2 + I*x, x, domain='EX')"
assert str(Poly(x**2 - I*x, x)) == "Poly(x**2 - I*x, x, domain='EX')"
assert str(Poly(-x*y*z + x*y - 1, x, y, z)) == "Poly(-x*y*z + x*y - 1, x, y, z, domain='ZZ')"
assert str(Poly(-w*x**21*y**7*z + (1 + w)*z**3 - 2*x*z + 1, x, y, z)) == \
"Poly(-w*x**21*y**7*z - 2*x*z + (w + 1)*z**3 + 1, x, y, z, domain='ZZ[w]')"
assert str(Poly(x**2 + 1, x, modulus=2)) == "Poly(x**2 + 1, x, modulus=2)"
assert str(Poly(2*x**2 + 3*x + 4, x, modulus=17)) == "Poly(2*x**2 + 3*x + 4, x, modulus=17)"
def test_Pow():
assert str(x**-1) == "1/x"
assert str(x**-2) == "x**(-2)"
assert str(x**2) == "x**2"
assert str((x+y)**-1) == "1/(x + y)"
assert str((x+y)**-2) == "(x + y)**(-2)"
assert str((x+y)**2) == "(x + y)**2"
assert str((x+y)**(1+x)) == "(x + y)**(x + 1)"
assert str(x**Rational(1, 3)) == "x**(1/3)"
assert str(1/x**Rational(1, 3)) == "x**(-1/3)"
assert str(sqrt(sqrt(x))) == "x**(1/4)"
assert str(x**-1.0) == '1/x'
def test_sqrt():
assert str(sqrt(x)) == "sqrt(x)"
assert str(sqrt(x**2)) == "sqrt(x**2)"
assert str(1/sqrt(x)) == "1/sqrt(x)"
assert str(1/sqrt(x**2)) == "1/sqrt(x**2)"
assert str(y/sqrt(x)) == "y/sqrt(x)"
assert str(x**(1/2)) == "x**0.5"
assert str(1/x**(1/2)) == "x**(-0.5)"
def test_Rational():
n1 = Rational(1,4)
n2 = Rational(1,3)
n3 = Rational(2,4)
n4 = Rational(2,-4)
n5 = Rational(0)
n6 = Rational(1)
n7 = Rational(3)
n8 = Rational(-3)
assert str(n1*n2) == "1/12"
assert str(n1*n2) == "1/12"
assert str(n3) == "1/2"
assert str(n1*n3) == "1/8"
assert str(n1+n3) == "3/4"
assert str(n1+n2) == "7/12"
assert str(n1+n4) == "-1/4"
assert str(n4*n4) == "1/4"
assert str(n4+n2) == "-1/6"
assert str(n4+n5) == "-1/2"
assert str(n4*n5) == "0"
assert str(n3+n4) == "0"
assert str(n1**n7) == "1/64"
assert str(n2**n7) == "1/27"
assert str(n2**n8) == "27"
assert str(n7**n8) == "1/27"
assert str(Rational("-25")) == "-25"
assert str(Rational("1.25")) == "5/4"
assert str(Rational("-2.6e-2")) == "-13/500"
assert str(S("25/7")) == "25/7"
assert str(S("-123/569")) == "-123/569"
assert str(S("0.1[23]", rational=1)) == "61/495"
assert str(S("5.1[666]", rational=1)) == "31/6"
assert str(S("-5.1[666]", rational=1)) == "-31/6"
assert str(S("0.[9]", rational=1)) == "1"
assert str(S("-0.[9]", rational=1)) == "-1"
assert str(sqrt(Rational(1,4))) == "1/2"
assert str(sqrt(Rational(1,36))) == "1/6"
assert str((123**25) ** Rational(1,25)) == "123"
assert str((123**25+1)**Rational(1,25)) != "123"
assert str((123**25-1)**Rational(1,25)) != "123"
assert str((123**25-1)**Rational(1,25)) != "122"
assert str(sqrt(Rational(81,36))**3) == "27/8"
assert str(1/sqrt(Rational(81,36))**3) == "8/27"
assert str(sqrt(-4)) == str(2*I)
assert str(2**Rational(1,10**10)) == "2**(1/10000000000)"
def test_Float():
# NOTE prec is the whole number of decimal digits
assert str(Float('1.23', prec=1+2)) == '1.23'
assert str(Float('1.23456789', prec=1+8)) == '1.23456789'
assert str(Float('1.234567890123456789', prec=1+18)) == '1.234567890123456789'
assert str(pi.evalf(1+2)) == '3.14'
assert str(pi.evalf(1+14)) == '3.14159265358979'
assert str(pi.evalf(1+64)) == '3.1415926535897932384626433832795028841971693993751058209749445923'
assert str(pi.round(-1)) == '0.'
assert str((pi**400 - (pi**400).round(1)).n(2)) == '-0.e+88'
def test_Relational():
assert str(Rel(x, y, "<")) == "x < y"
assert str(Rel(x+y, y, "==")) == "x + y == y"
def test_RootOf():
assert str(RootOf(x**5 + 2*x - 1, 0)) == "RootOf(x**5 + 2*x - 1, 0)"
def test_RootSum():
f = x**5 + 2*x - 1
assert str(RootSum(f, Lambda(z, z), auto=False)) == "RootSum(x**5 + 2*x - 1)"
assert str(RootSum(f, Lambda(z, z**2), auto=False)) == "RootSum(x**5 + 2*x - 1, Lambda(_z, _z**2))"
def test_GroebnerBasis():
assert str(groebner([], x, y)) == "GroebnerBasis([], x, y, domain='ZZ', order='lex')"
F = [x**2 - 3*y - x + 1, y**2 - 2*x + y - 1]
assert str(groebner(F, order='grlex')) == \
"GroebnerBasis([x**2 - x - 3*y + 1, y**2 - 2*x + y - 1], x, y, domain='ZZ', order='grlex')"
assert str(groebner(F, order='lex')) == \
"GroebnerBasis([2*x - y**2 - y + 1, y**4 + 2*y**3 - 3*y**2 - 16*y + 7], x, y, domain='ZZ', order='lex')"
def test_Sample():
assert str(Sample([x, y, 1])) in [
"Sample([x, y, 1])",
"Sample([y, 1, x])",
"Sample([1, x, y])",
"Sample([y, x, 1])",
"Sample([x, 1, y])",
"Sample([1, y, x])",
]
def test_set():
assert sstr(set()) == 'set()'
assert sstr(frozenset()) == 'frozenset()'
assert sstr(set([1,2,3]))== 'set([1, 2, 3])'
assert sstr(set([1,x,x**2,x**3,x**4])) == 'set([1, x, x**2, x**3, x**4])'
def test_SparseMatrix():
M = SparseMatrix([[x**+1, 1], [y, x+y]])
assert str(M) == sstr(M) == "[x, 1]\n[y, x + y]"
def test_Sum():
assert str(summation(cos(3*z), (z, x, y))) == "Sum(cos(3*z), (z, x, y))"
assert str(Sum(x*y**2, (x, -2, 2), (y, -5, 5))) == \
"Sum(x*y**2, (x, -2, 2), (y, -5, 5))"
def test_Symbol():
assert str(y) == "y"
assert str(x) == "x"
e = x
assert str(e) == "x"
def test_tuple():
assert str((x,)) == sstr((x,)) == "(x,)"
assert str((x+y, 1+x)) == sstr((x+y, 1+x)) == "(x + y, x + 1)"
assert str((x+y, (1+x, x**2))) == sstr((x+y, (1+x, x**2))) == "(x + y, (x + 1, x**2))"
def test_Uniform():
assert str(Uniform(x, y)) == "Uniform(x, y)"
assert str(Uniform(x+y, y)) == "Uniform(x + y, y)"
def test_Unit():
assert str(second) == "s"
assert str(joule) == "kg*m**2/s**2" # issue 2461
def test_wild_str():
# Check expressions containing Wild not causing infinite recursion
w = Wild('x')
assert str(w + 1) == 'x_ + 1'
assert str(exp(2**w) + 5) == 'exp(2**x_) + 5'
assert str(3*w + 1) == '3*x_ + 1'
assert str(1/w + 1) == '1 + 1/x_'
assert str(w**2 + 1) == 'x_**2 + 1'
assert str(1/(1-w)) == '1/(-x_ + 1)'
def test_zeta():
assert str(zeta(3)) == "zeta(3)"
def test_bug2():
e = x-y
a = str(e)
b = str(e)
assert a == b
def test_bug4():
e = -2*sqrt(x)-y/sqrt(x)/2
assert str(e) not in ["(-2)*x**1/2(-1/2)*x**(-1/2)*y",
"-2*x**1/2(-1/2)*x**(-1/2)*y","-2*x**1/2-1/2*x**-1/2*w"]
assert str(e) == "-2*sqrt(x) - y/(2*sqrt(x))"
def test_issue922():
e = Integral(x,x) + 1
assert str(e) == 'Integral(x, x) + 1'
def test_sstrrepr():
assert sstr('abc') == 'abc'
assert sstrrepr('abc') == "'abc'"
e = ['a', 'b', 'c', x]
assert sstr(e) == "[a, b, c, x]"
assert sstrrepr(e) == "['a', 'b', 'c', x]"
def test_infinity():
assert sstr(oo*I) == "oo*I"
def test_full_prec():
assert sstr(S("0.3"), full_prec=True) == "0.300000000000000"
assert sstr(S("0.3"), full_prec="auto") == "0.300000000000000"
assert sstr(S("0.3"), full_prec=False) == "0.3"
assert sstr(S("0.3")*x, full_prec=True) in [
"0.300000000000000*x",
"x*0.300000000000000"
]
assert sstr(S("0.3")*x, full_prec="auto") in [
"0.3*x",
"x*0.3"
]
assert sstr(S("0.3")*x, full_prec=False) in [
"0.3*x",
"x*0.3"
]
def test_noncommutative():
A, B, C = symbols('A,B,C', commutative=False)
assert sstr(A*B*C**-1) == "A*B*C**(-1)"
assert sstr(C**-1*A*B) == "C**(-1)*A*B"
assert sstr(A*C**-1*B) == "A*C**(-1)*B"
assert sstr(sqrt(A)) == "sqrt(A)"
assert sstr(1/sqrt(A)) == "A**(-1/2)"
def test_empty_printer():
str_printer = StrPrinter()
assert str_printer.emptyPrinter("foo") == "foo"
assert str_printer.emptyPrinter(x*y) == "x*y"
assert str_printer.emptyPrinter(32) == "32"
def test_settings():
raises(TypeError, lambda: sstr(S(4), method="garbage"))
def test_RandomDomain():
from sympy.stats import Normal, Die, Exponential, pspace, where
X = Normal('x1', 0, 1)
assert str(where(X>0)) == "Domain: 0 < x1"
D = Die('d1', 6)
assert str(where(D>4)) == "Domain: Or(d1 == 5, d1 == 6)"
A = Exponential('a', 1)
B = Exponential('b', 1)
assert str(pspace(Tuple(A,B)).domain) =="Domain: And(0 <= a, 0 <= b)"
def test_FiniteSet():
assert str(FiniteSet(range(1, 51))) == '{1, 2, 3, ..., 48, 49, 50}'
assert str(FiniteSet(range(1, 6))) == '{1, 2, 3, 4, 5}'
def test_PrettyPoly():
from sympy.polys.domains import QQ
F = QQ.frac_field(x, y)
R = QQ[x, y]
assert sstr(F.convert(x/(x + y))) == sstr(x/(x + y))
assert sstr(R.convert(x + y)) == sstr(x + y)
def test_categories():
from sympy.categories import (Object, Morphism, NamedMorphism,
IdentityMorphism, Category)
A = Object("A")
B = Object("B")
f = NamedMorphism(A, B, "f")
id_A = IdentityMorphism(A)
K = Category("K")
assert str(A) == 'Object("A")'
assert str(f) == 'NamedMorphism(Object("A"), Object("B"), "f")'
assert str(id_A) == 'IdentityMorphism(Object("A"))'
assert str(K) == 'Category("K")'
def test_Tr():
A, B = symbols('A B', commutative=False)
t = Tr(A*B)
assert str(t) == 'Tr(A*B)'
| 33.023297
| 139
| 0.515168
|
from __future__ import division
from sympy import (Abs, Catalan, cos, Derivative, E, EulerGamma, exp,
factorial, factorial2, Function, GoldenRatio, I, Integer, Integral,
Interval, Lambda, Limit, log, Matrix, nan, O, oo, pi, Rational, Float, Rel,
S, sin, SparseMatrix, sqrt, summation, Sum, Symbol, symbols, Wild,
WildFunction, zeta, zoo, Dummy, Dict, Tuple, FiniteSet)
from sympy.core import Expr
from sympy.physics.units import second, joule
from sympy.polys import Poly, RootOf, RootSum, groebner
from sympy.statistics.distributions import Normal, Sample, Uniform
from sympy.geometry import Point, Circle
from sympy.utilities.pytest import raises
from sympy.printing import sstr, sstrrepr, StrPrinter
from sympy.core.trace import Tr
x, y, z, w = symbols('x,y,z,w')
d = Dummy('d')
def test_printmethod():
class R(Abs):
def _sympystr(self, printer):
return "foo(%s)" % printer._print(self.args[0])
assert sstr(R(x)) == "foo(x)"
class R(Abs):
def _sympystr(self, printer):
return "foo"
assert sstr(R(x)) == "foo"
def test_Abs():
assert str(Abs(x)) == "Abs(x)"
assert str(Abs(Rational(1,6))) == "1/6"
assert str(Abs(Rational(-1,6))) == "1/6"
def test_Add():
assert str(x+y) == "x + y"
assert str(x+1) == "x + 1"
assert str(x+x**2) == "x**2 + x"
assert str(5+x+y+x*y+x**2+y**2) == "x**2 + x*y + x + y**2 + y + 5"
assert str(1+x+x**2/2+x**3/3) == "x**3/3 + x**2/2 + x + 1"
assert str(2*x-7*x**2 + 2 + 3*y) == "-7*x**2 + 2*x + 3*y + 2"
assert str(x-y) == "x - y"
assert str(2-x) == "-x + 2"
assert str(x-2) == "x - 2"
assert str(x-y-z-w) == "-w + x - y - z"
assert str(x-z*y**2*z*w) == "-w*y**2*z**2 + x"
assert str(x-1*y*x*y) == "-x*y**2 + x"
assert str(sin(x).series(x, 0, 15)) == "x - x**3/6 + x**5/120 - x**7/5040 + x**9/362880 - x**11/39916800 + x**13/6227020800 + O(x**15)"
def test_Catalan():
assert str(Catalan) == "Catalan"
def test_ComplexInfinity():
assert str(zoo) == "zoo"
def test_Derivative():
assert str(Derivative(x, y)) == "Derivative(x, y)"
assert str(Derivative(x**2, x, evaluate=False)) == "Derivative(x**2, x)"
assert str(Derivative(x**2/y, x, y, evaluate=False)) == "Derivative(x**2/y, x, y)"
def test_dict():
assert str({1: 1+x}) == sstr({1: 1+x}) == "{1: x + 1}"
assert str({1: x**2, 2: y*x}) in ("{1: x**2, 2: x*y}", "{2: x*y, 1: x**2}")
assert sstr({1: x**2, 2: y*x}) == "{1: x**2, 2: x*y}"
def test_Dict():
assert str(Dict({1: 1+x})) == sstr({1: 1+x}) == "{1: x + 1}"
assert str(Dict({1: x**2, 2: y*x})) in (
"{1: x**2, 2: x*y}", "{2: x*y, 1: x**2}")
assert sstr(Dict({1: x**2, 2: y*x})) == "{1: x**2, 2: x*y}"
def test_Dummy():
assert str(d) == "_d"
assert str(d+x) == "_d + x"
def test_EulerGamma():
assert str(EulerGamma) == "EulerGamma"
def test_Exp():
assert str(E) == "E"
def test_factorial():
n = Symbol('n', integer=True)
assert str(factorial(-2)) == "0"
assert str(factorial(0)) == "1"
assert str(factorial(7)) == "5040"
assert str(factorial(n)) == "n!"
assert str(factorial(2*n)) == "(2*n)!"
assert str(factorial(factorial(n))) == '(n!)!'
assert str(factorial(factorial2(n))) == '(n!!)!'
assert str(factorial2(factorial(n))) == '(n!)!!'
assert str(factorial2(factorial2(n))) == '(n!!)!!'
def test_Function():
f = Function('f')
fx = f(x)
w = WildFunction('w')
assert str(f) == "f"
assert str(fx) == "f(x)"
assert str(w) == "w_"
def test_Geometry():
assert sstr(Point(0,0)) == 'Point(0, 0)'
assert sstr(Circle(Point(0,0), 3)) == 'Circle(Point(0, 0), 3)'
def test_GoldenRatio():
assert str(GoldenRatio) == "GoldenRatio"
def test_ImaginaryUnit():
assert str(I) == "I"
def test_Infinity():
assert str(oo) == "oo"
assert str(oo*I) == "oo*I"
def test_Integer():
assert str(Integer(-1)) == "-1"
assert str(Integer(1)) == "1"
assert str(Integer(-3)) == "-3"
assert str(Integer(0)) == "0"
assert str(Integer(25)) == "25"
def test_Integral():
assert str(Integral(sin(x), y)) == "Integral(sin(x), y)"
assert str(Integral(sin(x), (y, 0, 1))) == "Integral(sin(x), (y, 0, 1))"
def test_Interval():
a = Symbol('a', real=True)
assert str(Interval(0, a)) == "[0, a]"
assert str(Interval(0, a, False, False)) == "[0, a]"
assert str(Interval(0, a, True, False)) == "(0, a]"
assert str(Interval(0, a, False, True)) == "[0, a)"
assert str(Interval(0, a, True, True)) == "(0, a)"
def test_Lambda():
assert str(Lambda(d, d**2)) == "Lambda(_d, _d**2)"
def test_Limit():
assert str(Limit(sin(x)/x, x, y)) == "Limit(sin(x)/x, x, y)"
assert str(Limit(1/x, x, 0)) == "Limit(1/x, x, 0)"
assert str(Limit(sin(x)/x, x, y, dir="-")) == "Limit(sin(x)/x, x, y, dir='-')"
def test_list():
assert str([x]) == sstr([x]) == "[x]"
assert str([x**2, x*y+1]) == sstr([x**2, x*y+1]) == "[x**2, x*y + 1]"
assert str([x**2, [y+x]]) == sstr([x**2, [y+x]]) == "[x**2, [x + y]]"
def test_Matrix():
M = Matrix([[x**+1, 1], [y, x+y]])
assert str(M) == sstr(M) == "[x, 1]\n[y, x + y]"
M = Matrix()
assert str(M) == sstr(M) == "[]"
M = Matrix(0, 1, lambda i, j: 0)
assert str(M) == sstr(M) == "[]"
def test_Mul():
assert str(x/y) == "x/y"
assert str(y/x) == "y/x"
assert str(x/y/z) == "x/(y*z)"
assert str((x+1)/(y+2)) == "(x + 1)/(y + 2)"
assert str(2*x/3) == '2*x/3'
assert str(-2*x/3) == '-2*x/3'
class CustomClass1(Expr):
is_commutative = True
class CustomClass2(Expr):
is_commutative = True
cc1 = CustomClass1()
cc2 = CustomClass2()
assert str(Rational(2)*cc1) == '2*CustomClass1()'
assert str(cc1*Rational(2)) == '2*CustomClass1()'
assert str(cc1*Float("1.5")) == '1.5*CustomClass1()'
assert str(cc2*Rational(2)) == '2*CustomClass2()'
assert str(cc2*Rational(2)*cc1) == '2*CustomClass1()*CustomClass2()'
assert str(cc1*Rational(2)*cc2) == '2*CustomClass1()*CustomClass2()'
def test_NaN():
assert str(nan) == "nan"
def test_NegativeInfinity():
assert str(-oo) == "-oo"
def test_Normal():
assert str(Normal(x+y, z)) == "Normal(x + y, z)"
def test_Order():
assert str(O(x)) == "O(x)"
assert str(O(x**2)) == "O(x**2)"
assert str(O(x*y)) == "O(x*y, x, y)"
def test_Permutation_Cycle():
from sympy.combinatorics import Permutation, Cycle
for p, s in [
(Cycle(),
'Cycle()'),
(Cycle(2),
'Cycle(2)'),
(Cycle(2, 1),
'Cycle(1, 2)'),
(Cycle(1, 2)(5)(6, 7)(10),
'Cycle(1, 2)(6, 7)(10)'),
(Cycle(3, 4)(1, 2)(3, 4),
'Cycle(1, 2)(4)'),
]:
assert str(p) == s
Permutation.print_cyclic = False
for p, s in [
(Permutation([]),
'Permutation([])'),
(Permutation([], size=1),
'Permutation([0])'),
(Permutation([], size=2),
'Permutation([0, 1])'),
(Permutation([], size=10),
'Permutation([], size=10)'),
(Permutation([1, 0, 2]),
'Permutation([1, 0, 2])'),
(Permutation([1, 0, 2, 3, 4, 5]),
'Permutation([1, 0], size=6)'),
(Permutation([1, 0, 2, 3, 4, 5], size=10),
'Permutation([1, 0], size=10)'),
]:
assert str(p) == s
Permutation.print_cyclic = True
for p, s in [
(Permutation([]),
'Permutation()'),
(Permutation([], size=1),
'Permutation(0)'),
(Permutation([], size=2),
'Permutation(1)'),
(Permutation([], size=10),
'Permutation(9)'),
(Permutation([1, 0, 2]),
'Permutation(2)(0, 1)'),
(Permutation([1, 0, 2, 3, 4, 5]),
'Permutation(5)(0, 1)'),
(Permutation([1, 0, 2, 3, 4, 5], size=10),
'Permutation(9)(0, 1)'),
(Permutation([0, 1, 3, 2, 4, 5], size=10),
'Permutation(9)(2, 3)'),
]:
assert str(p) == s
def test_Pi():
assert str(pi) == "pi"
def test_Poly():
assert str(Poly(0, x)) == "Poly(0, x, domain='ZZ')"
assert str(Poly(1, x)) == "Poly(1, x, domain='ZZ')"
assert str(Poly(x, x)) == "Poly(x, x, domain='ZZ')"
assert str(Poly(2*x + 1, x)) == "Poly(2*x + 1, x, domain='ZZ')"
assert str(Poly(2*x - 1, x)) == "Poly(2*x - 1, x, domain='ZZ')"
assert str(Poly(-1, x)) == "Poly(-1, x, domain='ZZ')"
assert str(Poly(-x, x)) == "Poly(-x, x, domain='ZZ')"
assert str(Poly(-2*x + 1, x)) == "Poly(-2*x + 1, x, domain='ZZ')"
assert str(Poly(-2*x - 1, x)) == "Poly(-2*x - 1, x, domain='ZZ')"
assert str(Poly(x - 1, x)) == "Poly(x - 1, x, domain='ZZ')"
assert str(Poly(x**2 + 1 + y, x)) == "Poly(x**2 + y + 1, x, domain='ZZ[y]')"
assert str(Poly(x**2 - 1 + y, x)) == "Poly(x**2 + y - 1, x, domain='ZZ[y]')"
assert str(Poly(x**2 + I*x, x)) == "Poly(x**2 + I*x, x, domain='EX')"
assert str(Poly(x**2 - I*x, x)) == "Poly(x**2 - I*x, x, domain='EX')"
assert str(Poly(-x*y*z + x*y - 1, x, y, z)) == "Poly(-x*y*z + x*y - 1, x, y, z, domain='ZZ')"
assert str(Poly(-w*x**21*y**7*z + (1 + w)*z**3 - 2*x*z + 1, x, y, z)) == \
"Poly(-w*x**21*y**7*z - 2*x*z + (w + 1)*z**3 + 1, x, y, z, domain='ZZ[w]')"
assert str(Poly(x**2 + 1, x, modulus=2)) == "Poly(x**2 + 1, x, modulus=2)"
assert str(Poly(2*x**2 + 3*x + 4, x, modulus=17)) == "Poly(2*x**2 + 3*x + 4, x, modulus=17)"
def test_Pow():
assert str(x**-1) == "1/x"
assert str(x**-2) == "x**(-2)"
assert str(x**2) == "x**2"
assert str((x+y)**-1) == "1/(x + y)"
assert str((x+y)**-2) == "(x + y)**(-2)"
assert str((x+y)**2) == "(x + y)**2"
assert str((x+y)**(1+x)) == "(x + y)**(x + 1)"
assert str(x**Rational(1, 3)) == "x**(1/3)"
assert str(1/x**Rational(1, 3)) == "x**(-1/3)"
assert str(sqrt(sqrt(x))) == "x**(1/4)"
assert str(x**-1.0) == '1/x'
def test_sqrt():
assert str(sqrt(x)) == "sqrt(x)"
assert str(sqrt(x**2)) == "sqrt(x**2)"
assert str(1/sqrt(x)) == "1/sqrt(x)"
assert str(1/sqrt(x**2)) == "1/sqrt(x**2)"
assert str(y/sqrt(x)) == "y/sqrt(x)"
assert str(x**(1/2)) == "x**0.5"
assert str(1/x**(1/2)) == "x**(-0.5)"
def test_Rational():
n1 = Rational(1,4)
n2 = Rational(1,3)
n3 = Rational(2,4)
n4 = Rational(2,-4)
n5 = Rational(0)
n6 = Rational(1)
n7 = Rational(3)
n8 = Rational(-3)
assert str(n1*n2) == "1/12"
assert str(n1*n2) == "1/12"
assert str(n3) == "1/2"
assert str(n1*n3) == "1/8"
assert str(n1+n3) == "3/4"
assert str(n1+n2) == "7/12"
assert str(n1+n4) == "-1/4"
assert str(n4*n4) == "1/4"
assert str(n4+n2) == "-1/6"
assert str(n4+n5) == "-1/2"
assert str(n4*n5) == "0"
assert str(n3+n4) == "0"
assert str(n1**n7) == "1/64"
assert str(n2**n7) == "1/27"
assert str(n2**n8) == "27"
assert str(n7**n8) == "1/27"
assert str(Rational("-25")) == "-25"
assert str(Rational("1.25")) == "5/4"
assert str(Rational("-2.6e-2")) == "-13/500"
assert str(S("25/7")) == "25/7"
assert str(S("-123/569")) == "-123/569"
assert str(S("0.1[23]", rational=1)) == "61/495"
assert str(S("5.1[666]", rational=1)) == "31/6"
assert str(S("-5.1[666]", rational=1)) == "-31/6"
assert str(S("0.[9]", rational=1)) == "1"
assert str(S("-0.[9]", rational=1)) == "-1"
assert str(sqrt(Rational(1,4))) == "1/2"
assert str(sqrt(Rational(1,36))) == "1/6"
assert str((123**25) ** Rational(1,25)) == "123"
assert str((123**25+1)**Rational(1,25)) != "123"
assert str((123**25-1)**Rational(1,25)) != "123"
assert str((123**25-1)**Rational(1,25)) != "122"
assert str(sqrt(Rational(81,36))**3) == "27/8"
assert str(1/sqrt(Rational(81,36))**3) == "8/27"
assert str(sqrt(-4)) == str(2*I)
assert str(2**Rational(1,10**10)) == "2**(1/10000000000)"
def test_Float():
assert str(Float('1.23', prec=1+2)) == '1.23'
assert str(Float('1.23456789', prec=1+8)) == '1.23456789'
assert str(Float('1.234567890123456789', prec=1+18)) == '1.234567890123456789'
assert str(pi.evalf(1+2)) == '3.14'
assert str(pi.evalf(1+14)) == '3.14159265358979'
assert str(pi.evalf(1+64)) == '3.1415926535897932384626433832795028841971693993751058209749445923'
assert str(pi.round(-1)) == '0.'
assert str((pi**400 - (pi**400).round(1)).n(2)) == '-0.e+88'
def test_Relational():
assert str(Rel(x, y, "<")) == "x < y"
assert str(Rel(x+y, y, "==")) == "x + y == y"
def test_RootOf():
assert str(RootOf(x**5 + 2*x - 1, 0)) == "RootOf(x**5 + 2*x - 1, 0)"
def test_RootSum():
f = x**5 + 2*x - 1
assert str(RootSum(f, Lambda(z, z), auto=False)) == "RootSum(x**5 + 2*x - 1)"
assert str(RootSum(f, Lambda(z, z**2), auto=False)) == "RootSum(x**5 + 2*x - 1, Lambda(_z, _z**2))"
def test_GroebnerBasis():
assert str(groebner([], x, y)) == "GroebnerBasis([], x, y, domain='ZZ', order='lex')"
F = [x**2 - 3*y - x + 1, y**2 - 2*x + y - 1]
assert str(groebner(F, order='grlex')) == \
"GroebnerBasis([x**2 - x - 3*y + 1, y**2 - 2*x + y - 1], x, y, domain='ZZ', order='grlex')"
assert str(groebner(F, order='lex')) == \
"GroebnerBasis([2*x - y**2 - y + 1, y**4 + 2*y**3 - 3*y**2 - 16*y + 7], x, y, domain='ZZ', order='lex')"
def test_Sample():
assert str(Sample([x, y, 1])) in [
"Sample([x, y, 1])",
"Sample([y, 1, x])",
"Sample([1, x, y])",
"Sample([y, x, 1])",
"Sample([x, 1, y])",
"Sample([1, y, x])",
]
def test_set():
assert sstr(set()) == 'set()'
assert sstr(frozenset()) == 'frozenset()'
assert sstr(set([1,2,3]))== 'set([1, 2, 3])'
assert sstr(set([1,x,x**2,x**3,x**4])) == 'set([1, x, x**2, x**3, x**4])'
def test_SparseMatrix():
M = SparseMatrix([[x**+1, 1], [y, x+y]])
assert str(M) == sstr(M) == "[x, 1]\n[y, x + y]"
def test_Sum():
assert str(summation(cos(3*z), (z, x, y))) == "Sum(cos(3*z), (z, x, y))"
assert str(Sum(x*y**2, (x, -2, 2), (y, -5, 5))) == \
"Sum(x*y**2, (x, -2, 2), (y, -5, 5))"
def test_Symbol():
assert str(y) == "y"
assert str(x) == "x"
e = x
assert str(e) == "x"
def test_tuple():
assert str((x,)) == sstr((x,)) == "(x,)"
assert str((x+y, 1+x)) == sstr((x+y, 1+x)) == "(x + y, x + 1)"
assert str((x+y, (1+x, x**2))) == sstr((x+y, (1+x, x**2))) == "(x + y, (x + 1, x**2))"
def test_Uniform():
assert str(Uniform(x, y)) == "Uniform(x, y)"
assert str(Uniform(x+y, y)) == "Uniform(x + y, y)"
def test_Unit():
assert str(second) == "s"
assert str(joule) == "kg*m**2/s**2"
def test_wild_str():
w = Wild('x')
assert str(w + 1) == 'x_ + 1'
assert str(exp(2**w) + 5) == 'exp(2**x_) + 5'
assert str(3*w + 1) == '3*x_ + 1'
assert str(1/w + 1) == '1 + 1/x_'
assert str(w**2 + 1) == 'x_**2 + 1'
assert str(1/(1-w)) == '1/(-x_ + 1)'
def test_zeta():
assert str(zeta(3)) == "zeta(3)"
def test_bug2():
e = x-y
a = str(e)
b = str(e)
assert a == b
def test_bug4():
e = -2*sqrt(x)-y/sqrt(x)/2
assert str(e) not in ["(-2)*x**1/2(-1/2)*x**(-1/2)*y",
"-2*x**1/2(-1/2)*x**(-1/2)*y","-2*x**1/2-1/2*x**-1/2*w"]
assert str(e) == "-2*sqrt(x) - y/(2*sqrt(x))"
def test_issue922():
e = Integral(x,x) + 1
assert str(e) == 'Integral(x, x) + 1'
def test_sstrrepr():
assert sstr('abc') == 'abc'
assert sstrrepr('abc') == "'abc'"
e = ['a', 'b', 'c', x]
assert sstr(e) == "[a, b, c, x]"
assert sstrrepr(e) == "['a', 'b', 'c', x]"
def test_infinity():
assert sstr(oo*I) == "oo*I"
def test_full_prec():
assert sstr(S("0.3"), full_prec=True) == "0.300000000000000"
assert sstr(S("0.3"), full_prec="auto") == "0.300000000000000"
assert sstr(S("0.3"), full_prec=False) == "0.3"
assert sstr(S("0.3")*x, full_prec=True) in [
"0.300000000000000*x",
"x*0.300000000000000"
]
assert sstr(S("0.3")*x, full_prec="auto") in [
"0.3*x",
"x*0.3"
]
assert sstr(S("0.3")*x, full_prec=False) in [
"0.3*x",
"x*0.3"
]
def test_noncommutative():
A, B, C = symbols('A,B,C', commutative=False)
assert sstr(A*B*C**-1) == "A*B*C**(-1)"
assert sstr(C**-1*A*B) == "C**(-1)*A*B"
assert sstr(A*C**-1*B) == "A*C**(-1)*B"
assert sstr(sqrt(A)) == "sqrt(A)"
assert sstr(1/sqrt(A)) == "A**(-1/2)"
def test_empty_printer():
str_printer = StrPrinter()
assert str_printer.emptyPrinter("foo") == "foo"
assert str_printer.emptyPrinter(x*y) == "x*y"
assert str_printer.emptyPrinter(32) == "32"
def test_settings():
raises(TypeError, lambda: sstr(S(4), method="garbage"))
def test_RandomDomain():
from sympy.stats import Normal, Die, Exponential, pspace, where
X = Normal('x1', 0, 1)
assert str(where(X>0)) == "Domain: 0 < x1"
D = Die('d1', 6)
assert str(where(D>4)) == "Domain: Or(d1 == 5, d1 == 6)"
A = Exponential('a', 1)
B = Exponential('b', 1)
assert str(pspace(Tuple(A,B)).domain) =="Domain: And(0 <= a, 0 <= b)"
def test_FiniteSet():
assert str(FiniteSet(range(1, 51))) == '{1, 2, 3, ..., 48, 49, 50}'
assert str(FiniteSet(range(1, 6))) == '{1, 2, 3, 4, 5}'
def test_PrettyPoly():
from sympy.polys.domains import QQ
F = QQ.frac_field(x, y)
R = QQ[x, y]
assert sstr(F.convert(x/(x + y))) == sstr(x/(x + y))
assert sstr(R.convert(x + y)) == sstr(x + y)
def test_categories():
from sympy.categories import (Object, Morphism, NamedMorphism,
IdentityMorphism, Category)
A = Object("A")
B = Object("B")
f = NamedMorphism(A, B, "f")
id_A = IdentityMorphism(A)
K = Category("K")
assert str(A) == 'Object("A")'
assert str(f) == 'NamedMorphism(Object("A"), Object("B"), "f")'
assert str(id_A) == 'IdentityMorphism(Object("A"))'
assert str(K) == 'Category("K")'
def test_Tr():
A, B = symbols('A B', commutative=False)
t = Tr(A*B)
assert str(t) == 'Tr(A*B)'
| true
| true
|
f702393d2ef35884c8d2fef7496ee40c1e76f58b
| 10,876
|
py
|
Python
|
configs/dsc/fast_dsc_r50_fpn_1x_coco.py
|
hding2455/DSC
|
a3f49f549fd569e3c22b6dadbee1013418b9716f
|
[
"BSD-2-Clause"
] | 13
|
2021-04-26T08:33:00.000Z
|
2022-03-30T03:29:36.000Z
|
configs/dsc/fast_dsc_r50_fpn_1x_coco.py
|
hding2455/DSC
|
a3f49f549fd569e3c22b6dadbee1013418b9716f
|
[
"BSD-2-Clause"
] | 14
|
2021-06-24T07:02:38.000Z
|
2022-03-18T21:25:47.000Z
|
configs/dsc/fast_dsc_r50_fpn_1x_coco.py
|
hding2455/DSC
|
a3f49f549fd569e3c22b6dadbee1013418b9716f
|
[
"BSD-2-Clause"
] | 3
|
2021-10-21T11:42:32.000Z
|
2022-03-26T08:30:34.000Z
|
# model settings
_base_ = [
'../_base_/datasets/coco_instance.py',
'../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py'
]
model = dict(
type='DSC',
pretrained='torchvision://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
norm_eval=True,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_generator=dict(
type='AnchorGenerator',
scales=[8],
ratios=[0.5, 1.0, 2.0],
strides=[4, 8, 16, 32, 64]),
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0]),
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
roi_head=dict(
type='DSCRoIHead',
num_stages=3,
stage_loss_weights=[dict(loss_mpn=1, loss_bbox=1, loss_cls=1),
dict(loss_mpn=1, loss_bbox=0.5, loss_cls=1),
dict(loss_mpn=1, loss_bbox=0.5, loss_cls=1),
dict(loss_mask=1)],
semantic_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=14, sample_num=0),
out_channels=256,
featmap_strides=[8]),
semantic_head=dict(
type='FusedSemanticHead',
num_ins=5,
fusion_level=1,
num_convs=4,
in_channels=256,
conv_out_channels=256,
num_classes=183,
ignore_label=255,
loss_weight=0.2),
relative_roi_extractor=dict(
type='RelativeRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=0),
out_channels=256,
featmap_strides=[1.0]),
mpn_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
mpn=[
dict(
type='DSCMaskHead',
with_conv_res=False,
num_convs=4,
in_channels=256,
conv_out_channels=256,
class_agnostic=True,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)),
dict(
type='DSCMaskHead',
with_conv_res=True,
num_convs=4,
in_channels=256,
conv_out_channels=256,
class_agnostic=True,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)),
dict(
type='DSCMaskHead',
with_conv_res=True,
num_convs=4,
in_channels=256,
conv_out_channels=256,
class_agnostic=True,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))],
bbox_roi_extractor=dict(
type='SgSingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=[
dict(
type='Shared2FCDSCBBoxHead',
conv_res=1,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=80,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=1.0)),
dict(
type='Shared2FCDSCBBoxHead',
conv_res=1,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=80,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.05, 0.05, 0.1, 0.1]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=1.0)),
dict(
type='Shared2FCDSCBBoxHead',
conv_res=1,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=80,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.033, 0.033, 0.067, 0.067]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))
],
mask_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=14, sample_num=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
mask_head=dict(
type='DSCMaskHead',
num_convs=4,
in_channels=256,
conv_out_channels=256,
num_classes=80,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))))
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
match_low_quality=True,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=[
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=14,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.6,
neg_iou_thr=0.6,
min_pos_iou=0.6,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='PseudoSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=False),
mask_size=14,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.7,
min_pos_iou=0.7,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='PseudoSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=False),
mask_size=14,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.7,
min_pos_iou=0.7,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='PseudoSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=False),
mask_size=28,
pos_weight=-1,
debug=False),
])
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.001,
nms=dict(type='nms', iou_thr=0.5),
max_per_img=100,
mask_thr_binary=0.5))
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='LoadAnnotations', with_bbox=True, with_mask=True, with_seg=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='SegRescale', scale_factor=1 / 8),
dict(type='DefaultFormatBundle'),
dict(
type='Collect',
keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
train=dict(
seg_prefix=data_root + 'stuffthingmaps/train2017/',
pipeline=train_pipeline),
val=dict(pipeline=test_pipeline),
test=dict(pipeline=test_pipeline))
| 33.88162
| 79
| 0.495954
|
_base_ = [
'../_base_/datasets/coco_instance.py',
'../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py'
]
model = dict(
type='DSC',
pretrained='torchvision://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
norm_eval=True,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_generator=dict(
type='AnchorGenerator',
scales=[8],
ratios=[0.5, 1.0, 2.0],
strides=[4, 8, 16, 32, 64]),
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0]),
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
roi_head=dict(
type='DSCRoIHead',
num_stages=3,
stage_loss_weights=[dict(loss_mpn=1, loss_bbox=1, loss_cls=1),
dict(loss_mpn=1, loss_bbox=0.5, loss_cls=1),
dict(loss_mpn=1, loss_bbox=0.5, loss_cls=1),
dict(loss_mask=1)],
semantic_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=14, sample_num=0),
out_channels=256,
featmap_strides=[8]),
semantic_head=dict(
type='FusedSemanticHead',
num_ins=5,
fusion_level=1,
num_convs=4,
in_channels=256,
conv_out_channels=256,
num_classes=183,
ignore_label=255,
loss_weight=0.2),
relative_roi_extractor=dict(
type='RelativeRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=0),
out_channels=256,
featmap_strides=[1.0]),
mpn_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
mpn=[
dict(
type='DSCMaskHead',
with_conv_res=False,
num_convs=4,
in_channels=256,
conv_out_channels=256,
class_agnostic=True,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)),
dict(
type='DSCMaskHead',
with_conv_res=True,
num_convs=4,
in_channels=256,
conv_out_channels=256,
class_agnostic=True,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)),
dict(
type='DSCMaskHead',
with_conv_res=True,
num_convs=4,
in_channels=256,
conv_out_channels=256,
class_agnostic=True,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))],
bbox_roi_extractor=dict(
type='SgSingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=[
dict(
type='Shared2FCDSCBBoxHead',
conv_res=1,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=80,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=1.0)),
dict(
type='Shared2FCDSCBBoxHead',
conv_res=1,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=80,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.05, 0.05, 0.1, 0.1]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=1.0)),
dict(
type='Shared2FCDSCBBoxHead',
conv_res=1,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=80,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.033, 0.033, 0.067, 0.067]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))
],
mask_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=14, sample_num=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
mask_head=dict(
type='DSCMaskHead',
num_convs=4,
in_channels=256,
conv_out_channels=256,
num_classes=80,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))))
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
match_low_quality=True,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=[
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=14,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.6,
neg_iou_thr=0.6,
min_pos_iou=0.6,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='PseudoSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=False),
mask_size=14,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.7,
min_pos_iou=0.7,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='PseudoSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=False),
mask_size=14,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.7,
min_pos_iou=0.7,
match_low_quality=False,
ignore_iof_thr=-1),
sampler=dict(
type='PseudoSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=False),
mask_size=28,
pos_weight=-1,
debug=False),
])
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.001,
nms=dict(type='nms', iou_thr=0.5),
max_per_img=100,
mask_thr_binary=0.5))
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='LoadAnnotations', with_bbox=True, with_mask=True, with_seg=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='SegRescale', scale_factor=1 / 8),
dict(type='DefaultFormatBundle'),
dict(
type='Collect',
keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
train=dict(
seg_prefix=data_root + 'stuffthingmaps/train2017/',
pipeline=train_pipeline),
val=dict(pipeline=test_pipeline),
test=dict(pipeline=test_pipeline))
| true
| true
|
f70239a34c06e8d819bab5fc729906e65d1d5260
| 420
|
py
|
Python
|
lists/migrations/0001_initial.py
|
migglu/todo-lists
|
1a9cbdd2b2ff5240ef40946f8e07d18537cd3a12
|
[
"MIT"
] | null | null | null |
lists/migrations/0001_initial.py
|
migglu/todo-lists
|
1a9cbdd2b2ff5240ef40946f8e07d18537cd3a12
|
[
"MIT"
] | null | null | null |
lists/migrations/0001_initial.py
|
migglu/todo-lists
|
1a9cbdd2b2ff5240ef40946f8e07d18537cd3a12
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID', auto_created=True)),
],
),
]
| 21
| 114
| 0.585714
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID', auto_created=True)),
],
),
]
| true
| true
|
f7023a346bd1023538bb62dbeaea32c222fa7d60
| 210
|
py
|
Python
|
setup.py
|
chrisjsewell/disk-objectstore
|
274c868ade18cb89a38690cb99a28f514aeacba1
|
[
"MIT"
] | 6
|
2020-10-22T08:46:08.000Z
|
2022-01-27T06:50:07.000Z
|
setup.py
|
chrisjsewell/disk-objectstore
|
274c868ade18cb89a38690cb99a28f514aeacba1
|
[
"MIT"
] | 89
|
2020-04-11T16:25:38.000Z
|
2020-07-20T14:52:38.000Z
|
setup.py
|
chrisjsewell/disk-objectstore
|
274c868ade18cb89a38690cb99a28f514aeacba1
|
[
"MIT"
] | 5
|
2020-07-20T17:03:33.000Z
|
2022-01-10T13:48:55.000Z
|
"""This file is needed for editable installs (`pip install -e .`).
Can be removed once the following is resolved
https://github.com/pypa/packaging-problems/issues/256
"""
from setuptools import setup
setup()
| 23.333333
| 66
| 0.752381
|
from setuptools import setup
setup()
| true
| true
|
f7023b9f4124b4ce5a8066663f7adc569a5ebde6
| 1,292
|
py
|
Python
|
examples/mpi/visualization/config.py
|
FTHPC/FlipIt
|
02882de89941d83f89c84856e307ec53c99cf7c0
|
[
"NCSA"
] | 7
|
2017-02-20T21:39:33.000Z
|
2020-06-05T09:07:15.000Z
|
examples/mpi/visualization/config.py
|
FTHPC/FlipIt
|
02882de89941d83f89c84856e307ec53c99cf7c0
|
[
"NCSA"
] | 3
|
2016-05-05T13:32:10.000Z
|
2016-05-29T14:33:07.000Z
|
examples/mpi/visualization/config.py
|
FTHPC/FlipIt
|
02882de89941d83f89c84856e307ec53c99cf7c0
|
[
"NCSA"
] | 4
|
2015-08-27T11:19:53.000Z
|
2016-05-03T19:50:35.000Z
|
############### Injector Parameters ##################
#
# config - config file used by the compiler pass
# funcList - list of functions that are faulty
# prob - probability that instuction is faulty
# byte - which byte is faulty (0-7) -1 random
# singleInj - one injection per active rank (0 or 1)
# ptr - add code to inject into pointers (0 or 1)
# arith - add code to inject into mathematics (0 or 1)
# ctrl - add code to inject into control (0 or 1)
# stateFile - unique counter for fault site index;
# should differ based on application
#
#####################################################
config = "jacobi.config"
funcList = "\"\""
prob = 1e-5
byte = -1
singleInj = 1
ptr = 1
arith = 1
ctrl = 1
stateFile = "jacobi"
############# Library Parameters #####################
#
# FLIPIT_PATH - Path to FlipIt repo
# SHOW - libraries and path wraped by mpicc
#
#####################################################
import os
FLIPIT_PATH = os.environ['FLIPIT_PATH']
LLVM_BUILD_PATH = os.environ['LLVM_BUILD_PATH']
SHOW = ""
########### Files to NOT inject inside ###############
notInject = [" ", " "]
############ Default Compiler #################
cc = "mpicc"
############ Verbose compiler output ##############
verbose = False
| 27.489362
| 57
| 0.537926
|
config = "jacobi.config"
funcList = "\"\""
prob = 1e-5
byte = -1
singleInj = 1
ptr = 1
arith = 1
ctrl = 1
stateFile = "jacobi"
import os
FLIPIT_PATH = os.environ['FLIPIT_PATH']
LLVM_BUILD_PATH = os.environ['LLVM_BUILD_PATH']
SHOW = ""
notInject = [" ", " "]
cc = "mpicc"
verbose = False
| true
| true
|
f7023be0695f3585c75bf0627f064210a96296ac
| 491
|
py
|
Python
|
python/stage_5/4673.py
|
smartx-jshan/Coding_Practice
|
bc7d485e7992031e55df62483818b721ad7d1d4f
|
[
"Apache-2.0"
] | null | null | null |
python/stage_5/4673.py
|
smartx-jshan/Coding_Practice
|
bc7d485e7992031e55df62483818b721ad7d1d4f
|
[
"Apache-2.0"
] | null | null | null |
python/stage_5/4673.py
|
smartx-jshan/Coding_Practice
|
bc7d485e7992031e55df62483818b721ad7d1d4f
|
[
"Apache-2.0"
] | null | null | null |
def num(a):
num = int(a)
if (num < 10):
return (num + num)
elif (num <100):
return (num + num //10 + num % 10)
elif (num <1000):
return (num + num //100 + ( (num //10) % 10) + num % 10)
else:
return (num + num //1000 + ((num //100) % 10) + ((num //10) % 10) + num %10)
count = list(range(10000))
for i in range (10000):
temp = num(i)
if (temp >= 10000):
continue
else:
count[temp] = -1
for i in range (10000):
if (count[i] != -1):
print (i)
| 19.64
| 80
| 0.503055
|
def num(a):
num = int(a)
if (num < 10):
return (num + num)
elif (num <100):
return (num + num //10 + num % 10)
elif (num <1000):
return (num + num //100 + ( (num //10) % 10) + num % 10)
else:
return (num + num //1000 + ((num //100) % 10) + ((num //10) % 10) + num %10)
count = list(range(10000))
for i in range (10000):
temp = num(i)
if (temp >= 10000):
continue
else:
count[temp] = -1
for i in range (10000):
if (count[i] != -1):
print (i)
| true
| true
|
f7023be0b6c9bb5058c9f741b1eba904776dc433
| 9,753
|
py
|
Python
|
python/ccxt/virwox.py
|
wadaxofficial/ccxt
|
765aaa4cd83c1e2dc75717d49b50a55ac4dc0487
|
[
"MIT"
] | null | null | null |
python/ccxt/virwox.py
|
wadaxofficial/ccxt
|
765aaa4cd83c1e2dc75717d49b50a55ac4dc0487
|
[
"MIT"
] | null | null | null |
python/ccxt/virwox.py
|
wadaxofficial/ccxt
|
765aaa4cd83c1e2dc75717d49b50a55ac4dc0487
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
class virwox (Exchange):
def describe(self):
return self.deep_extend(super(virwox, self).describe(), {
'id': 'virwox',
'name': 'VirWoX',
'countries': ['AT', 'EU'],
'rateLimit': 1000,
'has': {
'CORS': True,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766894-6da9d360-5eea-11e7-90aa-41f2711b7405.jpg',
'api': {
'public': 'https://api.virwox.com/api/json.php',
'private': 'https://www.virwox.com/api/trading.php',
},
'www': 'https://www.virwox.com',
'doc': 'https://www.virwox.com/developers.php',
},
'requiredCredentials': {
'apiKey': True,
'secret': False,
'login': True,
'password': True,
},
'api': {
'public': {
'get': [
'getInstruments',
'getBestPrices',
'getMarketDepth',
'estimateMarketOrder',
'getTradedPriceVolume',
'getRawTradeData',
'getStatistics',
'getTerminalList',
'getGridList',
'getGridStatistics',
],
'post': [
'getInstruments',
'getBestPrices',
'getMarketDepth',
'estimateMarketOrder',
'getTradedPriceVolume',
'getRawTradeData',
'getStatistics',
'getTerminalList',
'getGridList',
'getGridStatistics',
],
},
'private': {
'get': [
'cancelOrder',
'getBalances',
'getCommissionDiscount',
'getOrders',
'getTransactions',
'placeOrder',
],
'post': [
'cancelOrder',
'getBalances',
'getCommissionDiscount',
'getOrders',
'getTransactions',
'placeOrder',
],
},
},
})
def fetch_markets(self, params={}):
markets = self.publicGetGetInstruments()
keys = list(markets['result'].keys())
result = []
for p in range(0, len(keys)):
market = markets['result'][keys[p]]
id = market['instrumentID']
symbol = market['symbol']
base = market['longCurrency']
quote = market['shortCurrency']
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'info': market,
})
return result
def fetch_balance(self, params={}):
self.load_markets()
response = self.privatePostGetBalances()
balances = response['result']['accountList']
result = {'info': balances}
for b in range(0, len(balances)):
balance = balances[b]
currency = balance['currency']
total = balance['balance']
account = {
'free': total,
'used': 0.0,
'total': total,
}
result[currency] = account
return self.parse_balance(result)
def fetch_market_price(self, symbol, params={}):
self.load_markets()
response = self.publicPostGetBestPrices(self.extend({
'symbols': [symbol],
}, params))
result = response['result']
return {
'bid': self.safe_float(result[0], 'bestBuyPrice'),
'ask': self.safe_float(result[0], 'bestSellPrice'),
}
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
request = {
'symbols': [symbol],
}
if limit is not None:
request['buyDepth'] = limit # 100
request['sellDepth'] = limit # 100
response = self.publicPostGetMarketDepth(self.extend(request, params))
orderbook = response['result'][0]
return self.parse_order_book(orderbook, None, 'buy', 'sell', 'price', 'volume')
def fetch_ticker(self, symbol, params={}):
self.load_markets()
end = self.milliseconds()
start = end - 86400000
response = self.publicGetGetTradedPriceVolume(self.extend({
'instrument': symbol,
'endDate': self.ymdhms(end),
'startDate': self.ymdhms(start),
'HLOC': 1,
}, params))
tickers = response['result']['priceVolumeList']
keys = list(tickers.keys())
length = len(keys)
lastKey = keys[length - 1]
ticker = tickers[lastKey]
timestamp = self.milliseconds()
close = self.safe_float(ticker, 'close')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': None,
'bidVolume': None,
'ask': None,
'askVolume': None,
'vwap': None,
'open': self.safe_float(ticker, 'open'),
'close': close,
'last': close,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_float(ticker, 'longVolume'),
'quoteVolume': self.safe_float(ticker, 'shortVolume'),
'info': ticker,
}
def parse_trade(self, trade, symbol=None):
sec = self.safe_integer(trade, 'time')
timestamp = sec * 1000
return {
'id': trade['tid'],
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'order': None,
'symbol': symbol,
'type': None,
'side': None,
'price': self.safe_float(trade, 'price'),
'amount': self.safe_float(trade, 'vol'),
'fee': None,
'info': trade,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
response = self.publicGetGetRawTradeData(self.extend({
'instrument': symbol,
'timespan': 3600,
}, params))
result = self.safe_value(response, 'result', {})
trades = self.safe_value(result, 'data', [])
return self.parse_trades(trades, market)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'instrument': market['symbol'],
'orderType': side.upper(),
'amount': amount,
}
if type == 'limit':
request['price'] = price
response = self.privatePostPlaceOrder(self.extend(request, params))
return {
'info': response,
'id': self.safe_string(response['result'], 'orderID'),
}
def cancel_order(self, id, symbol=None, params={}):
request = {
'orderID': id,
}
return self.privatePostCancelOrder(self.extend(request, params))
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'][api]
auth = {}
if api == 'private':
self.check_required_credentials()
auth['key'] = self.apiKey
auth['user'] = self.login
auth['pass'] = self.password
nonce = self.nonce()
if method == 'GET':
url += '?' + self.urlencode(self.extend({
'method': path,
'id': nonce,
}, auth, params))
else:
headers = {'Content-Type': 'application/json'}
body = self.json({
'method': path,
'params': self.extend(auth, params),
'id': nonce,
})
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response):
if code == 200:
if (body[0] == '{') or (body[0] == '['):
if 'result' in response:
result = response['result']
if 'errorCode' in result:
errorCode = result['errorCode']
if errorCode != 'OK':
raise ExchangeError(self.id + ' error returned: ' + body)
else:
raise ExchangeError(self.id + ' malformed response: no result in response: ' + body)
else:
# if not a JSON response
raise ExchangeError(self.id + ' returned a non-JSON reply: ' + body)
| 36.122222
| 126
| 0.467138
|
from ccxt.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
class virwox (Exchange):
def describe(self):
return self.deep_extend(super(virwox, self).describe(), {
'id': 'virwox',
'name': 'VirWoX',
'countries': ['AT', 'EU'],
'rateLimit': 1000,
'has': {
'CORS': True,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766894-6da9d360-5eea-11e7-90aa-41f2711b7405.jpg',
'api': {
'public': 'https://api.virwox.com/api/json.php',
'private': 'https://www.virwox.com/api/trading.php',
},
'www': 'https://www.virwox.com',
'doc': 'https://www.virwox.com/developers.php',
},
'requiredCredentials': {
'apiKey': True,
'secret': False,
'login': True,
'password': True,
},
'api': {
'public': {
'get': [
'getInstruments',
'getBestPrices',
'getMarketDepth',
'estimateMarketOrder',
'getTradedPriceVolume',
'getRawTradeData',
'getStatistics',
'getTerminalList',
'getGridList',
'getGridStatistics',
],
'post': [
'getInstruments',
'getBestPrices',
'getMarketDepth',
'estimateMarketOrder',
'getTradedPriceVolume',
'getRawTradeData',
'getStatistics',
'getTerminalList',
'getGridList',
'getGridStatistics',
],
},
'private': {
'get': [
'cancelOrder',
'getBalances',
'getCommissionDiscount',
'getOrders',
'getTransactions',
'placeOrder',
],
'post': [
'cancelOrder',
'getBalances',
'getCommissionDiscount',
'getOrders',
'getTransactions',
'placeOrder',
],
},
},
})
def fetch_markets(self, params={}):
markets = self.publicGetGetInstruments()
keys = list(markets['result'].keys())
result = []
for p in range(0, len(keys)):
market = markets['result'][keys[p]]
id = market['instrumentID']
symbol = market['symbol']
base = market['longCurrency']
quote = market['shortCurrency']
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'info': market,
})
return result
def fetch_balance(self, params={}):
self.load_markets()
response = self.privatePostGetBalances()
balances = response['result']['accountList']
result = {'info': balances}
for b in range(0, len(balances)):
balance = balances[b]
currency = balance['currency']
total = balance['balance']
account = {
'free': total,
'used': 0.0,
'total': total,
}
result[currency] = account
return self.parse_balance(result)
def fetch_market_price(self, symbol, params={}):
self.load_markets()
response = self.publicPostGetBestPrices(self.extend({
'symbols': [symbol],
}, params))
result = response['result']
return {
'bid': self.safe_float(result[0], 'bestBuyPrice'),
'ask': self.safe_float(result[0], 'bestSellPrice'),
}
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
request = {
'symbols': [symbol],
}
if limit is not None:
request['buyDepth'] = limit request['sellDepth'] = limit response = self.publicPostGetMarketDepth(self.extend(request, params))
orderbook = response['result'][0]
return self.parse_order_book(orderbook, None, 'buy', 'sell', 'price', 'volume')
def fetch_ticker(self, symbol, params={}):
self.load_markets()
end = self.milliseconds()
start = end - 86400000
response = self.publicGetGetTradedPriceVolume(self.extend({
'instrument': symbol,
'endDate': self.ymdhms(end),
'startDate': self.ymdhms(start),
'HLOC': 1,
}, params))
tickers = response['result']['priceVolumeList']
keys = list(tickers.keys())
length = len(keys)
lastKey = keys[length - 1]
ticker = tickers[lastKey]
timestamp = self.milliseconds()
close = self.safe_float(ticker, 'close')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': None,
'bidVolume': None,
'ask': None,
'askVolume': None,
'vwap': None,
'open': self.safe_float(ticker, 'open'),
'close': close,
'last': close,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_float(ticker, 'longVolume'),
'quoteVolume': self.safe_float(ticker, 'shortVolume'),
'info': ticker,
}
def parse_trade(self, trade, symbol=None):
sec = self.safe_integer(trade, 'time')
timestamp = sec * 1000
return {
'id': trade['tid'],
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'order': None,
'symbol': symbol,
'type': None,
'side': None,
'price': self.safe_float(trade, 'price'),
'amount': self.safe_float(trade, 'vol'),
'fee': None,
'info': trade,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
response = self.publicGetGetRawTradeData(self.extend({
'instrument': symbol,
'timespan': 3600,
}, params))
result = self.safe_value(response, 'result', {})
trades = self.safe_value(result, 'data', [])
return self.parse_trades(trades, market)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'instrument': market['symbol'],
'orderType': side.upper(),
'amount': amount,
}
if type == 'limit':
request['price'] = price
response = self.privatePostPlaceOrder(self.extend(request, params))
return {
'info': response,
'id': self.safe_string(response['result'], 'orderID'),
}
def cancel_order(self, id, symbol=None, params={}):
request = {
'orderID': id,
}
return self.privatePostCancelOrder(self.extend(request, params))
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'][api]
auth = {}
if api == 'private':
self.check_required_credentials()
auth['key'] = self.apiKey
auth['user'] = self.login
auth['pass'] = self.password
nonce = self.nonce()
if method == 'GET':
url += '?' + self.urlencode(self.extend({
'method': path,
'id': nonce,
}, auth, params))
else:
headers = {'Content-Type': 'application/json'}
body = self.json({
'method': path,
'params': self.extend(auth, params),
'id': nonce,
})
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response):
if code == 200:
if (body[0] == '{') or (body[0] == '['):
if 'result' in response:
result = response['result']
if 'errorCode' in result:
errorCode = result['errorCode']
if errorCode != 'OK':
raise ExchangeError(self.id + ' error returned: ' + body)
else:
raise ExchangeError(self.id + ' malformed response: no result in response: ' + body)
else:
raise ExchangeError(self.id + ' returned a non-JSON reply: ' + body)
| true
| true
|
f7023d3f50a4bcdd656f0e33b9e318facfcd714f
| 892
|
py
|
Python
|
kubi_ecs_logger/models/fields/destination.py
|
kumina/kubi_ecs_logger
|
64d9519e0759a24253a4edc53e0c024675033d1c
|
[
"BSD-3-Clause"
] | 6
|
2019-12-15T12:47:06.000Z
|
2022-01-11T08:54:58.000Z
|
kubi_ecs_logger/models/fields/destination.py
|
kumina/kubi_ecs_logger
|
64d9519e0759a24253a4edc53e0c024675033d1c
|
[
"BSD-3-Clause"
] | null | null | null |
kubi_ecs_logger/models/fields/destination.py
|
kumina/kubi_ecs_logger
|
64d9519e0759a24253a4edc53e0c024675033d1c
|
[
"BSD-3-Clause"
] | null | null | null |
from marshmallow import fields
from .field_set import FieldSet, FieldSetSchema
class Destination(FieldSet):
def __init__(self,
address: str = None,
bytes: int = None,
domain: str = None,
ip: str = None,
mac: str = None,
packets: int = None,
port: int = None,
*args, **kwargs):
super().__init__(*args, **kwargs)
self.address = address
self.bytes = bytes
self.domain = domain
self.ip = ip
self.mac = mac
self.packets = packets
self.port = port
class DestinationSchema(FieldSetSchema):
address = fields.String()
bytes = fields.Integer()
domain = fields.String()
ip = fields.String()
mac = fields.String()
packets = fields.Integer()
port = fields.Integer()
| 24.777778
| 47
| 0.533632
|
from marshmallow import fields
from .field_set import FieldSet, FieldSetSchema
class Destination(FieldSet):
def __init__(self,
address: str = None,
bytes: int = None,
domain: str = None,
ip: str = None,
mac: str = None,
packets: int = None,
port: int = None,
*args, **kwargs):
super().__init__(*args, **kwargs)
self.address = address
self.bytes = bytes
self.domain = domain
self.ip = ip
self.mac = mac
self.packets = packets
self.port = port
class DestinationSchema(FieldSetSchema):
address = fields.String()
bytes = fields.Integer()
domain = fields.String()
ip = fields.String()
mac = fields.String()
packets = fields.Integer()
port = fields.Integer()
| true
| true
|
f70240c114506f9d8a93be31e8b4a574850c87da
| 545
|
py
|
Python
|
vespawatch/migrations/0053_managementaction_result.py
|
JWCook/vespa-watch
|
7b5fe7dfd3e897d99fd91c479346c89efd890d23
|
[
"MIT"
] | 3
|
2019-01-17T09:22:13.000Z
|
2020-06-05T03:46:45.000Z
|
vespawatch/migrations/0053_managementaction_result.py
|
JWCook/vespa-watch
|
7b5fe7dfd3e897d99fd91c479346c89efd890d23
|
[
"MIT"
] | 406
|
2018-06-13T18:36:53.000Z
|
2022-03-22T16:41:50.000Z
|
vespawatch/migrations/0053_managementaction_result.py
|
JWCook/vespa-watch
|
7b5fe7dfd3e897d99fd91c479346c89efd890d23
|
[
"MIT"
] | 8
|
2019-02-12T10:05:08.000Z
|
2020-10-01T05:39:13.000Z
|
# Generated by Django 2.2.24 on 2021-08-24 10:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('vespawatch', '0052_auto_20210824_1131'),
]
operations = [
migrations.AddField(
model_name='managementaction',
name='result',
field=models.CharField(choices=[('ST', 'Successfully treated'), ('UT', 'Unsuccessfully treated'), ('UN', 'Untreated'), ('UK', 'UK')], default='UK', max_length=3, verbose_name='Result'),
),
]
| 28.684211
| 197
| 0.614679
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('vespawatch', '0052_auto_20210824_1131'),
]
operations = [
migrations.AddField(
model_name='managementaction',
name='result',
field=models.CharField(choices=[('ST', 'Successfully treated'), ('UT', 'Unsuccessfully treated'), ('UN', 'Untreated'), ('UK', 'UK')], default='UK', max_length=3, verbose_name='Result'),
),
]
| true
| true
|
f70240d38e81353f512252eb5825d0ca09406cdb
| 52,563
|
py
|
Python
|
py_insightvm_sdk/models/resources_vulnerability_validation_resource.py
|
greenpau/py_insightvm_sdk
|
bd881f26e14cb9f0f9c47927469ec992de9de8e6
|
[
"Apache-2.0"
] | 2
|
2019-03-15T16:05:54.000Z
|
2020-07-19T18:37:50.000Z
|
py_insightvm_sdk/models/resources_vulnerability_validation_resource.py
|
greenpau/py_insightvm_sdk
|
bd881f26e14cb9f0f9c47927469ec992de9de8e6
|
[
"Apache-2.0"
] | 1
|
2021-03-26T04:46:12.000Z
|
2021-03-26T04:51:23.000Z
|
py_insightvm_sdk/models/resources_vulnerability_validation_resource.py
|
greenpau/py_insightvm_sdk
|
bd881f26e14cb9f0f9c47927469ec992de9de8e6
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
InsightVM API
# Overview This guide documents the InsightVM Application Programming Interface (API) Version 3. This API supports the Representation State Transfer (REST) design pattern. Unless noted otherwise this API accepts and produces the `application/json` media type. This API uses Hypermedia as the Engine of Application State (HATEOAS) and is hypermedia friendly. All API connections must be made to the security console using HTTPS. ## Versioning Versioning is specified in the URL and the base path of this API is: `https://<host>:<port>/api/3/`. ## Specification An <a target=\"_blank\" href=\"https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md\">OpenAPI v2</a> specification (also known as Swagger 2) of this API is available. Tools such as <a target=\"_blank\" href=\"https://github.com/swagger-api/swagger-codegen\">swagger-codegen</a> can be used to generate an API client in the language of your choosing using this specification document. <p class=\"openapi\">Download the specification: <a class=\"openapi-button\" target=\"_blank\" download=\"\" href=\"/api/3/json\"> Download </a></p> ## Authentication Authorization to the API uses HTTP Basic Authorization (see <a target=\"_blank\" href=\"https://www.ietf.org/rfc/rfc2617.txt\">RFC 2617</a> for more information). Requests must supply authorization credentials in the `Authorization` header using a Base64 encoded hash of `\"username:password\"`. <!-- ReDoc-Inject: <security-definitions> --> ### 2FA This API supports two-factor authentication (2FA) by supplying an authentication token in addition to the Basic Authorization. The token is specified using the `Token` request header. To leverage two-factor authentication, this must be enabled on the console and be configured for the account accessing the API. ## Resources ### Naming Resource names represent nouns and identify the entity being manipulated or accessed. All collection resources are pluralized to indicate to the client they are interacting with a collection of multiple resources of the same type. Singular resource names are used when there exists only one resource available to interact with. The following naming conventions are used by this API: | Type | Case | | --------------------------------------------- | ------------------------ | | Resource names | `lower_snake_case` | | Header, body, and query parameters parameters | `camelCase` | | JSON fields and property names | `camelCase` | #### Collections A collection resource is a parent resource for instance resources, but can itself be retrieved and operated on independently. Collection resources use a pluralized resource name. The resource path for collection resources follow the convention: ``` /api/3/{resource_name} ``` #### Instances An instance resource is a \"leaf\" level resource that may be retrieved, optionally nested within a collection resource. Instance resources are usually retrievable with opaque identifiers. The resource path for instance resources follows the convention: ``` /api/3/{resource_name}/{instance_id}... ``` ## Verbs The following HTTP operations are supported throughout this API. The general usage of the operation and both its failure and success status codes are outlined below. | Verb | Usage | Success | Failure | | --------- | ------------------------------------------------------------------------------------- | ----------- | -------------------------------------------------------------- | | `GET` | Used to retrieve a resource by identifier, or a collection of resources by type. | `200` | `400`, `401`, `402`, `404`, `405`, `408`, `410`, `415`, `500` | | `POST` | Creates a resource with an application-specified identifier. | `201` | `400`, `401`, `404`, `405`, `408`, `413`, `415`, `500` | | `POST` | Performs a request to queue an asynchronous job. | `202` | `400`, `401`, `405`, `408`, `410`, `413`, `415`, `500` | | `PUT` | Creates a resource with a client-specified identifier. | `200` | `400`, `401`, `403`, `405`, `408`, `410`, `413`, `415`, `500` | | `PUT` | Performs a full update of a resource with a specified identifier. | `201` | `400`, `401`, `403`, `405`, `408`, `410`, `413`, `415`, `500` | | `DELETE` | Deletes a resource by identifier or an entire collection of resources. | `204` | `400`, `401`, `405`, `408`, `410`, `413`, `415`, `500` | | `OPTIONS` | Requests what operations are available on a resource. | `200` | `401`, `404`, `405`, `408`, `500` | ### Common Operations #### OPTIONS All resources respond to the `OPTIONS` request, which allows discoverability of available operations that are supported. The `OPTIONS` response returns the acceptable HTTP operations on that resource within the `Allow` header. The response is always a `200 OK` status. ### Collection Resources Collection resources can support the `GET`, `POST`, `PUT`, and `DELETE` operations. #### GET The `GET` operation invoked on a collection resource indicates a request to retrieve all, or some, of the entities contained within the collection. This also includes the optional capability to filter or search resources during the request. The response from a collection listing is a paginated document. See [hypermedia links](#section/Overview/Paging) for more information. #### POST The `POST` is a non-idempotent operation that allows for the creation of a new resource when the resource identifier is not provided by the system during the creation operation (i.e. the Security Console generates the identifier). The content of the `POST` request is sent in the request body. The response to a successful `POST` request should be a `201 CREATED` with a valid `Location` header field set to the URI that can be used to access to the newly created resource. The `POST` to a collection resource can also be used to interact with asynchronous resources. In this situation, instead of a `201 CREATED` response, the `202 ACCEPTED` response indicates that processing of the request is not fully complete but has been accepted for future processing. This request will respond similarly with a `Location` header with link to the job-oriented asynchronous resource that was created and/or queued. #### PUT The `PUT` is an idempotent operation that either performs a create with user-supplied identity, or a full replace or update of a resource by a known identifier. The response to a `PUT` operation to create an entity is a `201 Created` with a valid `Location` header field set to the URI that can be used to access to the newly created resource. `PUT` on a collection resource replaces all values in the collection. The typical response to a `PUT` operation that updates an entity is hypermedia links, which may link to related resources caused by the side-effects of the changes performed. #### DELETE The `DELETE` is an idempotent operation that physically deletes a resource, or removes an association between resources. The typical response to a `DELETE` operation is hypermedia links, which may link to related resources caused by the side-effects of the changes performed. ### Instance Resources Instance resources can support the `GET`, `PUT`, `POST`, `PATCH` and `DELETE` operations. #### GET Retrieves the details of a specific resource by its identifier. The details retrieved can be controlled through property selection and property views. The content of the resource is returned within the body of the response in the acceptable media type. #### PUT Allows for and idempotent \"full update\" (complete replacement) on a specific resource. If the resource does not exist, it will be created; if it does exist, it is completely overwritten. Any omitted properties in the request are assumed to be undefined/null. For \"partial updates\" use `POST` or `PATCH` instead. The content of the `PUT` request is sent in the request body. The identifier of the resource is specified within the URL (not the request body). The response to a successful `PUT` request is a `201 CREATED` to represent the created status, with a valid `Location` header field set to the URI that can be used to access to the newly created (or fully replaced) resource. #### POST Performs a non-idempotent creation of a new resource. The `POST` of an instance resource most commonly occurs with the use of nested resources (e.g. searching on a parent collection resource). The response to a `POST` of an instance resource is typically a `200 OK` if the resource is non-persistent, and a `201 CREATED` if there is a resource created/persisted as a result of the operation. This varies by endpoint. #### PATCH The `PATCH` operation is used to perform a partial update of a resource. `PATCH` is a non-idempotent operation that enforces an atomic mutation of a resource. Only the properties specified in the request are to be overwritten on the resource it is applied to. If a property is missing, it is assumed to not have changed. #### DELETE Permanently removes the individual resource from the system. If the resource is an association between resources, only the association is removed, not the resources themselves. A successful deletion of the resource should return `204 NO CONTENT` with no response body. This operation is not fully idempotent, as follow-up requests to delete a non-existent resource should return a `404 NOT FOUND`. ## Requests Unless otherwise indicated, the default request body media type is `application/json`. ### Headers Commonly used request headers include: | Header | Example | Purpose | | ------------------ | --------------------------------------------- | ---------------------------------------------------------------------------------------------- | | `Accept` | `application/json` | Defines what acceptable content types are allowed by the client. For all types, use `*/*`. | | `Accept-Encoding` | `deflate, gzip` | Allows for the encoding to be specified (such as gzip). | | `Accept-Language` | `en-US` | Indicates to the server the client's locale (defaults `en-US`). | | `Authorization ` | `Basic Base64(\"username:password\")` | Basic authentication | | `Token ` | `123456` | Two-factor authentication token (if enabled) | ### Dates & Times Dates and/or times are specified as strings in the ISO 8601 format(s). The following formats are supported as input: | Value | Format | Notes | | --------------------------- | ------------------------------------------------------ | ----------------------------------------------------- | | Date | YYYY-MM-DD | Defaults to 12 am UTC (if used for a date & time | | Date & time only | YYYY-MM-DD'T'hh:mm:ss[.nnn] | Defaults to UTC | | Date & time in UTC | YYYY-MM-DD'T'hh:mm:ss[.nnn]Z | | | Date & time w/ offset | YYYY-MM-DD'T'hh:mm:ss[.nnn][+|-]hh:mm | | | Date & time w/ zone-offset | YYYY-MM-DD'T'hh:mm:ss[.nnn][+|-]hh:mm[<zone-id>] | | ### Timezones Timezones are specified in the regional zone format, such as `\"America/Los_Angeles\"`, `\"Asia/Tokyo\"`, or `\"GMT\"`. ### Paging Pagination is supported on certain collection resources using a combination of two query parameters, `page` and `size`. As these are control parameters, they are prefixed with the underscore character. The page parameter dictates the zero-based index of the page to retrieve, and the `size` indicates the size of the page. For example, `/resources?page=2&size=10` will return page 3, with 10 records per page, giving results 21-30. The maximum page size for a request is 500. ### Sorting Sorting is supported on paginated resources with the `sort` query parameter(s). The sort query parameter(s) supports identifying a single or multi-property sort with a single or multi-direction output. The format of the parameter is: ``` sort=property[,ASC|DESC]... ``` Therefore, the request `/resources?sort=name,title,DESC` would return the results sorted by the name and title descending, in that order. The sort directions are either ascending `ASC` or descending `DESC`. With single-order sorting, all properties are sorted in the same direction. To sort the results with varying orders by property, multiple sort parameters are passed. For example, the request `/resources?sort=name,ASC&sort=title,DESC` would sort by name ascending and title descending, in that order. ## Responses The following response statuses may be returned by this API. | Status | Meaning | Usage | | ------ | ------------------------ |------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `200` | OK | The operation performed without error according to the specification of the request, and no more specific 2xx code is suitable. | | `201` | Created | A create request has been fulfilled and a resource has been created. The resource is available as the URI specified in the response, including the `Location` header. | | `202` | Accepted | An asynchronous task has been accepted, but not guaranteed, to be processed in the future. | | `400` | Bad Request | The request was invalid or cannot be otherwise served. The request is not likely to succeed in the future without modifications. | | `401` | Unauthorized | The user is unauthorized to perform the operation requested, or does not maintain permissions to perform the operation on the resource specified. | | `403` | Forbidden | The resource exists to which the user has access, but the operating requested is not permitted. | | `404` | Not Found | The resource specified could not be located, does not exist, or an unauthenticated client does not have permissions to a resource. | | `405` | Method Not Allowed | The operations may not be performed on the specific resource. Allowed operations are returned and may be performed on the resource. | | `408` | Request Timeout | The client has failed to complete a request in a timely manner and the request has been discarded. | | `413` | Request Entity Too Large | The request being provided is too large for the server to accept processing. | | `415` | Unsupported Media Type | The media type is not supported for the requested resource. | | `500` | Internal Server Error | An internal and unexpected error has occurred on the server at no fault of the client. | ### Security The response statuses 401, 403 and 404 need special consideration for security purposes. As necessary, error statuses and messages may be obscured to strengthen security and prevent information exposure. The following is a guideline for privileged resource response statuses: | Use Case | Access | Resource | Permission | Status | | ------------------------------------------------------------------ | ------------------ |------------------- | ------------ | ------------ | | Unauthenticated access to an unauthenticated resource. | Unauthenticated | Unauthenticated | Yes | `20x` | | Unauthenticated access to an authenticated resource. | Unauthenticated | Authenticated | No | `401` | | Unauthenticated access to an authenticated resource. | Unauthenticated | Non-existent | No | `401` | | Authenticated access to a unauthenticated resource. | Authenticated | Unauthenticated | Yes | `20x` | | Authenticated access to an authenticated, unprivileged resource. | Authenticated | Authenticated | No | `404` | | Authenticated access to an authenticated, privileged resource. | Authenticated | Authenticated | Yes | `20x` | | Authenticated access to an authenticated, non-existent resource | Authenticated | Non-existent | Yes | `404` | ### Headers Commonly used response headers include: | Header | Example | Purpose | | -------------------------- | --------------------------------- | --------------------------------------------------------------- | | `Allow` | `OPTIONS, GET` | Defines the allowable HTTP operations on a resource. | | `Cache-Control` | `no-store, must-revalidate` | Disables caching of resources (as they are all dynamic). | | `Content-Encoding` | `gzip` | The encoding of the response body (if any). | | `Location` | | Refers to the URI of the resource created by a request. | | `Transfer-Encoding` | `chunked` | Specified the encoding used to transform response. | | `Retry-After` | 5000 | Indicates the time to wait before retrying a request. | | `X-Content-Type-Options` | `nosniff` | Disables MIME type sniffing. | | `X-XSS-Protection` | `1; mode=block` | Enables XSS filter protection. | | `X-Frame-Options` | `SAMEORIGIN` | Prevents rendering in a frame from a different origin. | | `X-UA-Compatible` | `IE=edge,chrome=1` | Specifies the browser mode to render in. | ### Format When `application/json` is returned in the response body it is always pretty-printed (indented, human readable output). Additionally, gzip compression/encoding is supported on all responses. #### Dates & Times Dates or times are returned as strings in the ISO 8601 'extended' format. When a date and time is returned (instant) the value is converted to UTC. For example: | Value | Format | Example | | --------------- | ------------------------------ | --------------------- | | Date | `YYYY-MM-DD` | 2017-12-03 | | Date & Time | `YYYY-MM-DD'T'hh:mm:ss[.nnn]Z` | 2017-12-03T10:15:30Z | #### Content In some resources a Content data type is used. This allows for multiple formats of representation to be returned within resource, specifically `\"html\"` and `\"text\"`. The `\"text\"` property returns a flattened representation suitable for output in textual displays. The `\"html\"` property returns an HTML fragment suitable for display within an HTML element. Note, the HTML returned is not a valid stand-alone HTML document. #### Paging The response to a paginated request follows the format: ```json { resources\": [ ... ], \"page\": { \"number\" : ..., \"size\" : ..., \"totalResources\" : ..., \"totalPages\" : ... }, \"links\": [ \"first\" : { \"href\" : \"...\" }, \"prev\" : { \"href\" : \"...\" }, \"self\" : { \"href\" : \"...\" }, \"next\" : { \"href\" : \"...\" }, \"last\" : { \"href\" : \"...\" } ] } ``` The `resources` property is an array of the resources being retrieved from the endpoint, each which should contain at minimum a \"self\" relation hypermedia link. The `page` property outlines the details of the current page and total possible pages. The object for the page includes the following properties: - number - The page number (zero-based) of the page returned. - size - The size of the pages, which is less than or equal to the maximum page size. - totalResources - The total amount of resources available across all pages. - totalPages - The total amount of pages. The last property of the paged response is the `links` array, which contains all available hypermedia links. For paginated responses, the \"self\", \"next\", \"previous\", \"first\", and \"last\" links are returned. The \"self\" link must always be returned and should contain a link to allow the client to replicate the original request against the collection resource in an identical manner to that in which it was invoked. The \"next\" and \"previous\" links are present if either or both there exists a previous or next page, respectively. The \"next\" and \"previous\" links have hrefs that allow \"natural movement\" to the next page, that is all parameters required to move the next page are provided in the link. The \"first\" and \"last\" links provide references to the first and last pages respectively. Requests outside the boundaries of the pageable will result in a `404 NOT FOUND`. Paginated requests do not provide a \"stateful cursor\" to the client, nor does it need to provide a read consistent view. Records in adjacent pages may change while pagination is being traversed, and the total number of pages and resources may change between requests within the same filtered/queries resource collection. #### Property Views The \"depth\" of the response of a resource can be configured using a \"view\". All endpoints supports two views that can tune the extent of the information returned in the resource. The supported views are `summary` and `details` (the default). View are specified using a query parameter, in this format: ```bash /<resource>?view={viewName} ``` #### Error Any error responses can provide a response body with a message to the client indicating more information (if applicable) to aid debugging of the error. All 40x and 50x responses will return an error response in the body. The format of the response is as follows: ```json { \"status\": <statusCode>, \"message\": <message>, \"links\" : [ { \"rel\" : \"...\", \"href\" : \"...\" } ] } ``` The `status` property is the same as the HTTP status returned in the response, to ease client parsing. The message property is a localized message in the request client's locale (if applicable) that articulates the nature of the error. The last property is the `links` property. This may contain additional [hypermedia links](#section/Overview/Authentication) to troubleshoot. #### Search Criteria <a section=\"section/Responses/SearchCriteria\"></a> Multiple resources make use of search criteria to match assets. Search criteria is an array of search filters. Each search filter has a generic format of: ```json { \"field\": \"<field-name>\", \"operator\": \"<operator>\", [\"value\": \"<value>\",] [\"lower\": \"<value>\",] [\"upper\": \"<value>\"] } ``` Every filter defines two required properties `field` and `operator`. The field is the name of an asset property that is being filtered on. The operator is a type and property-specific operating performed on the filtered property. The valid values for fields and operators are outlined in the table below. Every filter also defines one or more values that are supplied to the operator. The valid values vary by operator and are outlined below. ##### Fields The following table outlines the search criteria fields and the available operators: | Field | Operators | | --------------------------------- | ------------------------------------------------------------------------------------------------------------------------------ | | `alternate-address-type` | `in` | | `container-image` | `is` ` is-not` ` starts-with` ` ends-with` ` contains` ` does-not-contain` ` is-like` ` not-like` | | `container-status` | `is` ` is-not` | | `containers` | `are` | | `criticality-tag` | `is` ` is-not` ` is-greater-than` ` is-less-than` ` is-applied` ` is-not-applied` | | `custom-tag` | `is` ` is-not` ` starts-with` ` ends-with` ` contains` ` does-not-contain` ` is-applied` ` is-not-applied` | | `cve` | `is` ` is-not` ` contains` ` does-not-contain` | | `cvss-access-complexity` | `is` ` is-not` | | `cvss-authentication-required` | `is` ` is-not` | | `cvss-access-vector` | `is` ` is-not` | | `cvss-availability-impact` | `is` ` is-not` | | `cvss-confidentiality-impact` | `is` ` is-not` | | `cvss-integrity-impact` | `is` ` is-not` | | `cvss-v3-confidentiality-impact` | `is` ` is-not` | | `cvss-v3-integrity-impact` | `is` ` is-not` | | `cvss-v3-availability-impact` | `is` ` is-not` | | `cvss-v3-attack-vector` | `is` ` is-not` | | `cvss-v3-attack-complexity` | `is` ` is-not` | | `cvss-v3-user-interaction` | `is` ` is-not` | | `cvss-v3-privileges-required` | `is` ` is-not` | | `host-name` | `is` ` is-not` ` starts-with` ` ends-with` ` contains` ` does-not-contain` ` is-empty` ` is-not-empty` ` is-like` ` not-like` | | `host-type` | `in` ` not-in` | | `ip-address` | `is` ` is-not` ` in-range` ` not-in-range` ` is-like` ` not-like` | | `ip-address-type` | `in` ` not-in` | | `last-scan-date` | `is-on-or-before` ` is-on-or-after` ` is-between` ` is-earlier-than` ` is-within-the-last` | | `location-tag` | `is` ` is-not` ` starts-with` ` ends-with` ` contains` ` does-not-contain` ` is-applied` ` is-not-applied` | | `mobile-device-last-sync-time` | `is-within-the-last` ` is-earlier-than` | | `open-ports` | `is` ` is-not` ` in-range` | | `operating-system` | `contains` ` does-not-contain` ` is-empty` ` is-not-empty` | | `owner-tag` | `is` ` is-not` ` starts-with` ` ends-with` ` contains` ` does-not-contain` ` is-applied` ` is-not-applied` | | `pci-compliance` | `is` | | `risk-score` | `is` ` is-not` ` in-range` ` greater-than` ` less-than` | | `service-name` | `contains` ` does-not-contain` | | `site-id` | `in` ` not-in` | | `software` | `contains` ` does-not-contain` | | `vAsset-cluster` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `vAsset-datacenter` | `is` ` is-not` | | `vAsset-host-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `vAsset-power-state` | `in` ` not-in` | | `vAsset-resource-pool-path` | `contains` ` does-not-contain` | | `vulnerability-assessed` | `is-on-or-before` ` is-on-or-after` ` is-between` ` is-earlier-than` ` is-within-the-last` | | `vulnerability-category` | `is` ` is-not` ` starts-with` ` ends-with` ` contains` ` does-not-contain` | | `vulnerability-cvss-v3-score` | `is` ` is-not` | | `vulnerability-cvss-score` | `is` ` is-not` ` in-range` ` is-greater-than` ` is-less-than` | | `vulnerability-exposures` | `includes` ` does-not-include` | | `vulnerability-title` | `contains` ` does-not-contain` ` is` ` is-not` ` starts-with` ` ends-with` | | `vulnerability-validated-status` | `are` | ##### Enumerated Properties The following fields have enumerated values: | Field | Acceptable Values | | ----------------------------------------- | ------------------------------------------------------------------------------------------------------------- | | `alternate-address-type` | 0=IPv4, 1=IPv6 | | `containers` | 0=present, 1=not present | | `container-status` | `created` `running` `paused` `restarting` `exited` `dead` `unknown` | | `cvss-access-complexity` | <ul><li><code>L</code> = Low</li><li><code>M</code> = Medium</li><li><code>H</code> = High</li></ul> | | `cvss-integrity-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-confidentiality-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-availability-impact` | <ul><li><code>N</code> = None</li><li><code>P</code> = Partial</li><li><code>C</code> = Complete</li></ul> | | `cvss-access-vector` | <ul><li><code>L</code> = Local</li><li><code>A</code> = Adjacent</li><li><code>N</code> = Network</li></ul> | | `cvss-authentication-required` | <ul><li><code>N</code> = None</li><li><code>S</code> = Single</li><li><code>M</code> = Multiple</li></ul> | | `cvss-v3-confidentiality-impact` | <ul><li><code>L</code> = Local</li><li><code>L</code> = Low</li><li><code>N</code> = None</li><li><code>H</code> = High</li></ul> | | `cvss-v3-integrity-impact` | <ul><li><code>L</code> = Local</li><li><code>L</code> = Low</li><li><code>N</code> = None</li><li><code>H</code> = High</li></ul> | | `cvss-v3-availability-impact` | <ul><li><code>N</code> = None</li><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `cvss-v3-attack-vector` | <ul><li><code>N</code> = Network</li><li><code>A</code> = Adjacent</li><li><code>L</code> = Local</li><li><code>P</code> = Physical</li></ul> | | `cvss-v3-attack-complexity` | <ul><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `cvss-v3-user-interaction` | <ul><li><code>N</code> = None</li><li><code>R</code> = Required</li></ul> | | `cvss-v3-privileges-required` | <ul><li><code>N</code> = None</li><li><code>L</code> = Low</li><li><code>H</code> = High</li></ul> | | `host-type` | 0=Unknown, 1=Guest, 2=Hypervisor, 3=Physical, 4=Mobile | | `ip-address-type` | 0=IPv4, 1=IPv6 | | `pci-compliance` | 0=fail, 1=pass | | `vulnerability-validated-status` | 0=present, 1=not present | ##### Operator Properties <a section=\"section/Responses/SearchCriteria/OperatorProperties\"></a> The following table outlines which properties are required for each operator and the appropriate data type(s): | Operator | `value` | `lower` | `upper` | | ----------------------|-----------------------|-----------------------|-----------------------| | `are` | `string` | | | | `contains` | `string` | | | | `does-not-contain` | `string` | | | | `ends with` | `string` | | | | `in` | `Array[ string ]` | | | | `in-range` | | `numeric` | `numeric` | | `includes` | `Array[ string ]` | | | | `is` | `string` | | | | `is-applied` | | | | | `is-between` | | `numeric` | `numeric` | | `is-earlier-than` | `numeric` | | | | `is-empty` | | | | | `is-greater-than` | `numeric` | | | | `is-on-or-after` | `string` (yyyy-MM-dd) | | | | `is-on-or-before` | `string` (yyyy-MM-dd) | | | | `is-not` | `string` | | | | `is-not-applied` | | | | | `is-not-empty` | | | | | `is-within-the-last` | `numeric` | | | | `less-than` | `string` | | | | `like` | `string` | | | | `not-contains` | `string` | | | | `not-in` | `Array[ string ]` | | | | `not-in-range` | | `numeric` | `numeric` | | `not-like` | `string` | | | | `starts-with` | `string` | | | #### Discovery Connection Search Criteria <a section=\"section/Responses/DiscoverySearchCriteria\"></a> Dynamic sites make use of search criteria to match assets from a discovery connection. Search criteria is an array of search filters. Each search filter has a generic format of: ```json { \"field\": \"<field-name>\", \"operator\": \"<operator>\", [\"value\": \"<value>\",] [\"lower\": \"<value>\",] [\"upper\": \"<value>\"] } ``` Every filter defines two required properties `field` and `operator`. The field is the name of an asset property that is being filtered on. The list of supported fields vary depending on the type of discovery connection configured for the dynamic site (e.g vSphere, ActiveSync, etc.). The operator is a type and property-specific operating performed on the filtered property. The valid values for fields outlined in the tables below and are grouped by the type of connection. Every filter also defines one or more values that are supplied to the operator. See <a href=\"#section/Responses/SearchCriteria/OperatorProperties\">Search Criteria Operator Properties</a> for more information on the valid values for each operator. ##### Fields (ActiveSync) This section documents search criteria information for ActiveSync discovery connections. The discovery connections must be one of the following types: `\"activesync-ldap\"`, `\"activesync-office365\"`, or `\"activesync-powershell\"`. The following table outlines the search criteria fields and the available operators for ActiveSync connections: | Field | Operators | | --------------------------------- | ------------------------------------------------------------- | | `last-sync-time` | `is-within-the-last` ` is-earlier-than` | | `operating-system` | `contains` ` does-not-contain` | | `user` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (AWS) This section documents search criteria information for AWS discovery connections. The discovery connections must be the type `\"aws\"`. The following table outlines the search criteria fields and the available operators for AWS connections: | Field | Operators | | ----------------------- | ------------------------------------------------------------- | | `availability-zone` | `contains` ` does-not-contain` | | `guest-os-family` | `contains` ` does-not-contain` | | `instance-id` | `contains` ` does-not-contain` | | `instance-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `instance-state` | `in` ` not-in` | | `instance-type` | `in` ` not-in` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `region` | `in` ` not-in` | | `vpc-id` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (DHCP) This section documents search criteria information for DHCP discovery connections. The discovery connections must be the type `\"dhcp\"`. The following table outlines the search criteria fields and the available operators for DHCP connections: | Field | Operators | | --------------- | ------------------------------------------------------------- | | `host-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `mac-address` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Fields (Sonar) This section documents search criteria information for Sonar discovery connections. The discovery connections must be the type `\"sonar\"`. The following table outlines the search criteria fields and the available operators for Sonar connections: | Field | Operators | | ------------------- | -------------------- | | `search-domain` | `contains` ` is` | | `ip-address` | `in-range` ` is` | | `sonar-scan-date` | `is-within-the-last` | ##### Fields (vSphere) This section documents search criteria information for vSphere discovery connections. The discovery connections must be the type `\"vsphere\"`. The following table outlines the search criteria fields and the available operators for vSphere connections: | Field | Operators | | -------------------- | ------------------------------------------------------------------------------------------ | | `cluster` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `data-center` | `is` ` is-not` | | `discovered-time` | `is-on-or-before` ` is-on-or-after` ` is-between` ` is-earlier-than` ` is-within-the-last` | | `guest-os-family` | `contains` ` does-not-contain` | | `host-name` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | | `ip-address` | `in-range` ` not-in-range` ` is` ` is-not` | | `power-state` | `in` ` not-in` | | `resource-pool-path` | `contains` ` does-not-contain` | | `last-time-seen` | `is-on-or-before` ` is-on-or-after` ` is-between` ` is-earlier-than` ` is-within-the-last` | | `vm` | `is` ` is-not` ` contains` ` does-not-contain` ` starts-with` | ##### Enumerated Properties (vSphere) The following fields have enumerated values: | Field | Acceptable Values | | ------------- | ------------------------------------ | | `power-state` | `poweredOn` `poweredOff` `suspended` | ## HATEOAS This API follows Hypermedia as the Engine of Application State (HATEOAS) principals and is therefore hypermedia friendly. Hyperlinks are returned in the `links` property of any given resource and contain a fully-qualified hyperlink to the corresponding resource. The format of the hypermedia link adheres to both the <a target=\"_blank\" href=\"http://jsonapi.org\">{json:api} v1</a> <a target=\"_blank\" href=\"http://jsonapi.org/format/#document-links\">\"Link Object\"</a> and <a target=\"_blank\" href=\"http://json-schema.org/latest/json-schema-hypermedia.html\">JSON Hyper-Schema</a> <a target=\"_blank\" href=\"http://json-schema.org/latest/json-schema-hypermedia.html#rfc.section.5.2\">\"Link Description Object\"</a> formats. For example: ```json \"links\": [{ \"rel\": \"<relation>\", \"href\": \"<href>\" ... }] ``` Where appropriate link objects may also contain additional properties than the `rel` and `href` properties, such as `id`, `type`, etc. See the [Root](#tag/Root) resources for the entry points into API discovery. # noqa: E501
OpenAPI spec version: 3
Contact: support@rapid7.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from py_insightvm_sdk.models.link import Link # noqa: F401,E501
from py_insightvm_sdk.models.vulnerability_validation_resource import VulnerabilityValidationResource # noqa: F401,E501
class ResourcesVulnerabilityValidationResource(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'links': 'list[Link]',
'resources': 'list[VulnerabilityValidationResource]'
}
attribute_map = {
'links': 'links',
'resources': 'resources'
}
def __init__(self, links=None, resources=None): # noqa: E501
"""ResourcesVulnerabilityValidationResource - a model defined in Swagger""" # noqa: E501
self._links = None
self._resources = None
self.discriminator = None
if links is not None:
self.links = links
if resources is not None:
self.resources = resources
@property
def links(self):
"""Gets the links of this ResourcesVulnerabilityValidationResource. # noqa: E501
Hypermedia links to corresponding or related resources. # noqa: E501
:return: The links of this ResourcesVulnerabilityValidationResource. # noqa: E501
:rtype: list[Link]
"""
return self._links
@links.setter
def links(self, links):
"""Sets the links of this ResourcesVulnerabilityValidationResource.
Hypermedia links to corresponding or related resources. # noqa: E501
:param links: The links of this ResourcesVulnerabilityValidationResource. # noqa: E501
:type: list[Link]
"""
self._links = links
@property
def resources(self):
"""Gets the resources of this ResourcesVulnerabilityValidationResource. # noqa: E501
The resources returned. # noqa: E501
:return: The resources of this ResourcesVulnerabilityValidationResource. # noqa: E501
:rtype: list[VulnerabilityValidationResource]
"""
return self._resources
@resources.setter
def resources(self, resources):
"""Sets the resources of this ResourcesVulnerabilityValidationResource.
The resources returned. # noqa: E501
:param resources: The resources of this ResourcesVulnerabilityValidationResource. # noqa: E501
:type: list[VulnerabilityValidationResource]
"""
self._resources = resources
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ResourcesVulnerabilityValidationResource, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ResourcesVulnerabilityValidationResource):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 352.771812
| 48,045
| 0.497403
|
import pprint
import re
import six
from py_insightvm_sdk.models.link import Link from py_insightvm_sdk.models.vulnerability_validation_resource import VulnerabilityValidationResource
class ResourcesVulnerabilityValidationResource(object):
swagger_types = {
'links': 'list[Link]',
'resources': 'list[VulnerabilityValidationResource]'
}
attribute_map = {
'links': 'links',
'resources': 'resources'
}
def __init__(self, links=None, resources=None):
self._links = None
self._resources = None
self.discriminator = None
if links is not None:
self.links = links
if resources is not None:
self.resources = resources
@property
def links(self):
return self._links
@links.setter
def links(self, links):
self._links = links
@property
def resources(self):
return self._resources
@resources.setter
def resources(self, resources):
self._resources = resources
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ResourcesVulnerabilityValidationResource, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, ResourcesVulnerabilityValidationResource):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true
| true
|
f702411b146ade9a780b38d3042522f84e5f82cb
| 325
|
py
|
Python
|
pythonprog/code/12/12.2/logcall.py
|
davidyshuang/python3
|
5200638f6c4450998d97a9ac24bcba0872786fb2
|
[
"Unlicense"
] | null | null | null |
pythonprog/code/12/12.2/logcall.py
|
davidyshuang/python3
|
5200638f6c4450998d97a9ac24bcba0872786fb2
|
[
"Unlicense"
] | null | null | null |
pythonprog/code/12/12.2/logcall.py
|
davidyshuang/python3
|
5200638f6c4450998d97a9ac24bcba0872786fb2
|
[
"Unlicense"
] | null | null | null |
# logcall.py
from functools import wraps
def logged(func):
# Idea: Give me a function, I'll put logging
# around it
print('Adding logging to', func.__name__)
@wraps(func)
def wrapper(*args, **kwargs):
print('You called', func.__name__)
return func(*args, **kwargs)
return wrapper
| 19.117647
| 48
| 0.633846
|
from functools import wraps
def logged(func):
# around it
print('Adding logging to', func.__name__)
@wraps(func)
def wrapper(*args, **kwargs):
print('You called', func.__name__)
return func(*args, **kwargs)
return wrapper
| true
| true
|
f7024122a0575e48b00ab24490e91da345677693
| 21,119
|
py
|
Python
|
sympy/core/tests/test_relational.py
|
toolforger/sympy
|
adc391f00e1e35d27925efb0314ec8abde9d255d
|
[
"BSD-3-Clause"
] | 1
|
2019-06-27T13:40:28.000Z
|
2019-06-27T13:40:28.000Z
|
sympy/core/tests/test_relational.py
|
toolforger/sympy
|
adc391f00e1e35d27925efb0314ec8abde9d255d
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/core/tests/test_relational.py
|
toolforger/sympy
|
adc391f00e1e35d27925efb0314ec8abde9d255d
|
[
"BSD-3-Clause"
] | null | null | null |
from sympy.utilities.pytest import XFAIL, raises
from sympy import (S, Symbol, symbols, nan, oo, I, pi, Float, And, Or, Not,
Implies, Xor, zoo, sqrt, Rational, simplify, Function)
from sympy.core.compatibility import range
from sympy.core.relational import (Relational, Equality, Unequality,
GreaterThan, LessThan, StrictGreaterThan,
StrictLessThan, Rel, Eq, Lt, Le,
Gt, Ge, Ne)
from sympy.sets.sets import Interval, FiniteSet
x, y, z, t = symbols('x,y,z,t')
def test_rel_ne():
assert Relational(x, y, '!=') == Ne(x, y)
def test_rel_subs():
e = Relational(x, y, '==')
e = e.subs(x, z)
assert isinstance(e, Equality)
assert e.lhs == z
assert e.rhs == y
e = Relational(x, y, '>=')
e = e.subs(x, z)
assert isinstance(e, GreaterThan)
assert e.lhs == z
assert e.rhs == y
e = Relational(x, y, '<=')
e = e.subs(x, z)
assert isinstance(e, LessThan)
assert e.lhs == z
assert e.rhs == y
e = Relational(x, y, '>')
e = e.subs(x, z)
assert isinstance(e, StrictGreaterThan)
assert e.lhs == z
assert e.rhs == y
e = Relational(x, y, '<')
e = e.subs(x, z)
assert isinstance(e, StrictLessThan)
assert e.lhs == z
assert e.rhs == y
e = Eq(x, 0)
assert e.subs(x, 0) is S.true
assert e.subs(x, 1) is S.false
def test_wrappers():
e = x + x**2
res = Relational(y, e, '==')
assert Rel(y, x + x**2, '==') == res
assert Eq(y, x + x**2) == res
res = Relational(y, e, '<')
assert Lt(y, x + x**2) == res
res = Relational(y, e, '<=')
assert Le(y, x + x**2) == res
res = Relational(y, e, '>')
assert Gt(y, x + x**2) == res
res = Relational(y, e, '>=')
assert Ge(y, x + x**2) == res
res = Relational(y, e, '!=')
assert Ne(y, x + x**2) == res
def test_Eq():
assert Eq(x**2) == Eq(x**2, 0)
assert Eq(x**2) != Eq(x**2, 1)
assert Eq(x, x) # issue 5719
# issue 6116
p = Symbol('p', positive=True)
assert Eq(p, 0) is S.false
def test_rel_Infinity():
# NOTE: All of these are actually handled by sympy.core.Number, and do
# not create Relational objects.
assert (oo > oo) is S.false
assert (oo > -oo) is S.true
assert (oo > 1) is S.true
assert (oo < oo) is S.false
assert (oo < -oo) is S.false
assert (oo < 1) is S.false
assert (oo >= oo) is S.true
assert (oo >= -oo) is S.true
assert (oo >= 1) is S.true
assert (oo <= oo) is S.true
assert (oo <= -oo) is S.false
assert (oo <= 1) is S.false
assert (-oo > oo) is S.false
assert (-oo > -oo) is S.false
assert (-oo > 1) is S.false
assert (-oo < oo) is S.true
assert (-oo < -oo) is S.false
assert (-oo < 1) is S.true
assert (-oo >= oo) is S.false
assert (-oo >= -oo) is S.true
assert (-oo >= 1) is S.false
assert (-oo <= oo) is S.true
assert (-oo <= -oo) is S.true
assert (-oo <= 1) is S.true
def test_bool():
assert Eq(0, 0) is S.true
assert Eq(1, 0) is S.false
assert Ne(0, 0) is S.false
assert Ne(1, 0) is S.true
assert Lt(0, 1) is S.true
assert Lt(1, 0) is S.false
assert Le(0, 1) is S.true
assert Le(1, 0) is S.false
assert Le(0, 0) is S.true
assert Gt(1, 0) is S.true
assert Gt(0, 1) is S.false
assert Ge(1, 0) is S.true
assert Ge(0, 1) is S.false
assert Ge(1, 1) is S.true
assert Eq(I, 2) is S.false
assert Ne(I, 2) is S.true
raises(TypeError, lambda: Gt(I, 2))
raises(TypeError, lambda: Ge(I, 2))
raises(TypeError, lambda: Lt(I, 2))
raises(TypeError, lambda: Le(I, 2))
a = Float('.000000000000000000001', '')
b = Float('.0000000000000000000001', '')
assert Eq(pi + a, pi + b) is S.false
def test_rich_cmp():
assert (x < y) == Lt(x, y)
assert (x <= y) == Le(x, y)
assert (x > y) == Gt(x, y)
assert (x >= y) == Ge(x, y)
def test_doit():
from sympy import Symbol
p = Symbol('p', positive=True)
n = Symbol('n', negative=True)
np = Symbol('np', nonpositive=True)
nn = Symbol('nn', nonnegative=True)
assert Gt(p, 0).doit() is S.true
assert Gt(p, 1).doit() == Gt(p, 1)
assert Ge(p, 0).doit() is S.true
assert Le(p, 0).doit() is S.false
assert Lt(n, 0).doit() is S.true
assert Le(np, 0).doit() is S.true
assert Gt(nn, 0).doit() == Gt(nn, 0)
assert Lt(nn, 0).doit() is S.false
assert Eq(x, 0).doit() == Eq(x, 0)
def test_new_relational():
x = Symbol('x')
assert Eq(x) == Relational(x, 0) # None ==> Equality
assert Eq(x) == Relational(x, 0, '==')
assert Eq(x) == Relational(x, 0, 'eq')
assert Eq(x) == Equality(x, 0)
assert Eq(x, -1) == Relational(x, -1) # None ==> Equality
assert Eq(x, -1) == Relational(x, -1, '==')
assert Eq(x, -1) == Relational(x, -1, 'eq')
assert Eq(x, -1) == Equality(x, -1)
assert Eq(x) != Relational(x, 1) # None ==> Equality
assert Eq(x) != Relational(x, 1, '==')
assert Eq(x) != Relational(x, 1, 'eq')
assert Eq(x) != Equality(x, 1)
assert Eq(x, -1) != Relational(x, 1) # None ==> Equality
assert Eq(x, -1) != Relational(x, 1, '==')
assert Eq(x, -1) != Relational(x, 1, 'eq')
assert Eq(x, -1) != Equality(x, 1)
assert Ne(x, 0) == Relational(x, 0, '!=')
assert Ne(x, 0) == Relational(x, 0, '<>')
assert Ne(x, 0) == Relational(x, 0, 'ne')
assert Ne(x, 0) == Unequality(x, 0)
assert Ne(x, 0) != Relational(x, 1, '!=')
assert Ne(x, 0) != Relational(x, 1, '<>')
assert Ne(x, 0) != Relational(x, 1, 'ne')
assert Ne(x, 0) != Unequality(x, 1)
assert Ge(x, 0) == Relational(x, 0, '>=')
assert Ge(x, 0) == Relational(x, 0, 'ge')
assert Ge(x, 0) == GreaterThan(x, 0)
assert Ge(x, 1) != Relational(x, 0, '>=')
assert Ge(x, 1) != Relational(x, 0, 'ge')
assert Ge(x, 1) != GreaterThan(x, 0)
assert (x >= 1) == Relational(x, 1, '>=')
assert (x >= 1) == Relational(x, 1, 'ge')
assert (x >= 1) == GreaterThan(x, 1)
assert (x >= 0) != Relational(x, 1, '>=')
assert (x >= 0) != Relational(x, 1, 'ge')
assert (x >= 0) != GreaterThan(x, 1)
assert Le(x, 0) == Relational(x, 0, '<=')
assert Le(x, 0) == Relational(x, 0, 'le')
assert Le(x, 0) == LessThan(x, 0)
assert Le(x, 1) != Relational(x, 0, '<=')
assert Le(x, 1) != Relational(x, 0, 'le')
assert Le(x, 1) != LessThan(x, 0)
assert (x <= 1) == Relational(x, 1, '<=')
assert (x <= 1) == Relational(x, 1, 'le')
assert (x <= 1) == LessThan(x, 1)
assert (x <= 0) != Relational(x, 1, '<=')
assert (x <= 0) != Relational(x, 1, 'le')
assert (x <= 0) != LessThan(x, 1)
assert Gt(x, 0) == Relational(x, 0, '>')
assert Gt(x, 0) == Relational(x, 0, 'gt')
assert Gt(x, 0) == StrictGreaterThan(x, 0)
assert Gt(x, 1) != Relational(x, 0, '>')
assert Gt(x, 1) != Relational(x, 0, 'gt')
assert Gt(x, 1) != StrictGreaterThan(x, 0)
assert (x > 1) == Relational(x, 1, '>')
assert (x > 1) == Relational(x, 1, 'gt')
assert (x > 1) == StrictGreaterThan(x, 1)
assert (x > 0) != Relational(x, 1, '>')
assert (x > 0) != Relational(x, 1, 'gt')
assert (x > 0) != StrictGreaterThan(x, 1)
assert Lt(x, 0) == Relational(x, 0, '<')
assert Lt(x, 0) == Relational(x, 0, 'lt')
assert Lt(x, 0) == StrictLessThan(x, 0)
assert Lt(x, 1) != Relational(x, 0, '<')
assert Lt(x, 1) != Relational(x, 0, 'lt')
assert Lt(x, 1) != StrictLessThan(x, 0)
assert (x < 1) == Relational(x, 1, '<')
assert (x < 1) == Relational(x, 1, 'lt')
assert (x < 1) == StrictLessThan(x, 1)
assert (x < 0) != Relational(x, 1, '<')
assert (x < 0) != Relational(x, 1, 'lt')
assert (x < 0) != StrictLessThan(x, 1)
# finally, some fuzz testing
from random import randint
from sympy.core.compatibility import unichr
for i in range(100):
while 1:
strtype, length = (unichr, 65535) if randint(0, 1) else (chr, 255)
relation_type = strtype(randint(0, length))
if randint(0, 1):
relation_type += strtype(randint(0, length))
if relation_type not in ('==', 'eq', '!=', '<>', 'ne', '>=', 'ge',
'<=', 'le', '>', 'gt', '<', 'lt'):
break
raises(ValueError, lambda: Relational(x, 1, relation_type))
def test_relational_bool_output():
# https://github.com/sympy/sympy/issues/5931
raises(TypeError, lambda: bool(x > 3))
raises(TypeError, lambda: bool(x >= 3))
raises(TypeError, lambda: bool(x < 3))
raises(TypeError, lambda: bool(x <= 3))
raises(TypeError, lambda: bool(Eq(x, 3)))
raises(TypeError, lambda: bool(Ne(x, 3)))
def test_relational_logic_symbols():
# See issue 6204
assert (x < y) & (z < t) == And(x < y, z < t)
assert (x < y) | (z < t) == Or(x < y, z < t)
assert ~(x < y) == Not(x < y)
assert (x < y) >> (z < t) == Implies(x < y, z < t)
assert (x < y) << (z < t) == Implies(z < t, x < y)
assert (x < y) ^ (z < t) == Xor(x < y, z < t)
assert isinstance((x < y) & (z < t), And)
assert isinstance((x < y) | (z < t), Or)
assert isinstance(~(x < y), GreaterThan)
assert isinstance((x < y) >> (z < t), Implies)
assert isinstance((x < y) << (z < t), Implies)
assert isinstance((x < y) ^ (z < t), (Or, Xor))
def test_univariate_relational_as_set():
assert (x > 0).as_set() == Interval(0, oo, True, True)
assert (x >= 0).as_set() == Interval(0, oo)
assert (x < 0).as_set() == Interval(-oo, 0, True, True)
assert (x <= 0).as_set() == Interval(-oo, 0)
assert Eq(x, 0).as_set() == FiniteSet(0)
assert Ne(x, 0).as_set() == Interval(-oo, 0, True, True) + \
Interval(0, oo, True, True)
assert (x**2 >= 4).as_set() == Interval(-oo, -2) + Interval(2, oo)
@XFAIL
def test_multivariate_relational_as_set():
assert (x*y >= 0).as_set() == Interval(0, oo)*Interval(0, oo) + \
Interval(-oo, 0)*Interval(-oo, 0)
def test_Not():
assert Not(Equality(x, y)) == Unequality(x, y)
assert Not(Unequality(x, y)) == Equality(x, y)
assert Not(StrictGreaterThan(x, y)) == LessThan(x, y)
assert Not(StrictLessThan(x, y)) == GreaterThan(x, y)
assert Not(GreaterThan(x, y)) == StrictLessThan(x, y)
assert Not(LessThan(x, y)) == StrictGreaterThan(x, y)
def test_evaluate():
assert str(Eq(x, x, evaluate=False)) == 'Eq(x, x)'
assert Eq(x, x, evaluate=False).doit() == S.true
assert str(Ne(x, x, evaluate=False)) == 'Ne(x, x)'
assert Ne(x, x, evaluate=False).doit() == S.false
assert str(Ge(x, x, evaluate=False)) == 'x >= x'
assert str(Le(x, x, evaluate=False)) == 'x <= x'
assert str(Gt(x, x, evaluate=False)) == 'x > x'
assert str(Lt(x, x, evaluate=False)) == 'x < x'
def assert_all_ineq_raise_TypeError(a, b):
raises(TypeError, lambda: a > b)
raises(TypeError, lambda: a >= b)
raises(TypeError, lambda: a < b)
raises(TypeError, lambda: a <= b)
raises(TypeError, lambda: b > a)
raises(TypeError, lambda: b >= a)
raises(TypeError, lambda: b < a)
raises(TypeError, lambda: b <= a)
def assert_all_ineq_give_class_Inequality(a, b):
"""All inequality operations on `a` and `b` result in class Inequality."""
from sympy.core.relational import _Inequality as Inequality
assert isinstance(a > b, Inequality)
assert isinstance(a >= b, Inequality)
assert isinstance(a < b, Inequality)
assert isinstance(a <= b, Inequality)
assert isinstance(b > a, Inequality)
assert isinstance(b >= a, Inequality)
assert isinstance(b < a, Inequality)
assert isinstance(b <= a, Inequality)
def test_imaginary_compare_raises_TypeError():
# See issue #5724
assert_all_ineq_raise_TypeError(I, x)
def test_complex_compare_not_real():
# two cases which are not real
y = Symbol('y', imaginary=True)
z = Symbol('z', complex=True, real=False)
for w in (y, z):
assert_all_ineq_raise_TypeError(2, w)
# some cases which should remain un-evaluated
t = Symbol('t')
x = Symbol('x', real=True)
z = Symbol('z', complex=True)
for w in (x, z, t):
assert_all_ineq_give_class_Inequality(2, w)
def test_imaginary_and_inf_compare_raises_TypeError():
# See pull request #7835
y = Symbol('y', imaginary=True)
assert_all_ineq_raise_TypeError(oo, y)
assert_all_ineq_raise_TypeError(-oo, y)
def test_complex_pure_imag_not_ordered():
raises(TypeError, lambda: 2*I < 3*I)
# more generally
x = Symbol('x', real=True, nonzero=True)
y = Symbol('y', imaginary=True)
z = Symbol('z', complex=True)
assert_all_ineq_raise_TypeError(I, y)
t = I*x # an imaginary number, should raise errors
assert_all_ineq_raise_TypeError(2, t)
t = -I*y # a real number, so no errors
assert_all_ineq_give_class_Inequality(2, t)
t = I*z # unknown, should be unevaluated
assert_all_ineq_give_class_Inequality(2, t)
def test_x_minus_y_not_same_as_x_lt_y():
"""
A consequence of pull request #7792 is that `x - y < 0` and `x < y`
are not synonymous.
"""
x = I + 2
y = I + 3
raises(TypeError, lambda: x < y)
assert x - y < 0
ineq = Lt(x, y, evaluate=False)
raises(TypeError, lambda: ineq.doit())
assert ineq.lhs - ineq.rhs < 0
t = Symbol('t', imaginary=True)
x = 2 + t
y = 3 + t
ineq = Lt(x, y, evaluate=False)
raises(TypeError, lambda: ineq.doit())
assert ineq.lhs - ineq.rhs < 0
# this one should give error either way
x = I + 2
y = 2*I + 3
raises(TypeError, lambda: x < y)
raises(TypeError, lambda: x - y < 0)
def test_nan_equality_exceptions():
# See issue #7774
import random
assert Equality(nan, nan) is S.false
assert Unequality(nan, nan) is S.true
# See issue #7773
A = (x, S(0), S(1)/3, pi, oo, -oo)
assert Equality(nan, random.choice(A)) is S.false
assert Equality(random.choice(A), nan) is S.false
assert Unequality(nan, random.choice(A)) is S.true
assert Unequality(random.choice(A), nan) is S.true
def test_nan_inequality_raise_errors():
# See discussion in pull request #7776. We test inequalities with
# a set including examples of various classes.
for q in (x, S(0), S(10), S(1)/3, pi, S(1.3), oo, -oo, nan):
assert_all_ineq_raise_TypeError(q, nan)
def test_nan_complex_inequalities():
# Comparisons of NaN with non-real raise errors, we're not too
# fussy whether its the NaN error or complex error.
for r in (I, zoo, Symbol('z', imaginary=True)):
assert_all_ineq_raise_TypeError(r, nan)
def test_complex_infinity_inequalities():
raises(TypeError, lambda: zoo > 0)
raises(TypeError, lambda: zoo >= 0)
raises(TypeError, lambda: zoo < 0)
raises(TypeError, lambda: zoo <= 0)
def test_inequalities_symbol_name_same():
"""Using the operator and functional forms should give same results."""
# We test all combinations from a set
# FIXME: could replace with random selection after test passes
A = (x, y, S(0), S(1)/3, pi, oo, -oo)
for a in A:
for b in A:
assert Gt(a, b) == (a > b)
assert Lt(a, b) == (a < b)
assert Ge(a, b) == (a >= b)
assert Le(a, b) == (a <= b)
for b in (y, S(0), S(1)/3, pi, oo, -oo):
assert Gt(x, b, evaluate=False) == (x > b)
assert Lt(x, b, evaluate=False) == (x < b)
assert Ge(x, b, evaluate=False) == (x >= b)
assert Le(x, b, evaluate=False) == (x <= b)
for b in (y, S(0), S(1)/3, pi, oo, -oo):
assert Gt(b, x, evaluate=False) == (b > x)
assert Lt(b, x, evaluate=False) == (b < x)
assert Ge(b, x, evaluate=False) == (b >= x)
assert Le(b, x, evaluate=False) == (b <= x)
def test_inequalities_symbol_name_same_complex():
"""Using the operator and functional forms should give same results.
With complex non-real numbers, both should raise errors.
"""
# FIXME: could replace with random selection after test passes
for a in (x, S(0), S(1)/3, pi, oo):
raises(TypeError, lambda: Gt(a, I))
raises(TypeError, lambda: a > I)
raises(TypeError, lambda: Lt(a, I))
raises(TypeError, lambda: a < I)
raises(TypeError, lambda: Ge(a, I))
raises(TypeError, lambda: a >= I)
raises(TypeError, lambda: Le(a, I))
raises(TypeError, lambda: a <= I)
def test_inequalities_cant_sympify_other():
# see issue 7833
from operator import gt, lt, ge, le
bar = "foo"
for a in (x, S(0), S(1)/3, pi, I, zoo, oo, -oo, nan):
for op in (lt, gt, le, ge):
raises(TypeError, lambda: op(a, bar))
def test_ineq_avoid_wild_symbol_flip():
# see issue #7951, we try to avoid this internally, e.g., by using
# __lt__ instead of "<".
from sympy.core.symbol import Wild
p = symbols('p', cls=Wild)
# x > p might flip, but Gt should not:
assert Gt(x, p) == Gt(x, p, evaluate=False)
# Previously failed as 'p > x':
e = Lt(x, y).subs({y: p})
assert e == Lt(x, p, evaluate=False)
# Previously failed as 'p <= x':
e = Ge(x, p).doit()
assert e == Ge(x, p, evaluate=False)
def test_issue_8245():
a = S("6506833320952669167898688709329/5070602400912917605986812821504")
q = a.n(10)
assert (a == q) is True
assert (a != q) is False
assert (a > q) == False
assert (a < q) == False
assert (a >= q) == True
assert (a <= q) == True
a = sqrt(2)
r = Rational(str(a.n(30)))
assert (r == a) is False
assert (r != a) is True
assert (r > a) == True
assert (r < a) == False
assert (r >= a) == True
assert (r <= a) == False
a = sqrt(2)
r = Rational(str(a.n(29)))
assert (r == a) is False
assert (r != a) is True
assert (r > a) == False
assert (r < a) == True
assert (r >= a) == False
assert (r <= a) == True
def test_issue_8449():
p = Symbol('p', nonnegative=True)
assert Lt(-oo, p)
assert Ge(-oo, p) is S.false
assert Gt(oo, -p)
assert Le(oo, -p) is S.false
def test_simplify():
assert simplify(x*(y + 1) - x*y - x + 1 < x) == (x > 1)
assert simplify(S(1) < -x) == (x < -1)
def test_equals():
w, x, y, z = symbols('w:z')
f = Function('f')
assert Eq(x, 1).equals(Eq(x*(y + 1) - x*y - x + 1, x))
assert Eq(x, y).equals(x < y, True) == False
assert Eq(x, f(1)).equals(Eq(x, f(2)), True) == f(1) - f(2)
assert Eq(f(1), y).equals(Eq(f(2), y), True) == f(1) - f(2)
assert Eq(x, f(1)).equals(Eq(f(2), x), True) == f(1) - f(2)
assert Eq(f(1), x).equals(Eq(x, f(2)), True) == f(1) - f(2)
assert Eq(w, x).equals(Eq(y, z), True) == False
assert Eq(f(1), f(2)).equals(Eq(f(3), f(4)), True) == f(1) - f(3)
assert (x < y).equals(y > x, True) == True
assert (x < y).equals(y >= x, True) == False
assert (x < y).equals(z < y, True) == False
assert (x < y).equals(x < z, True) == False
assert (x < f(1)).equals(x < f(2), True) == f(1) - f(2)
assert (f(1) < x).equals(f(2) < x, True) == f(1) - f(2)
def test_reversed():
assert (x < y).reversed == (y > x)
assert (x <= y).reversed == (y >= x)
assert Eq(x, y, evaluate=False).reversed == Eq(y, x, evaluate=False)
assert Ne(x, y, evaluate=False).reversed == Ne(y, x, evaluate=False)
assert (x >= y).reversed == (y <= x)
assert (x > y).reversed == (y < x)
def test_canonical():
one = S(1)
def unchanged(v):
c = v.canonical
return v.is_Relational and c.is_Relational and v == c
def isreversed(v):
return v.canonical == v.reversed
assert unchanged(x < one)
assert unchanged(x <= one)
assert isreversed(Eq(one, x, evaluate=False))
assert unchanged(Eq(x, one, evaluate=False))
assert isreversed(Ne(one, x, evaluate=False))
assert unchanged(Ne(x, one, evaluate=False))
assert unchanged(x >= one)
assert unchanged(x > one)
assert unchanged(x < y)
assert unchanged(x <= y)
assert isreversed(Eq(y, x, evaluate=False))
assert unchanged(Eq(x, y, evaluate=False))
assert isreversed(Ne(y, x, evaluate=False))
assert unchanged(Ne(x, y, evaluate=False))
assert isreversed(x >= y)
assert isreversed(x > y)
assert (-x < 1).canonical == (x > -1)
assert isreversed(-x > y)
@XFAIL
def test_issue_8444():
x = symbols('x', real=True)
assert (x <= oo) == (x >= -oo) == True
x = symbols('x')
assert x >= floor(x)
assert (x < floor(x)) == False
assert Gt(x, floor(x)) == Gt(x, floor(x), evaluate=False)
assert Ge(x, floor(x)) == Ge(x, floor(x), evaluate=False)
assert x <= ceiling(x)
assert (x > ceiling(x)) == False
assert Lt(x, ceiling(x)) == Lt(x, ceiling(x), evaluate=False)
assert Le(x, ceiling(x)) == Le(x, ceiling(x), evaluate=False)
i = symbols('i', integer=True)
assert (i > floor(i)) == False
assert (i < ceiling(i)) == False
| 32.341501
| 78
| 0.564373
|
from sympy.utilities.pytest import XFAIL, raises
from sympy import (S, Symbol, symbols, nan, oo, I, pi, Float, And, Or, Not,
Implies, Xor, zoo, sqrt, Rational, simplify, Function)
from sympy.core.compatibility import range
from sympy.core.relational import (Relational, Equality, Unequality,
GreaterThan, LessThan, StrictGreaterThan,
StrictLessThan, Rel, Eq, Lt, Le,
Gt, Ge, Ne)
from sympy.sets.sets import Interval, FiniteSet
x, y, z, t = symbols('x,y,z,t')
def test_rel_ne():
assert Relational(x, y, '!=') == Ne(x, y)
def test_rel_subs():
e = Relational(x, y, '==')
e = e.subs(x, z)
assert isinstance(e, Equality)
assert e.lhs == z
assert e.rhs == y
e = Relational(x, y, '>=')
e = e.subs(x, z)
assert isinstance(e, GreaterThan)
assert e.lhs == z
assert e.rhs == y
e = Relational(x, y, '<=')
e = e.subs(x, z)
assert isinstance(e, LessThan)
assert e.lhs == z
assert e.rhs == y
e = Relational(x, y, '>')
e = e.subs(x, z)
assert isinstance(e, StrictGreaterThan)
assert e.lhs == z
assert e.rhs == y
e = Relational(x, y, '<')
e = e.subs(x, z)
assert isinstance(e, StrictLessThan)
assert e.lhs == z
assert e.rhs == y
e = Eq(x, 0)
assert e.subs(x, 0) is S.true
assert e.subs(x, 1) is S.false
def test_wrappers():
e = x + x**2
res = Relational(y, e, '==')
assert Rel(y, x + x**2, '==') == res
assert Eq(y, x + x**2) == res
res = Relational(y, e, '<')
assert Lt(y, x + x**2) == res
res = Relational(y, e, '<=')
assert Le(y, x + x**2) == res
res = Relational(y, e, '>')
assert Gt(y, x + x**2) == res
res = Relational(y, e, '>=')
assert Ge(y, x + x**2) == res
res = Relational(y, e, '!=')
assert Ne(y, x + x**2) == res
def test_Eq():
assert Eq(x**2) == Eq(x**2, 0)
assert Eq(x**2) != Eq(x**2, 1)
assert Eq(x, x)
p = Symbol('p', positive=True)
assert Eq(p, 0) is S.false
def test_rel_Infinity():
assert (oo > oo) is S.false
assert (oo > -oo) is S.true
assert (oo > 1) is S.true
assert (oo < oo) is S.false
assert (oo < -oo) is S.false
assert (oo < 1) is S.false
assert (oo >= oo) is S.true
assert (oo >= -oo) is S.true
assert (oo >= 1) is S.true
assert (oo <= oo) is S.true
assert (oo <= -oo) is S.false
assert (oo <= 1) is S.false
assert (-oo > oo) is S.false
assert (-oo > -oo) is S.false
assert (-oo > 1) is S.false
assert (-oo < oo) is S.true
assert (-oo < -oo) is S.false
assert (-oo < 1) is S.true
assert (-oo >= oo) is S.false
assert (-oo >= -oo) is S.true
assert (-oo >= 1) is S.false
assert (-oo <= oo) is S.true
assert (-oo <= -oo) is S.true
assert (-oo <= 1) is S.true
def test_bool():
assert Eq(0, 0) is S.true
assert Eq(1, 0) is S.false
assert Ne(0, 0) is S.false
assert Ne(1, 0) is S.true
assert Lt(0, 1) is S.true
assert Lt(1, 0) is S.false
assert Le(0, 1) is S.true
assert Le(1, 0) is S.false
assert Le(0, 0) is S.true
assert Gt(1, 0) is S.true
assert Gt(0, 1) is S.false
assert Ge(1, 0) is S.true
assert Ge(0, 1) is S.false
assert Ge(1, 1) is S.true
assert Eq(I, 2) is S.false
assert Ne(I, 2) is S.true
raises(TypeError, lambda: Gt(I, 2))
raises(TypeError, lambda: Ge(I, 2))
raises(TypeError, lambda: Lt(I, 2))
raises(TypeError, lambda: Le(I, 2))
a = Float('.000000000000000000001', '')
b = Float('.0000000000000000000001', '')
assert Eq(pi + a, pi + b) is S.false
def test_rich_cmp():
assert (x < y) == Lt(x, y)
assert (x <= y) == Le(x, y)
assert (x > y) == Gt(x, y)
assert (x >= y) == Ge(x, y)
def test_doit():
from sympy import Symbol
p = Symbol('p', positive=True)
n = Symbol('n', negative=True)
np = Symbol('np', nonpositive=True)
nn = Symbol('nn', nonnegative=True)
assert Gt(p, 0).doit() is S.true
assert Gt(p, 1).doit() == Gt(p, 1)
assert Ge(p, 0).doit() is S.true
assert Le(p, 0).doit() is S.false
assert Lt(n, 0).doit() is S.true
assert Le(np, 0).doit() is S.true
assert Gt(nn, 0).doit() == Gt(nn, 0)
assert Lt(nn, 0).doit() is S.false
assert Eq(x, 0).doit() == Eq(x, 0)
def test_new_relational():
x = Symbol('x')
assert Eq(x) == Relational(x, 0) assert Eq(x) == Relational(x, 0, '==')
assert Eq(x) == Relational(x, 0, 'eq')
assert Eq(x) == Equality(x, 0)
assert Eq(x, -1) == Relational(x, -1) assert Eq(x, -1) == Relational(x, -1, '==')
assert Eq(x, -1) == Relational(x, -1, 'eq')
assert Eq(x, -1) == Equality(x, -1)
assert Eq(x) != Relational(x, 1) assert Eq(x) != Relational(x, 1, '==')
assert Eq(x) != Relational(x, 1, 'eq')
assert Eq(x) != Equality(x, 1)
assert Eq(x, -1) != Relational(x, 1) assert Eq(x, -1) != Relational(x, 1, '==')
assert Eq(x, -1) != Relational(x, 1, 'eq')
assert Eq(x, -1) != Equality(x, 1)
assert Ne(x, 0) == Relational(x, 0, '!=')
assert Ne(x, 0) == Relational(x, 0, '<>')
assert Ne(x, 0) == Relational(x, 0, 'ne')
assert Ne(x, 0) == Unequality(x, 0)
assert Ne(x, 0) != Relational(x, 1, '!=')
assert Ne(x, 0) != Relational(x, 1, '<>')
assert Ne(x, 0) != Relational(x, 1, 'ne')
assert Ne(x, 0) != Unequality(x, 1)
assert Ge(x, 0) == Relational(x, 0, '>=')
assert Ge(x, 0) == Relational(x, 0, 'ge')
assert Ge(x, 0) == GreaterThan(x, 0)
assert Ge(x, 1) != Relational(x, 0, '>=')
assert Ge(x, 1) != Relational(x, 0, 'ge')
assert Ge(x, 1) != GreaterThan(x, 0)
assert (x >= 1) == Relational(x, 1, '>=')
assert (x >= 1) == Relational(x, 1, 'ge')
assert (x >= 1) == GreaterThan(x, 1)
assert (x >= 0) != Relational(x, 1, '>=')
assert (x >= 0) != Relational(x, 1, 'ge')
assert (x >= 0) != GreaterThan(x, 1)
assert Le(x, 0) == Relational(x, 0, '<=')
assert Le(x, 0) == Relational(x, 0, 'le')
assert Le(x, 0) == LessThan(x, 0)
assert Le(x, 1) != Relational(x, 0, '<=')
assert Le(x, 1) != Relational(x, 0, 'le')
assert Le(x, 1) != LessThan(x, 0)
assert (x <= 1) == Relational(x, 1, '<=')
assert (x <= 1) == Relational(x, 1, 'le')
assert (x <= 1) == LessThan(x, 1)
assert (x <= 0) != Relational(x, 1, '<=')
assert (x <= 0) != Relational(x, 1, 'le')
assert (x <= 0) != LessThan(x, 1)
assert Gt(x, 0) == Relational(x, 0, '>')
assert Gt(x, 0) == Relational(x, 0, 'gt')
assert Gt(x, 0) == StrictGreaterThan(x, 0)
assert Gt(x, 1) != Relational(x, 0, '>')
assert Gt(x, 1) != Relational(x, 0, 'gt')
assert Gt(x, 1) != StrictGreaterThan(x, 0)
assert (x > 1) == Relational(x, 1, '>')
assert (x > 1) == Relational(x, 1, 'gt')
assert (x > 1) == StrictGreaterThan(x, 1)
assert (x > 0) != Relational(x, 1, '>')
assert (x > 0) != Relational(x, 1, 'gt')
assert (x > 0) != StrictGreaterThan(x, 1)
assert Lt(x, 0) == Relational(x, 0, '<')
assert Lt(x, 0) == Relational(x, 0, 'lt')
assert Lt(x, 0) == StrictLessThan(x, 0)
assert Lt(x, 1) != Relational(x, 0, '<')
assert Lt(x, 1) != Relational(x, 0, 'lt')
assert Lt(x, 1) != StrictLessThan(x, 0)
assert (x < 1) == Relational(x, 1, '<')
assert (x < 1) == Relational(x, 1, 'lt')
assert (x < 1) == StrictLessThan(x, 1)
assert (x < 0) != Relational(x, 1, '<')
assert (x < 0) != Relational(x, 1, 'lt')
assert (x < 0) != StrictLessThan(x, 1)
from random import randint
from sympy.core.compatibility import unichr
for i in range(100):
while 1:
strtype, length = (unichr, 65535) if randint(0, 1) else (chr, 255)
relation_type = strtype(randint(0, length))
if randint(0, 1):
relation_type += strtype(randint(0, length))
if relation_type not in ('==', 'eq', '!=', '<>', 'ne', '>=', 'ge',
'<=', 'le', '>', 'gt', '<', 'lt'):
break
raises(ValueError, lambda: Relational(x, 1, relation_type))
def test_relational_bool_output():
raises(TypeError, lambda: bool(x > 3))
raises(TypeError, lambda: bool(x >= 3))
raises(TypeError, lambda: bool(x < 3))
raises(TypeError, lambda: bool(x <= 3))
raises(TypeError, lambda: bool(Eq(x, 3)))
raises(TypeError, lambda: bool(Ne(x, 3)))
def test_relational_logic_symbols():
assert (x < y) & (z < t) == And(x < y, z < t)
assert (x < y) | (z < t) == Or(x < y, z < t)
assert ~(x < y) == Not(x < y)
assert (x < y) >> (z < t) == Implies(x < y, z < t)
assert (x < y) << (z < t) == Implies(z < t, x < y)
assert (x < y) ^ (z < t) == Xor(x < y, z < t)
assert isinstance((x < y) & (z < t), And)
assert isinstance((x < y) | (z < t), Or)
assert isinstance(~(x < y), GreaterThan)
assert isinstance((x < y) >> (z < t), Implies)
assert isinstance((x < y) << (z < t), Implies)
assert isinstance((x < y) ^ (z < t), (Or, Xor))
def test_univariate_relational_as_set():
assert (x > 0).as_set() == Interval(0, oo, True, True)
assert (x >= 0).as_set() == Interval(0, oo)
assert (x < 0).as_set() == Interval(-oo, 0, True, True)
assert (x <= 0).as_set() == Interval(-oo, 0)
assert Eq(x, 0).as_set() == FiniteSet(0)
assert Ne(x, 0).as_set() == Interval(-oo, 0, True, True) + \
Interval(0, oo, True, True)
assert (x**2 >= 4).as_set() == Interval(-oo, -2) + Interval(2, oo)
@XFAIL
def test_multivariate_relational_as_set():
assert (x*y >= 0).as_set() == Interval(0, oo)*Interval(0, oo) + \
Interval(-oo, 0)*Interval(-oo, 0)
def test_Not():
assert Not(Equality(x, y)) == Unequality(x, y)
assert Not(Unequality(x, y)) == Equality(x, y)
assert Not(StrictGreaterThan(x, y)) == LessThan(x, y)
assert Not(StrictLessThan(x, y)) == GreaterThan(x, y)
assert Not(GreaterThan(x, y)) == StrictLessThan(x, y)
assert Not(LessThan(x, y)) == StrictGreaterThan(x, y)
def test_evaluate():
assert str(Eq(x, x, evaluate=False)) == 'Eq(x, x)'
assert Eq(x, x, evaluate=False).doit() == S.true
assert str(Ne(x, x, evaluate=False)) == 'Ne(x, x)'
assert Ne(x, x, evaluate=False).doit() == S.false
assert str(Ge(x, x, evaluate=False)) == 'x >= x'
assert str(Le(x, x, evaluate=False)) == 'x <= x'
assert str(Gt(x, x, evaluate=False)) == 'x > x'
assert str(Lt(x, x, evaluate=False)) == 'x < x'
def assert_all_ineq_raise_TypeError(a, b):
raises(TypeError, lambda: a > b)
raises(TypeError, lambda: a >= b)
raises(TypeError, lambda: a < b)
raises(TypeError, lambda: a <= b)
raises(TypeError, lambda: b > a)
raises(TypeError, lambda: b >= a)
raises(TypeError, lambda: b < a)
raises(TypeError, lambda: b <= a)
def assert_all_ineq_give_class_Inequality(a, b):
from sympy.core.relational import _Inequality as Inequality
assert isinstance(a > b, Inequality)
assert isinstance(a >= b, Inequality)
assert isinstance(a < b, Inequality)
assert isinstance(a <= b, Inequality)
assert isinstance(b > a, Inequality)
assert isinstance(b >= a, Inequality)
assert isinstance(b < a, Inequality)
assert isinstance(b <= a, Inequality)
def test_imaginary_compare_raises_TypeError():
assert_all_ineq_raise_TypeError(I, x)
def test_complex_compare_not_real():
y = Symbol('y', imaginary=True)
z = Symbol('z', complex=True, real=False)
for w in (y, z):
assert_all_ineq_raise_TypeError(2, w)
t = Symbol('t')
x = Symbol('x', real=True)
z = Symbol('z', complex=True)
for w in (x, z, t):
assert_all_ineq_give_class_Inequality(2, w)
def test_imaginary_and_inf_compare_raises_TypeError():
y = Symbol('y', imaginary=True)
assert_all_ineq_raise_TypeError(oo, y)
assert_all_ineq_raise_TypeError(-oo, y)
def test_complex_pure_imag_not_ordered():
raises(TypeError, lambda: 2*I < 3*I)
x = Symbol('x', real=True, nonzero=True)
y = Symbol('y', imaginary=True)
z = Symbol('z', complex=True)
assert_all_ineq_raise_TypeError(I, y)
t = I*x assert_all_ineq_raise_TypeError(2, t)
t = -I*y assert_all_ineq_give_class_Inequality(2, t)
t = I*z assert_all_ineq_give_class_Inequality(2, t)
def test_x_minus_y_not_same_as_x_lt_y():
x = I + 2
y = I + 3
raises(TypeError, lambda: x < y)
assert x - y < 0
ineq = Lt(x, y, evaluate=False)
raises(TypeError, lambda: ineq.doit())
assert ineq.lhs - ineq.rhs < 0
t = Symbol('t', imaginary=True)
x = 2 + t
y = 3 + t
ineq = Lt(x, y, evaluate=False)
raises(TypeError, lambda: ineq.doit())
assert ineq.lhs - ineq.rhs < 0
x = I + 2
y = 2*I + 3
raises(TypeError, lambda: x < y)
raises(TypeError, lambda: x - y < 0)
def test_nan_equality_exceptions():
import random
assert Equality(nan, nan) is S.false
assert Unequality(nan, nan) is S.true
A = (x, S(0), S(1)/3, pi, oo, -oo)
assert Equality(nan, random.choice(A)) is S.false
assert Equality(random.choice(A), nan) is S.false
assert Unequality(nan, random.choice(A)) is S.true
assert Unequality(random.choice(A), nan) is S.true
def test_nan_inequality_raise_errors():
for q in (x, S(0), S(10), S(1)/3, pi, S(1.3), oo, -oo, nan):
assert_all_ineq_raise_TypeError(q, nan)
def test_nan_complex_inequalities():
# fussy whether its the NaN error or complex error.
for r in (I, zoo, Symbol('z', imaginary=True)):
assert_all_ineq_raise_TypeError(r, nan)
def test_complex_infinity_inequalities():
raises(TypeError, lambda: zoo > 0)
raises(TypeError, lambda: zoo >= 0)
raises(TypeError, lambda: zoo < 0)
raises(TypeError, lambda: zoo <= 0)
def test_inequalities_symbol_name_same():
# We test all combinations from a set
# FIXME: could replace with random selection after test passes
A = (x, y, S(0), S(1)/3, pi, oo, -oo)
for a in A:
for b in A:
assert Gt(a, b) == (a > b)
assert Lt(a, b) == (a < b)
assert Ge(a, b) == (a >= b)
assert Le(a, b) == (a <= b)
for b in (y, S(0), S(1)/3, pi, oo, -oo):
assert Gt(x, b, evaluate=False) == (x > b)
assert Lt(x, b, evaluate=False) == (x < b)
assert Ge(x, b, evaluate=False) == (x >= b)
assert Le(x, b, evaluate=False) == (x <= b)
for b in (y, S(0), S(1)/3, pi, oo, -oo):
assert Gt(b, x, evaluate=False) == (b > x)
assert Lt(b, x, evaluate=False) == (b < x)
assert Ge(b, x, evaluate=False) == (b >= x)
assert Le(b, x, evaluate=False) == (b <= x)
def test_inequalities_symbol_name_same_complex():
# FIXME: could replace with random selection after test passes
for a in (x, S(0), S(1)/3, pi, oo):
raises(TypeError, lambda: Gt(a, I))
raises(TypeError, lambda: a > I)
raises(TypeError, lambda: Lt(a, I))
raises(TypeError, lambda: a < I)
raises(TypeError, lambda: Ge(a, I))
raises(TypeError, lambda: a >= I)
raises(TypeError, lambda: Le(a, I))
raises(TypeError, lambda: a <= I)
def test_inequalities_cant_sympify_other():
# see issue 7833
from operator import gt, lt, ge, le
bar = "foo"
for a in (x, S(0), S(1)/3, pi, I, zoo, oo, -oo, nan):
for op in (lt, gt, le, ge):
raises(TypeError, lambda: op(a, bar))
def test_ineq_avoid_wild_symbol_flip():
# see issue #7951, we try to avoid this internally, e.g., by using
# __lt__ instead of "<".
from sympy.core.symbol import Wild
p = symbols('p', cls=Wild)
# x > p might flip, but Gt should not:
assert Gt(x, p) == Gt(x, p, evaluate=False)
# Previously failed as 'p > x':
e = Lt(x, y).subs({y: p})
assert e == Lt(x, p, evaluate=False)
# Previously failed as 'p <= x':
e = Ge(x, p).doit()
assert e == Ge(x, p, evaluate=False)
def test_issue_8245():
a = S("6506833320952669167898688709329/5070602400912917605986812821504")
q = a.n(10)
assert (a == q) is True
assert (a != q) is False
assert (a > q) == False
assert (a < q) == False
assert (a >= q) == True
assert (a <= q) == True
a = sqrt(2)
r = Rational(str(a.n(30)))
assert (r == a) is False
assert (r != a) is True
assert (r > a) == True
assert (r < a) == False
assert (r >= a) == True
assert (r <= a) == False
a = sqrt(2)
r = Rational(str(a.n(29)))
assert (r == a) is False
assert (r != a) is True
assert (r > a) == False
assert (r < a) == True
assert (r >= a) == False
assert (r <= a) == True
def test_issue_8449():
p = Symbol('p', nonnegative=True)
assert Lt(-oo, p)
assert Ge(-oo, p) is S.false
assert Gt(oo, -p)
assert Le(oo, -p) is S.false
def test_simplify():
assert simplify(x*(y + 1) - x*y - x + 1 < x) == (x > 1)
assert simplify(S(1) < -x) == (x < -1)
def test_equals():
w, x, y, z = symbols('w:z')
f = Function('f')
assert Eq(x, 1).equals(Eq(x*(y + 1) - x*y - x + 1, x))
assert Eq(x, y).equals(x < y, True) == False
assert Eq(x, f(1)).equals(Eq(x, f(2)), True) == f(1) - f(2)
assert Eq(f(1), y).equals(Eq(f(2), y), True) == f(1) - f(2)
assert Eq(x, f(1)).equals(Eq(f(2), x), True) == f(1) - f(2)
assert Eq(f(1), x).equals(Eq(x, f(2)), True) == f(1) - f(2)
assert Eq(w, x).equals(Eq(y, z), True) == False
assert Eq(f(1), f(2)).equals(Eq(f(3), f(4)), True) == f(1) - f(3)
assert (x < y).equals(y > x, True) == True
assert (x < y).equals(y >= x, True) == False
assert (x < y).equals(z < y, True) == False
assert (x < y).equals(x < z, True) == False
assert (x < f(1)).equals(x < f(2), True) == f(1) - f(2)
assert (f(1) < x).equals(f(2) < x, True) == f(1) - f(2)
def test_reversed():
assert (x < y).reversed == (y > x)
assert (x <= y).reversed == (y >= x)
assert Eq(x, y, evaluate=False).reversed == Eq(y, x, evaluate=False)
assert Ne(x, y, evaluate=False).reversed == Ne(y, x, evaluate=False)
assert (x >= y).reversed == (y <= x)
assert (x > y).reversed == (y < x)
def test_canonical():
one = S(1)
def unchanged(v):
c = v.canonical
return v.is_Relational and c.is_Relational and v == c
def isreversed(v):
return v.canonical == v.reversed
assert unchanged(x < one)
assert unchanged(x <= one)
assert isreversed(Eq(one, x, evaluate=False))
assert unchanged(Eq(x, one, evaluate=False))
assert isreversed(Ne(one, x, evaluate=False))
assert unchanged(Ne(x, one, evaluate=False))
assert unchanged(x >= one)
assert unchanged(x > one)
assert unchanged(x < y)
assert unchanged(x <= y)
assert isreversed(Eq(y, x, evaluate=False))
assert unchanged(Eq(x, y, evaluate=False))
assert isreversed(Ne(y, x, evaluate=False))
assert unchanged(Ne(x, y, evaluate=False))
assert isreversed(x >= y)
assert isreversed(x > y)
assert (-x < 1).canonical == (x > -1)
assert isreversed(-x > y)
@XFAIL
def test_issue_8444():
x = symbols('x', real=True)
assert (x <= oo) == (x >= -oo) == True
x = symbols('x')
assert x >= floor(x)
assert (x < floor(x)) == False
assert Gt(x, floor(x)) == Gt(x, floor(x), evaluate=False)
assert Ge(x, floor(x)) == Ge(x, floor(x), evaluate=False)
assert x <= ceiling(x)
assert (x > ceiling(x)) == False
assert Lt(x, ceiling(x)) == Lt(x, ceiling(x), evaluate=False)
assert Le(x, ceiling(x)) == Le(x, ceiling(x), evaluate=False)
i = symbols('i', integer=True)
assert (i > floor(i)) == False
assert (i < ceiling(i)) == False
| true
| true
|
f702418be4646ce2cca2d4d5e436f668865ba2a0
| 579
|
py
|
Python
|
chapter4/main.py
|
takapat/training-kit-3
|
3140b62a9d618a52d9c957927883255b60e27a0c
|
[
"Apache-2.0"
] | null | null | null |
chapter4/main.py
|
takapat/training-kit-3
|
3140b62a9d618a52d9c957927883255b60e27a0c
|
[
"Apache-2.0"
] | 1
|
2018-10-18T05:52:40.000Z
|
2018-10-18T10:38:58.000Z
|
chapter4/main.py
|
takapat/training-kit-3
|
3140b62a9d618a52d9c957927883255b60e27a0c
|
[
"Apache-2.0"
] | 7
|
2018-10-19T05:33:26.000Z
|
2020-06-02T22:10:40.000Z
|
from flask import Flask, Response
from camera import Camera
import cv2
app = Flask(__name__)
camera = Camera().start()
def gen(camera):
while True:
frame = camera.read()
_, jpeg = cv2.imencode('.jpg', frame)
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + jpeg.tobytes() + b'\r\n')
@app.route('/stream')
def stream():
return Response(gen(camera),
mimetype='multipart/x-mixed-replace; boundary=frame')
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=False, use_reloader=False)
| 23.16
| 78
| 0.607945
|
from flask import Flask, Response
from camera import Camera
import cv2
app = Flask(__name__)
camera = Camera().start()
def gen(camera):
while True:
frame = camera.read()
_, jpeg = cv2.imencode('.jpg', frame)
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + jpeg.tobytes() + b'\r\n')
@app.route('/stream')
def stream():
return Response(gen(camera),
mimetype='multipart/x-mixed-replace; boundary=frame')
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=False, use_reloader=False)
| true
| true
|
f702419a1d1820b52591b3a643e27f680f9b94ef
| 485
|
py
|
Python
|
PFlib/setup.py
|
Rogue05/Praca-Magisterska
|
863a0522348eddd093b4de05d12c788ef5f2f520
|
[
"MIT"
] | null | null | null |
PFlib/setup.py
|
Rogue05/Praca-Magisterska
|
863a0522348eddd093b4de05d12c788ef5f2f520
|
[
"MIT"
] | null | null | null |
PFlib/setup.py
|
Rogue05/Praca-Magisterska
|
863a0522348eddd093b4de05d12c788ef5f2f520
|
[
"MIT"
] | null | null | null |
from glob import glob
from setuptools import setup
from pybind11.setup_helpers import Pybind11Extension
ext_modules = [
Pybind11Extension(
"PFlib",
# sorted(glob("*.cpp")), # Sort source files for reproducibility
["particle_filter.cpp",],
swig_opts=['-ggdb',],
include_dirs=['include',]
),
]
setup(
name="PFlib",
# extra_compile_args=['-O0','-Wall','-g'],
# extra_compile_args=['-Iinclude'],
ext_modules=ext_modules,
)
| 24.25
| 73
| 0.63299
|
from glob import glob
from setuptools import setup
from pybind11.setup_helpers import Pybind11Extension
ext_modules = [
Pybind11Extension(
"PFlib",
["particle_filter.cpp",],
swig_opts=['-ggdb',],
include_dirs=['include',]
),
]
setup(
name="PFlib",
ext_modules=ext_modules,
)
| true
| true
|
f702425a5ae41f02c61077b11e3cd43acf7df328
| 3,517
|
py
|
Python
|
comic_classify/comic_detect.py
|
docileninja/Calvin-and-Hobbes-Viewer
|
74ff2e090e040517e2b445432a5ea6c0f87df49b
|
[
"MIT"
] | 2
|
2017-01-10T03:22:19.000Z
|
2017-02-14T02:57:21.000Z
|
comic_classify/comic_detect.py
|
sciencemanx/Calvin-and-Hobbes-Viewer
|
74ff2e090e040517e2b445432a5ea6c0f87df49b
|
[
"MIT"
] | null | null | null |
comic_classify/comic_detect.py
|
sciencemanx/Calvin-and-Hobbes-Viewer
|
74ff2e090e040517e2b445432a5ea6c0f87df49b
|
[
"MIT"
] | 3
|
2018-12-28T04:57:54.000Z
|
2020-06-24T04:11:48.000Z
|
from sklearn import svm, cluster
from PIL import Image, ImageDraw
import os
import sys
import random
def load_images(dirname):
images = []
for image_name in os.listdir(dirname):
if image_name.startswith('.'):
continue
image = Image.open(dirname + '/' + image_name).convert('1')
x, y = image.size
image = image.resize((x, 280), Image.ANTIALIAS)
data = [0 if pixel == 0 else 1 for pixel in image.getdata()]
images.append(data)
return images
min_len = 10000000
def normalize(X):
global min_len
min_len = min(min_len, min(len(x) for x in X))
return [x[:min_len] for x in X]
def crossvalidate(edges, nonedges):
random.shuffle(edges)
random.shuffle(nonedges)
train_edge_len, train_nonedge_len = len(edges) * 7 // 10, len(nonedges) * 7 // 10
cross_edge_len, cross_nonedge_len = len(edges) - train_edge_len, len(nonedges) - train_nonedge_len
X_train = normalize(nonedges[:train_nonedge_len] +
edges[:train_edge_len])
y_train = [0] * train_nonedge_len + [1] * train_edge_len
X_cross = normalize(nonedges[train_nonedge_len:] +
edges[train_edge_len:])
y_cross = [0] * cross_nonedge_len + [1] * cross_edge_len
clf = svm.SVC(gamma=.001, C=100.)
clf.fit(X_train, y_train)
print("prediction: {}".format(list(clf.predict(X_cross))))
print("actuallity: {}".format(y_cross))
print(clf.score(X_cross, y_cross))
def get_column(img, i):
w, h = img.size
column = []
for j in range(h):
column.append(0 if img.getpixel((i, j)) == 0 else 1)
return column
def search_picture(clf, image_name):
image = Image.open(image_name).convert('1')
x, y = image.size
image = image.resize((x, 280), Image.ANTIALIAS)
w, h = image.size
columns = [get_column(image, i) for i in range(25)]
datas = []
for i in range(25, w):
columns = columns[1:] + [get_column(image, i)]
data = [columns[i][j] for j in range(len(columns[0])) for i in range(len(columns))]
datas.append(data)
datas = normalize(datas)
matches = [[i] for i, m in enumerate(clf.predict(datas)) if m == 1]
if len(matches) == 0:
return [], matches
clst = cluster.DBSCAN(eps=20, min_samples=1)
clst.fit(matches)
trimmed = [idx for idx in clst.components_ if idx > w // 6 and idx < w * 5 // 6]
clst = cluster.KMeans(3, init='k-means++')
clst.fit(trimmed)
seps = list(sorted([int(v[0]) + 25//2 for v in clst.cluster_centers_]))
final_seps = []
for start, end in zip(seps, seps[1:]):
if (end - start) > w // 6:
final_seps.append(start)
final_seps.append(seps[-1])
return final_seps, matches
def train(edges, nonedges):
clf = svm.SVC(gamma=.001, C=100.)
X = normalize(nonedges + edges)
y = [0] * len(nonedges) + [1] * len(edges)
clf.fit(X, y)
return clf
def main(edge_dir, non_edge_dir):
edges = load_images(edge_dir)
nonedges = load_images(non_edge_dir)
crossvalidate(edges, nonedges)
clf = train(edges, nonedges)
for comic in os.listdir('test'):
print(comic)
panels, matches = search_picture(clf, 'test/' + comic)
print("\tpanels: {}".format(panels))
image = Image.open('test/' + comic).convert('RGBA')
draw = ImageDraw.Draw(image)
w, h = image.size
for match in matches:
match = match[0]
draw.line((match, 0) + (match, h), fill=(0,0,255,0))
for sep in panels:
draw.line((sep, 0) + (sep, h), fill=(255,0,0), width=3)
image.show()
return clf
if __name__ == '__main__':
if len(sys.argv) != 3:
print('Usage: {} <edges-dir> <non-edges-dir>'.format(sys.argv[0]))
sys.exit(1)
edge_dir = sys.argv[1]
non_edge_dir = sys.argv[2]
main(edge_dir, non_edge_dir)
| 28.827869
| 99
| 0.67387
|
from sklearn import svm, cluster
from PIL import Image, ImageDraw
import os
import sys
import random
def load_images(dirname):
images = []
for image_name in os.listdir(dirname):
if image_name.startswith('.'):
continue
image = Image.open(dirname + '/' + image_name).convert('1')
x, y = image.size
image = image.resize((x, 280), Image.ANTIALIAS)
data = [0 if pixel == 0 else 1 for pixel in image.getdata()]
images.append(data)
return images
min_len = 10000000
def normalize(X):
global min_len
min_len = min(min_len, min(len(x) for x in X))
return [x[:min_len] for x in X]
def crossvalidate(edges, nonedges):
random.shuffle(edges)
random.shuffle(nonedges)
train_edge_len, train_nonedge_len = len(edges) * 7 // 10, len(nonedges) * 7 // 10
cross_edge_len, cross_nonedge_len = len(edges) - train_edge_len, len(nonedges) - train_nonedge_len
X_train = normalize(nonedges[:train_nonedge_len] +
edges[:train_edge_len])
y_train = [0] * train_nonedge_len + [1] * train_edge_len
X_cross = normalize(nonedges[train_nonedge_len:] +
edges[train_edge_len:])
y_cross = [0] * cross_nonedge_len + [1] * cross_edge_len
clf = svm.SVC(gamma=.001, C=100.)
clf.fit(X_train, y_train)
print("prediction: {}".format(list(clf.predict(X_cross))))
print("actuallity: {}".format(y_cross))
print(clf.score(X_cross, y_cross))
def get_column(img, i):
w, h = img.size
column = []
for j in range(h):
column.append(0 if img.getpixel((i, j)) == 0 else 1)
return column
def search_picture(clf, image_name):
image = Image.open(image_name).convert('1')
x, y = image.size
image = image.resize((x, 280), Image.ANTIALIAS)
w, h = image.size
columns = [get_column(image, i) for i in range(25)]
datas = []
for i in range(25, w):
columns = columns[1:] + [get_column(image, i)]
data = [columns[i][j] for j in range(len(columns[0])) for i in range(len(columns))]
datas.append(data)
datas = normalize(datas)
matches = [[i] for i, m in enumerate(clf.predict(datas)) if m == 1]
if len(matches) == 0:
return [], matches
clst = cluster.DBSCAN(eps=20, min_samples=1)
clst.fit(matches)
trimmed = [idx for idx in clst.components_ if idx > w // 6 and idx < w * 5 // 6]
clst = cluster.KMeans(3, init='k-means++')
clst.fit(trimmed)
seps = list(sorted([int(v[0]) + 25//2 for v in clst.cluster_centers_]))
final_seps = []
for start, end in zip(seps, seps[1:]):
if (end - start) > w // 6:
final_seps.append(start)
final_seps.append(seps[-1])
return final_seps, matches
def train(edges, nonedges):
clf = svm.SVC(gamma=.001, C=100.)
X = normalize(nonedges + edges)
y = [0] * len(nonedges) + [1] * len(edges)
clf.fit(X, y)
return clf
def main(edge_dir, non_edge_dir):
edges = load_images(edge_dir)
nonedges = load_images(non_edge_dir)
crossvalidate(edges, nonedges)
clf = train(edges, nonedges)
for comic in os.listdir('test'):
print(comic)
panels, matches = search_picture(clf, 'test/' + comic)
print("\tpanels: {}".format(panels))
image = Image.open('test/' + comic).convert('RGBA')
draw = ImageDraw.Draw(image)
w, h = image.size
for match in matches:
match = match[0]
draw.line((match, 0) + (match, h), fill=(0,0,255,0))
for sep in panels:
draw.line((sep, 0) + (sep, h), fill=(255,0,0), width=3)
image.show()
return clf
if __name__ == '__main__':
if len(sys.argv) != 3:
print('Usage: {} <edges-dir> <non-edges-dir>'.format(sys.argv[0]))
sys.exit(1)
edge_dir = sys.argv[1]
non_edge_dir = sys.argv[2]
main(edge_dir, non_edge_dir)
| true
| true
|
f70242cd924626b37f0454ea32519e3c989f44e0
| 9,876
|
py
|
Python
|
python/handwritten_baseline/pipeline/data/loader/ecb_reader_utils.py
|
UKPLab/cdcr-beyond-corpus-tailored
|
52bf98692c7464f25628baea24addd1a988f9a1f
|
[
"Apache-2.0"
] | 10
|
2020-11-28T05:01:04.000Z
|
2021-12-21T19:34:00.000Z
|
python/handwritten_baseline/pipeline/data/loader/ecb_reader_utils.py
|
UKPLab/cdcr-beyond-corpus-tailored
|
52bf98692c7464f25628baea24addd1a988f9a1f
|
[
"Apache-2.0"
] | 1
|
2022-03-12T07:20:39.000Z
|
2022-03-16T05:11:38.000Z
|
python/handwritten_baseline/pipeline/data/loader/ecb_reader_utils.py
|
UKPLab/cdcr-beyond-corpus-tailored
|
52bf98692c7464f25628baea24addd1a988f9a1f
|
[
"Apache-2.0"
] | 1
|
2021-12-21T19:34:08.000Z
|
2021-12-21T19:34:08.000Z
|
import logging
import os
import re
import xml.etree.ElementTree as ET
from pathlib import Path
from typing import Any, Tuple, Optional
import pandas as pd
from python import TOPIC_ID, SUBTOPIC, DOCUMENT_NUMBER, DOCUMENT_ID, SENTENCE_IDX, TOKEN_IDX, TOKEN_IDX_TO, \
TOKEN_IDX_FROM, TOKEN, MENTION_ID, EVENT, MENTION_TYPE, DESCRIPTION, MENTION_TYPES_ACTION
logger = logging.getLogger()
def read_xml(xml_path) -> Tuple[Any, Any, Any, Any, Any]:
tree = ET.parse(xml_path)
# 1: read document info
root = tree.getroot()
assert root.tag == "Document"
doc_filename = root.attrib["doc_name"]
doc_id = root.attrib["doc_id"]
m = re.match(r"(?P<topic_id>\d+)_(?P<document_number>\d+)(?P<subtopic>\w+)\.xml", doc_filename)
topic_id = m.group("topic_id")
subtopic = m.group("subtopic")
document_number = int(m.group("document_number"))
documents_index = pd.MultiIndex.from_tuples([(topic_id, subtopic, doc_id)],
names=[TOPIC_ID, SUBTOPIC, DOCUMENT_ID])
documents = pd.DataFrame({DOCUMENT_ID: pd.Series(doc_id, index=documents_index),
DOCUMENT_NUMBER: pd.Series(document_number, index=documents_index)})
# 2: read document content
contents_rows = []
contents_index = []
for token_elmt in root.iter("token"):
# index content
sentence_idx = int(token_elmt.attrib["sentence"])
token_idx = int(token_elmt.attrib["number"])
contents_index.append((doc_id, sentence_idx, token_idx))
# content
token = token_elmt.text
contents_rows.append({TOKEN: token})
contents_index = pd.MultiIndex.from_tuples(contents_index, names=[DOCUMENT_ID, SENTENCE_IDX, TOKEN_IDX])
contents = pd.DataFrame(contents_rows, index=contents_index)
# 3: read markables / mentions and entity/event descriptions
mentions_rows = []
mentions_index = []
entities_events = []
for markable in root.find("Markables").getchildren():
# Don't know what this is, skip it
if markable.tag == "UNKNOWN_INSTANCE_TAG":
continue
mention_id = int(markable.attrib["m_id"])
# there are markables without spans, these are descriptions of entities / events which we want to keep
if "TAG_DESCRIPTOR" in markable.attrib.keys():
if "instance_id" in markable.attrib.keys():
entities_events.append({
EVENT: markable.attrib["instance_id"],
DESCRIPTION: markable.attrib["TAG_DESCRIPTOR"]
})
continue
token_ids = [int(anchor.attrib["t_id"]) for anchor in markable.iter("token_anchor")]
token_ids_from, token_ids_to = min(token_ids), max(token_ids)
# the token_ids are cumulative token indexes, remove their cumulative nature
token_indexes = contents.index.get_level_values(TOKEN_IDX).values
token_idx_from = token_indexes[
token_ids_from - 1] # -1 because token_ids start at 1, so we need to access index 0 in the dataframe to find t_id 1
token_idx_to = token_indexes[
token_ids_to - 1] + 1 # additionally +1 here because we want mention spans represented as intervals [from, to[
sentence_idx = contents.index.get_level_values(SENTENCE_IDX).values[token_ids_from - 1]
# resolve non-contiguous mentions
is_non_contiguous_mention = len(token_ids) < token_idx_from - token_idx_to
if is_non_contiguous_mention:
logger.info("Converted non-contiguous mention to contiguous mention.")
mentions_index.append((doc_id, mention_id))
mentions_rows.append({SENTENCE_IDX: sentence_idx,
TOKEN_IDX_FROM: token_idx_from,
TOKEN_IDX_TO: token_idx_to,
MENTION_TYPE: markable.tag})
mentions_index = pd.MultiIndex.from_tuples(mentions_index, names=[DOCUMENT_ID, MENTION_ID])
mentions = pd.DataFrame(mentions_rows, index=mentions_index)
entities_events = pd.DataFrame(entities_events).set_index(EVENT)
# 4. read relations (clusters)
clusters_rows = []
for relation in root.find("Relations").getchildren():
tags_of_interest = ["CROSS_DOC_COREF", "INTRA_DOC_COREF"]
if not relation.tag in tags_of_interest:
logger.info("Unexpected tag " + relation.tag)
raise NotImplementedError
# There are relations with tags INTRA_DOC_COREF and CROSS_DOC_COREF. The cross-doc ones have a "note" attribute.
if "note" in relation.attrib:
# this is the case for CROSS_DOC_COREF tags
relation_id = relation.attrib["note"]
else:
# this is the case for INTRA_DOC_COREF tags
relation_id = doc_id + "_" + relation.attrib["r_id"]
for mention in relation.iter("source"):
mention_id = int(mention.attrib["m_id"])
clusters_rows.append({EVENT: relation_id, DOCUMENT_ID: doc_id, MENTION_ID: mention_id})
clusters = pd.DataFrame(clusters_rows)
# 5. create relations for singletons
# In ECB plus, there are ACTION_OCCURRENCE markables which are not assigned to a relation. These are singletons. We
# add one entry for each singleton to `clusters` to ensure consistency. Note that the opposite also exists:
# singleton mentions which are marked as participating in a cross-doc coref relation, but there is no second
# mention for this relation.
if clusters.empty:
singletons = mentions.index.to_frame().reset_index(drop=True)
else:
# This can most likely be done in a nicer way using some index difference...
outer = pd.merge(mentions, clusters, left_index=True, right_on=[DOCUMENT_ID, MENTION_ID], how="outer")
singletons = outer.loc[outer[EVENT].isna(), [DOCUMENT_ID, MENTION_ID]]
singletons[EVENT] = "SINGLETON_" + singletons.astype(str).apply("_".join, axis=1)
clusters = clusters.append(singletons, sort=False).reset_index(drop=True)
return documents, contents, mentions, clusters, entities_events
def read_split_data(root: Path, sentence_filter_csv: Optional[Path]):
documents = []
contents = []
mentions = []
clusters = []
entities_events = []
# enumerate files
for root, dirs, files in os.walk(str(root.absolute())):
for file in files:
path = os.path.abspath(os.path.join(root, file))
f_documents, f_contents, f_mentions, f_clusters, f_entities_events = read_xml(path)
documents.append(f_documents)
contents.append(f_contents)
mentions.append(f_mentions)
clusters.append(f_clusters)
entities_events.append(f_entities_events)
documents = pd.concat(documents).sort_index()
contents = pd.concat(contents).sort_index()
mentions = pd.concat(mentions).sort_index()
clusters = pd.concat(clusters, sort=False)
entities_events = pd.concat(entities_events).sort_index()
# assert that every mention participates only in one cluster -> meaning we can just add an 'EVENT' column to each mention
assert clusters.duplicated(subset=[DOCUMENT_ID, MENTION_ID]).value_counts().get(True, 0) == 0
clusters = clusters.set_index([DOCUMENT_ID, MENTION_ID])
mentions = pd.merge(mentions, clusters, left_index=True, right_index=True).sort_index()
# read file which tells us from which sentences we should keep event mentions
if sentence_filter_csv is not None:
sent_filter = pd.read_csv(sentence_filter_csv)
doc_number_and_subtopic = sent_filter["File"].str.split("ecb", expand=True)
doc_number_and_subtopic.columns = [DOCUMENT_NUMBER, SUBTOPIC]
doc_number_and_subtopic[DOCUMENT_NUMBER] = doc_number_and_subtopic[DOCUMENT_NUMBER].astype(int)
doc_number_and_subtopic[SUBTOPIC].replace({"plus": "ecbplus", "": "ecb"}, inplace=True)
sent_filter = pd.concat([sent_filter.drop(columns="File"), doc_number_and_subtopic], axis=1)
sent_filter.rename(columns={"Topic": TOPIC_ID, "Sentence Number": SENTENCE_IDX}, inplace=True)
sent_filter[TOPIC_ID] = sent_filter[TOPIC_ID].astype(str)
sent_filter = sent_filter[[TOPIC_ID, SUBTOPIC, DOCUMENT_NUMBER, SENTENCE_IDX]]
# the sentence filter file applies to all splits, remove those topics that we don't have in the split we're loading
topics_in_split = documents.index.get_level_values(TOPIC_ID).unique()
sent_filter = sent_filter.loc[sent_filter[TOPIC_ID].isin(topics_in_split)].copy()
# obtain doc-id from topic+subtopic+document number
documents_with_doc_number_in_index = documents.set_index(DOCUMENT_NUMBER, append=True).reset_index(level=DOCUMENT_ID, drop=True).sort_index()
sent_filter[DOCUMENT_ID] = sent_filter[[TOPIC_ID, SUBTOPIC, DOCUMENT_NUMBER]].apply(lambda row: documents_with_doc_number_in_index[DOCUMENT_ID].loc[tuple(row.values)], axis=1)
all_mentions_to_keep = []
for doc_id, df in mentions.groupby(DOCUMENT_ID):
sentences_to_keep = sent_filter.loc[sent_filter[DOCUMENT_ID] == doc_id]
# we only remove action phrases and leave the other mentions in place, so that we can potentially mask them for
# analysis, see python.handwritten_baseline.pipeline.data.processing.masking.MentionMaskingStage
is_official_evaluation_sentence = df[SENTENCE_IDX].isin(sentences_to_keep[SENTENCE_IDX])
is_action_mention = df[MENTION_TYPE].isin(MENTION_TYPES_ACTION)
mentions_to_keep = df.loc[is_official_evaluation_sentence | (~is_action_mention)]
all_mentions_to_keep.append(mentions_to_keep)
mentions = pd.concat(all_mentions_to_keep).sort_index()
return documents, contents, mentions, entities_events
| 50.13198
| 183
| 0.691272
|
import logging
import os
import re
import xml.etree.ElementTree as ET
from pathlib import Path
from typing import Any, Tuple, Optional
import pandas as pd
from python import TOPIC_ID, SUBTOPIC, DOCUMENT_NUMBER, DOCUMENT_ID, SENTENCE_IDX, TOKEN_IDX, TOKEN_IDX_TO, \
TOKEN_IDX_FROM, TOKEN, MENTION_ID, EVENT, MENTION_TYPE, DESCRIPTION, MENTION_TYPES_ACTION
logger = logging.getLogger()
def read_xml(xml_path) -> Tuple[Any, Any, Any, Any, Any]:
tree = ET.parse(xml_path)
root = tree.getroot()
assert root.tag == "Document"
doc_filename = root.attrib["doc_name"]
doc_id = root.attrib["doc_id"]
m = re.match(r"(?P<topic_id>\d+)_(?P<document_number>\d+)(?P<subtopic>\w+)\.xml", doc_filename)
topic_id = m.group("topic_id")
subtopic = m.group("subtopic")
document_number = int(m.group("document_number"))
documents_index = pd.MultiIndex.from_tuples([(topic_id, subtopic, doc_id)],
names=[TOPIC_ID, SUBTOPIC, DOCUMENT_ID])
documents = pd.DataFrame({DOCUMENT_ID: pd.Series(doc_id, index=documents_index),
DOCUMENT_NUMBER: pd.Series(document_number, index=documents_index)})
contents_rows = []
contents_index = []
for token_elmt in root.iter("token"):
sentence_idx = int(token_elmt.attrib["sentence"])
token_idx = int(token_elmt.attrib["number"])
contents_index.append((doc_id, sentence_idx, token_idx))
token = token_elmt.text
contents_rows.append({TOKEN: token})
contents_index = pd.MultiIndex.from_tuples(contents_index, names=[DOCUMENT_ID, SENTENCE_IDX, TOKEN_IDX])
contents = pd.DataFrame(contents_rows, index=contents_index)
mentions_rows = []
mentions_index = []
entities_events = []
for markable in root.find("Markables").getchildren():
if markable.tag == "UNKNOWN_INSTANCE_TAG":
continue
mention_id = int(markable.attrib["m_id"])
# there are markables without spans, these are descriptions of entities / events which we want to keep
if "TAG_DESCRIPTOR" in markable.attrib.keys():
if "instance_id" in markable.attrib.keys():
entities_events.append({
EVENT: markable.attrib["instance_id"],
DESCRIPTION: markable.attrib["TAG_DESCRIPTOR"]
})
continue
token_ids = [int(anchor.attrib["t_id"]) for anchor in markable.iter("token_anchor")]
token_ids_from, token_ids_to = min(token_ids), max(token_ids)
# the token_ids are cumulative token indexes, remove their cumulative nature
token_indexes = contents.index.get_level_values(TOKEN_IDX).values
token_idx_from = token_indexes[
token_ids_from - 1] # -1 because token_ids start at 1, so we need to access index 0 in the dataframe to find t_id 1
token_idx_to = token_indexes[
token_ids_to - 1] + 1 # additionally +1 here because we want mention spans represented as intervals [from, to[
sentence_idx = contents.index.get_level_values(SENTENCE_IDX).values[token_ids_from - 1]
# resolve non-contiguous mentions
is_non_contiguous_mention = len(token_ids) < token_idx_from - token_idx_to
if is_non_contiguous_mention:
logger.info("Converted non-contiguous mention to contiguous mention.")
mentions_index.append((doc_id, mention_id))
mentions_rows.append({SENTENCE_IDX: sentence_idx,
TOKEN_IDX_FROM: token_idx_from,
TOKEN_IDX_TO: token_idx_to,
MENTION_TYPE: markable.tag})
mentions_index = pd.MultiIndex.from_tuples(mentions_index, names=[DOCUMENT_ID, MENTION_ID])
mentions = pd.DataFrame(mentions_rows, index=mentions_index)
entities_events = pd.DataFrame(entities_events).set_index(EVENT)
# 4. read relations (clusters)
clusters_rows = []
for relation in root.find("Relations").getchildren():
tags_of_interest = ["CROSS_DOC_COREF", "INTRA_DOC_COREF"]
if not relation.tag in tags_of_interest:
logger.info("Unexpected tag " + relation.tag)
raise NotImplementedError
# There are relations with tags INTRA_DOC_COREF and CROSS_DOC_COREF. The cross-doc ones have a "note" attribute.
if "note" in relation.attrib:
# this is the case for CROSS_DOC_COREF tags
relation_id = relation.attrib["note"]
else:
# this is the case for INTRA_DOC_COREF tags
relation_id = doc_id + "_" + relation.attrib["r_id"]
for mention in relation.iter("source"):
mention_id = int(mention.attrib["m_id"])
clusters_rows.append({EVENT: relation_id, DOCUMENT_ID: doc_id, MENTION_ID: mention_id})
clusters = pd.DataFrame(clusters_rows)
# 5. create relations for singletons
# In ECB plus, there are ACTION_OCCURRENCE markables which are not assigned to a relation. These are singletons. We
# add one entry for each singleton to `clusters` to ensure consistency. Note that the opposite also exists:
# singleton mentions which are marked as participating in a cross-doc coref relation, but there is no second
# mention for this relation.
if clusters.empty:
singletons = mentions.index.to_frame().reset_index(drop=True)
else:
# This can most likely be done in a nicer way using some index difference...
outer = pd.merge(mentions, clusters, left_index=True, right_on=[DOCUMENT_ID, MENTION_ID], how="outer")
singletons = outer.loc[outer[EVENT].isna(), [DOCUMENT_ID, MENTION_ID]]
singletons[EVENT] = "SINGLETON_" + singletons.astype(str).apply("_".join, axis=1)
clusters = clusters.append(singletons, sort=False).reset_index(drop=True)
return documents, contents, mentions, clusters, entities_events
def read_split_data(root: Path, sentence_filter_csv: Optional[Path]):
documents = []
contents = []
mentions = []
clusters = []
entities_events = []
# enumerate files
for root, dirs, files in os.walk(str(root.absolute())):
for file in files:
path = os.path.abspath(os.path.join(root, file))
f_documents, f_contents, f_mentions, f_clusters, f_entities_events = read_xml(path)
documents.append(f_documents)
contents.append(f_contents)
mentions.append(f_mentions)
clusters.append(f_clusters)
entities_events.append(f_entities_events)
documents = pd.concat(documents).sort_index()
contents = pd.concat(contents).sort_index()
mentions = pd.concat(mentions).sort_index()
clusters = pd.concat(clusters, sort=False)
entities_events = pd.concat(entities_events).sort_index()
# assert that every mention participates only in one cluster -> meaning we can just add an 'EVENT' column to each mention
assert clusters.duplicated(subset=[DOCUMENT_ID, MENTION_ID]).value_counts().get(True, 0) == 0
clusters = clusters.set_index([DOCUMENT_ID, MENTION_ID])
mentions = pd.merge(mentions, clusters, left_index=True, right_index=True).sort_index()
# read file which tells us from which sentences we should keep event mentions
if sentence_filter_csv is not None:
sent_filter = pd.read_csv(sentence_filter_csv)
doc_number_and_subtopic = sent_filter["File"].str.split("ecb", expand=True)
doc_number_and_subtopic.columns = [DOCUMENT_NUMBER, SUBTOPIC]
doc_number_and_subtopic[DOCUMENT_NUMBER] = doc_number_and_subtopic[DOCUMENT_NUMBER].astype(int)
doc_number_and_subtopic[SUBTOPIC].replace({"plus": "ecbplus", "": "ecb"}, inplace=True)
sent_filter = pd.concat([sent_filter.drop(columns="File"), doc_number_and_subtopic], axis=1)
sent_filter.rename(columns={"Topic": TOPIC_ID, "Sentence Number": SENTENCE_IDX}, inplace=True)
sent_filter[TOPIC_ID] = sent_filter[TOPIC_ID].astype(str)
sent_filter = sent_filter[[TOPIC_ID, SUBTOPIC, DOCUMENT_NUMBER, SENTENCE_IDX]]
# the sentence filter file applies to all splits, remove those topics that we don't have in the split we're loading
topics_in_split = documents.index.get_level_values(TOPIC_ID).unique()
sent_filter = sent_filter.loc[sent_filter[TOPIC_ID].isin(topics_in_split)].copy()
# obtain doc-id from topic+subtopic+document number
documents_with_doc_number_in_index = documents.set_index(DOCUMENT_NUMBER, append=True).reset_index(level=DOCUMENT_ID, drop=True).sort_index()
sent_filter[DOCUMENT_ID] = sent_filter[[TOPIC_ID, SUBTOPIC, DOCUMENT_NUMBER]].apply(lambda row: documents_with_doc_number_in_index[DOCUMENT_ID].loc[tuple(row.values)], axis=1)
all_mentions_to_keep = []
for doc_id, df in mentions.groupby(DOCUMENT_ID):
sentences_to_keep = sent_filter.loc[sent_filter[DOCUMENT_ID] == doc_id]
# we only remove action phrases and leave the other mentions in place, so that we can potentially mask them for
# analysis, see python.handwritten_baseline.pipeline.data.processing.masking.MentionMaskingStage
is_official_evaluation_sentence = df[SENTENCE_IDX].isin(sentences_to_keep[SENTENCE_IDX])
is_action_mention = df[MENTION_TYPE].isin(MENTION_TYPES_ACTION)
mentions_to_keep = df.loc[is_official_evaluation_sentence | (~is_action_mention)]
all_mentions_to_keep.append(mentions_to_keep)
mentions = pd.concat(all_mentions_to_keep).sort_index()
return documents, contents, mentions, entities_events
| true
| true
|
f70245611ab460b5a18c19a0fe4fb4223b15e2ca
| 3,343
|
py
|
Python
|
simplesocial/simplesocial/settings.py
|
Thesirloc/Stargazer-club-django
|
90d2d95f7bb282756582734c591b37b9473b992e
|
[
"MIT"
] | 1
|
2022-02-06T06:26:23.000Z
|
2022-02-06T06:26:23.000Z
|
simplesocial/simplesocial/settings.py
|
Thesirloc/Stargazer-club-django
|
90d2d95f7bb282756582734c591b37b9473b992e
|
[
"MIT"
] | null | null | null |
simplesocial/simplesocial/settings.py
|
Thesirloc/Stargazer-club-django
|
90d2d95f7bb282756582734c591b37b9473b992e
|
[
"MIT"
] | 2
|
2018-12-30T12:59:11.000Z
|
2019-01-23T12:50:35.000Z
|
"""
Django settings for simplesocial project.
Generated by 'django-admin startproject' using Django 2.0.5.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLAE_DIR = os.path.join(BASE_DIR, 'templates')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '%2^#bfjd61932k^l^fz4ztt+e*9ahjda(7w3xaa0d+^@a&=-6*'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'bootstrap4',
'accounts',
'groups',
'posts',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'simplesocial.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLAE_DIR,],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'simplesocial.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static'),]
LOGIN_REDIRECT_URL = 'test'
LOGOUT_REDIRECT_URL = 'thanks'
| 25.914729
| 91
| 0.69668
|
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLAE_DIR = os.path.join(BASE_DIR, 'templates')
SECRET_KEY = '%2^#bfjd61932k^l^fz4ztt+e*9ahjda(7w3xaa0d+^@a&=-6*'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'bootstrap4',
'accounts',
'groups',
'posts',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'simplesocial.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLAE_DIR,],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'simplesocial.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static'),]
LOGIN_REDIRECT_URL = 'test'
LOGOUT_REDIRECT_URL = 'thanks'
| true
| true
|
f7024605869dd7788905637cfccaa41707efb6c3
| 256
|
py
|
Python
|
data/scripts/reverse.py
|
levindu/OpenCC
|
345ea91303e5b3d9332dc51ea73370dac83e4c6b
|
[
"Apache-2.0"
] | 43
|
2018-09-17T00:45:35.000Z
|
2021-11-14T23:56:45.000Z
|
data/scripts/reverse.py
|
levindu/OpenCC
|
345ea91303e5b3d9332dc51ea73370dac83e4c6b
|
[
"Apache-2.0"
] | 7
|
2019-11-26T10:48:14.000Z
|
2021-06-13T04:49:58.000Z
|
data/scripts/reverse.py
|
levindu/OpenCC
|
345ea91303e5b3d9332dc51ea73370dac83e4c6b
|
[
"Apache-2.0"
] | 6
|
2018-09-17T02:09:59.000Z
|
2020-08-15T13:57:44.000Z
|
#!/usr/bin/env python
#coding: utf-8
import sys
from common import reverse_items
if len(sys.argv) != 3:
print("Reverse key and value of all pairs")
print(("Usage: ", sys.argv[0], "[input] [output]"))
exit(1)
reverse_items(sys.argv[1], sys.argv[2])
| 21.333333
| 53
| 0.671875
|
import sys
from common import reverse_items
if len(sys.argv) != 3:
print("Reverse key and value of all pairs")
print(("Usage: ", sys.argv[0], "[input] [output]"))
exit(1)
reverse_items(sys.argv[1], sys.argv[2])
| true
| true
|
f70246589eef38aa124a119a1dbb44c4c2cf9dfc
| 6,422
|
py
|
Python
|
docker/Dockerfile.py
|
danieldanciu/toil
|
5089ccd0ec9dc48e3b3947deabd67fbb8e667d39
|
[
"Apache-2.0"
] | null | null | null |
docker/Dockerfile.py
|
danieldanciu/toil
|
5089ccd0ec9dc48e3b3947deabd67fbb8e667d39
|
[
"Apache-2.0"
] | null | null | null |
docker/Dockerfile.py
|
danieldanciu/toil
|
5089ccd0ec9dc48e3b3947deabd67fbb8e667d39
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2015-2016 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import textwrap
applianceSelf = os.environ['TOIL_APPLIANCE_SELF']
sdistName = os.environ['_TOIL_SDIST_NAME']
dependencies = ' '.join(['libffi-dev', # For client side encryption for extras with PyNACL
'python3.6',
'python3.6-dev',
'python-dev', # For installing Python packages with native code
'python-pip', # Bootstrap pip, but needs upgrading, see below
'python3-pip',
'libcurl4-openssl-dev',
'libssl-dev',
'wget',
'curl',
'openssh-server',
'mesos=1.0.1-2.0.94.ubuntu1604',
"nodejs", # CWL support for javascript expressions
'rsync',
'screen',
'build-essential', # We need a build environment to build Singularity 3.
'uuid-dev',
'libgpgme11-dev',
'libseccomp-dev',
'pkg-config',
'squashfs-tools',
'cryptsetup',
'git'])
def heredoc(s):
s = textwrap.dedent(s).format(**globals())
return s[1:] if s.startswith('\n') else s
motd = heredoc('''
This is the Toil appliance. You can run your Toil script directly on the appliance.
Run toil <workflow>.py --help to see all options for running your workflow.
For more information see http://toil.readthedocs.io/en/latest/
Copyright (C) 2015-2018 Regents of the University of California
Version: {applianceSelf}
''')
# Prepare motd to be echoed in the Dockerfile using a RUN statement that uses bash's print
motd = ''.join(l + '\\n\\\n' for l in motd.splitlines())
print(heredoc('''
FROM ubuntu:16.04
RUN apt-get -y update --fix-missing && apt-get -y upgrade && apt-get -y install apt-transport-https ca-certificates software-properties-common && apt-get clean && rm -rf /var/lib/apt/lists/*
RUN echo "deb http://repos.mesosphere.io/ubuntu/ xenial main" \
> /etc/apt/sources.list.d/mesosphere.list \
&& apt-key adv --keyserver keyserver.ubuntu.com --recv E56151BF \
&& echo "deb http://deb.nodesource.com/node_6.x xenial main" \
> /etc/apt/sources.list.d/nodesource.list \
&& apt-key adv --keyserver keyserver.ubuntu.com --recv 68576280
RUN add-apt-repository -y ppa:deadsnakes/ppa
RUN apt-get -y update --fix-missing && \
DEBIAN_FRONTEND=noninteractive apt-get -y upgrade && \
DEBIAN_FRONTEND=noninteractive apt-get -y install {dependencies} && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
RUN wget https://dl.google.com/go/go1.13.3.linux-amd64.tar.gz && \
tar xvf go1.13.3.linux-amd64.tar.gz && \
mv go/bin/* /usr/bin/ && \
mv go /usr/local/
RUN mkdir -p $(go env GOPATH)/src/github.com/sylabs && \
cd $(go env GOPATH)/src/github.com/sylabs && \
git clone https://github.com/sylabs/singularity.git && \
cd singularity && \
git checkout v3.4.2 && \
./mconfig && \
cd ./builddir && \
make -j4 && \
make install
RUN mkdir /root/.ssh && \
chmod 700 /root/.ssh
ADD waitForKey.sh /usr/bin/waitForKey.sh
ADD customDockerInit.sh /usr/bin/customDockerInit.sh
RUN chmod 777 /usr/bin/waitForKey.sh && chmod 777 /usr/bin/customDockerInit.sh
# The stock pip is too old and can't install from sdist with extras
RUN pip install --upgrade pip==9.0.1
# Default setuptools is too old
RUN pip install --upgrade setuptools==36.5.0
# Include virtualenv, as it is still the recommended way to deploy pipelines
RUN pip install --upgrade virtualenv==15.0.3
# Install s3am (--never-download prevents silent upgrades to pip, wheel and setuptools)
RUN virtualenv --never-download /home/s3am \
&& /home/s3am/bin/pip install s3am==2.0 \
&& ln -s /home/s3am/bin/s3am /usr/local/bin/
# Install statically linked version of docker client
RUN curl https://download.docker.com/linux/static/stable/x86_64/docker-18.06.1-ce.tgz \
| tar -xvzf - --transform='s,[^/]*/,,g' -C /usr/local/bin/ \
&& chmod u+x /usr/local/bin/docker
# Fix for Mesos interface dependency missing on ubuntu
RUN pip install protobuf==3.0.0
# Fix for https://issues.apache.org/jira/browse/MESOS-3793
ENV MESOS_LAUNCHER=posix
# Fix for `screen` (https://github.com/BD2KGenomics/toil/pull/1386#issuecomment-267424561)
ENV TERM linux
# Run bash instead of sh inside of screen
ENV SHELL /bin/bash
RUN echo "defshell -bash" > ~/.screenrc
# An appliance may need to start more appliances, e.g. when the leader appliance launches the
# worker appliance on a worker node. To support this, we embed a self-reference into the image:
ENV TOIL_APPLIANCE_SELF {applianceSelf}
RUN mkdir /var/lib/toil
ENV TOIL_WORKDIR /var/lib/toil
# This component changes most frequently and keeping it last maximizes Docker cache hits.
COPY {sdistName} .
RUN pip install {sdistName}[all]
RUN rm {sdistName}
# We intentionally inherit the default ENTRYPOINT and CMD from the base image, to the effect
# that the running appliance just gives you a shell. To start the Mesos master or slave
# daemons, the user # should override the entrypoint via --entrypoint.
RUN echo '[ ! -z "$TERM" -a -r /etc/motd ] && cat /etc/motd' >> /etc/bash.bashrc \
&& printf '{motd}' > /etc/motd
'''))
| 39.158537
| 194
| 0.618187
|
from __future__ import print_function
import os
import textwrap
applianceSelf = os.environ['TOIL_APPLIANCE_SELF']
sdistName = os.environ['_TOIL_SDIST_NAME']
dependencies = ' '.join(['libffi-dev', 'python3.6',
'python3.6-dev',
'python-dev', 'python-pip', 'python3-pip',
'libcurl4-openssl-dev',
'libssl-dev',
'wget',
'curl',
'openssh-server',
'mesos=1.0.1-2.0.94.ubuntu1604',
"nodejs", 'rsync',
'screen',
'build-essential', 'uuid-dev',
'libgpgme11-dev',
'libseccomp-dev',
'pkg-config',
'squashfs-tools',
'cryptsetup',
'git'])
def heredoc(s):
s = textwrap.dedent(s).format(**globals())
return s[1:] if s.startswith('\n') else s
motd = heredoc('''
This is the Toil appliance. You can run your Toil script directly on the appliance.
Run toil <workflow>.py --help to see all options for running your workflow.
For more information see http://toil.readthedocs.io/en/latest/
Copyright (C) 2015-2018 Regents of the University of California
Version: {applianceSelf}
''')
motd = ''.join(l + '\\n\\\n' for l in motd.splitlines())
print(heredoc('''
FROM ubuntu:16.04
RUN apt-get -y update --fix-missing && apt-get -y upgrade && apt-get -y install apt-transport-https ca-certificates software-properties-common && apt-get clean && rm -rf /var/lib/apt/lists/*
RUN echo "deb http://repos.mesosphere.io/ubuntu/ xenial main" \
> /etc/apt/sources.list.d/mesosphere.list \
&& apt-key adv --keyserver keyserver.ubuntu.com --recv E56151BF \
&& echo "deb http://deb.nodesource.com/node_6.x xenial main" \
> /etc/apt/sources.list.d/nodesource.list \
&& apt-key adv --keyserver keyserver.ubuntu.com --recv 68576280
RUN add-apt-repository -y ppa:deadsnakes/ppa
RUN apt-get -y update --fix-missing && \
DEBIAN_FRONTEND=noninteractive apt-get -y upgrade && \
DEBIAN_FRONTEND=noninteractive apt-get -y install {dependencies} && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
RUN wget https://dl.google.com/go/go1.13.3.linux-amd64.tar.gz && \
tar xvf go1.13.3.linux-amd64.tar.gz && \
mv go/bin/* /usr/bin/ && \
mv go /usr/local/
RUN mkdir -p $(go env GOPATH)/src/github.com/sylabs && \
cd $(go env GOPATH)/src/github.com/sylabs && \
git clone https://github.com/sylabs/singularity.git && \
cd singularity && \
git checkout v3.4.2 && \
./mconfig && \
cd ./builddir && \
make -j4 && \
make install
RUN mkdir /root/.ssh && \
chmod 700 /root/.ssh
ADD waitForKey.sh /usr/bin/waitForKey.sh
ADD customDockerInit.sh /usr/bin/customDockerInit.sh
RUN chmod 777 /usr/bin/waitForKey.sh && chmod 777 /usr/bin/customDockerInit.sh
# The stock pip is too old and can't install from sdist with extras
RUN pip install --upgrade pip==9.0.1
# Default setuptools is too old
RUN pip install --upgrade setuptools==36.5.0
# Include virtualenv, as it is still the recommended way to deploy pipelines
RUN pip install --upgrade virtualenv==15.0.3
# Install s3am (--never-download prevents silent upgrades to pip, wheel and setuptools)
RUN virtualenv --never-download /home/s3am \
&& /home/s3am/bin/pip install s3am==2.0 \
&& ln -s /home/s3am/bin/s3am /usr/local/bin/
# Install statically linked version of docker client
RUN curl https://download.docker.com/linux/static/stable/x86_64/docker-18.06.1-ce.tgz \
| tar -xvzf - --transform='s,[^/]*/,,g' -C /usr/local/bin/ \
&& chmod u+x /usr/local/bin/docker
# Fix for Mesos interface dependency missing on ubuntu
RUN pip install protobuf==3.0.0
# Fix for https://issues.apache.org/jira/browse/MESOS-3793
ENV MESOS_LAUNCHER=posix
# Fix for `screen` (https://github.com/BD2KGenomics/toil/pull/1386#issuecomment-267424561)
ENV TERM linux
# Run bash instead of sh inside of screen
ENV SHELL /bin/bash
RUN echo "defshell -bash" > ~/.screenrc
# An appliance may need to start more appliances, e.g. when the leader appliance launches the
# worker appliance on a worker node. To support this, we embed a self-reference into the image:
ENV TOIL_APPLIANCE_SELF {applianceSelf}
RUN mkdir /var/lib/toil
ENV TOIL_WORKDIR /var/lib/toil
# This component changes most frequently and keeping it last maximizes Docker cache hits.
COPY {sdistName} .
RUN pip install {sdistName}[all]
RUN rm {sdistName}
# We intentionally inherit the default ENTRYPOINT and CMD from the base image, to the effect
# that the running appliance just gives you a shell. To start the Mesos master or slave
# daemons, the user # should override the entrypoint via --entrypoint.
RUN echo '[ ! -z "$TERM" -a -r /etc/motd ] && cat /etc/motd' >> /etc/bash.bashrc \
&& printf '{motd}' > /etc/motd
'''))
| true
| true
|
f7024664218bf4abb7bd1e6581a318ef1357b07c
| 2,021
|
py
|
Python
|
tools/ci/tc/tests/test_decision.py
|
BasixKOR/wpt
|
aa27d567c10dcdb2aea6884d5155dfaaa177a800
|
[
"BSD-3-Clause"
] | null | null | null |
tools/ci/tc/tests/test_decision.py
|
BasixKOR/wpt
|
aa27d567c10dcdb2aea6884d5155dfaaa177a800
|
[
"BSD-3-Clause"
] | 59
|
2022-01-19T21:35:57.000Z
|
2022-03-30T21:35:27.000Z
|
tools/ci/tc/tests/test_decision.py
|
BasixKOR/wpt
|
aa27d567c10dcdb2aea6884d5155dfaaa177a800
|
[
"BSD-3-Clause"
] | null | null | null |
# mypy: allow-untyped-defs
from unittest import mock
import pytest
from tools.ci.tc import decision
@pytest.mark.parametrize("run_jobs,tasks,expected", [
([], {"task-no-schedule-if": {}}, ["task-no-schedule-if"]),
([], {"task-schedule-if-no-run-job": {"schedule-if": {}}}, []),
(["job"],
{"job-present": {"schedule-if": {"run-job": ["other-job", "job"]}}},
["job-present"]),
(["job"], {"job-missing": {"schedule-if": {"run-job": ["other-job"]}}}, []),
(["all"], {"job-all": {"schedule-if": {"run-job": ["other-job"]}}}, ["job-all"]),
(["job"],
{"job-1": {"schedule-if": {"run-job": ["job"]}},
"job-2": {"schedule-if": {"run-job": ["other-job"]}}},
["job-1"]),
])
def test_filter_schedule_if(run_jobs, tasks, expected):
with mock.patch("tools.ci.tc.decision.get_run_jobs",
return_value=run_jobs) as get_run_jobs:
assert (decision.filter_schedule_if({}, tasks) ==
{name: tasks[name] for name in expected})
get_run_jobs.call_count in (0, 1)
@pytest.mark.parametrize("msg,expected", [
("Some initial line\n\ntc-jobs:foo,bar", {"foo", "bar"}),
("Some initial line\n\ntc-jobs:foo, bar", {"foo", "bar"}),
("tc-jobs:foo, bar \nbaz", {"foo", "bar"}),
("tc-jobs:all", {"all"}),
("", set()),
("tc-jobs:foo\ntc-jobs:bar", {"foo"})])
@pytest.mark.parametrize("event", [
{"commits": [{"message": "<message>"}]},
{"pull_request": {"body": "<message>"}}
])
def test_extra_jobs_pr(msg, expected, event):
def sub(obj):
"""Copy obj, except if it's a string with the value <message>
replace it with the value of the msg argument"""
if isinstance(obj, dict):
return {key: sub(value) for (key, value) in obj.items()}
elif isinstance(obj, list):
return [sub(value) for value in obj]
elif obj == "<message>":
return msg
return obj
event = sub(event)
assert decision.get_extra_jobs(event) == expected
| 35.45614
| 85
| 0.562098
|
from unittest import mock
import pytest
from tools.ci.tc import decision
@pytest.mark.parametrize("run_jobs,tasks,expected", [
([], {"task-no-schedule-if": {}}, ["task-no-schedule-if"]),
([], {"task-schedule-if-no-run-job": {"schedule-if": {}}}, []),
(["job"],
{"job-present": {"schedule-if": {"run-job": ["other-job", "job"]}}},
["job-present"]),
(["job"], {"job-missing": {"schedule-if": {"run-job": ["other-job"]}}}, []),
(["all"], {"job-all": {"schedule-if": {"run-job": ["other-job"]}}}, ["job-all"]),
(["job"],
{"job-1": {"schedule-if": {"run-job": ["job"]}},
"job-2": {"schedule-if": {"run-job": ["other-job"]}}},
["job-1"]),
])
def test_filter_schedule_if(run_jobs, tasks, expected):
with mock.patch("tools.ci.tc.decision.get_run_jobs",
return_value=run_jobs) as get_run_jobs:
assert (decision.filter_schedule_if({}, tasks) ==
{name: tasks[name] for name in expected})
get_run_jobs.call_count in (0, 1)
@pytest.mark.parametrize("msg,expected", [
("Some initial line\n\ntc-jobs:foo,bar", {"foo", "bar"}),
("Some initial line\n\ntc-jobs:foo, bar", {"foo", "bar"}),
("tc-jobs:foo, bar \nbaz", {"foo", "bar"}),
("tc-jobs:all", {"all"}),
("", set()),
("tc-jobs:foo\ntc-jobs:bar", {"foo"})])
@pytest.mark.parametrize("event", [
{"commits": [{"message": "<message>"}]},
{"pull_request": {"body": "<message>"}}
])
def test_extra_jobs_pr(msg, expected, event):
def sub(obj):
if isinstance(obj, dict):
return {key: sub(value) for (key, value) in obj.items()}
elif isinstance(obj, list):
return [sub(value) for value in obj]
elif obj == "<message>":
return msg
return obj
event = sub(event)
assert decision.get_extra_jobs(event) == expected
| true
| true
|
f70247e814ef6527174be10cc686a320fe4b2b46
| 1,540
|
py
|
Python
|
ParameterFiles/params_en1_i.py
|
dunkenj/eazy-pype
|
9cb8ac765d659ace36c00293a5809fc4a066e1ec
|
[
"MIT"
] | 1
|
2019-07-25T10:55:18.000Z
|
2019-07-25T10:55:18.000Z
|
ParameterFiles/params_en1_i.py
|
dunkenj/eazy-pype
|
9cb8ac765d659ace36c00293a5809fc4a066e1ec
|
[
"MIT"
] | null | null | null |
ParameterFiles/params_en1_i.py
|
dunkenj/eazy-pype
|
9cb8ac765d659ace36c00293a5809fc4a066e1ec
|
[
"MIT"
] | 1
|
2018-12-18T16:31:41.000Z
|
2018-12-18T16:31:41.000Z
|
"""
Main inputs:
(Change for all fields)
"""
eazypath = '/data2/ken/photoz/eazy-photoz/src/eazy '
working_folder = '/data2/ken/EN1_pani'
photometry_catalog = 'en1_phot_with_zspec.fits'
photometry_format = 'fits'
filter_file = 'EN1_filters.res'
translate_file = 'EN1.translate'
zspec_col = 'z_spec'
flux_col = 'flux'
fluxerr_col ='fluxerr'
do_zp = False
do_zp_tests = False
do_subcats = False
do_full = False
do_stellar = False
do_hb = True
do_merge = True
"""
Training parameters
"""
Ncrossval = 1
test_fraction = 0.2
process_outliers = True
correct_extinction = True
"""
Fitting Parameters
(Change only when needed)
"""
# Templates: Any combination of 'eazy', 'swire', 'atlas'
templates = ['eazy', 'atlas', 'cosmos']#, 'swire']#, 'cosmos', 'atlas'] #,'cosmos', 'atlas']
fitting_mode = ['a', '1', '1']
defaults = ['defaults/zphot.eazy',
'defaults/zphot.atlas',
'defaults/zphot.cosmos']
#'defaults/zphot.eazy',
#'defaults/zphot.atlas',
#'defaults/zphot.swire']
stellar_params = 'defaults/zphot.pickles'
additional_errors = [0.0, 0.0, 0.0]
template_error_norm = [1., 1., 1.]
template_error_file = ''
lambda_fit_max = [5., 30., 30.]
"""
Combination Parameters
"""
include_prior = True
fbad_prior = 'mag' # 'flat', 'vol' or 'mag'
#prior_parameter_path = 'en1_i_prior_coeff.npz'
prior_fname = 'pani_mag'
prior_colname = 'pani_mag'
alpha_colname = 'pani_mag'
"""
System Parameters
(Specific system only - fixed after installation)
"""
block_size = 1e4
ncpus = 10
| 18.333333
| 92
| 0.677273
|
eazypath = '/data2/ken/photoz/eazy-photoz/src/eazy '
working_folder = '/data2/ken/EN1_pani'
photometry_catalog = 'en1_phot_with_zspec.fits'
photometry_format = 'fits'
filter_file = 'EN1_filters.res'
translate_file = 'EN1.translate'
zspec_col = 'z_spec'
flux_col = 'flux'
fluxerr_col ='fluxerr'
do_zp = False
do_zp_tests = False
do_subcats = False
do_full = False
do_stellar = False
do_hb = True
do_merge = True
Ncrossval = 1
test_fraction = 0.2
process_outliers = True
correct_extinction = True
templates = ['eazy', 'atlas', 'cosmos']fitting_mode = ['a', '1', '1']
defaults = ['defaults/zphot.eazy',
'defaults/zphot.atlas',
'defaults/zphot.cosmos']
stellar_params = 'defaults/zphot.pickles'
additional_errors = [0.0, 0.0, 0.0]
template_error_norm = [1., 1., 1.]
template_error_file = ''
lambda_fit_max = [5., 30., 30.]
include_prior = True
fbad_prior = 'mag' prior_fname = 'pani_mag'
prior_colname = 'pani_mag'
alpha_colname = 'pani_mag'
block_size = 1e4
ncpus = 10
| true
| true
|
f702480ed21c1be212847c37c1516de78ef6c252
| 3,071
|
py
|
Python
|
bfstpw/views.py
|
dkess/bfstpw
|
c1f4ca4126f33e16e756c109c3b0b34f68bbb351
|
[
"MIT"
] | null | null | null |
bfstpw/views.py
|
dkess/bfstpw
|
c1f4ca4126f33e16e756c109c3b0b34f68bbb351
|
[
"MIT"
] | null | null | null |
bfstpw/views.py
|
dkess/bfstpw
|
c1f4ca4126f33e16e756c109c3b0b34f68bbb351
|
[
"MIT"
] | null | null | null |
from bfstpw.models import ForumThread, ForumPost
from datetime import datetime
from django.conf import settings
from django.core.paginator import Paginator
from django.core.urlresolvers import reverse
from django.db.models import Count
from django.http import HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from django.template import Context
import django.utils.timezone
from markdown import markdown
def threadlist(request):
c = Context({"threadlist" :
[{"id":t.id
,"name":t.thread_title
,"poster":t.getOriginalPost().poster
,"replycount":t.postcount
,"lastpage":(t.postcount / getattr(settings, 'BFSTPW_MAX_POSTS_PER_PAGE', 20))+1
,"date":t.mostrecent
,"lastposter":t.getLatestPost().poster
} for t in
ForumThread.objects.sortByLastPost().annotate(postcount=Count('forumpost'))]})
return render(request, 'bfstpw/threadlist.html', c)
def thread(request, thread_id, message=''):
current_thread = get_object_or_404(ForumThread, id=thread_id)
max_posts_per_page = getattr(settings, 'BFSTPW_MAX_POSTS_PER_PAGE', 20)
paginator = Paginator(current_thread.forumpost_set.order_by('date_posted'), max_posts_per_page)
c = Context(
{"threadlist" :
[{"id":t.id
,"name":t.thread_title
} for t in ForumThread.objects.sortByLastPost()],
"thread" : current_thread,
"posts" : paginator.page(request.GET.get('page',1)),
"pages" : paginator.page_range,
"message" : message
})
return render(request, 'bfstpw/thread.html', c)
def post(request, thread_id):
t = get_object_or_404(ForumThread, id=thread_id)
posts = t.forumpost_set
"""
# TODO: don't let users post too quickly
session = request.session
current_time = time()
if (session.get('lastposttime',0) + 10) < current_time:
message_html = markdown(request.POST['message'], safe_mode='escape')
posts.create(poster=request.POST['name'],message_body=message_html,date_posted=datetime.now())
msg = ''
session['lastposttime'] = current_time
else:
msg = "Error: you must wait 10 seconds before posting"
"""
message_html = markdown(request.POST['message'], safe_mode='escape')
posts.create(poster=request.POST['name'], message_body=message_html,
date_posted=django.utils.timezone.now())
pagenum = (posts.count() / getattr(settings, 'BFSTPW_MAX_POSTS_PER_PAGE', 20))+1
return HttpResponseRedirect(reverse('bfstpw-thread', args=(t.id,))+'?page=%d' % pagenum)
def newthreadmake(request):
t = ForumThread(thread_title=request.POST['threadname'])
t.save()
message_html = markdown(request.POST['message'], safe_mode='escape')
t.forumpost_set.create(poster=request.POST['name'],
message_body=message_html,
date_posted=django.utils.timezone.now())
return HttpResponseRedirect(reverse('bfstpw-thread', args=(t.id,)))
| 39.883117
| 102
| 0.674048
|
from bfstpw.models import ForumThread, ForumPost
from datetime import datetime
from django.conf import settings
from django.core.paginator import Paginator
from django.core.urlresolvers import reverse
from django.db.models import Count
from django.http import HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from django.template import Context
import django.utils.timezone
from markdown import markdown
def threadlist(request):
c = Context({"threadlist" :
[{"id":t.id
,"name":t.thread_title
,"poster":t.getOriginalPost().poster
,"replycount":t.postcount
,"lastpage":(t.postcount / getattr(settings, 'BFSTPW_MAX_POSTS_PER_PAGE', 20))+1
,"date":t.mostrecent
,"lastposter":t.getLatestPost().poster
} for t in
ForumThread.objects.sortByLastPost().annotate(postcount=Count('forumpost'))]})
return render(request, 'bfstpw/threadlist.html', c)
def thread(request, thread_id, message=''):
current_thread = get_object_or_404(ForumThread, id=thread_id)
max_posts_per_page = getattr(settings, 'BFSTPW_MAX_POSTS_PER_PAGE', 20)
paginator = Paginator(current_thread.forumpost_set.order_by('date_posted'), max_posts_per_page)
c = Context(
{"threadlist" :
[{"id":t.id
,"name":t.thread_title
} for t in ForumThread.objects.sortByLastPost()],
"thread" : current_thread,
"posts" : paginator.page(request.GET.get('page',1)),
"pages" : paginator.page_range,
"message" : message
})
return render(request, 'bfstpw/thread.html', c)
def post(request, thread_id):
t = get_object_or_404(ForumThread, id=thread_id)
posts = t.forumpost_set
message_html = markdown(request.POST['message'], safe_mode='escape')
posts.create(poster=request.POST['name'], message_body=message_html,
date_posted=django.utils.timezone.now())
pagenum = (posts.count() / getattr(settings, 'BFSTPW_MAX_POSTS_PER_PAGE', 20))+1
return HttpResponseRedirect(reverse('bfstpw-thread', args=(t.id,))+'?page=%d' % pagenum)
def newthreadmake(request):
t = ForumThread(thread_title=request.POST['threadname'])
t.save()
message_html = markdown(request.POST['message'], safe_mode='escape')
t.forumpost_set.create(poster=request.POST['name'],
message_body=message_html,
date_posted=django.utils.timezone.now())
return HttpResponseRedirect(reverse('bfstpw-thread', args=(t.id,)))
| true
| true
|
f7024c75af5406622e35a63a1f78be3767533815
| 7,631
|
py
|
Python
|
Cream/lib/models/builders/build_childnet.py
|
wkcn/AutoML
|
106cb0fbc19fa116c4ef5caf232acaaea85822c8
|
[
"MIT"
] | 307
|
2020-10-29T13:17:02.000Z
|
2022-03-30T09:55:49.000Z
|
Cream/lib/models/builders/build_childnet.py
|
QPC-database/AutoML
|
75d86a5e3366a6c9af6c2001ee1ba4fc41c5d6cb
|
[
"MIT"
] | 42
|
2020-10-30T07:09:48.000Z
|
2022-03-29T13:54:56.000Z
|
Cream/lib/models/builders/build_childnet.py
|
QPC-database/AutoML
|
75d86a5e3366a6c9af6c2001ee1ba4fc41c5d6cb
|
[
"MIT"
] | 64
|
2020-10-30T10:08:48.000Z
|
2022-03-30T06:51:01.000Z
|
from lib.utils.util import *
from timm.models.efficientnet_blocks import *
# ChildNet Builder definition.
class ChildNetBuilder:
def __init__(
self,
channel_multiplier=1.0,
channel_divisor=8,
channel_min=None,
output_stride=32,
pad_type='',
act_layer=None,
se_kwargs=None,
norm_layer=nn.BatchNorm2d,
norm_kwargs=None,
drop_path_rate=0.,
feature_location='',
verbose=False,
logger=None):
self.channel_multiplier = channel_multiplier
self.channel_divisor = channel_divisor
self.channel_min = channel_min
self.output_stride = output_stride
self.pad_type = pad_type
self.act_layer = act_layer
self.se_kwargs = se_kwargs
self.norm_layer = norm_layer
self.norm_kwargs = norm_kwargs
self.drop_path_rate = drop_path_rate
self.feature_location = feature_location
assert feature_location in ('pre_pwl', 'post_exp', '')
self.verbose = verbose
self.in_chs = None
self.features = OrderedDict()
self.logger = logger
def _round_channels(self, chs):
return round_channels(
chs,
self.channel_multiplier,
self.channel_divisor,
self.channel_min)
def _make_block(self, ba, block_idx, block_count):
drop_path_rate = self.drop_path_rate * block_idx / block_count
bt = ba.pop('block_type')
ba['in_chs'] = self.in_chs
ba['out_chs'] = self._round_channels(ba['out_chs'])
if 'fake_in_chs' in ba and ba['fake_in_chs']:
ba['fake_in_chs'] = self._round_channels(ba['fake_in_chs'])
ba['norm_layer'] = self.norm_layer
ba['norm_kwargs'] = self.norm_kwargs
ba['pad_type'] = self.pad_type
# block act fn overrides the model default
ba['act_layer'] = ba['act_layer'] if ba['act_layer'] is not None else self.act_layer
assert ba['act_layer'] is not None
if bt == 'ir':
ba['drop_path_rate'] = drop_path_rate
ba['se_kwargs'] = self.se_kwargs
if self.verbose:
self.logger.info(
' InvertedResidual {}, Args: {}'.format(
block_idx, str(ba)))
block = InvertedResidual(**ba)
elif bt == 'ds' or bt == 'dsa':
ba['drop_path_rate'] = drop_path_rate
ba['se_kwargs'] = self.se_kwargs
if self.verbose:
self.logger.info(
' DepthwiseSeparable {}, Args: {}'.format(
block_idx, str(ba)))
block = DepthwiseSeparableConv(**ba)
elif bt == 'cn':
if self.verbose:
self.logger.info(
' ConvBnAct {}, Args: {}'.format(
block_idx, str(ba)))
block = ConvBnAct(**ba)
else:
assert False, 'Uknkown block type (%s) while building model.' % bt
self.in_chs = ba['out_chs'] # update in_chs for arg of next block
return block
def __call__(self, in_chs, model_block_args):
""" Build the blocks
Args:
in_chs: Number of input-channels passed to first block
model_block_args: A list of lists, outer list defines stages, inner
list contains strings defining block configuration(s)
Return:
List of block stacks (each stack wrapped in nn.Sequential)
"""
if self.verbose:
self.logger.info(
'Building model trunk with %d stages...' %
len(model_block_args))
self.in_chs = in_chs
total_block_count = sum([len(x) for x in model_block_args])
total_block_idx = 0
current_stride = 2
current_dilation = 1
feature_idx = 0
stages = []
# outer list of block_args defines the stacks ('stages' by some
# conventions)
for stage_idx, stage_block_args in enumerate(model_block_args):
last_stack = stage_idx == (len(model_block_args) - 1)
if self.verbose:
self.logger.info('Stack: {}'.format(stage_idx))
assert isinstance(stage_block_args, list)
blocks = []
# each stack (stage) contains a list of block arguments
for block_idx, block_args in enumerate(stage_block_args):
last_block = block_idx == (len(stage_block_args) - 1)
extract_features = '' # No features extracted
if self.verbose:
self.logger.info(' Block: {}'.format(block_idx))
# Sort out stride, dilation, and feature extraction details
assert block_args['stride'] in (1, 2)
if block_idx >= 1:
# only the first block in any stack can have a stride > 1
block_args['stride'] = 1
do_extract = False
if self.feature_location == 'pre_pwl':
if last_block:
next_stage_idx = stage_idx + 1
if next_stage_idx >= len(model_block_args):
do_extract = True
else:
do_extract = model_block_args[next_stage_idx][0]['stride'] > 1
elif self.feature_location == 'post_exp':
if block_args['stride'] > 1 or (last_stack and last_block):
do_extract = True
if do_extract:
extract_features = self.feature_location
next_dilation = current_dilation
if block_args['stride'] > 1:
next_output_stride = current_stride * block_args['stride']
if next_output_stride > self.output_stride:
next_dilation = current_dilation * block_args['stride']
block_args['stride'] = 1
if self.verbose:
self.logger.info(
' Converting stride to dilation to maintain output_stride=={}'.format(
self.output_stride))
else:
current_stride = next_output_stride
block_args['dilation'] = current_dilation
if next_dilation != current_dilation:
current_dilation = next_dilation
# create the block
block = self._make_block(
block_args, total_block_idx, total_block_count)
blocks.append(block)
# stash feature module name and channel info for model feature
# extraction
if extract_features:
feature_module = block.feature_module(extract_features)
if feature_module:
feature_module = 'blocks.{}.{}.'.format(
stage_idx, block_idx) + feature_module
feature_channels = block.feature_channels(extract_features)
self.features[feature_idx] = dict(
name=feature_module,
num_chs=feature_channels
)
feature_idx += 1
# incr global block idx (across all stacks)
total_block_idx += 1
stages.append(nn.Sequential(*blocks))
return stages
| 41.928571
| 103
| 0.538462
|
from lib.utils.util import *
from timm.models.efficientnet_blocks import *
class ChildNetBuilder:
def __init__(
self,
channel_multiplier=1.0,
channel_divisor=8,
channel_min=None,
output_stride=32,
pad_type='',
act_layer=None,
se_kwargs=None,
norm_layer=nn.BatchNorm2d,
norm_kwargs=None,
drop_path_rate=0.,
feature_location='',
verbose=False,
logger=None):
self.channel_multiplier = channel_multiplier
self.channel_divisor = channel_divisor
self.channel_min = channel_min
self.output_stride = output_stride
self.pad_type = pad_type
self.act_layer = act_layer
self.se_kwargs = se_kwargs
self.norm_layer = norm_layer
self.norm_kwargs = norm_kwargs
self.drop_path_rate = drop_path_rate
self.feature_location = feature_location
assert feature_location in ('pre_pwl', 'post_exp', '')
self.verbose = verbose
self.in_chs = None
self.features = OrderedDict()
self.logger = logger
def _round_channels(self, chs):
return round_channels(
chs,
self.channel_multiplier,
self.channel_divisor,
self.channel_min)
def _make_block(self, ba, block_idx, block_count):
drop_path_rate = self.drop_path_rate * block_idx / block_count
bt = ba.pop('block_type')
ba['in_chs'] = self.in_chs
ba['out_chs'] = self._round_channels(ba['out_chs'])
if 'fake_in_chs' in ba and ba['fake_in_chs']:
ba['fake_in_chs'] = self._round_channels(ba['fake_in_chs'])
ba['norm_layer'] = self.norm_layer
ba['norm_kwargs'] = self.norm_kwargs
ba['pad_type'] = self.pad_type
ba['act_layer'] = ba['act_layer'] if ba['act_layer'] is not None else self.act_layer
assert ba['act_layer'] is not None
if bt == 'ir':
ba['drop_path_rate'] = drop_path_rate
ba['se_kwargs'] = self.se_kwargs
if self.verbose:
self.logger.info(
' InvertedResidual {}, Args: {}'.format(
block_idx, str(ba)))
block = InvertedResidual(**ba)
elif bt == 'ds' or bt == 'dsa':
ba['drop_path_rate'] = drop_path_rate
ba['se_kwargs'] = self.se_kwargs
if self.verbose:
self.logger.info(
' DepthwiseSeparable {}, Args: {}'.format(
block_idx, str(ba)))
block = DepthwiseSeparableConv(**ba)
elif bt == 'cn':
if self.verbose:
self.logger.info(
' ConvBnAct {}, Args: {}'.format(
block_idx, str(ba)))
block = ConvBnAct(**ba)
else:
assert False, 'Uknkown block type (%s) while building model.' % bt
self.in_chs = ba['out_chs']
return block
def __call__(self, in_chs, model_block_args):
if self.verbose:
self.logger.info(
'Building model trunk with %d stages...' %
len(model_block_args))
self.in_chs = in_chs
total_block_count = sum([len(x) for x in model_block_args])
total_block_idx = 0
current_stride = 2
current_dilation = 1
feature_idx = 0
stages = []
for stage_idx, stage_block_args in enumerate(model_block_args):
last_stack = stage_idx == (len(model_block_args) - 1)
if self.verbose:
self.logger.info('Stack: {}'.format(stage_idx))
assert isinstance(stage_block_args, list)
blocks = []
for block_idx, block_args in enumerate(stage_block_args):
last_block = block_idx == (len(stage_block_args) - 1)
extract_features = '' if self.verbose:
self.logger.info(' Block: {}'.format(block_idx))
assert block_args['stride'] in (1, 2)
if block_idx >= 1:
block_args['stride'] = 1
do_extract = False
if self.feature_location == 'pre_pwl':
if last_block:
next_stage_idx = stage_idx + 1
if next_stage_idx >= len(model_block_args):
do_extract = True
else:
do_extract = model_block_args[next_stage_idx][0]['stride'] > 1
elif self.feature_location == 'post_exp':
if block_args['stride'] > 1 or (last_stack and last_block):
do_extract = True
if do_extract:
extract_features = self.feature_location
next_dilation = current_dilation
if block_args['stride'] > 1:
next_output_stride = current_stride * block_args['stride']
if next_output_stride > self.output_stride:
next_dilation = current_dilation * block_args['stride']
block_args['stride'] = 1
if self.verbose:
self.logger.info(
' Converting stride to dilation to maintain output_stride=={}'.format(
self.output_stride))
else:
current_stride = next_output_stride
block_args['dilation'] = current_dilation
if next_dilation != current_dilation:
current_dilation = next_dilation
block = self._make_block(
block_args, total_block_idx, total_block_count)
blocks.append(block)
if extract_features:
feature_module = block.feature_module(extract_features)
if feature_module:
feature_module = 'blocks.{}.{}.'.format(
stage_idx, block_idx) + feature_module
feature_channels = block.feature_channels(extract_features)
self.features[feature_idx] = dict(
name=feature_module,
num_chs=feature_channels
)
feature_idx += 1
total_block_idx += 1
stages.append(nn.Sequential(*blocks))
return stages
| true
| true
|
f7024d3b025f432ada7fbb9c7ac3ca1994b5c05e
| 8,915
|
py
|
Python
|
apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/payment_sips/models/payment.py
|
gtfarng/Odoo_migrade
|
9cc28fae4c379e407645248a29d22139925eafe7
|
[
"Apache-2.0"
] | 1
|
2019-12-19T01:53:13.000Z
|
2019-12-19T01:53:13.000Z
|
apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/payment_sips/models/payment.py
|
gtfarng/Odoo_migrade
|
9cc28fae4c379e407645248a29d22139925eafe7
|
[
"Apache-2.0"
] | null | null | null |
apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/payment_sips/models/payment.py
|
gtfarng/Odoo_migrade
|
9cc28fae4c379e407645248a29d22139925eafe7
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
# Copyright 2015 Eezee-It
import json
import logging
from hashlib import sha256
import urlparse
from odoo import models, fields, api
from odoo.tools.float_utils import float_compare
from odoo.tools.translate import _
from odoo.addons.payment.models.payment_acquirer import ValidationError
from odoo.addons.payment_sips.controllers.main import SipsController
_logger = logging.getLogger(__name__)
CURRENCY_CODES = {
'EUR': '978',
'USD': '840',
'CHF': '756',
'GBP': '826',
'CAD': '124',
'JPY': '392',
'MXN': '484',
'TRY': '949',
'AUD': '036',
'NZD': '554',
'NOK': '578',
'BRL': '986',
'ARS': '032',
'KHR': '116',
'TWD': '901',
}
class AcquirerSips(models.Model):
_inherit = 'payment.acquirer'
provider = fields.Selection(selection_add=[('sips', 'Sips')])
sips_merchant_id = fields.Char('SIPS API User Password', required_if_provider='sips', groups='base.group_user')
sips_secret = fields.Char('SIPS Secret', size=64, required_if_provider='sips', groups='base.group_user')
def _get_sips_urls(self, environment):
""" Worldline SIPS URLS """
url = {
'prod': 'https://payment-webinit.sips-atos.com/paymentInit',
'test': 'https://payment-webinit.simu.sips-atos.com/paymentInit', }
return {'sips_form_url': url.get(environment, url['test']), }
def _sips_generate_shasign(self, values):
""" Generate the shasign for incoming or outgoing communications.
:param dict values: transaction values
:return string: shasign
"""
if self.provider != 'sips':
raise ValidationError(_('Incorrect payment acquirer provider'))
data = values['Data']
# Test key provided by Worldine
key = u'002001000000001_KEY1'
if self.environment == 'prod':
key = getattr(self, 'sips_secret')
shasign = sha256(data + key)
return shasign.hexdigest()
@api.multi
def sips_form_generate_values(self, values):
self.ensure_one()
base_url = self.env['ir.config_parameter'].sudo().get_param('web.base.url')
currency = self.env['res.currency'].sudo().browse(values['currency_id'])
currency_code = CURRENCY_CODES.get(currency.name, False)
if not currency_code:
raise ValidationError(_('Currency not supported by Wordline'))
amount = int(values['amount'] * 100)
if self.environment == 'prod':
# For production environment, key version 2 is required
merchant_id = getattr(self, 'sips_merchant_id')
key_version = '2'
else:
# Test key provided by Atos Wordline works only with version 1
merchant_id = '002001000000001'
key_version = '1'
sips_tx_values = dict(values)
sips_tx_values.update({
'Data': u'amount=%s|' % amount +
u'currencyCode=%s|' % currency_code +
u'merchantId=%s|' % merchant_id +
u'normalReturnUrl=%s|' % urlparse.urljoin(base_url, SipsController._return_url) +
u'automaticResponseUrl=%s|' % urlparse.urljoin(base_url, SipsController._return_url) +
u'transactionReference=%s|' % values['reference'] +
u'statementReference=%s|' % values['reference'] +
u'keyVersion=%s' % key_version,
'InterfaceVersion': 'HP_2.3',
})
return_context = {}
if sips_tx_values.get('return_url'):
return_context[u'return_url'] = u'%s' % sips_tx_values.pop('return_url')
return_context[u'reference'] = u'%s' % sips_tx_values['reference']
sips_tx_values['Data'] += u'|returnContext=%s' % (json.dumps(return_context))
shasign = self._sips_generate_shasign(sips_tx_values)
sips_tx_values['Seal'] = shasign
return sips_tx_values
@api.multi
def sips_get_form_action_url(self):
self.ensure_one()
return self._get_sips_urls(self.environment)['sips_form_url']
class TxSips(models.Model):
_inherit = 'payment.transaction'
_sips_valid_tx_status = ['00']
_sips_wait_tx_status = ['90', '99']
_sips_refused_tx_status = ['05', '14', '34', '54', '75', '97']
_sips_error_tx_status = ['03', '12', '24', '25', '30', '40', '51', '63', '94']
_sips_pending_tx_status = ['60']
_sips_cancel_tx_status = ['17']
# --------------------------------------------------
# FORM RELATED METHODS
# --------------------------------------------------
def _sips_data_to_object(self, data):
res = {}
for element in data.split('|'):
element_split = element.split('=')
res[element_split[0]] = element_split[1]
return res
@api.model
def _sips_form_get_tx_from_data(self, data):
""" Given a data dict coming from sips, verify it and find the related
transaction record. """
data = self._sips_data_to_object(data.get('Data'))
reference = data.get('transactionReference')
if not reference:
custom = json.loads(data.pop('returnContext', False) or '{}')
reference = custom.get('reference')
payment_tx = self.search([('reference', '=', reference)])
if not payment_tx or len(payment_tx) > 1:
error_msg = _('Sips: received data for reference %s') % reference
if not payment_tx:
error_msg += _('; no order found')
else:
error_msg += _('; multiple order found')
_logger.error(error_msg)
raise ValidationError(error_msg)
return payment_tx
@api.multi
def _sips_form_get_invalid_parameters(self, data):
invalid_parameters = []
data = self._sips_data_to_object(data.get('Data'))
# TODO: txn_id: should be false at draft, set afterwards, and verified with txn details
if self.acquirer_reference and data.get('transactionReference') != self.acquirer_reference:
invalid_parameters.append(('transactionReference', data.get('transactionReference'), self.acquirer_reference))
# check what is bought
if float_compare(float(data.get('amount', '0.0')) / 100, self.amount, 2) != 0:
invalid_parameters.append(('amount', data.get('amount'), '%.2f' % self.amount))
if self.partner_reference and data.get('customerId') != self.partner_reference:
invalid_parameters.append(('customerId', data.get('customerId'), self.partner_reference))
return invalid_parameters
@api.multi
def _sips_form_validate(self, data):
data = self._sips_data_to_object(data.get('Data'))
status = data.get('responseCode')
data = {
'acquirer_reference': data.get('transactionReference'),
'partner_reference': data.get('customerId'),
'date_validate': data.get('transactionDateTime',
fields.Datetime.now())
}
res = False
if status in self._sips_valid_tx_status:
msg = 'Payment for tx ref: %s, got response [%s], set as done.' % \
(self.reference, status)
_logger.info(msg)
data.update(state='done', state_message=msg)
res = True
elif status in self._sips_error_tx_status:
msg = 'Payment for tx ref: %s, got response [%s], set as ' \
'error.' % (self.reference, status)
data.update(state='error', state_message=msg)
elif status in self._sips_wait_tx_status:
msg = 'Received wait status for payment ref: %s, got response ' \
'[%s], set as error.' % (self.reference, status)
data.update(state='error', state_message=msg)
elif status in self._sips_refused_tx_status:
msg = 'Received refused status for payment ref: %s, got response' \
' [%s], set as error.' % (self.reference, status)
data.update(state='error', state_message=msg)
elif status in self._sips_pending_tx_status:
msg = 'Payment ref: %s, got response [%s] set as pending.' \
% (self.reference, status)
data.update(state='pending', state_message=msg)
elif status in self._sips_cancel_tx_status:
msg = 'Received notification for payment ref: %s, got response ' \
'[%s], set as cancel.' % (self.reference, status)
data.update(state='cancel', state_message=msg)
else:
msg = 'Received unrecognized status for payment ref: %s, got ' \
'response [%s], set as error.' % (self.reference, status)
data.update(state='error', state_message=msg)
_logger.info(msg)
self.write(data)
return res
| 39.799107
| 122
| 0.598654
|
import json
import logging
from hashlib import sha256
import urlparse
from odoo import models, fields, api
from odoo.tools.float_utils import float_compare
from odoo.tools.translate import _
from odoo.addons.payment.models.payment_acquirer import ValidationError
from odoo.addons.payment_sips.controllers.main import SipsController
_logger = logging.getLogger(__name__)
CURRENCY_CODES = {
'EUR': '978',
'USD': '840',
'CHF': '756',
'GBP': '826',
'CAD': '124',
'JPY': '392',
'MXN': '484',
'TRY': '949',
'AUD': '036',
'NZD': '554',
'NOK': '578',
'BRL': '986',
'ARS': '032',
'KHR': '116',
'TWD': '901',
}
class AcquirerSips(models.Model):
_inherit = 'payment.acquirer'
provider = fields.Selection(selection_add=[('sips', 'Sips')])
sips_merchant_id = fields.Char('SIPS API User Password', required_if_provider='sips', groups='base.group_user')
sips_secret = fields.Char('SIPS Secret', size=64, required_if_provider='sips', groups='base.group_user')
def _get_sips_urls(self, environment):
url = {
'prod': 'https://payment-webinit.sips-atos.com/paymentInit',
'test': 'https://payment-webinit.simu.sips-atos.com/paymentInit', }
return {'sips_form_url': url.get(environment, url['test']), }
def _sips_generate_shasign(self, values):
if self.provider != 'sips':
raise ValidationError(_('Incorrect payment acquirer provider'))
data = values['Data']
key = u'002001000000001_KEY1'
if self.environment == 'prod':
key = getattr(self, 'sips_secret')
shasign = sha256(data + key)
return shasign.hexdigest()
@api.multi
def sips_form_generate_values(self, values):
self.ensure_one()
base_url = self.env['ir.config_parameter'].sudo().get_param('web.base.url')
currency = self.env['res.currency'].sudo().browse(values['currency_id'])
currency_code = CURRENCY_CODES.get(currency.name, False)
if not currency_code:
raise ValidationError(_('Currency not supported by Wordline'))
amount = int(values['amount'] * 100)
if self.environment == 'prod':
merchant_id = getattr(self, 'sips_merchant_id')
key_version = '2'
else:
merchant_id = '002001000000001'
key_version = '1'
sips_tx_values = dict(values)
sips_tx_values.update({
'Data': u'amount=%s|' % amount +
u'currencyCode=%s|' % currency_code +
u'merchantId=%s|' % merchant_id +
u'normalReturnUrl=%s|' % urlparse.urljoin(base_url, SipsController._return_url) +
u'automaticResponseUrl=%s|' % urlparse.urljoin(base_url, SipsController._return_url) +
u'transactionReference=%s|' % values['reference'] +
u'statementReference=%s|' % values['reference'] +
u'keyVersion=%s' % key_version,
'InterfaceVersion': 'HP_2.3',
})
return_context = {}
if sips_tx_values.get('return_url'):
return_context[u'return_url'] = u'%s' % sips_tx_values.pop('return_url')
return_context[u'reference'] = u'%s' % sips_tx_values['reference']
sips_tx_values['Data'] += u'|returnContext=%s' % (json.dumps(return_context))
shasign = self._sips_generate_shasign(sips_tx_values)
sips_tx_values['Seal'] = shasign
return sips_tx_values
@api.multi
def sips_get_form_action_url(self):
self.ensure_one()
return self._get_sips_urls(self.environment)['sips_form_url']
class TxSips(models.Model):
_inherit = 'payment.transaction'
_sips_valid_tx_status = ['00']
_sips_wait_tx_status = ['90', '99']
_sips_refused_tx_status = ['05', '14', '34', '54', '75', '97']
_sips_error_tx_status = ['03', '12', '24', '25', '30', '40', '51', '63', '94']
_sips_pending_tx_status = ['60']
_sips_cancel_tx_status = ['17']
def _sips_data_to_object(self, data):
res = {}
for element in data.split('|'):
element_split = element.split('=')
res[element_split[0]] = element_split[1]
return res
@api.model
def _sips_form_get_tx_from_data(self, data):
data = self._sips_data_to_object(data.get('Data'))
reference = data.get('transactionReference')
if not reference:
custom = json.loads(data.pop('returnContext', False) or '{}')
reference = custom.get('reference')
payment_tx = self.search([('reference', '=', reference)])
if not payment_tx or len(payment_tx) > 1:
error_msg = _('Sips: received data for reference %s') % reference
if not payment_tx:
error_msg += _('; no order found')
else:
error_msg += _('; multiple order found')
_logger.error(error_msg)
raise ValidationError(error_msg)
return payment_tx
@api.multi
def _sips_form_get_invalid_parameters(self, data):
invalid_parameters = []
data = self._sips_data_to_object(data.get('Data'))
if self.acquirer_reference and data.get('transactionReference') != self.acquirer_reference:
invalid_parameters.append(('transactionReference', data.get('transactionReference'), self.acquirer_reference))
if float_compare(float(data.get('amount', '0.0')) / 100, self.amount, 2) != 0:
invalid_parameters.append(('amount', data.get('amount'), '%.2f' % self.amount))
if self.partner_reference and data.get('customerId') != self.partner_reference:
invalid_parameters.append(('customerId', data.get('customerId'), self.partner_reference))
return invalid_parameters
@api.multi
def _sips_form_validate(self, data):
data = self._sips_data_to_object(data.get('Data'))
status = data.get('responseCode')
data = {
'acquirer_reference': data.get('transactionReference'),
'partner_reference': data.get('customerId'),
'date_validate': data.get('transactionDateTime',
fields.Datetime.now())
}
res = False
if status in self._sips_valid_tx_status:
msg = 'Payment for tx ref: %s, got response [%s], set as done.' % \
(self.reference, status)
_logger.info(msg)
data.update(state='done', state_message=msg)
res = True
elif status in self._sips_error_tx_status:
msg = 'Payment for tx ref: %s, got response [%s], set as ' \
'error.' % (self.reference, status)
data.update(state='error', state_message=msg)
elif status in self._sips_wait_tx_status:
msg = 'Received wait status for payment ref: %s, got response ' \
'[%s], set as error.' % (self.reference, status)
data.update(state='error', state_message=msg)
elif status in self._sips_refused_tx_status:
msg = 'Received refused status for payment ref: %s, got response' \
' [%s], set as error.' % (self.reference, status)
data.update(state='error', state_message=msg)
elif status in self._sips_pending_tx_status:
msg = 'Payment ref: %s, got response [%s] set as pending.' \
% (self.reference, status)
data.update(state='pending', state_message=msg)
elif status in self._sips_cancel_tx_status:
msg = 'Received notification for payment ref: %s, got response ' \
'[%s], set as cancel.' % (self.reference, status)
data.update(state='cancel', state_message=msg)
else:
msg = 'Received unrecognized status for payment ref: %s, got ' \
'response [%s], set as error.' % (self.reference, status)
data.update(state='error', state_message=msg)
_logger.info(msg)
self.write(data)
return res
| true
| true
|
f7024e3194e8a4fab4ea64262a0690eef773048b
| 10,727
|
py
|
Python
|
tensorflow_probability/python/distributions/generalized_pareto_test.py
|
nbro/probability
|
07a6378155f0ed720b5aaccf5387e3f9a432bd10
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/distributions/generalized_pareto_test.py
|
nbro/probability
|
07a6378155f0ed720b5aaccf5387e3f9a432bd10
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_probability/python/distributions/generalized_pareto_test.py
|
nbro/probability
|
07a6378155f0ed720b5aaccf5387e3f9a432bd10
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for Generalized Pareto distribution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
# Dependency imports
import hypothesis as hp
import hypothesis.strategies as hps
import numpy as np
from scipy import stats as sp_stats
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
from tensorflow_probability.python.internal import hypothesis_testlib as tfp_hps
from tensorflow_probability.python.internal import test_util
tfd = tfp.distributions
# Pylint doesn't understand hps.composite.
# pylint: disable=no-value-for-parameter
@hps.composite
def generalized_paretos(draw, batch_shape=None):
if batch_shape is None:
batch_shape = draw(tfp_hps.shapes())
constraints = dict(
loc=tfp_hps.identity_fn,
scale=tfp_hps.softplus_plus_eps(),
concentration=lambda x: tf.math.tanh(x) * 0.24) # <.25==safe for variance
params = draw(
tfp_hps.broadcasting_params(
batch_shape,
params_event_ndims=dict(loc=0, scale=0, concentration=0),
constraint_fn_for=constraints.get))
dist = tfd.GeneralizedPareto(validate_args=draw(hps.booleans()), **params)
if dist.batch_shape != batch_shape:
raise AssertionError('batch_shape mismatch: expect {} but got {}'.format(
batch_shape, dist))
return dist
@test_util.test_all_tf_execution_regimes
class GeneralizedParetoTest(test_util.TestCase):
@hp.given(generalized_paretos())
@tfp_hps.tfp_hp_settings(default_max_examples=5)
def testShape(self, dist):
# batch_shape == dist.batch_shape asserted in generalized_paretos()
self.assertEqual(dist.batch_shape, self.evaluate(dist.batch_shape_tensor()))
self.assertEqual(tf.TensorShape([]), dist.event_shape)
self.assertAllEqual([], self.evaluate(dist.event_shape_tensor()))
@hp.given(generalized_paretos(batch_shape=[]))
@tfp_hps.tfp_hp_settings(default_max_examples=5)
def testLogPDF(self, dist):
xs = self.evaluate(dist.sample())
logp = dist.log_prob(xs)
self.assertEqual(dist.batch_shape, logp.shape)
p = dist.prob(xs)
self.assertEqual(dist.batch_shape, p.shape)
loc, scale, conc = self.evaluate([dist.loc, dist.scale, dist.concentration])
expected_logp = sp_stats.genpareto(conc, loc=loc, scale=scale).logpdf(xs)
actual_logp = self.evaluate(logp)
self.assertAllClose(expected_logp, actual_logp, rtol=1e-5)
self.assertAllClose(np.exp(expected_logp), self.evaluate(p), rtol=1e-5)
def testLogPDFBoundary(self):
# When loc = concentration = 0, we have an exponential distribution. Check
# that at 0 we have finite log prob.
scale = np.array([0.1, 0.5, 1., 2., 5., 10.], dtype=np.float32)
dist = tfd.GeneralizedPareto(loc=0, scale=scale, concentration=0)
log_pdf = dist.log_prob(0.)
self.assertAllClose(-np.log(scale), self.evaluate(log_pdf), rtol=1e-5)
@hp.given(generalized_paretos(batch_shape=[]))
@tfp_hps.tfp_hp_settings(default_max_examples=5)
def testCDF(self, dist):
xs = self.evaluate(dist.sample())
cdf = dist.cdf(xs)
self.assertEqual(dist.batch_shape, cdf.shape)
loc, scale, conc = self.evaluate([dist.loc, dist.scale, dist.concentration])
expected_cdf = sp_stats.genpareto(conc, loc=loc, scale=scale).cdf(xs)
self.assertAllClose(expected_cdf, self.evaluate(cdf), rtol=5e-5)
@hp.given(generalized_paretos(batch_shape=[]))
@tfp_hps.tfp_hp_settings(default_max_examples=5)
def testMean(self, dist):
loc, scale, conc = self.evaluate([dist.loc, dist.scale, dist.concentration])
self.assertEqual(dist.batch_shape, dist.mean().shape)
if np.abs(conc) < 1e-5 and conc != 0:
return # scipy does badly at small nonzero concentrations.
expected = sp_stats.genpareto(conc, loc=loc, scale=scale).mean()
actual = self.evaluate(dist.mean())
self.assertAllClose(expected, actual, rtol=5e-4)
@hp.given(generalized_paretos(batch_shape=[]))
@tfp_hps.tfp_hp_settings(default_max_examples=5)
def testVariance(self, dist):
loc, scale, conc = self.evaluate([dist.loc, dist.scale, dist.concentration])
self.assertEqual(dist.batch_shape, dist.variance().shape)
expected = sp_stats.genpareto(conc, loc=loc, scale=scale).var()
if np.abs(conc) < 1e-4 and conc != 0:
return # scipy does badly at small nonzero concentrations.
if expected <= 0:
return # scipy sometimes returns nonsense zero or negative variances.
actual = self.evaluate(dist.variance())
print('var', loc, scale, conc, expected, actual, file=sys.stderr)
self.assertAllClose(expected, actual, rtol=.01)
@hp.given(generalized_paretos(batch_shape=[]))
@tfp_hps.tfp_hp_settings(default_max_examples=5)
def testEntropy(self, dist):
loc, scale, conc = self.evaluate([dist.loc, dist.scale, dist.concentration])
self.assertEqual(dist.batch_shape, dist.entropy().shape)
expected = sp_stats.genpareto.entropy(conc, loc=loc, scale=scale)
actual = self.evaluate(dist.entropy())
self.assertAllClose(expected, actual)
def testSample(self):
loc = np.float32(-7.5)
scale = np.float32(3.5)
conc = np.float32(0.07)
n = 100000
dist = tfd.GeneralizedPareto(loc=loc, scale=scale, concentration=conc)
samples = dist.sample(n, seed=test_util.test_seed())
sample_values = self.evaluate(samples)
self.assertEqual((n,), samples.shape)
self.assertEqual((n,), sample_values.shape)
self.assertTrue(self._kstest(loc, scale, conc, sample_values))
self.assertAllClose(
sp_stats.genpareto.mean(conc, loc=loc, scale=scale),
sample_values.mean(),
rtol=.005)
self.assertAllClose(
sp_stats.genpareto.var(conc, loc=loc, scale=scale),
sample_values.var(),
rtol=.01)
def testFullyReparameterized(self):
loc = tf.constant(4.0)
scale = tf.constant(3.0)
conc = tf.constant(2.0)
_, grads = tfp.math.value_and_gradient(
lambda *args: tfd.GeneralizedPareto(*args).sample(100),
[loc, scale, conc])
self.assertLen(grads, 3)
self.assertAllNotNone(grads)
def testSampleKolmogorovSmirnovMultiDimensional(self):
loc = np.linspace(-10, 10, 3).reshape(3, 1, 1)
scale = np.linspace(1e-6, 7, 5).reshape(5, 1)
conc = np.linspace(-1.3, 1.3, 7)
dist = tfd.GeneralizedPareto(loc=loc, scale=scale, concentration=conc)
n = 10000
samples = dist.sample(n, seed=test_util.test_seed())
sample_values = self.evaluate(samples)
self.assertEqual((n, 3, 5, 7), samples.shape)
self.assertEqual((n, 3, 5, 7), sample_values.shape)
fails = 0
trials = 0
for li, l in enumerate(loc.reshape(-1)):
for si, s in enumerate(scale.reshape(-1)):
for ci, c in enumerate(conc.reshape(-1)):
samps = sample_values[:, li, si, ci]
trials += 1
fails += 0 if self._kstest(l, s, c, samps) else 1
self.assertLess(fails, trials * 0.01)
def _kstest(self, loc, scale, conc, samples):
# Uses the Kolmogorov-Smirnov test for goodness of fit.
ks, _ = sp_stats.kstest(samples,
sp_stats.genpareto(conc, loc=loc, scale=scale).cdf)
# Return True when the test passes.
return ks < 0.02
def testPdfOfSampleMultiDims(self):
dist = tfd.GeneralizedPareto(
loc=0, scale=[[2.], [3.]], concentration=[-.37, .11])
num = 50000
samples = dist.sample(num, seed=test_util.test_seed())
pdfs = dist.prob(samples)
sample_vals, pdf_vals = self.evaluate([samples, pdfs])
self.assertEqual((num, 2, 2), samples.shape)
self.assertEqual((num, 2, 2), pdfs.shape)
self._assertIntegral(sample_vals[:, 0, 0], pdf_vals[:, 0, 0], err=0.02)
self._assertIntegral(sample_vals[:, 0, 1], pdf_vals[:, 0, 1], err=0.02)
self._assertIntegral(sample_vals[:, 1, 0], pdf_vals[:, 1, 0], err=0.02)
self._assertIntegral(sample_vals[:, 1, 1], pdf_vals[:, 1, 1], err=0.02)
def _assertIntegral(self, sample_vals, pdf_vals, err=1e-3):
s_p = zip(sample_vals, pdf_vals)
prev = (0, 0)
total = 0
for k in sorted(s_p, key=lambda x: x[0]):
pair_pdf = (k[1] + prev[1]) / 2
total += (k[0] - prev[0]) * pair_pdf
prev = k
self.assertNear(1., total, err=err)
def testNonPositiveInitializationParamsRaises(self):
scale = tf.constant(0.0, name='scale')
with self.assertRaisesOpError('Argument `scale` must be positive.'):
dist = tfd.GeneralizedPareto(
loc=0, scale=scale, concentration=1, validate_args=True)
self.evaluate(dist.mean())
def testGradientThroughConcentration(self):
concentration = tf.Variable(3.)
d = tfd.GeneralizedPareto(loc=0, scale=1, concentration=concentration)
with tf.GradientTape() as tape:
loss = -d.log_prob([1., 2., 4.])
grad = tape.gradient(loss, d.trainable_variables)
self.assertLen(grad, 1)
self.assertAllNotNone(grad)
def testAssertsPositiveScale(self):
scale = tf.Variable([1., 2., -3.])
self.evaluate(scale.initializer)
with self.assertRaisesOpError('Argument `scale` must be positive.'):
d = tfd.GeneralizedPareto(
loc=0, scale=scale, concentration=1, validate_args=True)
self.evaluate(d.sample())
def testAssertsPositiveScaleAfterMutation(self):
scale = tf.Variable([1., 2., 3.])
self.evaluate(scale.initializer)
d = tfd.GeneralizedPareto(
loc=0, scale=scale, concentration=0.25, validate_args=True)
self.evaluate(d.mean())
with self.assertRaisesOpError('Argument `scale` must be positive.'):
with tf.control_dependencies([scale.assign([1., 2., -3.])]):
self.evaluate(d.sample())
def testGradientThroughLocScale(self):
loc = tf.Variable(1.)
scale = tf.Variable(2.5)
d = tfd.GeneralizedPareto(loc=loc, scale=scale, concentration=.15)
with tf.GradientTape() as tape:
loss = -d.log_prob([1., 2., 4.])
grads = tape.gradient(loss, d.trainable_variables)
self.assertLen(grads, 2)
self.assertAllNotNone(grads)
if __name__ == '__main__':
tf.test.main()
| 39.149635
| 80
| 0.690873
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import hypothesis as hp
import hypothesis.strategies as hps
import numpy as np
from scipy import stats as sp_stats
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
from tensorflow_probability.python.internal import hypothesis_testlib as tfp_hps
from tensorflow_probability.python.internal import test_util
tfd = tfp.distributions
# pylint: disable=no-value-for-parameter
@hps.composite
def generalized_paretos(draw, batch_shape=None):
if batch_shape is None:
batch_shape = draw(tfp_hps.shapes())
constraints = dict(
loc=tfp_hps.identity_fn,
scale=tfp_hps.softplus_plus_eps(),
concentration=lambda x: tf.math.tanh(x) * 0.24) # <.25==safe for variance
params = draw(
tfp_hps.broadcasting_params(
batch_shape,
params_event_ndims=dict(loc=0, scale=0, concentration=0),
constraint_fn_for=constraints.get))
dist = tfd.GeneralizedPareto(validate_args=draw(hps.booleans()), **params)
if dist.batch_shape != batch_shape:
raise AssertionError('batch_shape mismatch: expect {} but got {}'.format(
batch_shape, dist))
return dist
@test_util.test_all_tf_execution_regimes
class GeneralizedParetoTest(test_util.TestCase):
@hp.given(generalized_paretos())
@tfp_hps.tfp_hp_settings(default_max_examples=5)
def testShape(self, dist):
# batch_shape == dist.batch_shape asserted in generalized_paretos()
self.assertEqual(dist.batch_shape, self.evaluate(dist.batch_shape_tensor()))
self.assertEqual(tf.TensorShape([]), dist.event_shape)
self.assertAllEqual([], self.evaluate(dist.event_shape_tensor()))
@hp.given(generalized_paretos(batch_shape=[]))
@tfp_hps.tfp_hp_settings(default_max_examples=5)
def testLogPDF(self, dist):
xs = self.evaluate(dist.sample())
logp = dist.log_prob(xs)
self.assertEqual(dist.batch_shape, logp.shape)
p = dist.prob(xs)
self.assertEqual(dist.batch_shape, p.shape)
loc, scale, conc = self.evaluate([dist.loc, dist.scale, dist.concentration])
expected_logp = sp_stats.genpareto(conc, loc=loc, scale=scale).logpdf(xs)
actual_logp = self.evaluate(logp)
self.assertAllClose(expected_logp, actual_logp, rtol=1e-5)
self.assertAllClose(np.exp(expected_logp), self.evaluate(p), rtol=1e-5)
def testLogPDFBoundary(self):
# When loc = concentration = 0, we have an exponential distribution. Check
# that at 0 we have finite log prob.
scale = np.array([0.1, 0.5, 1., 2., 5., 10.], dtype=np.float32)
dist = tfd.GeneralizedPareto(loc=0, scale=scale, concentration=0)
log_pdf = dist.log_prob(0.)
self.assertAllClose(-np.log(scale), self.evaluate(log_pdf), rtol=1e-5)
@hp.given(generalized_paretos(batch_shape=[]))
@tfp_hps.tfp_hp_settings(default_max_examples=5)
def testCDF(self, dist):
xs = self.evaluate(dist.sample())
cdf = dist.cdf(xs)
self.assertEqual(dist.batch_shape, cdf.shape)
loc, scale, conc = self.evaluate([dist.loc, dist.scale, dist.concentration])
expected_cdf = sp_stats.genpareto(conc, loc=loc, scale=scale).cdf(xs)
self.assertAllClose(expected_cdf, self.evaluate(cdf), rtol=5e-5)
@hp.given(generalized_paretos(batch_shape=[]))
@tfp_hps.tfp_hp_settings(default_max_examples=5)
def testMean(self, dist):
loc, scale, conc = self.evaluate([dist.loc, dist.scale, dist.concentration])
self.assertEqual(dist.batch_shape, dist.mean().shape)
if np.abs(conc) < 1e-5 and conc != 0:
return # scipy does badly at small nonzero concentrations.
expected = sp_stats.genpareto(conc, loc=loc, scale=scale).mean()
actual = self.evaluate(dist.mean())
self.assertAllClose(expected, actual, rtol=5e-4)
@hp.given(generalized_paretos(batch_shape=[]))
@tfp_hps.tfp_hp_settings(default_max_examples=5)
def testVariance(self, dist):
loc, scale, conc = self.evaluate([dist.loc, dist.scale, dist.concentration])
self.assertEqual(dist.batch_shape, dist.variance().shape)
expected = sp_stats.genpareto(conc, loc=loc, scale=scale).var()
if np.abs(conc) < 1e-4 and conc != 0:
return # scipy does badly at small nonzero concentrations.
if expected <= 0:
return # scipy sometimes returns nonsense zero or negative variances.
actual = self.evaluate(dist.variance())
print('var', loc, scale, conc, expected, actual, file=sys.stderr)
self.assertAllClose(expected, actual, rtol=.01)
@hp.given(generalized_paretos(batch_shape=[]))
@tfp_hps.tfp_hp_settings(default_max_examples=5)
def testEntropy(self, dist):
loc, scale, conc = self.evaluate([dist.loc, dist.scale, dist.concentration])
self.assertEqual(dist.batch_shape, dist.entropy().shape)
expected = sp_stats.genpareto.entropy(conc, loc=loc, scale=scale)
actual = self.evaluate(dist.entropy())
self.assertAllClose(expected, actual)
def testSample(self):
loc = np.float32(-7.5)
scale = np.float32(3.5)
conc = np.float32(0.07)
n = 100000
dist = tfd.GeneralizedPareto(loc=loc, scale=scale, concentration=conc)
samples = dist.sample(n, seed=test_util.test_seed())
sample_values = self.evaluate(samples)
self.assertEqual((n,), samples.shape)
self.assertEqual((n,), sample_values.shape)
self.assertTrue(self._kstest(loc, scale, conc, sample_values))
self.assertAllClose(
sp_stats.genpareto.mean(conc, loc=loc, scale=scale),
sample_values.mean(),
rtol=.005)
self.assertAllClose(
sp_stats.genpareto.var(conc, loc=loc, scale=scale),
sample_values.var(),
rtol=.01)
def testFullyReparameterized(self):
loc = tf.constant(4.0)
scale = tf.constant(3.0)
conc = tf.constant(2.0)
_, grads = tfp.math.value_and_gradient(
lambda *args: tfd.GeneralizedPareto(*args).sample(100),
[loc, scale, conc])
self.assertLen(grads, 3)
self.assertAllNotNone(grads)
def testSampleKolmogorovSmirnovMultiDimensional(self):
loc = np.linspace(-10, 10, 3).reshape(3, 1, 1)
scale = np.linspace(1e-6, 7, 5).reshape(5, 1)
conc = np.linspace(-1.3, 1.3, 7)
dist = tfd.GeneralizedPareto(loc=loc, scale=scale, concentration=conc)
n = 10000
samples = dist.sample(n, seed=test_util.test_seed())
sample_values = self.evaluate(samples)
self.assertEqual((n, 3, 5, 7), samples.shape)
self.assertEqual((n, 3, 5, 7), sample_values.shape)
fails = 0
trials = 0
for li, l in enumerate(loc.reshape(-1)):
for si, s in enumerate(scale.reshape(-1)):
for ci, c in enumerate(conc.reshape(-1)):
samps = sample_values[:, li, si, ci]
trials += 1
fails += 0 if self._kstest(l, s, c, samps) else 1
self.assertLess(fails, trials * 0.01)
def _kstest(self, loc, scale, conc, samples):
# Uses the Kolmogorov-Smirnov test for goodness of fit.
ks, _ = sp_stats.kstest(samples,
sp_stats.genpareto(conc, loc=loc, scale=scale).cdf)
# Return True when the test passes.
return ks < 0.02
def testPdfOfSampleMultiDims(self):
dist = tfd.GeneralizedPareto(
loc=0, scale=[[2.], [3.]], concentration=[-.37, .11])
num = 50000
samples = dist.sample(num, seed=test_util.test_seed())
pdfs = dist.prob(samples)
sample_vals, pdf_vals = self.evaluate([samples, pdfs])
self.assertEqual((num, 2, 2), samples.shape)
self.assertEqual((num, 2, 2), pdfs.shape)
self._assertIntegral(sample_vals[:, 0, 0], pdf_vals[:, 0, 0], err=0.02)
self._assertIntegral(sample_vals[:, 0, 1], pdf_vals[:, 0, 1], err=0.02)
self._assertIntegral(sample_vals[:, 1, 0], pdf_vals[:, 1, 0], err=0.02)
self._assertIntegral(sample_vals[:, 1, 1], pdf_vals[:, 1, 1], err=0.02)
def _assertIntegral(self, sample_vals, pdf_vals, err=1e-3):
s_p = zip(sample_vals, pdf_vals)
prev = (0, 0)
total = 0
for k in sorted(s_p, key=lambda x: x[0]):
pair_pdf = (k[1] + prev[1]) / 2
total += (k[0] - prev[0]) * pair_pdf
prev = k
self.assertNear(1., total, err=err)
def testNonPositiveInitializationParamsRaises(self):
scale = tf.constant(0.0, name='scale')
with self.assertRaisesOpError('Argument `scale` must be positive.'):
dist = tfd.GeneralizedPareto(
loc=0, scale=scale, concentration=1, validate_args=True)
self.evaluate(dist.mean())
def testGradientThroughConcentration(self):
concentration = tf.Variable(3.)
d = tfd.GeneralizedPareto(loc=0, scale=1, concentration=concentration)
with tf.GradientTape() as tape:
loss = -d.log_prob([1., 2., 4.])
grad = tape.gradient(loss, d.trainable_variables)
self.assertLen(grad, 1)
self.assertAllNotNone(grad)
def testAssertsPositiveScale(self):
scale = tf.Variable([1., 2., -3.])
self.evaluate(scale.initializer)
with self.assertRaisesOpError('Argument `scale` must be positive.'):
d = tfd.GeneralizedPareto(
loc=0, scale=scale, concentration=1, validate_args=True)
self.evaluate(d.sample())
def testAssertsPositiveScaleAfterMutation(self):
scale = tf.Variable([1., 2., 3.])
self.evaluate(scale.initializer)
d = tfd.GeneralizedPareto(
loc=0, scale=scale, concentration=0.25, validate_args=True)
self.evaluate(d.mean())
with self.assertRaisesOpError('Argument `scale` must be positive.'):
with tf.control_dependencies([scale.assign([1., 2., -3.])]):
self.evaluate(d.sample())
def testGradientThroughLocScale(self):
loc = tf.Variable(1.)
scale = tf.Variable(2.5)
d = tfd.GeneralizedPareto(loc=loc, scale=scale, concentration=.15)
with tf.GradientTape() as tape:
loss = -d.log_prob([1., 2., 4.])
grads = tape.gradient(loss, d.trainable_variables)
self.assertLen(grads, 2)
self.assertAllNotNone(grads)
if __name__ == '__main__':
tf.test.main()
| true
| true
|
f7024e4804728c23068aca067b592692e83ed126
| 551
|
py
|
Python
|
src/py/fun/vect3dotfun.py
|
butiran/butiran.github.io
|
bf99f55819a140190e5bda8f9675109ef607eb9d
|
[
"MIT"
] | null | null | null |
src/py/fun/vect3dotfun.py
|
butiran/butiran.github.io
|
bf99f55819a140190e5bda8f9675109ef607eb9d
|
[
"MIT"
] | 2
|
2020-08-08T13:57:20.000Z
|
2020-08-08T14:18:05.000Z
|
src/py/fun/vect3dotfun.py
|
butiran/butiran.github.io
|
bf99f55819a140190e5bda8f9675109ef607eb9d
|
[
"MIT"
] | 1
|
2020-08-08T13:54:23.000Z
|
2020-08-08T13:54:23.000Z
|
#
# vect3dotfun.py
# Dot product of two 3-d vectors using function in Python 3.7
#
# Sparisoma Viridi | https://github.com/dudung
#
# 20210110
# 2001 Start creating this example.
# 2002 Test it and ok.
#
# Define dot function with two arguments
def dot(a, b):
p = a[0] * b[0] + a[1] * b[1] + a[2] * b[2]
return p
# Define two vector using array
r1 = [1, 2, 3]
r2 = [2, 2, 9]
# Calculate dot product of two vectors
p = dot(r1, r2)
# Display result
print("r1 = ", r1, sep="");
print("r2 = ", r2, sep="");
print("p = r1 \xb7 r2 = ", p, sep="");
| 19.678571
| 61
| 0.607985
|
def dot(a, b):
p = a[0] * b[0] + a[1] * b[1] + a[2] * b[2]
return p
r1 = [1, 2, 3]
r2 = [2, 2, 9]
p = dot(r1, r2)
print("r1 = ", r1, sep="");
print("r2 = ", r2, sep="");
print("p = r1 \xb7 r2 = ", p, sep="");
| true
| true
|
f7024e6e79ada62ffe1b48f5784bcfb79e415eb6
| 555
|
py
|
Python
|
lambda_handlers/types.py
|
renovate-tests/lambda-handlers
|
0b14013f19b597524a8d50f7ea8813ee726c584c
|
[
"Apache-2.0"
] | null | null | null |
lambda_handlers/types.py
|
renovate-tests/lambda-handlers
|
0b14013f19b597524a8d50f7ea8813ee726c584c
|
[
"Apache-2.0"
] | null | null | null |
lambda_handlers/types.py
|
renovate-tests/lambda-handlers
|
0b14013f19b597524a8d50f7ea8813ee726c584c
|
[
"Apache-2.0"
] | null | null | null |
from typing import Any, Dict, Union, Optional
from dataclasses import asdict, dataclass
Headers = Optional[Dict[str, Union[str, bool, int]]]
@dataclass
class APIGatewayProxyResult:
"""
Key names are expected and given by AWS APIGateway specifications and must not be changed
"""
statusCode: int
body: Union[str, Dict[str, Any]]
headers: Headers = None
multiValueHeaders: Headers = None
isBase64Encoded: Optional[bool] = None
def asdict(self):
return {k: v for k, v in asdict(self).items() if v is not None}
| 27.75
| 93
| 0.695495
|
from typing import Any, Dict, Union, Optional
from dataclasses import asdict, dataclass
Headers = Optional[Dict[str, Union[str, bool, int]]]
@dataclass
class APIGatewayProxyResult:
statusCode: int
body: Union[str, Dict[str, Any]]
headers: Headers = None
multiValueHeaders: Headers = None
isBase64Encoded: Optional[bool] = None
def asdict(self):
return {k: v for k, v in asdict(self).items() if v is not None}
| true
| true
|
f7024f1af12ba2367ded56c8c7aef9e10ce2d174
| 206
|
py
|
Python
|
app/model/response.py
|
Djaler/VkGraph
|
473ad3703c16b7842b3834e062aecaa8057b00cc
|
[
"MIT"
] | 7
|
2017-03-22T13:41:52.000Z
|
2021-01-16T14:13:44.000Z
|
app/model/response.py
|
Djaler/VkGraph
|
473ad3703c16b7842b3834e062aecaa8057b00cc
|
[
"MIT"
] | 7
|
2019-10-30T18:44:47.000Z
|
2020-08-18T20:19:41.000Z
|
app/model/response.py
|
Djaler/VkGraph
|
473ad3703c16b7842b3834e062aecaa8057b00cc
|
[
"MIT"
] | 5
|
2017-06-16T10:34:59.000Z
|
2021-12-02T20:37:07.000Z
|
class Status:
OK = "OK"
ERROR = "ERROR"
class Response(dict):
def __init__(self, status, data):
super().__init__()
self["status"] = status
self["data"] = data
| 17.166667
| 37
| 0.524272
|
class Status:
OK = "OK"
ERROR = "ERROR"
class Response(dict):
def __init__(self, status, data):
super().__init__()
self["status"] = status
self["data"] = data
| true
| true
|
f7024fc117459b9452cffefdc8a339808e80919d
| 903
|
py
|
Python
|
py/worldfiles.py
|
Andreto/ga-utsikt
|
0efb5b8fe3589ed7ccd39a8a665c2674ecd3ea7e
|
[
"MIT"
] | null | null | null |
py/worldfiles.py
|
Andreto/ga-utsikt
|
0efb5b8fe3589ed7ccd39a8a665c2674ecd3ea7e
|
[
"MIT"
] | null | null | null |
py/worldfiles.py
|
Andreto/ga-utsikt
|
0efb5b8fe3589ed7ccd39a8a665c2674ecd3ea7e
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2022 Andreas Törnkvist | MIT License
import math
class worldfile:
def __init__(self, filename):
wFile = open(filename)
w = wFile.readlines()
w = [line.rstrip() for line in w]
self.A = float(w[0])
self.D = float(w[1])
self.B = float(w[2])
self.E = float(w[3])
self.C = float(w[4])
self.F = float(w[5])
Xv = math.atan(self.D/self.A)
Yv = math.atan(self.B/self.E)
self.Xx = (math.cos(Xv) ** 2) / self.A
self.Xy = (math.cos(Xv) * math.sin(Xv)) / self.A
self.Yy = (math.cos(Yv) ** 2) / self.E
self.Yx = (math.cos(Yv) * math.sin(Yv)) / self.E
def coordToPx(self, lon, lat):
Dx = lon - self.C
Dy = lat - self.F
Px = (Dx * self.Xx) + (Dy * self.Yx)
Py = (Dx * self.Xy) + (Dy * self.Yy)
return(Px, Py)
| 25.083333
| 56
| 0.487265
|
import math
class worldfile:
def __init__(self, filename):
wFile = open(filename)
w = wFile.readlines()
w = [line.rstrip() for line in w]
self.A = float(w[0])
self.D = float(w[1])
self.B = float(w[2])
self.E = float(w[3])
self.C = float(w[4])
self.F = float(w[5])
Xv = math.atan(self.D/self.A)
Yv = math.atan(self.B/self.E)
self.Xx = (math.cos(Xv) ** 2) / self.A
self.Xy = (math.cos(Xv) * math.sin(Xv)) / self.A
self.Yy = (math.cos(Yv) ** 2) / self.E
self.Yx = (math.cos(Yv) * math.sin(Yv)) / self.E
def coordToPx(self, lon, lat):
Dx = lon - self.C
Dy = lat - self.F
Px = (Dx * self.Xx) + (Dy * self.Yx)
Py = (Dx * self.Xy) + (Dy * self.Yy)
return(Px, Py)
| true
| true
|
f7024fe456a0913f82cedffaff816db8b405cfa5
| 22,648
|
py
|
Python
|
python/tvm/relay/_parser.py
|
ptrendx/tvm
|
f07fe80aaf110086b651f2850506c803a5688ddb
|
[
"Apache-2.0"
] | null | null | null |
python/tvm/relay/_parser.py
|
ptrendx/tvm
|
f07fe80aaf110086b651f2850506c803a5688ddb
|
[
"Apache-2.0"
] | null | null | null |
python/tvm/relay/_parser.py
|
ptrendx/tvm
|
f07fe80aaf110086b651f2850506c803a5688ddb
|
[
"Apache-2.0"
] | 1
|
2019-12-03T13:53:44.000Z
|
2019-12-03T13:53:44.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, unused-argument
"""A parser for Relay's text format."""
from __future__ import absolute_import
import sys
from ast import literal_eval
from collections import deque
import tvm
from . import module
from .base import Span, SourceName
from . import expr
from . import ty
from . import op
PYTHON_VERSION = sys.version_info.major
try:
from .grammar.py3.RelayVisitor import RelayVisitor
from .grammar.py3.RelayParser import RelayParser
from .grammar.py3.RelayLexer import RelayLexer
except ImportError:
raise Exception("Couldn't find ANTLR parser. Try building with USE_ANTLR=ON.")
try:
from antlr4 import InputStream, CommonTokenStream
from antlr4.error.ErrorListener import ErrorListener
except ImportError:
raise Exception("Couldn't find ANTLR runtime." +
"Try running `pip{version} install antlr4-python{version}-runtime`."
.format(version=PYTHON_VERSION))
sys.setrecursionlimit(10000)
class ParseError(Exception):
"""Exception type for parse errors."""
def __init__(self, message):
# type: (str) -> None
super(ParseError, self).__init__()
self.message = message
def __repr__(self):
return "ParseError({})".format(self.message)
def __str__(self):
return repr(self)
class OpWrapper:
"""Overload the __call__ for op."""
pass
class ExprOp(OpWrapper):
"""Call an expr. The default, but does not handle attrs well."""
def __init__(self, operator):
self.operator = operator
def __call__(self, args, attrs, type_args):
try:
return expr.Call(self.operator, args, attrs, type_args)
except Exception:
raise Exception("Operator {} is not registered. It's attributes are {}"
.format(self.operator, attrs))
class FuncOp(OpWrapper):
"""Convert the attrs, call the python function with the attrs passed in as keyword arguments.
Tvm should provide this in the future, as this is pretty similar to what op.get is providing.
"""
def __init__(self, operator):
self.operator = operator
def convert(self, v):
if isinstance(v, tuple):
return tuple([self.convert(x) for x in v])
if isinstance(v, expr.Constant):
return v.data.asnumpy().item()
if isinstance(v, str):
return v
raise Exception(v)
def __call__(self, args, attrs, type_args):
if attrs is None:
attrs = {}
x = self.operator(*args, **{k: self.convert(v) for k, v in attrs.items()})
if isinstance(x, expr.TupleWrapper):
x = x.astuple()
return x
BINARY_OPS = {
RelayParser.MUL: op.multiply,
RelayParser.DIV: op.divide,
RelayParser.ADD: op.add,
RelayParser.SUB: op.subtract,
RelayParser.LT: op.less,
RelayParser.GT: op.greater,
RelayParser.LE: op.less_equal,
RelayParser.GE: op.greater_equal,
RelayParser.EQ: op.equal,
RelayParser.NE: op.not_equal,
}
FUNC_OPS = {
"nn.conv2d": op.nn.conv2d,
"nn.batch_norm": op.nn.batch_norm,
"nn.dense": op.nn.dense,
"nn.bias_add": op.nn.bias_add,
"nn.max_pool2d": op.nn.max_pool2d,
"nn.global_max_pool2d": op.nn.global_max_pool2d,
"nn.avg_pool2d": op.nn.avg_pool2d,
"nn.global_avg_pool2d": op.nn.global_avg_pool2d,
"nn.softmax": op.nn.softmax,
"reshape": op.reshape,
"nn.conv2d_transpose": op.nn.conv2d_transpose,
"concatenate": op.concatenate,
"nn.dropout": op.nn.dropout_raw,
"zeros": op.zeros,
"split": op.split,
"cast": op.cast
}
TYPE_PREFIXES = [
"int",
"uint",
"float",
"bool",
]
T = ty.TypeVar("T")
# Scope = Deque[Tuple[str, T]]
# Scopes = Deque[Scope[T]]
def lookup(scopes, name):
# type: (Scopes[T], str) -> Optional[T]
"""Look up `name` in `scopes`."""
for scope in scopes:
for key, val in scope:
if key == name:
return val
return None
def spanify(f):
"""A decorator which attaches span information
to the value returned by calling `f`.
Intended for use with the below AST visiting
methods. The idea is that after we do the work
of constructing the AST we attach Span information.
"""
def _wrapper(*args, **kwargs):
# Assumes 0th arg is self and gets source_name from object.
sn = args[0].source_name
# Assumes 1st arg is an ANTLR parser context.
ctx = args[1]
ast = f(*args, **kwargs)
line, col = ctx.getSourceInterval()
sp = Span(sn, line, col)
if isinstance(ast, tvm.relay.expr.TupleWrapper):
ast = ast.astuple()
ast.set_span(sp)
return ast
return _wrapper
# TODO(@jmp): Use https://stackoverflow.com/q/13889941
# to figure out how to get ANTLR4 to be more unhappy about syntax errors
class ParseTreeToRelayIR(RelayVisitor):
"""Parse Relay text format into Relay IR."""
def __init__(self, source_name):
# type: (str) -> None
self.source_name = source_name
self.module = module.Module({}) # type: module.Module
# Adding an empty scope allows naked lets without pain.
self.var_scopes = deque([deque()]) # type: Scopes[expr.Var]
self.global_var_scope = deque() # type: Scope[expr.GlobalVar]
self.type_param_scopes = deque([deque()]) # type: Scopes[ty.TypeVar]
self.graph_expr = [] # type: List[expr.Expr]
super(ParseTreeToRelayIR, self).__init__()
def enter_var_scope(self):
# type: () -> None
"""Enter a new Var scope so it can be popped off later."""
self.var_scopes.appendleft(deque())
def exit_var_scope(self):
# type: () -> Scope[expr.Var]
"""Pop off the current Var scope and return it."""
return self.var_scopes.popleft()
def mk_var(self, name, type_):
# type: (str, ty.Type) -> expr.Var
"""Create a new Var and add it to the Var scope."""
var = expr.Var(name, type_)
self.var_scopes[0].appendleft((name, var))
return var
def mk_global_var(self, name):
# type: (str) -> expr.GlobalVar
"""Create a new GlobalVar and add it to the GlobalVar scope."""
var = expr.GlobalVar(name)
self.global_var_scope.append((name, var))
return var
def enter_type_param_scope(self):
# type: () -> None
"""Enter a new TypeVar scope so it can be popped off later."""
self.type_param_scopes.appendleft(deque())
def exit_type_param_scope(self):
# type: () -> Scope[ty.TypeVar]
"""Pop off the current TypeVar scope and return it."""
return self.type_param_scopes.popleft()
def mk_typ(self, name, kind):
# (str, ty.Kind) -> ty.TypeVar
"""Create a new TypeVar and add it to the TypeVar scope."""
typ = ty.TypeVar(name, kind)
self.type_param_scopes[0].appendleft((name, typ))
return typ
def visitProjection(self, ctx):
return expr.TupleGetItem(self.visit(ctx.expr()), self.visit(ctx.NAT()))
def visitTerminal(self, node):
# type: (TerminalNode) -> Union[expr.Expr, int, float]
"""Visit lexer tokens that aren't ignored or visited by other functions."""
node_type = node.getSymbol().type
node_text = node.getText()
name = node_text[1:]
# variables
if node_type == RelayLexer.GLOBAL_VAR:
return lookup(deque([self.global_var_scope]), node_text[1:])
if node_type == RelayLexer.LOCAL_VAR:
# Remove the leading '%' and lookup the name.
var = lookup(self.var_scopes, name)
if var is None:
raise ParseError("Couldn't resolve `{}`.".format(name))
return var
if node_type == RelayLexer.GRAPH_VAR:
try:
return self.graph_expr[int(name)]
except IndexError:
raise ParseError("Couldn't resolve `{}`".format(name))
# data types
if node_type == RelayLexer.NAT:
return int(node_text)
if node_type == RelayLexer.FLOAT:
return float(node_text[:-1])
if node_type == RelayLexer.BOOL_LIT:
if node_text == "True":
return True
if node_text == "False":
return False
raise ParseError("Unrecognized BOOL_LIT: `{}`".format(node_text))
if node_type == RelayLexer.QUOTED_STRING:
return literal_eval(node_text)
raise ParseError("todo: `{}`".format(node_text))
def visit_list(self, ctx_list):
# type: (List[ParserRuleContext]) -> List[Any]
""""Visit a list of contexts."""
assert isinstance(ctx_list, list)
return [self.visit(ctx) for ctx in ctx_list]
def getType_(self, ctx):
# type: (Optional[RelayParser.Type_Context]) -> Optional[ty.Type]
"""Return a (possibly None) Relay type."""
if ctx is None:
return None
return self.visit(ctx)
def visitProg(self, ctx):
self.meta = None
if ctx.METADATA():
header, data = str(ctx.METADATA()).split('\n', 1)
assert header == "METADATA:"
self.meta = tvm.load_json(data)
# type: (RelayParser.ProgContext) -> Union[expr.Expr, module.Module]
if ctx.defn():
self.visit_list(ctx.defn())
return self.module
if ctx.expr():
return self.visit(ctx.expr())
return self.module
# Exprs
def visitOpIdent(self, ctx):
# type: (RelayParser.OpIdentContext) -> op.Op
op_name = ctx.CNAME().getText()
if op_name in FUNC_OPS:
return FuncOp(FUNC_OPS[op_name])
return ExprOp(op.get(op_name))
# pass through
def visitParen(self, ctx):
# type: (RelayParser.ParenContext) -> expr.Expr
return self.visit(ctx.expr())
# pass through
def visitBody(self, ctx):
# type: (RelayParser.BodyContext) -> expr.Expr
return self.visit(ctx.expr())
def visitScalarFloat(self, ctx):
# type: (RelayParser.ScalarFloatContext) -> expr.Constant
return expr.const(self.visit(ctx.FLOAT()))
def visitScalarInt(self, ctx):
# type: (RelayParser.ScalarIntContext) -> expr.Constant
return expr.const(self.visit(ctx.NAT()))
def visitScalarBool(self, ctx):
# type: (RelayParser.ScalarBoolContext) -> expr.Constant
return expr.const(self.visit(ctx.BOOL_LIT()))
def visitNeg(self, ctx):
# type: (RelayParser.NegContext) -> Union[expr.Constant, expr.Call]
val = self.visit(ctx.expr())
if isinstance(val, expr.Constant) and val.data.asnumpy().ndim == 0:
# fold Neg in for scalars
return expr.const(-val.data.asnumpy().item())
return op.negative(val)
def visitTuple(self, ctx):
# type: (RelayParser.TupleContext) -> expr.Tuple
tup = self.visit_list(ctx.expr())
return expr.Tuple(tup)
def visitLet(self, ctx):
# type: (RelayParser.SeqContext) -> expr.Let
"""Desugar various sequence constructs to Relay Let nodes."""
if ctx.var() is None:
# anonymous identity
ident = "_"
type_ = None
var = self.mk_var(ident, type_)
else:
var = self.visitVar(ctx.var())
self.enter_var_scope()
value = self.visit(ctx.expr(0))
self.exit_var_scope()
body = self.visit(ctx.expr(1))
return expr.Let(var, value, body)
def visitBinOp(self, ctx):
# type: (RelayParser.BinOpContext) -> expr.Call
"""Desugar binary operators."""
arg0, arg1 = self.visit_list(ctx.expr())
relay_op = BINARY_OPS.get(ctx.op.type)
if relay_op is None:
raise ParseError("Unimplemented binary op.")
return relay_op(arg0, arg1)
@spanify
def visitVar(self, ctx):
# type: (RelayParser.VarContext) -> expr.Var
"""Visit a single variable."""
ident = ctx.LOCAL_VAR()
if ident is None:
raise ParseError("Only local ids may be used in vars.")
type_ = self.getType_(ctx.type_())
return self.mk_var(ident.getText()[1:], type_)
def visitVarList(self, ctx):
# type: (RelayParser.VarListContext) -> List[expr.Var]
return self.visit_list(ctx.var())
# TODO: support a larger class of values than just Relay exprs
def visitAttr(self, ctx):
# type: (RelayParser.AttrContext) -> Tuple[str, expr.Expr]
return (ctx.CNAME().getText(), self.visit(ctx.expr()))
def visitArgNoAttr(self, ctx):
return (self.visit_list(ctx.varList().var()), None)
def visitAttrSeq(self, ctx):
# type: (RelayParser.AttrListContext) -> Dict[str, expr.Expr]
return dict(self.visit_list(ctx.attr()))
def visitArgWithAttr(self, ctx):
return (self.visit_list(ctx.var()), self.visitAttrSeq(ctx.attrSeq()))
def visitArgList(self,
ctx # type: RelayParser.ArgListContext
):
# type: (...) -> Tuple[Optional[List[expr.Var]], Optional[Dict[str, expr.Expr]]]
var_list = self.visit(ctx.varList()) if ctx.varList() else None
attr_list = self.visit(ctx.attrList()) if ctx.attrList() else None
return (var_list, attr_list)
def visitMeta(self, ctx):
type_key = str(ctx.CNAME())
index = int(self.visit(ctx.NAT()))
return self.meta[type_key][index]
def mk_func(self, ctx):
# type: (Union[RelayParser.FuncContext, RelayParser.DefnContext]) -> expr.Function
"""Construct a function from either a Func or Defn."""
# Enter var scope early to put params in scope.
self.enter_var_scope()
# Capture type params in params.
self.enter_type_param_scope()
type_params = ctx.typeParamList()
if type_params is not None:
type_params = type_params.ident()
assert type_params
for ty_param in type_params:
name = ty_param.getText()
self.mk_typ(name, ty.Kind.Type)
var_list, attr_list = self.visit(ctx.argList())
if var_list is None:
var_list = []
ret_type = self.getType_(ctx.type_())
body = self.visit(ctx.body())
# NB(@jroesch): you must stay in the type parameter scope until
# after you exit the body, you can reference the type parameters
# of your parent scopes.
type_params = list(self.exit_type_param_scope())
if type_params:
_, type_params = zip(*type_params)
self.exit_var_scope()
attrs = tvm.make.node("DictAttrs", **attr_list) if attr_list is not None else None
return expr.Function(var_list, body, ret_type, type_params, attrs)
@spanify
def visitFunc(self, ctx):
# type: (RelayParser.FuncContext) -> expr.Function
return self.mk_func(ctx)
# TODO: how to set spans for definitions?
# @spanify
def visitDefn(self, ctx):
# type: (RelayParser.DefnContext) -> None
ident = ctx.ident().GLOBAL_VAR()
if ident is None:
raise ParseError("Only global ids may be used in `def`s.")
ident_name = ident.getText()[1:]
ident = self.mk_global_var(ident_name)
self.module[ident] = self.mk_func(ctx)
def visitCallNoAttr(self, ctx):
return (self.visit_list(ctx.exprList().expr()), None)
def visitCallWithAttr(self, ctx):
return (self.visit_list(ctx.expr()), self.visit(ctx.attrSeq()))
def call(self, func, args, attrs, type_args):
if isinstance(func, OpWrapper):
return func(args, attrs, type_args)
return expr.Call(func, args, attrs, type_args)
@spanify
def visitCall(self, ctx):
# type: (RelayParser.CallContext) -> expr.Call
func = self.visit(ctx.expr())
args, attrs = self.visit(ctx.callList())
return self.call(func, args, attrs, [])
@spanify
def visitIfElse(self, ctx):
# type: (RelayParser.IfElseContext) -> expr.If
"""Construct a Relay If node. Creates a new scope for each branch."""
cond = self.visit(ctx.expr())
self.enter_var_scope()
true_branch = self.visit(ctx.body(0))
self.exit_var_scope()
self.enter_var_scope()
false_branch = self.visit(ctx.body(1))
self.exit_var_scope()
return expr.If(cond, true_branch, false_branch)
@spanify
def visitGraph(self, ctx):
# type: (RelayParser.GraphContext) -> expr.Expr
"""Visit a graph variable assignment."""
graph_nid = int(ctx.GRAPH_VAR().getText()[1:])
self.enter_var_scope()
value = self.visit(ctx.expr(0))
self.exit_var_scope()
if graph_nid != len(self.graph_expr):
raise ParseError(
"Expected new graph variable to be `%{}`,".format(len(self.graph_expr)) + \
"but got `%{}`".format(graph_nid))
self.graph_expr.append(value)
kont = self.visit(ctx.expr(1))
return kont
# Types
# pylint: disable=unused-argument
def visitIncompleteType(self, ctx):
# type (RelayParser.IncompleteTypeContext) -> None:
return None
def visitTypeIdent(self, ctx):
# type: (RelayParser.TypeIdentContext) -> Union[ty.TensorType, str]
'''
Handle type identifier.
'''
type_ident = ctx.CNAME().getText()
# Look through all type prefixes for a match
for type_prefix in TYPE_PREFIXES:
if type_ident.startswith(type_prefix):
return ty.scalar_type(type_ident)
type_param = lookup(self.type_param_scopes, type_ident)
if type_param is not None:
return type_param
raise ParseError("Unknown builtin type: {}".format(type_ident))
# def visitCallType(self, ctx):
# # type: (RelayParser.CallTypeContext) -> Union[expr.Expr, ty.TensorType]
# ident_type = ctx.identType().CNAME().getText()
# args = self.visit_list(ctx.type_())
# if not args:
# raise ParseError("Type-level functions must have arguments!")
# func_type = TYPE_FUNCS.get(ident_type)(args)
# if func_type is None:
# raise ParseError("Unknown type-level function: `{}`".format(ident_type))
# else:
# return func_type
def visitParensShape(self, ctx):
# type: (RelayParser.ParensShapeContext) -> int
return self.visit(ctx.shape())
def visitShapeList(self, ctx):
# type: (RelayParser.ShapeListContext) -> List[int]
return self.visit_list(ctx.shape())
def visitTensor(self, ctx):
return tuple(self.visit_list(ctx.expr()))
def visitTensorType(self, ctx):
# type: (RelayParser.TensorTypeContext) -> ty.TensorType
"""Create a simple tensor type. No generics."""
shape = self.visit(ctx.shapeList())
dtype = self.visit(ctx.type_())
if not isinstance(dtype, ty.TensorType):
raise ParseError("Expected dtype to be a Relay base type.")
dtype = dtype.dtype
return ty.TensorType(shape, dtype)
def visitTupleType(self, ctx):
# type: (RelayParser.TupleTypeContext) -> ty.TupleType
return ty.TupleType(self.visit_list(ctx.type_()))
def visitFuncType(self, ctx):
# type: (RelayParser.FuncTypeContext) -> ty.FuncType
types = self.visit_list(ctx.type_())
arg_types = types[:-1]
ret_type = types[-1]
return ty.FuncType(arg_types, ret_type, [], None)
def make_parser(data):
# type: (str) -> RelayParser
"""Construct a RelayParser a given data stream."""
input_stream = InputStream(data)
lexer = RelayLexer(input_stream)
lexer.addErrorListener(StrictErrorListener(data))
token_stream = CommonTokenStream(lexer)
p = RelayParser(token_stream)
p.addErrorListener(StrictErrorListener(data))
return p
__source_name_counter__ = 0
class StrictErrorListener(ErrorListener):
"""This ErrorListener fail eagerly on all error, and report the program."""
def __init__(self, text):
self.text = text
def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e):
raise Exception("Syntax Error in:\n" + self.text)
def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs):
raise Exception("Ambiguity Error in:\n" + self.text)
def reportAttemptingFullContext(self,
recognizer,
dfa,
startIndex,
stopIndex,
conflictingAlts,
configs):
raise Exception("Attempting Full Context in:\n" + self.text)
def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs):
raise Exception("Context Sensitivity in:\n" + self.text)
def fromtext(data, source_name=None):
# type: (str, str) -> Union[expr.Expr, module.Module]
"""Parse a Relay program."""
if data == "":
raise ParseError("Cannot parse the empty string.")
global __source_name_counter__
if source_name is None:
source_name = "source_file{0}".format(__source_name_counter__)
if isinstance(source_name, str):
source_name = SourceName(source_name)
tree = make_parser(data).prog()
return ParseTreeToRelayIR(source_name).visit(tree)
| 33.40413
| 100
| 0.616478
|
from __future__ import absolute_import
import sys
from ast import literal_eval
from collections import deque
import tvm
from . import module
from .base import Span, SourceName
from . import expr
from . import ty
from . import op
PYTHON_VERSION = sys.version_info.major
try:
from .grammar.py3.RelayVisitor import RelayVisitor
from .grammar.py3.RelayParser import RelayParser
from .grammar.py3.RelayLexer import RelayLexer
except ImportError:
raise Exception("Couldn't find ANTLR parser. Try building with USE_ANTLR=ON.")
try:
from antlr4 import InputStream, CommonTokenStream
from antlr4.error.ErrorListener import ErrorListener
except ImportError:
raise Exception("Couldn't find ANTLR runtime." +
"Try running `pip{version} install antlr4-python{version}-runtime`."
.format(version=PYTHON_VERSION))
sys.setrecursionlimit(10000)
class ParseError(Exception):
def __init__(self, message):
super(ParseError, self).__init__()
self.message = message
def __repr__(self):
return "ParseError({})".format(self.message)
def __str__(self):
return repr(self)
class OpWrapper:
pass
class ExprOp(OpWrapper):
def __init__(self, operator):
self.operator = operator
def __call__(self, args, attrs, type_args):
try:
return expr.Call(self.operator, args, attrs, type_args)
except Exception:
raise Exception("Operator {} is not registered. It's attributes are {}"
.format(self.operator, attrs))
class FuncOp(OpWrapper):
def __init__(self, operator):
self.operator = operator
def convert(self, v):
if isinstance(v, tuple):
return tuple([self.convert(x) for x in v])
if isinstance(v, expr.Constant):
return v.data.asnumpy().item()
if isinstance(v, str):
return v
raise Exception(v)
def __call__(self, args, attrs, type_args):
if attrs is None:
attrs = {}
x = self.operator(*args, **{k: self.convert(v) for k, v in attrs.items()})
if isinstance(x, expr.TupleWrapper):
x = x.astuple()
return x
BINARY_OPS = {
RelayParser.MUL: op.multiply,
RelayParser.DIV: op.divide,
RelayParser.ADD: op.add,
RelayParser.SUB: op.subtract,
RelayParser.LT: op.less,
RelayParser.GT: op.greater,
RelayParser.LE: op.less_equal,
RelayParser.GE: op.greater_equal,
RelayParser.EQ: op.equal,
RelayParser.NE: op.not_equal,
}
FUNC_OPS = {
"nn.conv2d": op.nn.conv2d,
"nn.batch_norm": op.nn.batch_norm,
"nn.dense": op.nn.dense,
"nn.bias_add": op.nn.bias_add,
"nn.max_pool2d": op.nn.max_pool2d,
"nn.global_max_pool2d": op.nn.global_max_pool2d,
"nn.avg_pool2d": op.nn.avg_pool2d,
"nn.global_avg_pool2d": op.nn.global_avg_pool2d,
"nn.softmax": op.nn.softmax,
"reshape": op.reshape,
"nn.conv2d_transpose": op.nn.conv2d_transpose,
"concatenate": op.concatenate,
"nn.dropout": op.nn.dropout_raw,
"zeros": op.zeros,
"split": op.split,
"cast": op.cast
}
TYPE_PREFIXES = [
"int",
"uint",
"float",
"bool",
]
T = ty.TypeVar("T")
# Scope = Deque[Tuple[str, T]]
# Scopes = Deque[Scope[T]]
def lookup(scopes, name):
# type: (Scopes[T], str) -> Optional[T]
for scope in scopes:
for key, val in scope:
if key == name:
return val
return None
def spanify(f):
def _wrapper(*args, **kwargs):
# Assumes 0th arg is self and gets source_name from object.
sn = args[0].source_name
# Assumes 1st arg is an ANTLR parser context.
ctx = args[1]
ast = f(*args, **kwargs)
line, col = ctx.getSourceInterval()
sp = Span(sn, line, col)
if isinstance(ast, tvm.relay.expr.TupleWrapper):
ast = ast.astuple()
ast.set_span(sp)
return ast
return _wrapper
# TODO(@jmp): Use https://stackoverflow.com/q/13889941
# to figure out how to get ANTLR4 to be more unhappy about syntax errors
class ParseTreeToRelayIR(RelayVisitor):
def __init__(self, source_name):
# type: (str) -> None
self.source_name = source_name
self.module = module.Module({}) # type: module.Module
# Adding an empty scope allows naked lets without pain.
self.var_scopes = deque([deque()]) # type: Scopes[expr.Var]
self.global_var_scope = deque() # type: Scope[expr.GlobalVar]
self.type_param_scopes = deque([deque()]) # type: Scopes[ty.TypeVar]
self.graph_expr = [] # type: List[expr.Expr]
super(ParseTreeToRelayIR, self).__init__()
def enter_var_scope(self):
# type: () -> None
self.var_scopes.appendleft(deque())
def exit_var_scope(self):
# type: () -> Scope[expr.Var]
return self.var_scopes.popleft()
def mk_var(self, name, type_):
# type: (str, ty.Type) -> expr.Var
var = expr.Var(name, type_)
self.var_scopes[0].appendleft((name, var))
return var
def mk_global_var(self, name):
# type: (str) -> expr.GlobalVar
var = expr.GlobalVar(name)
self.global_var_scope.append((name, var))
return var
def enter_type_param_scope(self):
# type: () -> None
self.type_param_scopes.appendleft(deque())
def exit_type_param_scope(self):
# type: () -> Scope[ty.TypeVar]
return self.type_param_scopes.popleft()
def mk_typ(self, name, kind):
# (str, ty.Kind) -> ty.TypeVar
typ = ty.TypeVar(name, kind)
self.type_param_scopes[0].appendleft((name, typ))
return typ
def visitProjection(self, ctx):
return expr.TupleGetItem(self.visit(ctx.expr()), self.visit(ctx.NAT()))
def visitTerminal(self, node):
# type: (TerminalNode) -> Union[expr.Expr, int, float]
node_type = node.getSymbol().type
node_text = node.getText()
name = node_text[1:]
# variables
if node_type == RelayLexer.GLOBAL_VAR:
return lookup(deque([self.global_var_scope]), node_text[1:])
if node_type == RelayLexer.LOCAL_VAR:
# Remove the leading '%' and lookup the name.
var = lookup(self.var_scopes, name)
if var is None:
raise ParseError("Couldn't resolve `{}`.".format(name))
return var
if node_type == RelayLexer.GRAPH_VAR:
try:
return self.graph_expr[int(name)]
except IndexError:
raise ParseError("Couldn't resolve `{}`".format(name))
# data types
if node_type == RelayLexer.NAT:
return int(node_text)
if node_type == RelayLexer.FLOAT:
return float(node_text[:-1])
if node_type == RelayLexer.BOOL_LIT:
if node_text == "True":
return True
if node_text == "False":
return False
raise ParseError("Unrecognized BOOL_LIT: `{}`".format(node_text))
if node_type == RelayLexer.QUOTED_STRING:
return literal_eval(node_text)
raise ParseError("todo: `{}`".format(node_text))
def visit_list(self, ctx_list):
# type: (List[ParserRuleContext]) -> List[Any]
assert isinstance(ctx_list, list)
return [self.visit(ctx) for ctx in ctx_list]
def getType_(self, ctx):
# type: (Optional[RelayParser.Type_Context]) -> Optional[ty.Type]
if ctx is None:
return None
return self.visit(ctx)
def visitProg(self, ctx):
self.meta = None
if ctx.METADATA():
header, data = str(ctx.METADATA()).split('\n', 1)
assert header == "METADATA:"
self.meta = tvm.load_json(data)
# type: (RelayParser.ProgContext) -> Union[expr.Expr, module.Module]
if ctx.defn():
self.visit_list(ctx.defn())
return self.module
if ctx.expr():
return self.visit(ctx.expr())
return self.module
# Exprs
def visitOpIdent(self, ctx):
# type: (RelayParser.OpIdentContext) -> op.Op
op_name = ctx.CNAME().getText()
if op_name in FUNC_OPS:
return FuncOp(FUNC_OPS[op_name])
return ExprOp(op.get(op_name))
# pass through
def visitParen(self, ctx):
# type: (RelayParser.ParenContext) -> expr.Expr
return self.visit(ctx.expr())
# pass through
def visitBody(self, ctx):
# type: (RelayParser.BodyContext) -> expr.Expr
return self.visit(ctx.expr())
def visitScalarFloat(self, ctx):
# type: (RelayParser.ScalarFloatContext) -> expr.Constant
return expr.const(self.visit(ctx.FLOAT()))
def visitScalarInt(self, ctx):
# type: (RelayParser.ScalarIntContext) -> expr.Constant
return expr.const(self.visit(ctx.NAT()))
def visitScalarBool(self, ctx):
# type: (RelayParser.ScalarBoolContext) -> expr.Constant
return expr.const(self.visit(ctx.BOOL_LIT()))
def visitNeg(self, ctx):
# type: (RelayParser.NegContext) -> Union[expr.Constant, expr.Call]
val = self.visit(ctx.expr())
if isinstance(val, expr.Constant) and val.data.asnumpy().ndim == 0:
# fold Neg in for scalars
return expr.const(-val.data.asnumpy().item())
return op.negative(val)
def visitTuple(self, ctx):
# type: (RelayParser.TupleContext) -> expr.Tuple
tup = self.visit_list(ctx.expr())
return expr.Tuple(tup)
def visitLet(self, ctx):
# type: (RelayParser.SeqContext) -> expr.Let
if ctx.var() is None:
# anonymous identity
ident = "_"
type_ = None
var = self.mk_var(ident, type_)
else:
var = self.visitVar(ctx.var())
self.enter_var_scope()
value = self.visit(ctx.expr(0))
self.exit_var_scope()
body = self.visit(ctx.expr(1))
return expr.Let(var, value, body)
def visitBinOp(self, ctx):
# type: (RelayParser.BinOpContext) -> expr.Call
arg0, arg1 = self.visit_list(ctx.expr())
relay_op = BINARY_OPS.get(ctx.op.type)
if relay_op is None:
raise ParseError("Unimplemented binary op.")
return relay_op(arg0, arg1)
@spanify
def visitVar(self, ctx):
# type: (RelayParser.VarContext) -> expr.Var
ident = ctx.LOCAL_VAR()
if ident is None:
raise ParseError("Only local ids may be used in vars.")
type_ = self.getType_(ctx.type_())
return self.mk_var(ident.getText()[1:], type_)
def visitVarList(self, ctx):
# type: (RelayParser.VarListContext) -> List[expr.Var]
return self.visit_list(ctx.var())
# TODO: support a larger class of values than just Relay exprs
def visitAttr(self, ctx):
# type: (RelayParser.AttrContext) -> Tuple[str, expr.Expr]
return (ctx.CNAME().getText(), self.visit(ctx.expr()))
def visitArgNoAttr(self, ctx):
return (self.visit_list(ctx.varList().var()), None)
def visitAttrSeq(self, ctx):
# type: (RelayParser.AttrListContext) -> Dict[str, expr.Expr]
return dict(self.visit_list(ctx.attr()))
def visitArgWithAttr(self, ctx):
return (self.visit_list(ctx.var()), self.visitAttrSeq(ctx.attrSeq()))
def visitArgList(self,
ctx # type: RelayParser.ArgListContext
):
# type: (...) -> Tuple[Optional[List[expr.Var]], Optional[Dict[str, expr.Expr]]]
var_list = self.visit(ctx.varList()) if ctx.varList() else None
attr_list = self.visit(ctx.attrList()) if ctx.attrList() else None
return (var_list, attr_list)
def visitMeta(self, ctx):
type_key = str(ctx.CNAME())
index = int(self.visit(ctx.NAT()))
return self.meta[type_key][index]
def mk_func(self, ctx):
# type: (Union[RelayParser.FuncContext, RelayParser.DefnContext]) -> expr.Function
# Enter var scope early to put params in scope.
self.enter_var_scope()
# Capture type params in params.
self.enter_type_param_scope()
type_params = ctx.typeParamList()
if type_params is not None:
type_params = type_params.ident()
assert type_params
for ty_param in type_params:
name = ty_param.getText()
self.mk_typ(name, ty.Kind.Type)
var_list, attr_list = self.visit(ctx.argList())
if var_list is None:
var_list = []
ret_type = self.getType_(ctx.type_())
body = self.visit(ctx.body())
# NB(@jroesch): you must stay in the type parameter scope until
# after you exit the body, you can reference the type parameters
# of your parent scopes.
type_params = list(self.exit_type_param_scope())
if type_params:
_, type_params = zip(*type_params)
self.exit_var_scope()
attrs = tvm.make.node("DictAttrs", **attr_list) if attr_list is not None else None
return expr.Function(var_list, body, ret_type, type_params, attrs)
@spanify
def visitFunc(self, ctx):
# type: (RelayParser.FuncContext) -> expr.Function
return self.mk_func(ctx)
# TODO: how to set spans for definitions?
# @spanify
def visitDefn(self, ctx):
# type: (RelayParser.DefnContext) -> None
ident = ctx.ident().GLOBAL_VAR()
if ident is None:
raise ParseError("Only global ids may be used in `def`s.")
ident_name = ident.getText()[1:]
ident = self.mk_global_var(ident_name)
self.module[ident] = self.mk_func(ctx)
def visitCallNoAttr(self, ctx):
return (self.visit_list(ctx.exprList().expr()), None)
def visitCallWithAttr(self, ctx):
return (self.visit_list(ctx.expr()), self.visit(ctx.attrSeq()))
def call(self, func, args, attrs, type_args):
if isinstance(func, OpWrapper):
return func(args, attrs, type_args)
return expr.Call(func, args, attrs, type_args)
@spanify
def visitCall(self, ctx):
# type: (RelayParser.CallContext) -> expr.Call
func = self.visit(ctx.expr())
args, attrs = self.visit(ctx.callList())
return self.call(func, args, attrs, [])
@spanify
def visitIfElse(self, ctx):
# type: (RelayParser.IfElseContext) -> expr.If
cond = self.visit(ctx.expr())
self.enter_var_scope()
true_branch = self.visit(ctx.body(0))
self.exit_var_scope()
self.enter_var_scope()
false_branch = self.visit(ctx.body(1))
self.exit_var_scope()
return expr.If(cond, true_branch, false_branch)
@spanify
def visitGraph(self, ctx):
# type: (RelayParser.GraphContext) -> expr.Expr
graph_nid = int(ctx.GRAPH_VAR().getText()[1:])
self.enter_var_scope()
value = self.visit(ctx.expr(0))
self.exit_var_scope()
if graph_nid != len(self.graph_expr):
raise ParseError(
"Expected new graph variable to be `%{}`,".format(len(self.graph_expr)) + \
"but got `%{}`".format(graph_nid))
self.graph_expr.append(value)
kont = self.visit(ctx.expr(1))
return kont
# Types
# pylint: disable=unused-argument
def visitIncompleteType(self, ctx):
# type (RelayParser.IncompleteTypeContext) -> None:
return None
def visitTypeIdent(self, ctx):
# type: (RelayParser.TypeIdentContext) -> Union[ty.TensorType, str]
type_ident = ctx.CNAME().getText()
# Look through all type prefixes for a match
for type_prefix in TYPE_PREFIXES:
if type_ident.startswith(type_prefix):
return ty.scalar_type(type_ident)
type_param = lookup(self.type_param_scopes, type_ident)
if type_param is not None:
return type_param
raise ParseError("Unknown builtin type: {}".format(type_ident))
# def visitCallType(self, ctx):
# # type: (RelayParser.CallTypeContext) -> Union[expr.Expr, ty.TensorType]
# ident_type = ctx.identType().CNAME().getText()
# args = self.visit_list(ctx.type_())
# if not args:
# raise ParseError("Type-level functions must have arguments!")
# func_type = TYPE_FUNCS.get(ident_type)(args)
# if func_type is None:
# raise ParseError("Unknown type-level function: `{}`".format(ident_type))
# else:
# return func_type
def visitParensShape(self, ctx):
# type: (RelayParser.ParensShapeContext) -> int
return self.visit(ctx.shape())
def visitShapeList(self, ctx):
# type: (RelayParser.ShapeListContext) -> List[int]
return self.visit_list(ctx.shape())
def visitTensor(self, ctx):
return tuple(self.visit_list(ctx.expr()))
def visitTensorType(self, ctx):
# type: (RelayParser.TensorTypeContext) -> ty.TensorType
shape = self.visit(ctx.shapeList())
dtype = self.visit(ctx.type_())
if not isinstance(dtype, ty.TensorType):
raise ParseError("Expected dtype to be a Relay base type.")
dtype = dtype.dtype
return ty.TensorType(shape, dtype)
def visitTupleType(self, ctx):
# type: (RelayParser.TupleTypeContext) -> ty.TupleType
return ty.TupleType(self.visit_list(ctx.type_()))
def visitFuncType(self, ctx):
# type: (RelayParser.FuncTypeContext) -> ty.FuncType
types = self.visit_list(ctx.type_())
arg_types = types[:-1]
ret_type = types[-1]
return ty.FuncType(arg_types, ret_type, [], None)
def make_parser(data):
# type: (str) -> RelayParser
input_stream = InputStream(data)
lexer = RelayLexer(input_stream)
lexer.addErrorListener(StrictErrorListener(data))
token_stream = CommonTokenStream(lexer)
p = RelayParser(token_stream)
p.addErrorListener(StrictErrorListener(data))
return p
__source_name_counter__ = 0
class StrictErrorListener(ErrorListener):
def __init__(self, text):
self.text = text
def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e):
raise Exception("Syntax Error in:\n" + self.text)
def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs):
raise Exception("Ambiguity Error in:\n" + self.text)
def reportAttemptingFullContext(self,
recognizer,
dfa,
startIndex,
stopIndex,
conflictingAlts,
configs):
raise Exception("Attempting Full Context in:\n" + self.text)
def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs):
raise Exception("Context Sensitivity in:\n" + self.text)
def fromtext(data, source_name=None):
# type: (str, str) -> Union[expr.Expr, module.Module]
if data == "":
raise ParseError("Cannot parse the empty string.")
global __source_name_counter__
if source_name is None:
source_name = "source_file{0}".format(__source_name_counter__)
if isinstance(source_name, str):
source_name = SourceName(source_name)
tree = make_parser(data).prog()
return ParseTreeToRelayIR(source_name).visit(tree)
| true
| true
|
f702504392500d06ef6b36b82fbef26e508af87f
| 2,310
|
py
|
Python
|
twistit/_errorhandling.py
|
pydron/twistit
|
23ac43b830083fd32c400fcdfa5300e302769c74
|
[
"MIT"
] | 1
|
2016-04-08T00:35:54.000Z
|
2016-04-08T00:35:54.000Z
|
twistit/_errorhandling.py
|
pydron/twistit
|
23ac43b830083fd32c400fcdfa5300e302769c74
|
[
"MIT"
] | null | null | null |
twistit/_errorhandling.py
|
pydron/twistit
|
23ac43b830083fd32c400fcdfa5300e302769c74
|
[
"MIT"
] | null | null | null |
# Copyright (C) 2015 Stefan C. Mueller
import functools
from twisted.internet import defer
def on_error_close(logger):
"""
Decorator for callback methods that implement `IProtocol`.
Any uncaught exception is logged and the connection is closed
forcefully.
Usage::
import logger
logger = logging.getLogger(__name__)
class MyProtocol(Protocol):
@on_error_close(logger.error)
def connectionMade():
...
The argument passed to `on_error_close` will be invoked with a
string message.
The motivation behind this decorator is as follows:
Due to bugs it sometimes happens that exceptions are thrown out out
callback methods in protocols. Twisted ignores them, at best they
are logged. This is always a bug, as errors should be handled in the
callback and not let to continue up the call stack. As such, the
behaviour after this occured is typically not well defined and
unpredictable.
A well made protocol implementation can handle unexpected connection
losses as they may occur at any time in a real world environment.
By closing the connection, there is a certain chance
that we enter a code path that can recover, or at least gracefully
cleanup.
In my experience, this often means that unit-tests fail with a more
useful error message. Without it, I sometimes get the case that a
unit-test (or even the final application) just blocks forever
with no information on what is going wrong.
"""
def make_wrapper(func):
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
d = defer.maybeDeferred(func, self, *args, **kwargs)
def on_error(err):
logger("Unhandled failure in %r:%s" % (func, err. getTraceback()))
if hasattr(self, "transport"):
if hasattr(self.transport, "abortConnection"):
self.transport.abortConnection()
elif hasattr(self.transport, "loseConnection"):
self.transport.loseConnection()
d.addErrback(on_error)
return wrapper
return make_wrapper
| 35.538462
| 82
| 0.636364
|
import functools
from twisted.internet import defer
def on_error_close(logger):
def make_wrapper(func):
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
d = defer.maybeDeferred(func, self, *args, **kwargs)
def on_error(err):
logger("Unhandled failure in %r:%s" % (func, err. getTraceback()))
if hasattr(self, "transport"):
if hasattr(self.transport, "abortConnection"):
self.transport.abortConnection()
elif hasattr(self.transport, "loseConnection"):
self.transport.loseConnection()
d.addErrback(on_error)
return wrapper
return make_wrapper
| true
| true
|
f7025167168843760aa99b53b10d6a7a0fc912e1
| 2,035
|
py
|
Python
|
.eggs/boto-2.48.0-py2.7.egg/boto/sdb/db/key.py
|
MQQ/git-bigstore
|
95f1e37fcda7fdce80502593cec31a44c604cf8a
|
[
"Apache-2.0"
] | null | null | null |
.eggs/boto-2.48.0-py2.7.egg/boto/sdb/db/key.py
|
MQQ/git-bigstore
|
95f1e37fcda7fdce80502593cec31a44c604cf8a
|
[
"Apache-2.0"
] | null | null | null |
.eggs/boto-2.48.0-py2.7.egg/boto/sdb/db/key.py
|
MQQ/git-bigstore
|
95f1e37fcda7fdce80502593cec31a44c604cf8a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2006,2007,2008 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class Key(object):
@classmethod
def from_path(cls, *args, **kwds):
raise NotImplementedError("Paths are not currently supported")
def __init__(self, encoded=None, obj=None):
self.name = None
if obj:
self.id = obj.id
self.kind = obj.kind()
else:
self.id = None
self.kind = None
def app(self):
raise NotImplementedError("Applications are not currently supported")
def kind(self):
return self.kind
def id(self):
return self.id
def name(self):
raise NotImplementedError("Key Names are not currently supported")
def id_or_name(self):
return self.id
def has_id_or_name(self):
return self.id is not None
def parent(self):
raise NotImplementedError("Key parents are not currently supported")
def __str__(self):
return self.id_or_name()
| 33.916667
| 77
| 0.696314
|
class Key(object):
@classmethod
def from_path(cls, *args, **kwds):
raise NotImplementedError("Paths are not currently supported")
def __init__(self, encoded=None, obj=None):
self.name = None
if obj:
self.id = obj.id
self.kind = obj.kind()
else:
self.id = None
self.kind = None
def app(self):
raise NotImplementedError("Applications are not currently supported")
def kind(self):
return self.kind
def id(self):
return self.id
def name(self):
raise NotImplementedError("Key Names are not currently supported")
def id_or_name(self):
return self.id
def has_id_or_name(self):
return self.id is not None
def parent(self):
raise NotImplementedError("Key parents are not currently supported")
def __str__(self):
return self.id_or_name()
| true
| true
|
f7025258811b22755058146106a8a59727a8d6a1
| 14,181
|
py
|
Python
|
lib/geomet/wkt.py
|
davasqueza/eriskco_conector_CloudSQL
|
99304b5eed06e9bba3646535a82d7fc98b0838b7
|
[
"Apache-2.0"
] | null | null | null |
lib/geomet/wkt.py
|
davasqueza/eriskco_conector_CloudSQL
|
99304b5eed06e9bba3646535a82d7fc98b0838b7
|
[
"Apache-2.0"
] | null | null | null |
lib/geomet/wkt.py
|
davasqueza/eriskco_conector_CloudSQL
|
99304b5eed06e9bba3646535a82d7fc98b0838b7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2013 Lars Butler & individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tokenize
try:
import StringIO
except ImportError:
import io
StringIO = io
INVALID_WKT_FMT = 'Invalid WKT: `%s`'
def dump(obj, dest_file):
"""
Dump GeoJSON-like `dict` to WKT and write it to the `dest_file`.
:param dict obj:
A GeoJSON-like dictionary. It must at least the keys 'type' and
'coordinates'.
:param dest_file:
Open and writable file-like object.
"""
dest_file.write(dumps(obj))
def load(source_file):
"""
Load a GeoJSON `dict` object from a ``source_file`` containing WKT.
:param source_file:
Open and readable file-like object.
:returns:
A GeoJSON `dict` representing the geometry read from the file.
"""
return loads(source_file.read())
def dumps(obj, decimals=16):
"""
Dump a GeoJSON-like `dict` to a WKT string.
"""
geom_type = obj['type']
exporter = _dumps_registry.get(geom_type)
if exporter is None:
_unsupported_geom_type(geom_type)
fmt = '%%.%df' % decimals
return exporter(obj, fmt)
def loads(string):
"""
Construct a GeoJSON `dict` from WKT (`string`).
"""
sio = StringIO.StringIO(string)
# NOTE: This is not the intended purpose of `tokenize`, but it works.
tokens = (x[1] for x in tokenize.generate_tokens(sio.readline))
tokens = _tokenize_wkt(tokens)
geom_type = next(tokens)
importer = _loads_registry.get(geom_type)
if importer is None:
_unsupported_geom_type(geom_type)
return importer(tokens, string)
def _tokenize_wkt(tokens):
"""
Since the tokenizer treats "-" and numeric strings as separate values,
combine them and yield them as a single token. This utility encapsulates
parsing of negative numeric values from WKT can be used generically in all
parsers.
"""
negative = False
for t in tokens:
if t == '-':
negative = True
continue
else:
if negative:
yield '-%s' % t
else:
yield t
negative = False
def _unsupported_geom_type(geom_type):
raise ValueError("Unsupported geometry type '%s'" % geom_type)
def _dump_point(obj, fmt):
"""
Dump a GeoJSON-like Point object to WKT.
:param dict obj:
A GeoJSON-like `dict` representing a Point.
:param str fmt:
Format string which indicates the number of digits to display after the
decimal point when formatting coordinates.
:returns:
WKT representation of the input GeoJSON Point ``obj``.
"""
coords = obj['coordinates']
pt = 'POINT (%s)' % ' '.join(fmt % c for c in coords)
return pt
def _dump_linestring(obj, fmt):
"""
Dump a GeoJSON-like LineString object to WKT.
Input parameters and return value are the LINESTRING equivalent to
:func:`_dump_point`.
"""
coords = obj['coordinates']
ls = 'LINESTRING (%s)'
ls %= ', '.join(' '.join(fmt % c for c in pt) for pt in coords)
return ls
def _dump_polygon(obj, fmt):
"""
Dump a GeoJSON-like Polygon object to WKT.
Input parameters and return value are the POLYGON equivalent to
:func:`_dump_point`.
"""
coords = obj['coordinates']
poly = 'POLYGON (%s)'
rings = (', '.join(' '.join(fmt % c for c in pt) for pt in ring)
for ring in coords)
rings = ('(%s)' % r for r in rings)
poly %= ', '.join(rings)
return poly
def _dump_multipoint(obj, fmt):
"""
Dump a GeoJSON-like MultiPoint object to WKT.
Input parameters and return value are the MULTIPOINT equivalent to
:func:`_dump_point`.
"""
coords = obj['coordinates']
mp = 'MULTIPOINT (%s)'
points = (' '.join(fmt % c for c in pt) for pt in coords)
# Add parens around each point.
points = ('(%s)' % pt for pt in points)
mp %= ', '.join(points)
return mp
def _dump_multilinestring(obj, fmt):
"""
Dump a GeoJSON-like MultiLineString object to WKT.
Input parameters and return value are the MULTILINESTRING equivalent to
:func:`_dump_point`.
"""
coords = obj['coordinates']
mlls = 'MULTILINESTRING (%s)'
linestrs = ('(%s)' % ', '.join(' '.join(fmt % c for c in pt)
for pt in linestr) for linestr in coords)
mlls %= ', '.join(ls for ls in linestrs)
return mlls
def _dump_multipolygon(obj, fmt):
"""
Dump a GeoJSON-like MultiPolygon object to WKT.
Input parameters and return value are the MULTIPOLYGON equivalent to
:func:`_dump_point`.
"""
coords = obj['coordinates']
mp = 'MULTIPOLYGON (%s)'
polys = (
# join the polygons in the multipolygon
', '.join(
# join the rings in a polygon,
# and wrap in parens
'(%s)' % ', '.join(
# join the points in a ring,
# and wrap in parens
'(%s)' % ', '.join(
# join coordinate values of a vertex
' '.join(fmt % c for c in pt)
for pt in ring)
for ring in poly)
for poly in coords)
)
mp %= polys
return mp
def _dump_geometrycollection(obj, fmt):
"""
Dump a GeoJSON-like GeometryCollection object to WKT.
Input parameters and return value are the GEOMETRYCOLLECTION equivalent to
:func:`_dump_point`.
The WKT conversions for each geometry in the collection are delegated to
their respective functions.
"""
gc = 'GEOMETRYCOLLECTION (%s)'
geoms = obj['geometries']
geoms_wkt = []
for geom in geoms:
geom_type = geom['type']
geoms_wkt.append(_dumps_registry.get(geom_type)(geom, fmt))
gc %= ','.join(geoms_wkt)
return gc
def _load_point(tokens, string):
"""
:param tokens:
A generator of string tokens for the input WKT, begining just after the
geometry type. The geometry type is consumed before we get to here. For
example, if :func:`loads` is called with the input 'POINT(0.0 1.0)',
``tokens`` would generate the following values:
.. code-block:: python
['(', '0.0', '1.0', ')']
:param str string:
The original WKT string.
:returns:
A GeoJSON `dict` Point representation of the WKT ``string``.
"""
if not next(tokens) == '(':
raise ValueError(INVALID_WKT_FMT % string)
coords = []
try:
for t in tokens:
if t == ')':
break
else:
coords.append(float(t))
except tokenize.TokenError:
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='Point', coordinates=coords)
def _load_linestring(tokens, string):
"""
Has similar inputs and return value to to :func:`_load_point`, except is
for handling LINESTRING geometry.
:returns:
A GeoJSON `dict` LineString representation of the WKT ``string``.
"""
if not next(tokens) == '(':
raise ValueError(INVALID_WKT_FMT % string)
# a list of lists
# each member list represents a point
coords = []
try:
pt = []
for t in tokens:
if t == ')':
coords.append(pt)
break
elif t == ',':
# it's the end of the point
coords.append(pt)
pt = []
else:
pt.append(float(t))
except tokenize.TokenError:
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='LineString', coordinates=coords)
def _load_polygon(tokens, string):
"""
Has similar inputs and return value to to :func:`_load_point`, except is
for handling POLYGON geometry.
:returns:
A GeoJSON `dict` Polygon representation of the WKT ``string``.
"""
open_parens = next(tokens), next(tokens)
if not open_parens == ('(', '('):
raise ValueError(INVALID_WKT_FMT % string)
# coords contains a list of rings
# each ring contains a list of points
# each point is a list of 2-4 values
coords = []
ring = []
on_ring = True
try:
pt = []
for t in tokens:
if t == ')' and on_ring:
# The ring is finished
ring.append(pt)
coords.append(ring)
on_ring = False
elif t == ')' and not on_ring:
# it's the end of the polygon
break
elif t == '(':
# it's a new ring
ring = []
pt = []
on_ring = True
elif t == ',' and on_ring:
# it's the end of a point
ring.append(pt)
pt = []
elif t == ',' and not on_ring:
# there's another ring.
# do nothing
pass
else:
pt.append(float(t))
except tokenize.TokenError:
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='Polygon', coordinates=coords)
def _load_multipoint(tokens, string):
"""
Has similar inputs and return value to to :func:`_load_point`, except is
for handling MULTIPOINT geometry.
:returns:
A GeoJSON `dict` MultiPoint representation of the WKT ``string``.
"""
open_paren = next(tokens)
if not open_paren == '(':
raise ValueError(INVALID_WKT_FMT % string)
coords = []
pt = []
paren_depth = 1
try:
for t in tokens:
if t == '(':
paren_depth += 1
elif t == ')':
paren_depth -= 1
if paren_depth == 0:
break
elif t == '':
pass
elif t == ',':
# the point is done
coords.append(pt)
pt = []
else:
pt.append(float(t))
except tokenize.TokenError:
raise ValueError(INVALID_WKT_FMT % string)
# Given the way we're parsing, we'll probably have to deal with the last
# point after the loop
if len(pt) > 0:
coords.append(pt)
return dict(type='MultiPoint', coordinates=coords)
def _load_multipolygon(tokens, string):
"""
Has similar inputs and return value to to :func:`_load_point`, except is
for handling MULTIPOLYGON geometry.
:returns:
A GeoJSON `dict` MultiPolygon representation of the WKT ``string``.
"""
open_paren = next(tokens)
if not open_paren == '(':
raise ValueError(INVALID_WKT_FMT % string)
polygons = []
while True:
try:
poly = _load_polygon(tokens, string)
polygons.append(poly['coordinates'])
t = next(tokens)
if t == ')':
# we're done; no more polygons.
break
except StopIteration:
# If we reach this, the WKT is not valid.
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='MultiPolygon', coordinates=polygons)
def _load_multilinestring(tokens, string):
"""
Has similar inputs and return value to to :func:`_load_point`, except is
for handling MULTILINESTRING geometry.
:returns:
A GeoJSON `dict` MultiLineString representation of the WKT ``string``.
"""
open_paren = next(tokens)
if not open_paren == '(':
raise ValueError(INVALID_WKT_FMT % string)
linestrs = []
while True:
try:
linestr = _load_linestring(tokens, string)
linestrs.append(linestr['coordinates'])
t = next(tokens)
if t == ')':
# we're done; no more linestrings.
break
except StopIteration:
# If we reach this, the WKT is not valid.
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='MultiLineString', coordinates=linestrs)
def _load_geometrycollection(tokens, string):
"""
Has similar inputs and return value to to :func:`_load_point`, except is
for handling GEOMETRYCOLLECTIONs.
Delegates parsing to the parsers for the individual geometry types.
:returns:
A GeoJSON `dict` GeometryCollection representation of the WKT
``string``.
"""
open_paren = next(tokens)
if not open_paren == '(':
raise ValueError(INVALID_WKT_FMT % string)
geoms = []
result = dict(type='GeometryCollection', geometries=geoms)
while True:
try:
t = next(tokens)
if t == ')':
break
elif t == ',':
# another geometry still
continue
else:
geom_type = t
load_func = _loads_registry.get(geom_type)
geom = load_func(tokens, string)
geoms.append(geom)
except StopIteration:
raise ValueError(INVALID_WKT_FMT % string)
return result
_dumps_registry = {
'Point': _dump_point,
'LineString': _dump_linestring,
'Polygon': _dump_polygon,
'MultiPoint': _dump_multipoint,
'MultiLineString': _dump_multilinestring,
'MultiPolygon': _dump_multipolygon,
'GeometryCollection': _dump_geometrycollection,
}
_loads_registry = {
'POINT': _load_point,
'LINESTRING': _load_linestring,
'POLYGON': _load_polygon,
'MULTIPOINT': _load_multipoint,
'MULTILINESTRING': _load_multilinestring,
'MULTIPOLYGON': _load_multipolygon,
'GEOMETRYCOLLECTION': _load_geometrycollection,
}
| 28.192843
| 79
| 0.586348
|
import tokenize
try:
import StringIO
except ImportError:
import io
StringIO = io
INVALID_WKT_FMT = 'Invalid WKT: `%s`'
def dump(obj, dest_file):
dest_file.write(dumps(obj))
def load(source_file):
return loads(source_file.read())
def dumps(obj, decimals=16):
geom_type = obj['type']
exporter = _dumps_registry.get(geom_type)
if exporter is None:
_unsupported_geom_type(geom_type)
fmt = '%%.%df' % decimals
return exporter(obj, fmt)
def loads(string):
sio = StringIO.StringIO(string)
tokens = (x[1] for x in tokenize.generate_tokens(sio.readline))
tokens = _tokenize_wkt(tokens)
geom_type = next(tokens)
importer = _loads_registry.get(geom_type)
if importer is None:
_unsupported_geom_type(geom_type)
return importer(tokens, string)
def _tokenize_wkt(tokens):
negative = False
for t in tokens:
if t == '-':
negative = True
continue
else:
if negative:
yield '-%s' % t
else:
yield t
negative = False
def _unsupported_geom_type(geom_type):
raise ValueError("Unsupported geometry type '%s'" % geom_type)
def _dump_point(obj, fmt):
coords = obj['coordinates']
pt = 'POINT (%s)' % ' '.join(fmt % c for c in coords)
return pt
def _dump_linestring(obj, fmt):
coords = obj['coordinates']
ls = 'LINESTRING (%s)'
ls %= ', '.join(' '.join(fmt % c for c in pt) for pt in coords)
return ls
def _dump_polygon(obj, fmt):
coords = obj['coordinates']
poly = 'POLYGON (%s)'
rings = (', '.join(' '.join(fmt % c for c in pt) for pt in ring)
for ring in coords)
rings = ('(%s)' % r for r in rings)
poly %= ', '.join(rings)
return poly
def _dump_multipoint(obj, fmt):
coords = obj['coordinates']
mp = 'MULTIPOINT (%s)'
points = (' '.join(fmt % c for c in pt) for pt in coords)
points = ('(%s)' % pt for pt in points)
mp %= ', '.join(points)
return mp
def _dump_multilinestring(obj, fmt):
coords = obj['coordinates']
mlls = 'MULTILINESTRING (%s)'
linestrs = ('(%s)' % ', '.join(' '.join(fmt % c for c in pt)
for pt in linestr) for linestr in coords)
mlls %= ', '.join(ls for ls in linestrs)
return mlls
def _dump_multipolygon(obj, fmt):
coords = obj['coordinates']
mp = 'MULTIPOLYGON (%s)'
polys = (
', '.join(
'(%s)' % ', '.join(
'(%s)' % ', '.join(
' '.join(fmt % c for c in pt)
for pt in ring)
for ring in poly)
for poly in coords)
)
mp %= polys
return mp
def _dump_geometrycollection(obj, fmt):
gc = 'GEOMETRYCOLLECTION (%s)'
geoms = obj['geometries']
geoms_wkt = []
for geom in geoms:
geom_type = geom['type']
geoms_wkt.append(_dumps_registry.get(geom_type)(geom, fmt))
gc %= ','.join(geoms_wkt)
return gc
def _load_point(tokens, string):
if not next(tokens) == '(':
raise ValueError(INVALID_WKT_FMT % string)
coords = []
try:
for t in tokens:
if t == ')':
break
else:
coords.append(float(t))
except tokenize.TokenError:
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='Point', coordinates=coords)
def _load_linestring(tokens, string):
if not next(tokens) == '(':
raise ValueError(INVALID_WKT_FMT % string)
coords = []
try:
pt = []
for t in tokens:
if t == ')':
coords.append(pt)
break
elif t == ',':
coords.append(pt)
pt = []
else:
pt.append(float(t))
except tokenize.TokenError:
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='LineString', coordinates=coords)
def _load_polygon(tokens, string):
open_parens = next(tokens), next(tokens)
if not open_parens == ('(', '('):
raise ValueError(INVALID_WKT_FMT % string)
# coords contains a list of rings
# each ring contains a list of points
# each point is a list of 2-4 values
coords = []
ring = []
on_ring = True
try:
pt = []
for t in tokens:
if t == ')' and on_ring:
# The ring is finished
ring.append(pt)
coords.append(ring)
on_ring = False
elif t == ')' and not on_ring:
# it's the end of the polygon
break
elif t == '(':
ring = []
pt = []
on_ring = True
elif t == ',' and on_ring:
# it's the end of a point
ring.append(pt)
pt = []
elif t == ',' and not on_ring:
# do nothing
pass
else:
pt.append(float(t))
except tokenize.TokenError:
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='Polygon', coordinates=coords)
def _load_multipoint(tokens, string):
open_paren = next(tokens)
if not open_paren == '(':
raise ValueError(INVALID_WKT_FMT % string)
coords = []
pt = []
paren_depth = 1
try:
for t in tokens:
if t == '(':
paren_depth += 1
elif t == ')':
paren_depth -= 1
if paren_depth == 0:
break
elif t == '':
pass
elif t == ',':
# the point is done
coords.append(pt)
pt = []
else:
pt.append(float(t))
except tokenize.TokenError:
raise ValueError(INVALID_WKT_FMT % string)
# Given the way we're parsing, we'll probably have to deal with the last
# point after the loop
if len(pt) > 0:
coords.append(pt)
return dict(type='MultiPoint', coordinates=coords)
def _load_multipolygon(tokens, string):
open_paren = next(tokens)
if not open_paren == '(':
raise ValueError(INVALID_WKT_FMT % string)
polygons = []
while True:
try:
poly = _load_polygon(tokens, string)
polygons.append(poly['coordinates'])
t = next(tokens)
if t == ')':
# we're done; no more polygons.
break
except StopIteration:
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='MultiPolygon', coordinates=polygons)
def _load_multilinestring(tokens, string):
open_paren = next(tokens)
if not open_paren == '(':
raise ValueError(INVALID_WKT_FMT % string)
linestrs = []
while True:
try:
linestr = _load_linestring(tokens, string)
linestrs.append(linestr['coordinates'])
t = next(tokens)
if t == ')':
break
except StopIteration:
# If we reach this, the WKT is not valid.
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='MultiLineString', coordinates=linestrs)
def _load_geometrycollection(tokens, string):
open_paren = next(tokens)
if not open_paren == '(':
raise ValueError(INVALID_WKT_FMT % string)
geoms = []
result = dict(type='GeometryCollection', geometries=geoms)
while True:
try:
t = next(tokens)
if t == ')':
break
elif t == ',':
# another geometry still
continue
else:
geom_type = t
load_func = _loads_registry.get(geom_type)
geom = load_func(tokens, string)
geoms.append(geom)
except StopIteration:
raise ValueError(INVALID_WKT_FMT % string)
return result
_dumps_registry = {
'Point': _dump_point,
'LineString': _dump_linestring,
'Polygon': _dump_polygon,
'MultiPoint': _dump_multipoint,
'MultiLineString': _dump_multilinestring,
'MultiPolygon': _dump_multipolygon,
'GeometryCollection': _dump_geometrycollection,
}
_loads_registry = {
'POINT': _load_point,
'LINESTRING': _load_linestring,
'POLYGON': _load_polygon,
'MULTIPOINT': _load_multipoint,
'MULTILINESTRING': _load_multilinestring,
'MULTIPOLYGON': _load_multipolygon,
'GEOMETRYCOLLECTION': _load_geometrycollection,
}
| true
| true
|
f7025441717dc4c50f233a3d069a8ca08670a3c6
| 16,281
|
py
|
Python
|
gw_app/nas_managers/mod_mikrotik.py
|
Fa67/djing
|
6e84640d7294be9f2f17f73e796713e93e43bb68
|
[
"Unlicense"
] | 1
|
2020-12-05T01:55:56.000Z
|
2020-12-05T01:55:56.000Z
|
gw_app/nas_managers/mod_mikrotik.py
|
Fa67/djing
|
6e84640d7294be9f2f17f73e796713e93e43bb68
|
[
"Unlicense"
] | null | null | null |
gw_app/nas_managers/mod_mikrotik.py
|
Fa67/djing
|
6e84640d7294be9f2f17f73e796713e93e43bb68
|
[
"Unlicense"
] | 2
|
2020-03-07T12:00:17.000Z
|
2020-12-05T01:56:01.000Z
|
import binascii
import re
import socket
from abc import ABCMeta
from hashlib import md5
from ipaddress import ip_network, _BaseNetwork
from typing import Iterable, Optional, Tuple, Generator, Dict, Iterator
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from djing.lib.decorators import LazyInitMetaclass
from gw_app.nas_managers import core
from gw_app.nas_managers import structs as i_structs
DEBUG = getattr(settings, 'DEBUG', False)
LIST_USERS_ALLOWED = 'DjingUsersAllowed'
LIST_DEVICES_ALLOWED = 'DjingDevicesAllowed'
class ApiRos(object):
"""Routeros api"""
__sk = None
is_login = False
def __init__(self, ip: str, port: int):
if self.__sk is None:
sk = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sk.connect((ip, port or 8728))
self.__sk = sk
def login(self, username, pwd):
if self.is_login:
return
chal = None
for repl, attrs in self.talk_iter(("/login",)):
chal = binascii.unhexlify(attrs['=ret'])
md = md5()
md.update(b'\x00')
md.update(bytes(pwd, 'utf-8'))
md.update(chal)
for _ in self.talk_iter(("/login", "=name=" + username,
"=response=00" + binascii.hexlify(
md.digest()).decode('utf-8'))):
pass
self.is_login = True
def talk_iter(self, words: Iterable):
if self.write_sentence(words) == 0:
return
while 1:
i = self.read_sentence()
if len(i) == 0:
continue
reply = i[0]
attrs = {}
for w in i[1:]:
j = w.find('=', 1)
if j == -1:
attrs[w] = ''
else:
attrs[w[:j]] = w[j + 1:]
yield (reply, attrs)
if reply == '!done':
return
def write_sentence(self, words: Iterable):
ret = 0
for w in words:
self.write_word(w)
ret += 1
self.write_word('')
return ret
def read_sentence(self):
r = []
while 1:
w = self.read_word()
if w == '':
return r
r.append(w)
def write_word(self, w):
if DEBUG:
print("<<< " + w)
b = bytes(w, "utf-8")
self.write_len(len(b))
self.write_bytes(b)
def read_word(self):
ret = self.read_bytes(self.read_len()).decode('utf-8')
if DEBUG:
print(">>> " + ret)
return ret
def write_len(self, l):
if l < 0x80:
self.write_bytes(bytes((l,)))
elif l < 0x4000:
l |= 0x8000
self.write_bytes(bytes(((l >> 8) & 0xff, l & 0xff)))
elif l < 0x200000:
l |= 0xC00000
self.write_bytes(
bytes(((l >> 16) & 0xff, (l >> 8) & 0xff, l & 0xff)))
elif l < 0x10000000:
l |= 0xE0000000
self.write_bytes(bytes(((l >> 24) & 0xff, (l >> 16) & 0xff,
(l >> 8) & 0xff, l & 0xff)))
else:
self.write_bytes(bytes((0xf0, (l >> 24) & 0xff, (l >> 16) & 0xff,
(l >> 8) & 0xff, l & 0xff)))
def read_len(self):
c = self.read_bytes(1)[0]
if (c & 0x80) == 0x00:
pass
elif (c & 0xC0) == 0x80:
c &= ~0xC0
c <<= 8
c += self.read_bytes(1)[0]
elif (c & 0xE0) == 0xC0:
c &= ~0xE0
c <<= 8
c += self.read_bytes(1)[0]
c <<= 8
c += self.read_bytes(1)[0]
elif (c & 0xF0) == 0xE0:
c &= ~0xF0
c <<= 8
c += self.read_bytes(1)[0]
c <<= 8
c += self.read_bytes(1)[0]
c <<= 8
c += self.read_bytes(1)[0]
elif (c & 0xF8) == 0xF0:
c = self.read_bytes(1)[0]
c <<= 8
c += self.read_bytes(1)[0]
c <<= 8
c += self.read_bytes(1)[0]
c <<= 8
c += self.read_bytes(1)[0]
return c
def write_bytes(self, s):
n = 0
while n < len(s):
r = self.__sk.send(s[n:])
if r == 0:
raise core.NasFailedResult("connection closed by remote end")
n += r
def read_bytes(self, length):
ret = b''
while len(ret) < length:
s = self.__sk.recv(length - len(ret))
if len(s) == 0:
raise core.NasFailedResult("connection closed by remote end")
ret += s
return ret
def __del__(self):
if self.__sk is not None:
self.__sk.close()
class MikrotikTransmitter(core.BaseTransmitter, ApiRos,
metaclass=type('_ABC_Lazy_mcs',
(ABCMeta, LazyInitMetaclass), {})):
description = _('Mikrotik NAS')
def __init__(self, login: str, password: str, ip: str, port: int,
enabled: bool, *args, **kwargs):
if not enabled:
raise core.NasFailedResult(_('Gateway disabled'))
try:
core.BaseTransmitter.__init__(
self, login=login,
password=password,
ip=ip, port=port,
*args, **kwargs
)
ApiRos.__init__(self, ip, port)
self.login(username=login, pwd=password)
except ConnectionRefusedError:
raise core.NasNetworkError('Connection to %s is Refused' % ip)
def _exec_cmd(self, cmd: Iterable) -> Dict:
if not isinstance(cmd, (list, tuple)):
raise TypeError
r = dict()
for k, v in self.talk_iter(cmd):
if k == '!done':
break
elif k == '!trap':
raise core.NasFailedResult(v.get('=message'))
r[k] = v or None
return r
def _exec_cmd_iter(self, cmd: Iterable) -> Generator:
if not isinstance(cmd, (list, tuple)):
raise TypeError
for k, v in self.talk_iter(cmd):
if k == '!done':
break
elif k == '!trap':
raise core.NasFailedResult(v.get('=message'))
if v:
yield v
@staticmethod
def _build_shape_obj(info: Dict) -> i_structs.SubnetQueue:
# Переводим приставку скорости Mikrotik в Mbit/s
def parse_speed(text_speed):
text_speed_digit = float(text_speed[:-1] or 0.0)
text_append = text_speed[-1:]
if text_append == 'M':
res = text_speed_digit
elif text_append == 'k':
res = text_speed_digit / 1000
# elif text_append == 'G':
# res = text_speed_digit * 0x400
else:
res = float(re.sub(r'[a-zA-Z]', '', text_speed)) / 1000 ** 2
return res
speed_out, speed_in = info['=max-limit'].split('/')
speed_in = parse_speed(speed_in)
speed_out = parse_speed(speed_out)
try:
target = info.get('=target')
if target is None:
target = info.get('=target-addresses')
name = info.get('=name')
disabled = info.get('=disabled', False)
if disabled is not None:
disabled = True if disabled == 'true' else False
if target and name:
# target may be '192.168.0.3/32,192.168.0.2/32'
net = target.split(',')[0]
if not net:
return
a = i_structs.SubnetQueue(
name=name,
network=net,
max_limit=(speed_in, speed_out),
is_access=not disabled,
queue_id=info.get('=.id')
)
return a
except ValueError as e:
print('ValueError:', e)
#################################################
# QUEUES
#################################################
# Find queue by name
def find_queue(self, name: str) -> Optional[i_structs.SubnetQueue]:
r = self._exec_cmd(('/queue/simple/print', '?name=%s' % name))
if r:
return self._build_shape_obj(r.get('!re'))
def add_queue(self, queue: i_structs.SubnetQueue) -> None:
if not isinstance(queue, i_structs.SubnetQueue):
raise TypeError('queue must be instance of SubnetQueue')
return self._exec_cmd((
'/queue/simple/add',
'=name=%s' % queue.name,
# FIXME: тут в разных микротиках или =target-addresses или =target
'=target=%s' % queue.network,
'=max-limit=%.3fM/%.3fM' % queue.max_limit,
'=queue=Djing_pcq_up/Djing_pcq_down',
'=burst-time=1/5',
#'=total-queue=Djing_pcq_down'
))
def remove_queue(self, queue: i_structs.SubnetQueue) -> None:
if not isinstance(queue, i_structs.SubnetQueue):
raise TypeError
if not queue.queue_id:
queue = self.find_queue(queue.name)
if queue is not None:
if queue.queue_id:
self._exec_cmd((
'/queue/simple/remove',
'=.id=%s' % queue.queue_id
))
def remove_queue_range(self, q_ids: Iterable[str]):
ids = ','.join(q_ids)
if len(ids) > 1:
self._exec_cmd(('/queue/simple/remove', '=numbers=%s' % ids))
def update_queue(self, queue: i_structs.SubnetQueue):
if not isinstance(queue, i_structs.SubnetQueue):
raise TypeError
queue_gw = self.find_queue(queue.name)
if queue_gw is None:
return self.add_queue(queue)
else:
cmd = [
'/queue/simple/set',
'=name=%s' % queue.name,
'=max-limit=%.3fM/%.3fM' % queue.max_limit,
# FIXME: тут в разных версиях прошивки микротика
# или =target-addresses или =target
'=target=%s' % queue.network,
'=queue=Djing_pcq_up/Djing_pcq_down',
'=burst-time=1/1'
]
if queue.queue_id:
cmd.insert(1, '=.id=%s' % queue.queue_id)
r = self._exec_cmd(cmd)
return r
def read_queue_iter(self) -> Generator:
for dat in self._exec_cmd_iter(('/queue/simple/print', '=detail')):
sobj = self._build_shape_obj(dat)
if sobj is not None:
yield sobj
#################################################
# Ip->firewall->address list
#################################################
def add_ip(self, list_name: str, net):
if not issubclass(net.__class__, _BaseNetwork):
raise TypeError
commands = (
'/ip/firewall/address-list/add',
'=list=%s' % list_name,
'=address=%s' % net
)
return self._exec_cmd(commands)
def remove_ip(self, mk_id):
return self._exec_cmd((
'/ip/firewall/address-list/remove',
'=.id=%s' % mk_id
))
def remove_ip_range(self, ip_firewall_ids: Iterable[str]):
return self._exec_cmd((
'/ip/firewall/address-list/remove',
'=numbers=%s' % ','.join(ip_firewall_ids)
))
def find_ip(self, net, list_name: str):
if not issubclass(net.__class__, _BaseNetwork):
raise TypeError
if net.prefixlen == net.max_prefixlen:
ip = net.network_address
else:
ip = net.with_prefixlen
r = self._exec_cmd((
'/ip/firewall/address-list/print', 'where',
'?list=%s' % list_name,
'?address=%s' % ip
))
return r.get('!re')
def read_nets_iter(self, list_name: str) -> Generator:
nets = self._exec_cmd_iter((
'/ip/firewall/address-list/print', 'where',
'?list=%s' % list_name,
'?dynamic=no'
))
for dat in nets:
n = ip_network(dat.get('=address'))
n.queue_id = dat.get('=.id')
yield n
def update_ip(self, net):
if not issubclass(net.__class__, _BaseNetwork):
raise TypeError
res_net_gw = self.find_ip(net, LIST_USERS_ALLOWED)
if not res_net_gw:
self.add_ip(LIST_USERS_ALLOWED, net)
#################################################
# BaseTransmitter implementation
#################################################
def add_user_range(self, queue_list: i_structs.VectorQueue):
for q in queue_list:
self.add_user(q)
def remove_user_range(self, queues: i_structs.VectorQueue):
if not isinstance(queues, (tuple, list, set)):
raise ValueError('*users* is used twice, generator does not fit')
queue_ids = (q.queue_id for q in queues if q)
self.remove_queue_range(queue_ids)
for q in queues:
if isinstance(q, i_structs.SubnetQueue):
ip_list_entity = self.find_ip(q.network, LIST_USERS_ALLOWED)
if ip_list_entity:
self.remove_ip(ip_list_entity.get('=.id'))
def add_user(self, queue: i_structs.SubnetQueue, *args):
try:
self.add_queue(queue)
except core.NasFailedResult as e:
print('Error:', e)
net = queue.network
if not issubclass(net.__class__, _BaseNetwork):
raise TypeError
try:
self.add_ip(LIST_USERS_ALLOWED, net)
except core.NasFailedResult as e:
print('Error:', e)
def remove_user(self, queue: i_structs.SubnetQueue):
self.remove_queue(queue)
r = self.find_ip(queue.network, LIST_USERS_ALLOWED)
if r:
ip_id = r.get('=.id')
self.remove_ip(ip_id)
def update_user(self, queue: i_structs.SubnetQueue, *args):
if queue.is_access:
self.update_queue(queue)
self.update_ip(queue.network)
else:
self.remove_queue(queue)
res_ips = self.find_ip(queue.network, LIST_USERS_ALLOWED)
if res_ips:
self.remove_ip(res_ips.get('=.id'))
def ping(self, host, count=10) -> Optional[Tuple[int, int]]:
r = self._exec_cmd((
'/ip/arp/print',
'?address=%s' % host
))
if r == {}:
return
interface = r['!re'].get('=interface')
r = self._exec_cmd((
'/ping', '=address=%s' % host, '=arp-ping=yes', '=interval=100ms',
'=count=%d' % count,
'=interface=%s' % interface
))
res = r.get('!re')
if res is not None:
received, sent = int(res.get('=received')), int(res.get('=sent'))
return received, sent
def read_users(self) -> i_structs.VectorQueue:
return self.read_queue_iter()
def sync_nas(self, users_from_db: Iterator):
queues_from_db = (
ab.build_agent_struct() for ab in users_from_db
if ab is not None and ab.is_access()
)
queues_from_db = set(filter(lambda x: x is not None, queues_from_db))
queues_from_gw = self.read_queue_iter()
user_q_for_add, user_q_for_del = core.diff_set(queues_from_db,
set(queues_from_gw))
self.remove_queue_range(
(q.queue_id for q in user_q_for_del)
)
for q in user_q_for_add:
self.add_queue(q)
del user_q_for_add, user_q_for_del
# sync ip addrs list
db_nets = set(net.network for net in queues_from_db)
gw_nets = set(self.read_nets_iter(LIST_USERS_ALLOWED))
nets_add, nets_del = core.diff_set(db_nets, gw_nets)
self.remove_ip_range(
(q.queue_id for q in nets_del)
)
for q in nets_add:
self.add_ip(LIST_USERS_ALLOWED, q)
| 33.989562
| 78
| 0.504146
|
import binascii
import re
import socket
from abc import ABCMeta
from hashlib import md5
from ipaddress import ip_network, _BaseNetwork
from typing import Iterable, Optional, Tuple, Generator, Dict, Iterator
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from djing.lib.decorators import LazyInitMetaclass
from gw_app.nas_managers import core
from gw_app.nas_managers import structs as i_structs
DEBUG = getattr(settings, 'DEBUG', False)
LIST_USERS_ALLOWED = 'DjingUsersAllowed'
LIST_DEVICES_ALLOWED = 'DjingDevicesAllowed'
class ApiRos(object):
__sk = None
is_login = False
def __init__(self, ip: str, port: int):
if self.__sk is None:
sk = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sk.connect((ip, port or 8728))
self.__sk = sk
def login(self, username, pwd):
if self.is_login:
return
chal = None
for repl, attrs in self.talk_iter(("/login",)):
chal = binascii.unhexlify(attrs['=ret'])
md = md5()
md.update(b'\x00')
md.update(bytes(pwd, 'utf-8'))
md.update(chal)
for _ in self.talk_iter(("/login", "=name=" + username,
"=response=00" + binascii.hexlify(
md.digest()).decode('utf-8'))):
pass
self.is_login = True
def talk_iter(self, words: Iterable):
if self.write_sentence(words) == 0:
return
while 1:
i = self.read_sentence()
if len(i) == 0:
continue
reply = i[0]
attrs = {}
for w in i[1:]:
j = w.find('=', 1)
if j == -1:
attrs[w] = ''
else:
attrs[w[:j]] = w[j + 1:]
yield (reply, attrs)
if reply == '!done':
return
def write_sentence(self, words: Iterable):
ret = 0
for w in words:
self.write_word(w)
ret += 1
self.write_word('')
return ret
def read_sentence(self):
r = []
while 1:
w = self.read_word()
if w == '':
return r
r.append(w)
def write_word(self, w):
if DEBUG:
print("<<< " + w)
b = bytes(w, "utf-8")
self.write_len(len(b))
self.write_bytes(b)
def read_word(self):
ret = self.read_bytes(self.read_len()).decode('utf-8')
if DEBUG:
print(">>> " + ret)
return ret
def write_len(self, l):
if l < 0x80:
self.write_bytes(bytes((l,)))
elif l < 0x4000:
l |= 0x8000
self.write_bytes(bytes(((l >> 8) & 0xff, l & 0xff)))
elif l < 0x200000:
l |= 0xC00000
self.write_bytes(
bytes(((l >> 16) & 0xff, (l >> 8) & 0xff, l & 0xff)))
elif l < 0x10000000:
l |= 0xE0000000
self.write_bytes(bytes(((l >> 24) & 0xff, (l >> 16) & 0xff,
(l >> 8) & 0xff, l & 0xff)))
else:
self.write_bytes(bytes((0xf0, (l >> 24) & 0xff, (l >> 16) & 0xff,
(l >> 8) & 0xff, l & 0xff)))
def read_len(self):
c = self.read_bytes(1)[0]
if (c & 0x80) == 0x00:
pass
elif (c & 0xC0) == 0x80:
c &= ~0xC0
c <<= 8
c += self.read_bytes(1)[0]
elif (c & 0xE0) == 0xC0:
c &= ~0xE0
c <<= 8
c += self.read_bytes(1)[0]
c <<= 8
c += self.read_bytes(1)[0]
elif (c & 0xF0) == 0xE0:
c &= ~0xF0
c <<= 8
c += self.read_bytes(1)[0]
c <<= 8
c += self.read_bytes(1)[0]
c <<= 8
c += self.read_bytes(1)[0]
elif (c & 0xF8) == 0xF0:
c = self.read_bytes(1)[0]
c <<= 8
c += self.read_bytes(1)[0]
c <<= 8
c += self.read_bytes(1)[0]
c <<= 8
c += self.read_bytes(1)[0]
return c
def write_bytes(self, s):
n = 0
while n < len(s):
r = self.__sk.send(s[n:])
if r == 0:
raise core.NasFailedResult("connection closed by remote end")
n += r
def read_bytes(self, length):
ret = b''
while len(ret) < length:
s = self.__sk.recv(length - len(ret))
if len(s) == 0:
raise core.NasFailedResult("connection closed by remote end")
ret += s
return ret
def __del__(self):
if self.__sk is not None:
self.__sk.close()
class MikrotikTransmitter(core.BaseTransmitter, ApiRos,
metaclass=type('_ABC_Lazy_mcs',
(ABCMeta, LazyInitMetaclass), {})):
description = _('Mikrotik NAS')
def __init__(self, login: str, password: str, ip: str, port: int,
enabled: bool, *args, **kwargs):
if not enabled:
raise core.NasFailedResult(_('Gateway disabled'))
try:
core.BaseTransmitter.__init__(
self, login=login,
password=password,
ip=ip, port=port,
*args, **kwargs
)
ApiRos.__init__(self, ip, port)
self.login(username=login, pwd=password)
except ConnectionRefusedError:
raise core.NasNetworkError('Connection to %s is Refused' % ip)
def _exec_cmd(self, cmd: Iterable) -> Dict:
if not isinstance(cmd, (list, tuple)):
raise TypeError
r = dict()
for k, v in self.talk_iter(cmd):
if k == '!done':
break
elif k == '!trap':
raise core.NasFailedResult(v.get('=message'))
r[k] = v or None
return r
def _exec_cmd_iter(self, cmd: Iterable) -> Generator:
if not isinstance(cmd, (list, tuple)):
raise TypeError
for k, v in self.talk_iter(cmd):
if k == '!done':
break
elif k == '!trap':
raise core.NasFailedResult(v.get('=message'))
if v:
yield v
@staticmethod
def _build_shape_obj(info: Dict) -> i_structs.SubnetQueue:
def parse_speed(text_speed):
text_speed_digit = float(text_speed[:-1] or 0.0)
text_append = text_speed[-1:]
if text_append == 'M':
res = text_speed_digit
elif text_append == 'k':
res = text_speed_digit / 1000
else:
res = float(re.sub(r'[a-zA-Z]', '', text_speed)) / 1000 ** 2
return res
speed_out, speed_in = info['=max-limit'].split('/')
speed_in = parse_speed(speed_in)
speed_out = parse_speed(speed_out)
try:
target = info.get('=target')
if target is None:
target = info.get('=target-addresses')
name = info.get('=name')
disabled = info.get('=disabled', False)
if disabled is not None:
disabled = True if disabled == 'true' else False
if target and name:
net = target.split(',')[0]
if not net:
return
a = i_structs.SubnetQueue(
name=name,
network=net,
max_limit=(speed_in, speed_out),
is_access=not disabled,
queue_id=info.get('=.id')
)
return a
except ValueError as e:
print('ValueError:', e)
def find_queue(self, name: str) -> Optional[i_structs.SubnetQueue]:
r = self._exec_cmd(('/queue/simple/print', '?name=%s' % name))
if r:
return self._build_shape_obj(r.get('!re'))
def add_queue(self, queue: i_structs.SubnetQueue) -> None:
if not isinstance(queue, i_structs.SubnetQueue):
raise TypeError('queue must be instance of SubnetQueue')
return self._exec_cmd((
'/queue/simple/add',
'=name=%s' % queue.name,
'=target=%s' % queue.network,
'=max-limit=%.3fM/%.3fM' % queue.max_limit,
'=queue=Djing_pcq_up/Djing_pcq_down',
'=burst-time=1/5',
))
def remove_queue(self, queue: i_structs.SubnetQueue) -> None:
if not isinstance(queue, i_structs.SubnetQueue):
raise TypeError
if not queue.queue_id:
queue = self.find_queue(queue.name)
if queue is not None:
if queue.queue_id:
self._exec_cmd((
'/queue/simple/remove',
'=.id=%s' % queue.queue_id
))
def remove_queue_range(self, q_ids: Iterable[str]):
ids = ','.join(q_ids)
if len(ids) > 1:
self._exec_cmd(('/queue/simple/remove', '=numbers=%s' % ids))
def update_queue(self, queue: i_structs.SubnetQueue):
if not isinstance(queue, i_structs.SubnetQueue):
raise TypeError
queue_gw = self.find_queue(queue.name)
if queue_gw is None:
return self.add_queue(queue)
else:
cmd = [
'/queue/simple/set',
'=name=%s' % queue.name,
'=max-limit=%.3fM/%.3fM' % queue.max_limit,
'=target=%s' % queue.network,
'=queue=Djing_pcq_up/Djing_pcq_down',
'=burst-time=1/1'
]
if queue.queue_id:
cmd.insert(1, '=.id=%s' % queue.queue_id)
r = self._exec_cmd(cmd)
return r
def read_queue_iter(self) -> Generator:
for dat in self._exec_cmd_iter(('/queue/simple/print', '=detail')):
sobj = self._build_shape_obj(dat)
if sobj is not None:
yield sobj
def add_ip(self, list_name: str, net):
if not issubclass(net.__class__, _BaseNetwork):
raise TypeError
commands = (
'/ip/firewall/address-list/add',
'=list=%s' % list_name,
'=address=%s' % net
)
return self._exec_cmd(commands)
def remove_ip(self, mk_id):
return self._exec_cmd((
'/ip/firewall/address-list/remove',
'=.id=%s' % mk_id
))
def remove_ip_range(self, ip_firewall_ids: Iterable[str]):
return self._exec_cmd((
'/ip/firewall/address-list/remove',
'=numbers=%s' % ','.join(ip_firewall_ids)
))
def find_ip(self, net, list_name: str):
if not issubclass(net.__class__, _BaseNetwork):
raise TypeError
if net.prefixlen == net.max_prefixlen:
ip = net.network_address
else:
ip = net.with_prefixlen
r = self._exec_cmd((
'/ip/firewall/address-list/print', 'where',
'?list=%s' % list_name,
'?address=%s' % ip
))
return r.get('!re')
def read_nets_iter(self, list_name: str) -> Generator:
nets = self._exec_cmd_iter((
'/ip/firewall/address-list/print', 'where',
'?list=%s' % list_name,
'?dynamic=no'
))
for dat in nets:
n = ip_network(dat.get('=address'))
n.queue_id = dat.get('=.id')
yield n
def update_ip(self, net):
if not issubclass(net.__class__, _BaseNetwork):
raise TypeError
res_net_gw = self.find_ip(net, LIST_USERS_ALLOWED)
if not res_net_gw:
self.add_ip(LIST_USERS_ALLOWED, net)
def add_user_range(self, queue_list: i_structs.VectorQueue):
for q in queue_list:
self.add_user(q)
def remove_user_range(self, queues: i_structs.VectorQueue):
if not isinstance(queues, (tuple, list, set)):
raise ValueError('*users* is used twice, generator does not fit')
queue_ids = (q.queue_id for q in queues if q)
self.remove_queue_range(queue_ids)
for q in queues:
if isinstance(q, i_structs.SubnetQueue):
ip_list_entity = self.find_ip(q.network, LIST_USERS_ALLOWED)
if ip_list_entity:
self.remove_ip(ip_list_entity.get('=.id'))
def add_user(self, queue: i_structs.SubnetQueue, *args):
try:
self.add_queue(queue)
except core.NasFailedResult as e:
print('Error:', e)
net = queue.network
if not issubclass(net.__class__, _BaseNetwork):
raise TypeError
try:
self.add_ip(LIST_USERS_ALLOWED, net)
except core.NasFailedResult as e:
print('Error:', e)
def remove_user(self, queue: i_structs.SubnetQueue):
self.remove_queue(queue)
r = self.find_ip(queue.network, LIST_USERS_ALLOWED)
if r:
ip_id = r.get('=.id')
self.remove_ip(ip_id)
def update_user(self, queue: i_structs.SubnetQueue, *args):
if queue.is_access:
self.update_queue(queue)
self.update_ip(queue.network)
else:
self.remove_queue(queue)
res_ips = self.find_ip(queue.network, LIST_USERS_ALLOWED)
if res_ips:
self.remove_ip(res_ips.get('=.id'))
def ping(self, host, count=10) -> Optional[Tuple[int, int]]:
r = self._exec_cmd((
'/ip/arp/print',
'?address=%s' % host
))
if r == {}:
return
interface = r['!re'].get('=interface')
r = self._exec_cmd((
'/ping', '=address=%s' % host, '=arp-ping=yes', '=interval=100ms',
'=count=%d' % count,
'=interface=%s' % interface
))
res = r.get('!re')
if res is not None:
received, sent = int(res.get('=received')), int(res.get('=sent'))
return received, sent
def read_users(self) -> i_structs.VectorQueue:
return self.read_queue_iter()
def sync_nas(self, users_from_db: Iterator):
queues_from_db = (
ab.build_agent_struct() for ab in users_from_db
if ab is not None and ab.is_access()
)
queues_from_db = set(filter(lambda x: x is not None, queues_from_db))
queues_from_gw = self.read_queue_iter()
user_q_for_add, user_q_for_del = core.diff_set(queues_from_db,
set(queues_from_gw))
self.remove_queue_range(
(q.queue_id for q in user_q_for_del)
)
for q in user_q_for_add:
self.add_queue(q)
del user_q_for_add, user_q_for_del
db_nets = set(net.network for net in queues_from_db)
gw_nets = set(self.read_nets_iter(LIST_USERS_ALLOWED))
nets_add, nets_del = core.diff_set(db_nets, gw_nets)
self.remove_ip_range(
(q.queue_id for q in nets_del)
)
for q in nets_add:
self.add_ip(LIST_USERS_ALLOWED, q)
| true
| true
|
f702572d7b68352d734ef7b7cf7598a239ad2912
| 4,653
|
py
|
Python
|
fabfile/deploy/__init__.py
|
cjlee112/socraticqs2
|
2e7dd9d2ec687f68ca8ca341cf5f1b3b8809c820
|
[
"Apache-2.0"
] | 8
|
2015-06-02T15:34:44.000Z
|
2019-03-21T12:27:30.000Z
|
fabfile/deploy/__init__.py
|
cjlee112/socraticqs2
|
2e7dd9d2ec687f68ca8ca341cf5f1b3b8809c820
|
[
"Apache-2.0"
] | 761
|
2015-01-07T05:13:08.000Z
|
2022-02-10T10:23:37.000Z
|
fabfile/deploy/__init__.py
|
cjlee112/socraticqs2
|
2e7dd9d2ec687f68ca8ca341cf5f1b3b8809c820
|
[
"Apache-2.0"
] | 12
|
2015-01-28T20:09:36.000Z
|
2018-03-20T13:32:11.000Z
|
# coding: utf-8
"""
Fabric task for deploying project on servers(production, staging, development)
"""
import os
import sys
from contextlib import contextmanager
from fabric.contrib import django
from fabric.api import local, run, lcd, cd
from fabric.tasks import Task
from fab_settings import env
sys.path.append(os.path.dirname(__file__) + '/../../mysite/')
django.settings_module('mysite.settings')
STAGING_BRANCH = 'master'
BASE_PATH = os.path.dirname(__file__)
STAGING_HOST = 'staging.courselets.org'
def debug(*args, **kwargs):
output = ""
for x in args:
print(x)
output += str(x)
return output
@contextmanager
def debug_cd(path):
print("run on path:{0}".format(path))
yield
class Deploying(Task):
"""
Deploy project on Production
"""
func = local
func_cd = lcd
code_branch = STAGING_BRANCH
@property
def project_path(self):
return os.path.join(BASE_PATH, 'socraticqs2')
@property
def local_settings_path(self):
return os.path.join(self.project_path, '../settings')
def __virtualenv(self):
with self.func_cd(os.path.join(self.project_path, '../')):
self.func('source {}/bin/activate'.format(env.venv_name))
def update_requirements(self):
with self.func_cd(self.project_path):
self.func("sudo pip install -r requirements.txt")
def _get_settings(self, branch='master'):
with self.func_cd(self.local_settings_path):
self.func('git pull origin {0}'.format(branch))
self.func('cp production_conf.py ../socraticqs2/mysite/mysite/settings/production_conf.py')
def __restart_service(self):
self.func('sudo supervisorctl restart gunicorn')
self.func('sudo supervisorctl restart celery')
self.func('sudo service nginx restart')
@property
def __is_new_branch(self):
if self.func == run:
return self.code_branch in self.func('git branch')
else:
return self.code_branch in self.func('git branch', capture=True)
def __update(self):
if self.__is_new_branch:
self.func('git checkout {0} --force'.format(self.code_branch))
self.func('git pull origin {0} --force'.format(self.code_branch))
else:
self.func('git fetch origin')
self.func('git checkout -b {0} origin/{0}'.format(self.code_branch))
self._get_settings()
self.func('find . -name "*.pyc" -print -delete')
self.__virtualenv()
self.update_requirements()
with self.func_cd("mysite"):
self.func('python manage.py collectstatic --noinput')
self.func('python manage.py syncdb --noinput')
self.func('python manage.py fsm_deploy --noinput')
self.__restart_service()
def run(self, running='local', branch='master', suffix=None):
self.code_branch = branch
if running == 'local':
self.func = local
self.func_cd = lcd
self.__update()
elif running == 'remote':
self.func = run
self.func_cd = cd
env.hosts = [STAGING_HOST, ]
global BASE_PATH
BASE_PATH = env.project_root
with self.func_cd(self.project_path):
self.__update()
elif running == 'debug':
print("DEBUG:\n")
self.func = debug
self.func_cd = debug_cd
self.__update()
class Staging(Deploying):
"""Deploy on Staging"""
def _get_settings(self, branch='master'):
"""On dev/staging we don't use production settings"""
with self.func_cd(self.local_settings_path):
self.func('git pull origin {0} --force'.format(branch))
self.func('cp local_conf.py ../dev/socraticqs2/mysite/mysite/settings/local_conf.py')
class Development(Staging):
"""Deploy on Development server
Args:
running - deploy code local or in server(local/run)
branch - git branch name
Example:
fab deploy.dev:running='local', branch='dev'
"""
@property
def project_path(self):
if self.func == local:
return os.path.join(BASE_PATH, '../../../../dev')
else:
return os.path.join(BASE_PATH, 'dev/socraticqs2')
@property
def local_settings_path(self):
if self.func == local:
return os.path.join(self.project_path, '../settings')
else:
return os.path.join(self.project_path, '../../settings')
code_branch = 'dev'
prod = Deploying()
staging = Staging()
dev = Development()
| 28.722222
| 103
| 0.617451
|
import os
import sys
from contextlib import contextmanager
from fabric.contrib import django
from fabric.api import local, run, lcd, cd
from fabric.tasks import Task
from fab_settings import env
sys.path.append(os.path.dirname(__file__) + '/../../mysite/')
django.settings_module('mysite.settings')
STAGING_BRANCH = 'master'
BASE_PATH = os.path.dirname(__file__)
STAGING_HOST = 'staging.courselets.org'
def debug(*args, **kwargs):
output = ""
for x in args:
print(x)
output += str(x)
return output
@contextmanager
def debug_cd(path):
print("run on path:{0}".format(path))
yield
class Deploying(Task):
func = local
func_cd = lcd
code_branch = STAGING_BRANCH
@property
def project_path(self):
return os.path.join(BASE_PATH, 'socraticqs2')
@property
def local_settings_path(self):
return os.path.join(self.project_path, '../settings')
def __virtualenv(self):
with self.func_cd(os.path.join(self.project_path, '../')):
self.func('source {}/bin/activate'.format(env.venv_name))
def update_requirements(self):
with self.func_cd(self.project_path):
self.func("sudo pip install -r requirements.txt")
def _get_settings(self, branch='master'):
with self.func_cd(self.local_settings_path):
self.func('git pull origin {0}'.format(branch))
self.func('cp production_conf.py ../socraticqs2/mysite/mysite/settings/production_conf.py')
def __restart_service(self):
self.func('sudo supervisorctl restart gunicorn')
self.func('sudo supervisorctl restart celery')
self.func('sudo service nginx restart')
@property
def __is_new_branch(self):
if self.func == run:
return self.code_branch in self.func('git branch')
else:
return self.code_branch in self.func('git branch', capture=True)
def __update(self):
if self.__is_new_branch:
self.func('git checkout {0} --force'.format(self.code_branch))
self.func('git pull origin {0} --force'.format(self.code_branch))
else:
self.func('git fetch origin')
self.func('git checkout -b {0} origin/{0}'.format(self.code_branch))
self._get_settings()
self.func('find . -name "*.pyc" -print -delete')
self.__virtualenv()
self.update_requirements()
with self.func_cd("mysite"):
self.func('python manage.py collectstatic --noinput')
self.func('python manage.py syncdb --noinput')
self.func('python manage.py fsm_deploy --noinput')
self.__restart_service()
def run(self, running='local', branch='master', suffix=None):
self.code_branch = branch
if running == 'local':
self.func = local
self.func_cd = lcd
self.__update()
elif running == 'remote':
self.func = run
self.func_cd = cd
env.hosts = [STAGING_HOST, ]
global BASE_PATH
BASE_PATH = env.project_root
with self.func_cd(self.project_path):
self.__update()
elif running == 'debug':
print("DEBUG:\n")
self.func = debug
self.func_cd = debug_cd
self.__update()
class Staging(Deploying):
def _get_settings(self, branch='master'):
with self.func_cd(self.local_settings_path):
self.func('git pull origin {0} --force'.format(branch))
self.func('cp local_conf.py ../dev/socraticqs2/mysite/mysite/settings/local_conf.py')
class Development(Staging):
@property
def project_path(self):
if self.func == local:
return os.path.join(BASE_PATH, '../../../../dev')
else:
return os.path.join(BASE_PATH, 'dev/socraticqs2')
@property
def local_settings_path(self):
if self.func == local:
return os.path.join(self.project_path, '../settings')
else:
return os.path.join(self.project_path, '../../settings')
code_branch = 'dev'
prod = Deploying()
staging = Staging()
dev = Development()
| true
| true
|
f702586dfe5335199a783f1e4b598a01710c8e47
| 1,134
|
py
|
Python
|
isi_sdk_8_1_0/test/test_license_license_tier_entitlements_exceeded_alert.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 24
|
2018-06-22T14:13:23.000Z
|
2022-03-23T01:21:26.000Z
|
isi_sdk_8_1_0/test/test_license_license_tier_entitlements_exceeded_alert.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 46
|
2018-04-30T13:28:22.000Z
|
2022-03-21T21:11:07.000Z
|
isi_sdk_8_1_0/test/test_license_license_tier_entitlements_exceeded_alert.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 29
|
2018-06-19T00:14:04.000Z
|
2022-02-08T17:51:19.000Z
|
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 5
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import isi_sdk_8_1_0
from isi_sdk_8_1_0.models.license_license_tier_entitlements_exceeded_alert import LicenseLicenseTierEntitlementsExceededAlert # noqa: E501
from isi_sdk_8_1_0.rest import ApiException
class TestLicenseLicenseTierEntitlementsExceededAlert(unittest.TestCase):
"""LicenseLicenseTierEntitlementsExceededAlert unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testLicenseLicenseTierEntitlementsExceededAlert(self):
"""Test LicenseLicenseTierEntitlementsExceededAlert"""
# FIXME: construct object with mandatory attributes with example values
# model = isi_sdk_8_1_0.models.license_license_tier_entitlements_exceeded_alert.LicenseLicenseTierEntitlementsExceededAlert() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 27.658537
| 147
| 0.765432
|
from __future__ import absolute_import
import unittest
import isi_sdk_8_1_0
from isi_sdk_8_1_0.models.license_license_tier_entitlements_exceeded_alert import LicenseLicenseTierEntitlementsExceededAlert from isi_sdk_8_1_0.rest import ApiException
class TestLicenseLicenseTierEntitlementsExceededAlert(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testLicenseLicenseTierEntitlementsExceededAlert(self):
pass
if __name__ == '__main__':
unittest.main()
| true
| true
|
f70258bb2c2a7d4122360a14a2ed82fa7d31690f
| 1,316
|
py
|
Python
|
tests/unit-tests/test_confluence_metadata.py
|
tonybaloney/sphinx-confluencebuilder
|
fd71fb5d5038b80fa3623dd673c198959dfbab03
|
[
"BSD-2-Clause"
] | 1
|
2019-03-09T22:02:52.000Z
|
2019-03-09T22:02:52.000Z
|
tests/unit-tests/test_confluence_metadata.py
|
tonybaloney/sphinx-confluencebuilder
|
fd71fb5d5038b80fa3623dd673c198959dfbab03
|
[
"BSD-2-Clause"
] | 17
|
2019-03-09T21:34:20.000Z
|
2019-03-15T08:42:43.000Z
|
tests/unit-tests/test_confluence_metadata.py
|
tonybaloney/sphinx-confluencebuilder
|
fd71fb5d5038b80fa3623dd673c198959dfbab03
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
:copyright: Copyright 2020-2022 Sphinx Confluence Builder Contributors (AUTHORS)
:license: BSD-2-Clause (LICENSE)
"""
from tests.lib.testcase import ConfluenceTestCase
from tests.lib.testcase import setup_builder
import os
class TestConfluenceMetadata(ConfluenceTestCase):
@classmethod
def setUpClass(cls):
super(TestConfluenceMetadata, cls).setUpClass()
cls.dataset = os.path.join(cls.datasets, 'metadata')
def test_confluence_metadata_directive_expected(self):
with self.prepare(self.dataset) as app:
app.build()
builder_metadata = app.builder.metadata
self.assertTrue(builder_metadata)
self.assertTrue('index' in builder_metadata)
doc_labels = builder_metadata['index']
self.assertTrue(doc_labels)
self.assertTrue('labels' in doc_labels)
labels = doc_labels['labels']
self.assertEqual(len(labels), 2)
self.assertTrue('tag-a' in labels)
self.assertTrue('tag-c' in labels)
@setup_builder('html')
def test_html_confluence_metadata_directive_ignore(self):
with self.prepare(self.dataset, relax=True) as app:
# build attempt should not throw an exception/error
app.build()
| 32.097561
| 80
| 0.668693
|
from tests.lib.testcase import ConfluenceTestCase
from tests.lib.testcase import setup_builder
import os
class TestConfluenceMetadata(ConfluenceTestCase):
@classmethod
def setUpClass(cls):
super(TestConfluenceMetadata, cls).setUpClass()
cls.dataset = os.path.join(cls.datasets, 'metadata')
def test_confluence_metadata_directive_expected(self):
with self.prepare(self.dataset) as app:
app.build()
builder_metadata = app.builder.metadata
self.assertTrue(builder_metadata)
self.assertTrue('index' in builder_metadata)
doc_labels = builder_metadata['index']
self.assertTrue(doc_labels)
self.assertTrue('labels' in doc_labels)
labels = doc_labels['labels']
self.assertEqual(len(labels), 2)
self.assertTrue('tag-a' in labels)
self.assertTrue('tag-c' in labels)
@setup_builder('html')
def test_html_confluence_metadata_directive_ignore(self):
with self.prepare(self.dataset, relax=True) as app:
app.build()
| true
| true
|
f70258c7ffe9e101969941678261484178a2e372
| 2,270
|
py
|
Python
|
testes/prototipo/gamePythonWS/server/server.py
|
wykke/duck-bomb
|
e1f8dd3352fd49b48b0d7f99fe72c7c2c65fb59a
|
[
"MIT"
] | 2
|
2020-03-09T17:45:09.000Z
|
2020-03-09T19:16:51.000Z
|
testes/prototipo/gamePythonWS/server/server.py
|
wykke/duck-bomb
|
e1f8dd3352fd49b48b0d7f99fe72c7c2c65fb59a
|
[
"MIT"
] | 7
|
2020-04-07T13:46:23.000Z
|
2020-07-06T23:41:43.000Z
|
testes/prototipo/gamePythonWS/server/server.py
|
wykke/duck-bomb
|
e1f8dd3352fd49b48b0d7f99fe72c7c2c65fb59a
|
[
"MIT"
] | null | null | null |
import asyncio
import websockets
import time
import threading
players = 0
class Player:
def __init__(self, id, x = 0, y = 0, speed = 5):
self.id = id
self.x = x
self.y = y
self.dirX = 0
self.dirY = 0
self.speed = speed
print("Player criado com sucesso!")
def setX(self, x):
self.x = x
def setY(self, y):
self.y = y
def getX(self):
return self.x
def getY(self):
return self.y
async def hello(websocket, path):
global players
jogador = Player(players, 500, 500)
async def moveUP():
while 1:
jogador.setY(jogador.getY()-jogador.speed)
websocket.send("move:"+str(jogador.id)+":"+ str(jogador.getX())+":"+str(jogador.getY()))
print("move:"+str(jogador.id)+":"+ str(jogador.getX())+":"+str(jogador.getY()))
time.sleep(1)
async def moveR():
while 1:
jogador.setX(jogador.getX()+jogador.speed)
await websocket.send("move:"+str(jogador.id)+":"+ str(jogador.getX())+":"+str(jogador.getY()))
print("move:"+str(jogador.id)+":"+ str(jogador.getX())+":"+str(jogador.getY()))
time.sleep(1)
def threadEvoque():
global players
loop = asyncio.new_event_loop()
task = loop.create_task(moveUP())
loop.run_until_complete(task)
players += 1
print(players)
def threadEvoque2():
global players
loop = asyncio.new_event_loop()
task2 = loop.create_task(moveR())
loop.run_until_complete(task2)
players += 1
print(players)
while 1:
msg = await websocket.recv()
print(msg)
if(msg == "start"):
players +=1
await websocket.send("spawn:"+str(players)+":"+ str(jogador.getX())+":"+str(jogador.getY()))
print("spawn:"+str(players)+":"+ str(jogador.getX())+":"+str(jogador.getY()))
start_server = websockets.serve(hello, "0.0.0.0", 8888)
print("Iniciando server...")
asyncio.get_event_loop().run_until_complete(start_server)
print("Sever em funcionamento!")
asyncio.get_event_loop().run_forever()
| 26.091954
| 106
| 0.54978
|
import asyncio
import websockets
import time
import threading
players = 0
class Player:
def __init__(self, id, x = 0, y = 0, speed = 5):
self.id = id
self.x = x
self.y = y
self.dirX = 0
self.dirY = 0
self.speed = speed
print("Player criado com sucesso!")
def setX(self, x):
self.x = x
def setY(self, y):
self.y = y
def getX(self):
return self.x
def getY(self):
return self.y
async def hello(websocket, path):
global players
jogador = Player(players, 500, 500)
async def moveUP():
while 1:
jogador.setY(jogador.getY()-jogador.speed)
websocket.send("move:"+str(jogador.id)+":"+ str(jogador.getX())+":"+str(jogador.getY()))
print("move:"+str(jogador.id)+":"+ str(jogador.getX())+":"+str(jogador.getY()))
time.sleep(1)
async def moveR():
while 1:
jogador.setX(jogador.getX()+jogador.speed)
await websocket.send("move:"+str(jogador.id)+":"+ str(jogador.getX())+":"+str(jogador.getY()))
print("move:"+str(jogador.id)+":"+ str(jogador.getX())+":"+str(jogador.getY()))
time.sleep(1)
def threadEvoque():
global players
loop = asyncio.new_event_loop()
task = loop.create_task(moveUP())
loop.run_until_complete(task)
players += 1
print(players)
def threadEvoque2():
global players
loop = asyncio.new_event_loop()
task2 = loop.create_task(moveR())
loop.run_until_complete(task2)
players += 1
print(players)
while 1:
msg = await websocket.recv()
print(msg)
if(msg == "start"):
players +=1
await websocket.send("spawn:"+str(players)+":"+ str(jogador.getX())+":"+str(jogador.getY()))
print("spawn:"+str(players)+":"+ str(jogador.getX())+":"+str(jogador.getY()))
start_server = websockets.serve(hello, "0.0.0.0", 8888)
print("Iniciando server...")
asyncio.get_event_loop().run_until_complete(start_server)
print("Sever em funcionamento!")
asyncio.get_event_loop().run_forever()
| true
| true
|
f70258fcc3cd5eb7d5734b4a7f21593d187343a6
| 55
|
py
|
Python
|
eats/tests/behave/features/app/__init__.py
|
Etiqa/eats
|
8c8e2da93d0014f6fbb208185712c5526dba1174
|
[
"BSD-2-Clause"
] | null | null | null |
eats/tests/behave/features/app/__init__.py
|
Etiqa/eats
|
8c8e2da93d0014f6fbb208185712c5526dba1174
|
[
"BSD-2-Clause"
] | 5
|
2021-03-18T21:34:44.000Z
|
2022-03-11T23:35:23.000Z
|
eats/tests/behave/features/app/__init__.py
|
Etiqa/eats
|
8c8e2da93d0014f6fbb208185712c5526dba1174
|
[
"BSD-2-Clause"
] | null | null | null |
__author__ = 'pulphix'
from app import TestApplication
| 18.333333
| 31
| 0.818182
|
__author__ = 'pulphix'
from app import TestApplication
| true
| true
|
f70259449ecc1b15c11b417e7c81cc8cc9ba3771
| 422
|
py
|
Python
|
easy_thumbnails_admin/admin.py
|
Apkawa/easy-thumbnails-admin
|
9d7a38f215cdac53a663b00f1d4ff3a3c2a54eb4
|
[
"MIT"
] | null | null | null |
easy_thumbnails_admin/admin.py
|
Apkawa/easy-thumbnails-admin
|
9d7a38f215cdac53a663b00f1d4ff3a3c2a54eb4
|
[
"MIT"
] | null | null | null |
easy_thumbnails_admin/admin.py
|
Apkawa/easy-thumbnails-admin
|
9d7a38f215cdac53a663b00f1d4ff3a3c2a54eb4
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import unicode_literals
from django.contrib import admin
from .models import ThumbnailOption
from django.contrib.admin.widgets import AdminFileWidget
@admin.register(ThumbnailOption)
class ThumbnailOptionAdmin(admin.ModelAdmin):
fields = ['source', 'alias', 'options']
class ThumbnailOptionMixin(admin.ModelAdmin):
class Media:
pass
def media(self):
pass
| 20.095238
| 56
| 0.751185
|
from __future__ import unicode_literals
from django.contrib import admin
from .models import ThumbnailOption
from django.contrib.admin.widgets import AdminFileWidget
@admin.register(ThumbnailOption)
class ThumbnailOptionAdmin(admin.ModelAdmin):
fields = ['source', 'alias', 'options']
class ThumbnailOptionMixin(admin.ModelAdmin):
class Media:
pass
def media(self):
pass
| true
| true
|
f7025a2139b9ed3d7c6a408c7c308ae18dcece8f
| 1,177
|
py
|
Python
|
tests/signaltest.py
|
zibous/ha-miscale2
|
39486415208ea1c54620c0ed91d68b4167a93c4e
|
[
"MIT"
] | 25
|
2020-12-14T05:21:08.000Z
|
2022-03-28T21:08:06.000Z
|
tests/signaltest.py
|
zibous/ha-miscale2
|
39486415208ea1c54620c0ed91d68b4167a93c4e
|
[
"MIT"
] | null | null | null |
tests/signaltest.py
|
zibous/ha-miscale2
|
39486415208ea1c54620c0ed91d68b4167a93c4e
|
[
"MIT"
] | 2
|
2021-04-18T12:29:23.000Z
|
2021-07-27T15:09:09.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
sys.path.append("..")
try:
import os
import signal
import time
from conf import *
from lib import logger
from lib import mqtt
except Exception as e:
print(f"Import error: {str(e)} line {sys.exc_info()[-1].tb_lineno}, check requirements.txt")
sys.exit(1)
log = logger.Log("SignalTestcase", MI2_SHORTNAME, 10)
print(__name__, MI2_SHORTNAME, 20)
# store default handler of signal.SIGINT
# default_handler = signal.getsignal(signal.SIGINT)
def publish(state:str="Online"):
mqtt_client = mqtt.client()
if mqtt_client and mqtt_client.ready:
mqtt_client.publish_simple("tele/apptest/LWT", state, True)
def handler(signum, frame):
publish('Offline')
log.debug("Ende Application")
exit(0)
def main():
while True:
try:
pass
time.sleep(30)
except Exception as e:
Log.error(f"Error while running the script: {str(e)}, line {sys.exc_info()[-1].tb_lineno}")
if __name__ == "__main__":
log.debug("Start Application")
signal.signal(signal.SIGINT, handler)
publish('Online')
main()
| 23.078431
| 104
| 0.646559
|
import sys
sys.path.append("..")
try:
import os
import signal
import time
from conf import *
from lib import logger
from lib import mqtt
except Exception as e:
print(f"Import error: {str(e)} line {sys.exc_info()[-1].tb_lineno}, check requirements.txt")
sys.exit(1)
log = logger.Log("SignalTestcase", MI2_SHORTNAME, 10)
print(__name__, MI2_SHORTNAME, 20)
def publish(state:str="Online"):
mqtt_client = mqtt.client()
if mqtt_client and mqtt_client.ready:
mqtt_client.publish_simple("tele/apptest/LWT", state, True)
def handler(signum, frame):
publish('Offline')
log.debug("Ende Application")
exit(0)
def main():
while True:
try:
pass
time.sleep(30)
except Exception as e:
Log.error(f"Error while running the script: {str(e)}, line {sys.exc_info()[-1].tb_lineno}")
if __name__ == "__main__":
log.debug("Start Application")
signal.signal(signal.SIGINT, handler)
publish('Online')
main()
| true
| true
|
f7025a95efb3508f4614d10c34abe2268a9570db
| 1,043
|
py
|
Python
|
database/sec.py
|
chung-ejy/longshot
|
c9ff1a6586546b496ed6ba2f0b43ff9cd68fa15d
|
[
"MIT"
] | null | null | null |
database/sec.py
|
chung-ejy/longshot
|
c9ff1a6586546b496ed6ba2f0b43ff9cd68fa15d
|
[
"MIT"
] | null | null | null |
database/sec.py
|
chung-ejy/longshot
|
c9ff1a6586546b496ed6ba2f0b43ff9cd68fa15d
|
[
"MIT"
] | 1
|
2021-08-05T02:12:51.000Z
|
2021-08-05T02:12:51.000Z
|
from database.adatabase import ADatabase
import pandas as pd
class SEC(ADatabase):
def __init__(self):
super().__init__("sec")
def retrieve_num_data(self,adsh):
try:
db = self.client[self.name]
table = db["nums"]
data = table.find({"adsh":adsh},{"_id":0},show_record_id=False)
return pd.DataFrame(list(data))
except Exception as e:
print(str(e))
def retrieve_filing_data(self,cik):
try:
db = self.client[self.name]
table = db["filings"]
data = table.find({"cik":cik},{"_id":0},show_record_id=False)
return pd.DataFrame(list(data))
except Exception as e:
print(str(e))
def retrieve_adshs(self):
try:
db = self.client[self.name]
table = db["filings"]
data = table.find({},{"_id":0,"adsh":1},show_record_id=False)
return pd.DataFrame(list(data))
except Exception as e:
print(str(e))
| 31.606061
| 75
| 0.5465
|
from database.adatabase import ADatabase
import pandas as pd
class SEC(ADatabase):
def __init__(self):
super().__init__("sec")
def retrieve_num_data(self,adsh):
try:
db = self.client[self.name]
table = db["nums"]
data = table.find({"adsh":adsh},{"_id":0},show_record_id=False)
return pd.DataFrame(list(data))
except Exception as e:
print(str(e))
def retrieve_filing_data(self,cik):
try:
db = self.client[self.name]
table = db["filings"]
data = table.find({"cik":cik},{"_id":0},show_record_id=False)
return pd.DataFrame(list(data))
except Exception as e:
print(str(e))
def retrieve_adshs(self):
try:
db = self.client[self.name]
table = db["filings"]
data = table.find({},{"_id":0,"adsh":1},show_record_id=False)
return pd.DataFrame(list(data))
except Exception as e:
print(str(e))
| true
| true
|
f7025aa46119097ebc8650ad1c02f9485349b539
| 605
|
py
|
Python
|
supermario/supermario 1105(2)/start_state.py
|
Kimmiryeong/2DGP_GameProject
|
ad3fb197aab27227fc92fd404b2c310f8d0827ca
|
[
"MIT"
] | null | null | null |
supermario/supermario 1105(2)/start_state.py
|
Kimmiryeong/2DGP_GameProject
|
ad3fb197aab27227fc92fd404b2c310f8d0827ca
|
[
"MIT"
] | null | null | null |
supermario/supermario 1105(2)/start_state.py
|
Kimmiryeong/2DGP_GameProject
|
ad3fb197aab27227fc92fd404b2c310f8d0827ca
|
[
"MIT"
] | null | null | null |
import game_framework
from pico2d import *
import title_state
name = "StartState"
image = None
logo_time = 0.0
def enter():
global image
image = load_image('kpu_credit.png')
def exit():
global image
del(image)
def update():
global logo_time
if (logo_time > 1.0):
logo_time = 0.8
game_framework.change_state(title_state)
delay(0.01)
logo_time += 0.05
def draw():
global image
clear_canvas()
image.draw(400,300)
update_canvas()
def handle_events():
events = get_events()
pass
def pause(): pass
def resume(): pass
| 11.634615
| 48
| 0.634711
|
import game_framework
from pico2d import *
import title_state
name = "StartState"
image = None
logo_time = 0.0
def enter():
global image
image = load_image('kpu_credit.png')
def exit():
global image
del(image)
def update():
global logo_time
if (logo_time > 1.0):
logo_time = 0.8
game_framework.change_state(title_state)
delay(0.01)
logo_time += 0.05
def draw():
global image
clear_canvas()
image.draw(400,300)
update_canvas()
def handle_events():
events = get_events()
pass
def pause(): pass
def resume(): pass
| true
| true
|
f7025b188f8b1f8584c33840874bfd12437bb343
| 489
|
py
|
Python
|
Apuntes/Preprocess/Missing Data.py
|
Aaronga19/MachineLearning-A-Z
|
e8e27f0a31ac3e3c05d4029e6e5e14ac8a911153
|
[
"MIT"
] | null | null | null |
Apuntes/Preprocess/Missing Data.py
|
Aaronga19/MachineLearning-A-Z
|
e8e27f0a31ac3e3c05d4029e6e5e14ac8a911153
|
[
"MIT"
] | null | null | null |
Apuntes/Preprocess/Missing Data.py
|
Aaronga19/MachineLearning-A-Z
|
e8e27f0a31ac3e3c05d4029e6e5e14ac8a911153
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 13 20:30:46 2020
@author: Aaronga
"""
# Datos faltantes
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
dataset = pd.read_csv("Data.csv")
X = dataset.iloc[:, :-1].values
y = dataset.iloc[:, 3].values
# Tratamiento de los NaN
from sklearn.preprocessing import Imputer
imputer = Imputer(missing_values="NaN", strategy="mean", axis = 0)
imputer = imputer.fit(X[:, 1:3])
X[:, 1:3]= imputer.transform(X[:,1:3])
print(X)
| 22.227273
| 66
| 0.687117
|
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
dataset = pd.read_csv("Data.csv")
X = dataset.iloc[:, :-1].values
y = dataset.iloc[:, 3].values
from sklearn.preprocessing import Imputer
imputer = Imputer(missing_values="NaN", strategy="mean", axis = 0)
imputer = imputer.fit(X[:, 1:3])
X[:, 1:3]= imputer.transform(X[:,1:3])
print(X)
| true
| true
|
f7025b360586c29b40347acb0683ce9ba02e33c8
| 5,930
|
py
|
Python
|
test/sql/test_case_statement.py
|
Thhhza/sqlalchemy
|
f2b267043e17b2b769dc2a5b8139f6be2a3d4e84
|
[
"MIT"
] | 1
|
2018-04-02T18:41:52.000Z
|
2018-04-02T18:41:52.000Z
|
test/sql/test_case_statement.py
|
Thhhza/sqlalchemy
|
f2b267043e17b2b769dc2a5b8139f6be2a3d4e84
|
[
"MIT"
] | null | null | null |
test/sql/test_case_statement.py
|
Thhhza/sqlalchemy
|
f2b267043e17b2b769dc2a5b8139f6be2a3d4e84
|
[
"MIT"
] | 3
|
2017-09-26T13:59:24.000Z
|
2020-12-04T17:51:54.000Z
|
from sqlalchemy.testing import assert_raises, eq_
from sqlalchemy.testing import fixtures, AssertsCompiledSQL
from sqlalchemy import (
testing, exc, case, select, literal_column, text, and_, Integer, cast,
String, Column, Table, MetaData)
from sqlalchemy.sql import table, column
info_table = None
class CaseTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@classmethod
def setup_class(cls):
metadata = MetaData(testing.db)
global info_table
info_table = Table(
'infos', metadata,
Column('pk', Integer, primary_key=True),
Column('info', String(30)))
info_table.create()
info_table.insert().execute(
{'pk': 1, 'info': 'pk_1_data'},
{'pk': 2, 'info': 'pk_2_data'},
{'pk': 3, 'info': 'pk_3_data'},
{'pk': 4, 'info': 'pk_4_data'},
{'pk': 5, 'info': 'pk_5_data'},
{'pk': 6, 'info': 'pk_6_data'})
@classmethod
def teardown_class(cls):
info_table.drop()
@testing.fails_on('firebird', 'FIXME: unknown')
@testing.requires.subqueries
def test_case(self):
inner = select(
[
case(
[
[info_table.c.pk < 3, 'lessthan3'],
[
and_(info_table.c.pk >= 3, info_table.c.pk < 7),
'gt3']]).label('x'),
info_table.c.pk, info_table.c.info], from_obj=[info_table])
inner_result = inner.execute().fetchall()
# Outputs:
# lessthan3 1 pk_1_data
# lessthan3 2 pk_2_data
# gt3 3 pk_3_data
# gt3 4 pk_4_data
# gt3 5 pk_5_data
# gt3 6 pk_6_data
assert inner_result == [
('lessthan3', 1, 'pk_1_data'),
('lessthan3', 2, 'pk_2_data'),
('gt3', 3, 'pk_3_data'),
('gt3', 4, 'pk_4_data'),
('gt3', 5, 'pk_5_data'),
('gt3', 6, 'pk_6_data')
]
outer = select([inner.alias('q_inner')])
outer_result = outer.execute().fetchall()
assert outer_result == [
('lessthan3', 1, 'pk_1_data'),
('lessthan3', 2, 'pk_2_data'),
('gt3', 3, 'pk_3_data'),
('gt3', 4, 'pk_4_data'),
('gt3', 5, 'pk_5_data'),
('gt3', 6, 'pk_6_data')
]
w_else = select(
[
case(
[
[info_table.c.pk < 3, cast(3, Integer)],
[
and_(
info_table.c.pk >= 3, info_table.c.pk < 6),
6]],
else_=0).label('x'),
info_table.c.pk, info_table.c.info],
from_obj=[info_table])
else_result = w_else.execute().fetchall()
assert else_result == [
(3, 1, 'pk_1_data'),
(3, 2, 'pk_2_data'),
(6, 3, 'pk_3_data'),
(6, 4, 'pk_4_data'),
(6, 5, 'pk_5_data'),
(0, 6, 'pk_6_data')
]
def test_literal_interpretation(self):
t = table('test', column('col1'))
assert_raises(exc.ArgumentError, case, [("x", "y")])
self.assert_compile(
case([("x", "y")], value=t.c.col1),
"CASE test.col1 WHEN :param_1 THEN :param_2 END")
self.assert_compile(
case([(t.c.col1 == 7, "y")], else_="z"),
"CASE WHEN (test.col1 = :col1_1) THEN :param_1 ELSE :param_2 END")
def test_text_doesnt_explode(self):
for s in [
select(
[
case(
[
(
info_table.c.info == 'pk_4_data',
text("'yes'"))],
else_=text("'no'"))
]).order_by(info_table.c.info),
select(
[
case(
[
(
info_table.c.info == 'pk_4_data',
literal_column("'yes'"))],
else_=literal_column("'no'")
)]
).order_by(info_table.c.info),
]:
if testing.against("firebird"):
eq_(s.execute().fetchall(), [
('no ', ), ('no ', ), ('no ', ), ('yes', ),
('no ', ), ('no ', ),
])
else:
eq_(s.execute().fetchall(), [
('no', ), ('no', ), ('no', ), ('yes', ),
('no', ), ('no', ),
])
@testing.fails_on('firebird', 'FIXME: unknown')
def testcase_with_dict(self):
query = select(
[
case(
{
info_table.c.pk < 3: 'lessthan3',
info_table.c.pk >= 3: 'gt3',
}, else_='other'),
info_table.c.pk, info_table.c.info
],
from_obj=[info_table])
assert query.execute().fetchall() == [
('lessthan3', 1, 'pk_1_data'),
('lessthan3', 2, 'pk_2_data'),
('gt3', 3, 'pk_3_data'),
('gt3', 4, 'pk_4_data'),
('gt3', 5, 'pk_5_data'),
('gt3', 6, 'pk_6_data')
]
simple_query = select(
[
case(
{1: 'one', 2: 'two', },
value=info_table.c.pk, else_='other'),
info_table.c.pk
],
whereclause=info_table.c.pk < 4,
from_obj=[info_table])
assert simple_query.execute().fetchall() == [
('one', 1),
('two', 2),
('other', 3),
]
| 31.375661
| 78
| 0.417707
|
from sqlalchemy.testing import assert_raises, eq_
from sqlalchemy.testing import fixtures, AssertsCompiledSQL
from sqlalchemy import (
testing, exc, case, select, literal_column, text, and_, Integer, cast,
String, Column, Table, MetaData)
from sqlalchemy.sql import table, column
info_table = None
class CaseTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
@classmethod
def setup_class(cls):
metadata = MetaData(testing.db)
global info_table
info_table = Table(
'infos', metadata,
Column('pk', Integer, primary_key=True),
Column('info', String(30)))
info_table.create()
info_table.insert().execute(
{'pk': 1, 'info': 'pk_1_data'},
{'pk': 2, 'info': 'pk_2_data'},
{'pk': 3, 'info': 'pk_3_data'},
{'pk': 4, 'info': 'pk_4_data'},
{'pk': 5, 'info': 'pk_5_data'},
{'pk': 6, 'info': 'pk_6_data'})
@classmethod
def teardown_class(cls):
info_table.drop()
@testing.fails_on('firebird', 'FIXME: unknown')
@testing.requires.subqueries
def test_case(self):
inner = select(
[
case(
[
[info_table.c.pk < 3, 'lessthan3'],
[
and_(info_table.c.pk >= 3, info_table.c.pk < 7),
'gt3']]).label('x'),
info_table.c.pk, info_table.c.info], from_obj=[info_table])
inner_result = inner.execute().fetchall()
assert inner_result == [
('lessthan3', 1, 'pk_1_data'),
('lessthan3', 2, 'pk_2_data'),
('gt3', 3, 'pk_3_data'),
('gt3', 4, 'pk_4_data'),
('gt3', 5, 'pk_5_data'),
('gt3', 6, 'pk_6_data')
]
outer = select([inner.alias('q_inner')])
outer_result = outer.execute().fetchall()
assert outer_result == [
('lessthan3', 1, 'pk_1_data'),
('lessthan3', 2, 'pk_2_data'),
('gt3', 3, 'pk_3_data'),
('gt3', 4, 'pk_4_data'),
('gt3', 5, 'pk_5_data'),
('gt3', 6, 'pk_6_data')
]
w_else = select(
[
case(
[
[info_table.c.pk < 3, cast(3, Integer)],
[
and_(
info_table.c.pk >= 3, info_table.c.pk < 6),
6]],
else_=0).label('x'),
info_table.c.pk, info_table.c.info],
from_obj=[info_table])
else_result = w_else.execute().fetchall()
assert else_result == [
(3, 1, 'pk_1_data'),
(3, 2, 'pk_2_data'),
(6, 3, 'pk_3_data'),
(6, 4, 'pk_4_data'),
(6, 5, 'pk_5_data'),
(0, 6, 'pk_6_data')
]
def test_literal_interpretation(self):
t = table('test', column('col1'))
assert_raises(exc.ArgumentError, case, [("x", "y")])
self.assert_compile(
case([("x", "y")], value=t.c.col1),
"CASE test.col1 WHEN :param_1 THEN :param_2 END")
self.assert_compile(
case([(t.c.col1 == 7, "y")], else_="z"),
"CASE WHEN (test.col1 = :col1_1) THEN :param_1 ELSE :param_2 END")
def test_text_doesnt_explode(self):
for s in [
select(
[
case(
[
(
info_table.c.info == 'pk_4_data',
text("'yes'"))],
else_=text("'no'"))
]).order_by(info_table.c.info),
select(
[
case(
[
(
info_table.c.info == 'pk_4_data',
literal_column("'yes'"))],
else_=literal_column("'no'")
)]
).order_by(info_table.c.info),
]:
if testing.against("firebird"):
eq_(s.execute().fetchall(), [
('no ', ), ('no ', ), ('no ', ), ('yes', ),
('no ', ), ('no ', ),
])
else:
eq_(s.execute().fetchall(), [
('no', ), ('no', ), ('no', ), ('yes', ),
('no', ), ('no', ),
])
@testing.fails_on('firebird', 'FIXME: unknown')
def testcase_with_dict(self):
query = select(
[
case(
{
info_table.c.pk < 3: 'lessthan3',
info_table.c.pk >= 3: 'gt3',
}, else_='other'),
info_table.c.pk, info_table.c.info
],
from_obj=[info_table])
assert query.execute().fetchall() == [
('lessthan3', 1, 'pk_1_data'),
('lessthan3', 2, 'pk_2_data'),
('gt3', 3, 'pk_3_data'),
('gt3', 4, 'pk_4_data'),
('gt3', 5, 'pk_5_data'),
('gt3', 6, 'pk_6_data')
]
simple_query = select(
[
case(
{1: 'one', 2: 'two', },
value=info_table.c.pk, else_='other'),
info_table.c.pk
],
whereclause=info_table.c.pk < 4,
from_obj=[info_table])
assert simple_query.execute().fetchall() == [
('one', 1),
('two', 2),
('other', 3),
]
| true
| true
|
f7025b8b75bfdaa2c9f531221732fb0a9450c662
| 6,087
|
py
|
Python
|
sdks/python/apache_beam/runners/interactive/user_pipeline_tracker_test.py
|
NarimanAB/beam
|
6cedbac5bb42304f4af88634edd276b0b78e4e4e
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 5,279
|
2016-12-29T04:00:44.000Z
|
2022-03-31T22:56:45.000Z
|
sdks/python/apache_beam/runners/interactive/user_pipeline_tracker_test.py
|
NarimanAB/beam
|
6cedbac5bb42304f4af88634edd276b0b78e4e4e
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 14,149
|
2016-12-28T00:43:50.000Z
|
2022-03-31T23:50:22.000Z
|
sdks/python/apache_beam/runners/interactive/user_pipeline_tracker_test.py
|
NarimanAB/beam
|
6cedbac5bb42304f4af88634edd276b0b78e4e4e
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 3,763
|
2016-12-29T04:06:10.000Z
|
2022-03-31T22:25:49.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
import apache_beam as beam
from apache_beam.runners.interactive.user_pipeline_tracker import UserPipelineTracker
class UserPipelineTrackerTest(unittest.TestCase):
def test_getting_unknown_pid_returns_none(self):
ut = UserPipelineTracker()
p = beam.Pipeline()
self.assertIsNone(ut.get_pipeline(str(id(p))))
def test_getting_unknown_pipeline_returns_none(self):
ut = UserPipelineTracker()
p = beam.Pipeline()
self.assertIsNone(ut.get_user_pipeline(p))
def test_no_parent_returns_none(self):
ut = UserPipelineTracker()
user = beam.Pipeline()
derived = beam.Pipeline()
orphan = beam.Pipeline()
ut.add_derived_pipeline(user, derived)
self.assertIsNone(ut.get_user_pipeline(orphan))
def test_get_user_pipeline_is_same(self):
ut = UserPipelineTracker()
p = beam.Pipeline()
ut.add_user_pipeline(p)
self.assertIs(ut.get_user_pipeline(p), p)
def test_can_add_derived(self):
ut = UserPipelineTracker()
user = beam.Pipeline()
derived = beam.Pipeline()
ut.add_derived_pipeline(user, derived)
self.assertIs(ut.get_user_pipeline(derived), user)
def test_can_add_multiple_derived(self):
"""Tests that there can be many user pipelines with many derived
pipelines.
"""
ut = UserPipelineTracker()
# Add the first set of user and derived pipelines.
user1 = beam.Pipeline()
derived11 = beam.Pipeline()
derived12 = beam.Pipeline()
ut.add_derived_pipeline(user1, derived11)
ut.add_derived_pipeline(user1, derived12)
# Add the second set of user and derived pipelines.
user2 = beam.Pipeline()
derived21 = beam.Pipeline()
derived22 = beam.Pipeline()
ut.add_derived_pipeline(user2, derived21)
ut.add_derived_pipeline(user2, derived22)
# Assert that the user pipelines are correct.
self.assertIs(ut.get_user_pipeline(derived11), user1)
self.assertIs(ut.get_user_pipeline(derived12), user1)
self.assertIs(ut.get_user_pipeline(derived21), user2)
self.assertIs(ut.get_user_pipeline(derived22), user2)
def test_cannot_have_multiple_parents(self):
ut = UserPipelineTracker()
user1 = beam.Pipeline()
user2 = beam.Pipeline()
derived = beam.Pipeline()
ut.add_derived_pipeline(user1, derived)
with self.assertRaises(AssertionError):
ut.add_derived_pipeline(user2, derived)
self.assertIs(ut.get_user_pipeline(derived), user1)
def test_adding_derived_with_derived_gets_user_pipeline(self):
"""Tests that one can correctly add a derived pipeline from a derived
pipeline and still get the correct user pipeline.
"""
ut = UserPipelineTracker()
user = beam.Pipeline()
derived1 = beam.Pipeline()
derived2 = beam.Pipeline()
# Add the first derived pipeline to the user pipelne.
ut.add_derived_pipeline(user, derived1)
# Add the second derived pipeline to the first derived pipeline. This should
# get the user pipeline of the first and add the second to it.
ut.add_derived_pipeline(derived1, derived2)
# Asserts that both derived pipelines are under the same user pipeline.
self.assertIs(ut.get_user_pipeline(derived1), user)
self.assertIs(ut.get_user_pipeline(derived2), user)
def test_can_get_pipeline_from_id(self):
"""Tests the pid -> pipeline memoization."""
ut = UserPipelineTracker()
user = beam.Pipeline()
derived = beam.Pipeline()
ut.add_user_pipeline(user)
ut.add_derived_pipeline(user, derived)
self.assertIs(ut.get_pipeline(str(id(user))), user)
self.assertIs(ut.get_pipeline(str(id(derived))), derived)
def test_clear(self):
ut = UserPipelineTracker()
user = beam.Pipeline()
derived = beam.Pipeline()
ut.add_derived_pipeline(user, derived)
self.assertIs(ut.get_user_pipeline(derived), user)
ut.clear()
self.assertIsNone(ut.get_user_pipeline(user))
self.assertIsNone(ut.get_user_pipeline(derived))
def test_can_iterate(self):
ut = UserPipelineTracker()
user1 = beam.Pipeline()
derived11 = beam.Pipeline()
derived12 = beam.Pipeline()
ut.add_derived_pipeline(user1, derived11)
ut.add_derived_pipeline(user1, derived12)
user2 = beam.Pipeline()
derived21 = beam.Pipeline()
derived22 = beam.Pipeline()
ut.add_derived_pipeline(user2, derived21)
ut.add_derived_pipeline(user2, derived22)
user_pipelines = set(p for p in ut)
self.assertSetEqual(set([user1, user2]), user_pipelines)
def test_can_evict_user_pipeline(self):
ut = UserPipelineTracker()
user1 = beam.Pipeline()
derived11 = beam.Pipeline()
derived12 = beam.Pipeline()
ut.add_derived_pipeline(user1, derived11)
ut.add_derived_pipeline(user1, derived12)
user2 = beam.Pipeline()
derived21 = beam.Pipeline()
derived22 = beam.Pipeline()
ut.add_derived_pipeline(user2, derived21)
ut.add_derived_pipeline(user2, derived22)
ut.evict(user1)
self.assertIsNone(ut.get_user_pipeline(user1))
self.assertIsNone(ut.get_user_pipeline(derived11))
self.assertIsNone(ut.get_user_pipeline(derived12))
self.assertIs(user2, ut.get_user_pipeline(derived21))
self.assertIs(user2, ut.get_user_pipeline(derived22))
if __name__ == '__main__':
unittest.main()
| 29.264423
| 85
| 0.730738
|
import unittest
import apache_beam as beam
from apache_beam.runners.interactive.user_pipeline_tracker import UserPipelineTracker
class UserPipelineTrackerTest(unittest.TestCase):
def test_getting_unknown_pid_returns_none(self):
ut = UserPipelineTracker()
p = beam.Pipeline()
self.assertIsNone(ut.get_pipeline(str(id(p))))
def test_getting_unknown_pipeline_returns_none(self):
ut = UserPipelineTracker()
p = beam.Pipeline()
self.assertIsNone(ut.get_user_pipeline(p))
def test_no_parent_returns_none(self):
ut = UserPipelineTracker()
user = beam.Pipeline()
derived = beam.Pipeline()
orphan = beam.Pipeline()
ut.add_derived_pipeline(user, derived)
self.assertIsNone(ut.get_user_pipeline(orphan))
def test_get_user_pipeline_is_same(self):
ut = UserPipelineTracker()
p = beam.Pipeline()
ut.add_user_pipeline(p)
self.assertIs(ut.get_user_pipeline(p), p)
def test_can_add_derived(self):
ut = UserPipelineTracker()
user = beam.Pipeline()
derived = beam.Pipeline()
ut.add_derived_pipeline(user, derived)
self.assertIs(ut.get_user_pipeline(derived), user)
def test_can_add_multiple_derived(self):
ut = UserPipelineTracker()
user1 = beam.Pipeline()
derived11 = beam.Pipeline()
derived12 = beam.Pipeline()
ut.add_derived_pipeline(user1, derived11)
ut.add_derived_pipeline(user1, derived12)
user2 = beam.Pipeline()
derived21 = beam.Pipeline()
derived22 = beam.Pipeline()
ut.add_derived_pipeline(user2, derived21)
ut.add_derived_pipeline(user2, derived22)
self.assertIs(ut.get_user_pipeline(derived11), user1)
self.assertIs(ut.get_user_pipeline(derived12), user1)
self.assertIs(ut.get_user_pipeline(derived21), user2)
self.assertIs(ut.get_user_pipeline(derived22), user2)
def test_cannot_have_multiple_parents(self):
ut = UserPipelineTracker()
user1 = beam.Pipeline()
user2 = beam.Pipeline()
derived = beam.Pipeline()
ut.add_derived_pipeline(user1, derived)
with self.assertRaises(AssertionError):
ut.add_derived_pipeline(user2, derived)
self.assertIs(ut.get_user_pipeline(derived), user1)
def test_adding_derived_with_derived_gets_user_pipeline(self):
ut = UserPipelineTracker()
user = beam.Pipeline()
derived1 = beam.Pipeline()
derived2 = beam.Pipeline()
ut.add_derived_pipeline(user, derived1)
ut.add_derived_pipeline(derived1, derived2)
self.assertIs(ut.get_user_pipeline(derived1), user)
self.assertIs(ut.get_user_pipeline(derived2), user)
def test_can_get_pipeline_from_id(self):
ut = UserPipelineTracker()
user = beam.Pipeline()
derived = beam.Pipeline()
ut.add_user_pipeline(user)
ut.add_derived_pipeline(user, derived)
self.assertIs(ut.get_pipeline(str(id(user))), user)
self.assertIs(ut.get_pipeline(str(id(derived))), derived)
def test_clear(self):
ut = UserPipelineTracker()
user = beam.Pipeline()
derived = beam.Pipeline()
ut.add_derived_pipeline(user, derived)
self.assertIs(ut.get_user_pipeline(derived), user)
ut.clear()
self.assertIsNone(ut.get_user_pipeline(user))
self.assertIsNone(ut.get_user_pipeline(derived))
def test_can_iterate(self):
ut = UserPipelineTracker()
user1 = beam.Pipeline()
derived11 = beam.Pipeline()
derived12 = beam.Pipeline()
ut.add_derived_pipeline(user1, derived11)
ut.add_derived_pipeline(user1, derived12)
user2 = beam.Pipeline()
derived21 = beam.Pipeline()
derived22 = beam.Pipeline()
ut.add_derived_pipeline(user2, derived21)
ut.add_derived_pipeline(user2, derived22)
user_pipelines = set(p for p in ut)
self.assertSetEqual(set([user1, user2]), user_pipelines)
def test_can_evict_user_pipeline(self):
ut = UserPipelineTracker()
user1 = beam.Pipeline()
derived11 = beam.Pipeline()
derived12 = beam.Pipeline()
ut.add_derived_pipeline(user1, derived11)
ut.add_derived_pipeline(user1, derived12)
user2 = beam.Pipeline()
derived21 = beam.Pipeline()
derived22 = beam.Pipeline()
ut.add_derived_pipeline(user2, derived21)
ut.add_derived_pipeline(user2, derived22)
ut.evict(user1)
self.assertIsNone(ut.get_user_pipeline(user1))
self.assertIsNone(ut.get_user_pipeline(derived11))
self.assertIsNone(ut.get_user_pipeline(derived12))
self.assertIs(user2, ut.get_user_pipeline(derived21))
self.assertIs(user2, ut.get_user_pipeline(derived22))
if __name__ == '__main__':
unittest.main()
| true
| true
|
f7025c78c757691bec4e0501ffe984904b95dd4a
| 619
|
py
|
Python
|
Python/PYHTON/Variaveis/DSA - Variaveis.py
|
ccpn1988/Python
|
94c84aa6f3ef9d05d64c3d87212dfd67694f4544
|
[
"MIT"
] | null | null | null |
Python/PYHTON/Variaveis/DSA - Variaveis.py
|
ccpn1988/Python
|
94c84aa6f3ef9d05d64c3d87212dfd67694f4544
|
[
"MIT"
] | null | null | null |
Python/PYHTON/Variaveis/DSA - Variaveis.py
|
ccpn1988/Python
|
94c84aa6f3ef9d05d64c3d87212dfd67694f4544
|
[
"MIT"
] | null | null | null |
# VARIAVEIS
# ATRIBUINDO VALOR A UMA VARIAVEL
var_teste = 1
print(var_teste)
print(type(var_teste))
#DECLARAÇÂO MULTIPLA
pessoa1, pessoa2, pessoa3 = 'Jose', 'Joao','Maria'
print(pessoa1)
print(pessoa2)
print(pessoa3)
# VARIAVEL COM ATRIBUIÇÃO
pessoa1=pessoa2=pessoa3 = 'Jose'
print(pessoa1)
print(pessoa2)
print(pessoa3)
# OPERAÇÂO COM VARIAVEL
idade = 32
idade1 = 28
print('SOMA',idade + idade1)
print('SUBTRAÇÃO',idade - idade1)
print('MULTIPLICAÇÂO',idade * idade1)
print('DIVISAO',idade / idade1)
print('POTENCIA',idade ** idade1)
print('DIVISAO INTEIRO',idade // idade1)
print('RESTO DIVISÂO',idade % idade1)
| 19.967742
| 50
| 0.74475
|
var_teste = 1
print(var_teste)
print(type(var_teste))
pessoa1, pessoa2, pessoa3 = 'Jose', 'Joao','Maria'
print(pessoa1)
print(pessoa2)
print(pessoa3)
pessoa1=pessoa2=pessoa3 = 'Jose'
print(pessoa1)
print(pessoa2)
print(pessoa3)
idade = 32
idade1 = 28
print('SOMA',idade + idade1)
print('SUBTRAÇÃO',idade - idade1)
print('MULTIPLICAÇÂO',idade * idade1)
print('DIVISAO',idade / idade1)
print('POTENCIA',idade ** idade1)
print('DIVISAO INTEIRO',idade // idade1)
print('RESTO DIVISÂO',idade % idade1)
| true
| true
|
f7025d49566233267aee5f1e01b03afd40276ac2
| 8,674
|
py
|
Python
|
rrd/view/screen.py
|
schemacs/falcon-dashboard
|
4d2b387a3e9f12994a54e5d2bcf8acd723710fd6
|
[
"Apache-2.0"
] | 1
|
2016-06-04T13:23:43.000Z
|
2016-06-04T13:23:43.000Z
|
rrd/view/screen.py
|
smokeman/dashboard
|
35dbee7bc06d294a32ecba18a62395e320dcfb8a
|
[
"Apache-2.0"
] | null | null | null |
rrd/view/screen.py
|
smokeman/dashboard
|
35dbee7bc06d294a32ecba18a62395e320dcfb8a
|
[
"Apache-2.0"
] | null | null | null |
#-*- coding:utf-8 -*-
import json
import copy
import requests
import json
from flask import render_template, abort, request, url_for, redirect, g
import time
import datetime
from rrd import app
from rrd.model.screen import DashboardScreen
from rrd.model.graph import DashboardGraph
from rrd import consts
from rrd.utils.graph_urls import generate_graph_urls
from rrd import config
@app.route("/screen", methods=["GET", "POST"])
def dash_screens():
top_screens = DashboardScreen.gets(pid='0')
top_screens = sorted(top_screens, key=lambda x:x.name)
return render_template("screen/index.html", **locals())
@app.route("/screen/<int:sid>/delete")
def dash_screen_delete(sid):
screen = DashboardScreen.get(sid)
if not screen:
abort(404, "no such screen")
DashboardScreen.remove(sid)
return redirect("/screen")
@app.route("/screen/<int:sid>/edit", methods=["GET", "POST"])
def dash_screen_edit(sid):
screen = DashboardScreen.get(sid)
if not screen:
abort(404, "no such screen")
if request.method == "POST":
screen_name = request.form.get("screen_name")
screen.update(name=screen_name)
return redirect("/screen/%s" %screen.id)
else:
return render_template("screen/edit.html", **locals())
@app.route("/screen/<int:sid>/clone", methods=["GET", "POST"])
def dash_screen_clone(sid):
screen = DashboardScreen.get(sid)
if not screen:
abort(404, "no such screen")
if request.method == "POST":
screen_name = request.form.get("screen_name")
with_graph = request.form.get("with_graph")
new_s = DashboardScreen.add(screen.pid, screen_name)
if not new_s:
abort(404, "创建screen失败了")
if with_graph:
old_graphs = DashboardGraph.gets_by_screen_id(sid)
for o in old_graphs:
DashboardGraph.add(o.title, o.hosts, o.counters, new_s.id,
o.timespan, o.graph_type, o.method, o.position)
return redirect("/screen/%s" %new_s.id)
else:
return render_template("screen/clone.html", **locals())
@app.route("/graph/<int:gid>/delete")
def dash_graph_delete(gid):
graph = DashboardGraph.get(gid)
if not graph:
abort(404, "no such graph")
DashboardGraph.remove(gid)
return redirect("/screen/" + graph.screen_id)
@app.route("/screen/<int:sid>")
def dash_screen(sid):
start = request.args.get("start")
end = request.args.get("end")
top_screens = DashboardScreen.gets(pid=0)
top_screens = sorted(top_screens, key=lambda x:x.name)
screen = DashboardScreen.get(sid)
if not screen:
abort(404, "no screen")
if str(screen.pid) == '0':
sub_screens = DashboardScreen.gets(pid=sid)
sub_screens = sorted(sub_screens, key=lambda x:x.name)
return render_template("screen/top_screen.html", **locals())
pscreen = DashboardScreen.get(screen.pid)
sub_screens = DashboardScreen.gets(pid=screen.pid)
sub_screens = sorted(sub_screens, key=lambda x:x.name)
graphs = DashboardGraph.gets_by_screen_id(screen.id)
all_graphs = []
for graph in graphs:
all_graphs.extend(generate_graph_urls(graph, start, end) or [])
all_graphs = sorted(all_graphs, key=lambda x:x.position)
return render_template("screen/screen.html", **locals())
@app.route("/screen/embed/<int:sid>")
def dash_screen_embed(sid):
start = request.args.get("start")
end = request.args.get("end")
screen = DashboardScreen.get(sid)
if not screen:
abort(404, "no screen")
if screen.pid == '0':
abort(404, "top screen")
graphs = DashboardGraph.gets_by_screen_id(screen.id)
all_graphs = []
for graph in graphs:
all_graphs.extend(generate_graph_urls(graph, start, end) or [])
all_graphs = sorted(all_graphs, key=lambda x:x.position)
return render_template("screen/screen_embed.html", **locals())
@app.route("/screen/add", methods=["GET", "POST"])
def dash_screen_add():
if request.method == "POST":
name = request.form.get("screen_name")
pid = request.form.get("pid", '0')
screen = DashboardScreen.add(pid, name)
return redirect("/screen/%s" % screen.id)
else:
pid = request.args.get("pid", '0')
screen = DashboardScreen.get(pid)
return render_template("screen/add.html", **locals())
@app.route("/screen/<int:sid>/graph", methods=["GET", "POST"])
def dash_graph_add(sid):
all_screens = DashboardScreen.gets()
top_screens = [x for x in all_screens if x.pid == '0']
children = []
for t in top_screens:
children.append([x for x in all_screens if x.pid == t.id])
screen = DashboardScreen.get(sid)
if not screen:
abort(404, "no screen")
pscreen = DashboardScreen.get(screen.pid)
if request.method == "POST":
title = request.form.get("title")
hosts = request.form.get("hosts", "").strip()
hosts = hosts and hosts.split("\n") or []
hosts = [x.strip() for x in hosts]
counters = request.form.get("counters", "").strip()
counters = counters and counters.split("\n") or []
counters = [x.strip() for x in counters]
timespan = request.form.get("timespan", 3600)
graph_type = request.form.get("graph_type", 'h')
method = request.form.get("method", '').upper()
position = request.form.get("position", 0)
graph = DashboardGraph.add(title, hosts, counters, sid,
timespan, graph_type, method, position)
return redirect("/screen/%s" % sid)
else:
gid = request.args.get("gid")
graph = gid and DashboardGraph.get(gid)
return render_template("screen/graph_add.html", config=config, **locals())
@app.route("/graph/<int:gid>/edit", methods=["GET", "POST"])
def dash_graph_edit(gid):
error = ""
graph = DashboardGraph.get(gid)
if not graph:
abort(404, "no graph")
all_screens = DashboardScreen.gets()
top_screens = [x for x in all_screens if x.pid == '0']
children = []
for t in top_screens:
children.append([x for x in all_screens if x.pid == t.id])
screen = DashboardScreen.get(graph.screen_id)
if not screen:
abort(404, "no screen")
pscreen = DashboardScreen.get(screen.pid)
if request.method == "POST":
ajax = request.form.get("ajax", "")
screen_id = request.form.get("screen_id")
title = request.form.get("title", "").strip()
hosts = request.form.get("hosts", "").strip()
hosts = hosts and hosts.split("\n") or []
hosts = [x.strip() for x in hosts]
counters = request.form.get("counters", "").strip()
counters = counters and counters.split("\n") or []
counters = [x.strip() for x in counters]
timespan = request.form.get("timespan", 3600)
graph_type = request.form.get("graph_type", 'h')
method = request.form.get("method", '').upper()
position = request.form.get("position", 0)
graph = graph.update(title, hosts, counters, screen_id,
timespan, graph_type, method, position)
error = u"修改成功了"
if not ajax:
return render_template("screen/graph_edit.html", config=config, **locals())
else:
return "ok"
else:
ajax = request.args.get("ajax", "")
return render_template("screen/graph_edit.html", **locals())
@app.route("/graph/multi_edit", methods=["GET", "POST"])
def dash_graph_multi_edit():
ret = {
"ok": False,
"msg": "",
"data": [],
}
if request.method == "POST":
d = request.data
try:
jdata = json.loads(d)
except ValueError:
jdata = None
if not jdata:
return json.dumps({
"ok": False,
"msg": "no_data_post",
})
rows = []
for x in jdata:
rows.append({"id": x["id"], "hosts": x["endpoints"], "counters": x["counters"]})
DashboardGraph.update_multi(rows)
return json.dumps({
"ok": True,
"msg": "",
})
elif request.method == "GET":
sid = request.args.get("sid")
if not sid or not DashboardScreen.get(sid):
ret["msg"] = "no_screen"
return json.dumps(ret)
ret["ok"] = True
graphs = DashboardGraph.gets_by_screen_id(sid)
ret['data'] = [{"id": x.id, "title": x.title, "endpoints":x.hosts, "counters":x.counters} for x in graphs]
return json.dumps(ret)
| 32.00738
| 114
| 0.61056
|
import json
import copy
import requests
import json
from flask import render_template, abort, request, url_for, redirect, g
import time
import datetime
from rrd import app
from rrd.model.screen import DashboardScreen
from rrd.model.graph import DashboardGraph
from rrd import consts
from rrd.utils.graph_urls import generate_graph_urls
from rrd import config
@app.route("/screen", methods=["GET", "POST"])
def dash_screens():
top_screens = DashboardScreen.gets(pid='0')
top_screens = sorted(top_screens, key=lambda x:x.name)
return render_template("screen/index.html", **locals())
@app.route("/screen/<int:sid>/delete")
def dash_screen_delete(sid):
screen = DashboardScreen.get(sid)
if not screen:
abort(404, "no such screen")
DashboardScreen.remove(sid)
return redirect("/screen")
@app.route("/screen/<int:sid>/edit", methods=["GET", "POST"])
def dash_screen_edit(sid):
screen = DashboardScreen.get(sid)
if not screen:
abort(404, "no such screen")
if request.method == "POST":
screen_name = request.form.get("screen_name")
screen.update(name=screen_name)
return redirect("/screen/%s" %screen.id)
else:
return render_template("screen/edit.html", **locals())
@app.route("/screen/<int:sid>/clone", methods=["GET", "POST"])
def dash_screen_clone(sid):
screen = DashboardScreen.get(sid)
if not screen:
abort(404, "no such screen")
if request.method == "POST":
screen_name = request.form.get("screen_name")
with_graph = request.form.get("with_graph")
new_s = DashboardScreen.add(screen.pid, screen_name)
if not new_s:
abort(404, "创建screen失败了")
if with_graph:
old_graphs = DashboardGraph.gets_by_screen_id(sid)
for o in old_graphs:
DashboardGraph.add(o.title, o.hosts, o.counters, new_s.id,
o.timespan, o.graph_type, o.method, o.position)
return redirect("/screen/%s" %new_s.id)
else:
return render_template("screen/clone.html", **locals())
@app.route("/graph/<int:gid>/delete")
def dash_graph_delete(gid):
graph = DashboardGraph.get(gid)
if not graph:
abort(404, "no such graph")
DashboardGraph.remove(gid)
return redirect("/screen/" + graph.screen_id)
@app.route("/screen/<int:sid>")
def dash_screen(sid):
start = request.args.get("start")
end = request.args.get("end")
top_screens = DashboardScreen.gets(pid=0)
top_screens = sorted(top_screens, key=lambda x:x.name)
screen = DashboardScreen.get(sid)
if not screen:
abort(404, "no screen")
if str(screen.pid) == '0':
sub_screens = DashboardScreen.gets(pid=sid)
sub_screens = sorted(sub_screens, key=lambda x:x.name)
return render_template("screen/top_screen.html", **locals())
pscreen = DashboardScreen.get(screen.pid)
sub_screens = DashboardScreen.gets(pid=screen.pid)
sub_screens = sorted(sub_screens, key=lambda x:x.name)
graphs = DashboardGraph.gets_by_screen_id(screen.id)
all_graphs = []
for graph in graphs:
all_graphs.extend(generate_graph_urls(graph, start, end) or [])
all_graphs = sorted(all_graphs, key=lambda x:x.position)
return render_template("screen/screen.html", **locals())
@app.route("/screen/embed/<int:sid>")
def dash_screen_embed(sid):
start = request.args.get("start")
end = request.args.get("end")
screen = DashboardScreen.get(sid)
if not screen:
abort(404, "no screen")
if screen.pid == '0':
abort(404, "top screen")
graphs = DashboardGraph.gets_by_screen_id(screen.id)
all_graphs = []
for graph in graphs:
all_graphs.extend(generate_graph_urls(graph, start, end) or [])
all_graphs = sorted(all_graphs, key=lambda x:x.position)
return render_template("screen/screen_embed.html", **locals())
@app.route("/screen/add", methods=["GET", "POST"])
def dash_screen_add():
if request.method == "POST":
name = request.form.get("screen_name")
pid = request.form.get("pid", '0')
screen = DashboardScreen.add(pid, name)
return redirect("/screen/%s" % screen.id)
else:
pid = request.args.get("pid", '0')
screen = DashboardScreen.get(pid)
return render_template("screen/add.html", **locals())
@app.route("/screen/<int:sid>/graph", methods=["GET", "POST"])
def dash_graph_add(sid):
all_screens = DashboardScreen.gets()
top_screens = [x for x in all_screens if x.pid == '0']
children = []
for t in top_screens:
children.append([x for x in all_screens if x.pid == t.id])
screen = DashboardScreen.get(sid)
if not screen:
abort(404, "no screen")
pscreen = DashboardScreen.get(screen.pid)
if request.method == "POST":
title = request.form.get("title")
hosts = request.form.get("hosts", "").strip()
hosts = hosts and hosts.split("\n") or []
hosts = [x.strip() for x in hosts]
counters = request.form.get("counters", "").strip()
counters = counters and counters.split("\n") or []
counters = [x.strip() for x in counters]
timespan = request.form.get("timespan", 3600)
graph_type = request.form.get("graph_type", 'h')
method = request.form.get("method", '').upper()
position = request.form.get("position", 0)
graph = DashboardGraph.add(title, hosts, counters, sid,
timespan, graph_type, method, position)
return redirect("/screen/%s" % sid)
else:
gid = request.args.get("gid")
graph = gid and DashboardGraph.get(gid)
return render_template("screen/graph_add.html", config=config, **locals())
@app.route("/graph/<int:gid>/edit", methods=["GET", "POST"])
def dash_graph_edit(gid):
error = ""
graph = DashboardGraph.get(gid)
if not graph:
abort(404, "no graph")
all_screens = DashboardScreen.gets()
top_screens = [x for x in all_screens if x.pid == '0']
children = []
for t in top_screens:
children.append([x for x in all_screens if x.pid == t.id])
screen = DashboardScreen.get(graph.screen_id)
if not screen:
abort(404, "no screen")
pscreen = DashboardScreen.get(screen.pid)
if request.method == "POST":
ajax = request.form.get("ajax", "")
screen_id = request.form.get("screen_id")
title = request.form.get("title", "").strip()
hosts = request.form.get("hosts", "").strip()
hosts = hosts and hosts.split("\n") or []
hosts = [x.strip() for x in hosts]
counters = request.form.get("counters", "").strip()
counters = counters and counters.split("\n") or []
counters = [x.strip() for x in counters]
timespan = request.form.get("timespan", 3600)
graph_type = request.form.get("graph_type", 'h')
method = request.form.get("method", '').upper()
position = request.form.get("position", 0)
graph = graph.update(title, hosts, counters, screen_id,
timespan, graph_type, method, position)
error = u"修改成功了"
if not ajax:
return render_template("screen/graph_edit.html", config=config, **locals())
else:
return "ok"
else:
ajax = request.args.get("ajax", "")
return render_template("screen/graph_edit.html", **locals())
@app.route("/graph/multi_edit", methods=["GET", "POST"])
def dash_graph_multi_edit():
ret = {
"ok": False,
"msg": "",
"data": [],
}
if request.method == "POST":
d = request.data
try:
jdata = json.loads(d)
except ValueError:
jdata = None
if not jdata:
return json.dumps({
"ok": False,
"msg": "no_data_post",
})
rows = []
for x in jdata:
rows.append({"id": x["id"], "hosts": x["endpoints"], "counters": x["counters"]})
DashboardGraph.update_multi(rows)
return json.dumps({
"ok": True,
"msg": "",
})
elif request.method == "GET":
sid = request.args.get("sid")
if not sid or not DashboardScreen.get(sid):
ret["msg"] = "no_screen"
return json.dumps(ret)
ret["ok"] = True
graphs = DashboardGraph.gets_by_screen_id(sid)
ret['data'] = [{"id": x.id, "title": x.title, "endpoints":x.hosts, "counters":x.counters} for x in graphs]
return json.dumps(ret)
| true
| true
|
f7025d79335469b2af7d433c0a5238418c81123d
| 2,973
|
py
|
Python
|
ogbg-code/model/asap.py
|
anonyma2020/dagnn
|
3191b2cc4f923d523ece3962c96a0e3dd54f1a0b
|
[
"MIT"
] | 38
|
2021-01-24T13:33:31.000Z
|
2022-03-29T02:24:32.000Z
|
ogbg-code/model/asap.py
|
anonyma2020/dagnn
|
3191b2cc4f923d523ece3962c96a0e3dd54f1a0b
|
[
"MIT"
] | 3
|
2021-03-10T08:00:58.000Z
|
2021-11-02T08:29:02.000Z
|
ogbg-code/model/asap.py
|
anonyma2020/dagnn
|
3191b2cc4f923d523ece3962c96a0e3dd54f1a0b
|
[
"MIT"
] | 9
|
2021-02-20T14:39:34.000Z
|
2022-03-17T01:51:03.000Z
|
import torch
import torch.nn.functional as F
from torch.nn import Linear
from torch_geometric.nn import (ASAPooling,
GraphConv, global_mean_pool,
JumpingKnowledge)
class ASAP(torch.nn.Module):
def __init__(self, num_vocab, max_seq_len, node_encoder, emb_dim, num_layers, hidden, ratio=0.8, dropout=0, num_class=0):
super(ASAP, self).__init__()
self.num_class = num_class
self.max_seq_len = max_seq_len
self.node_encoder = node_encoder
self.conv1 = GraphConv(emb_dim, hidden, aggr='mean')
self.convs = torch.nn.ModuleList()
self.pools = torch.nn.ModuleList()
self.convs.extend([
GraphConv(hidden, hidden, aggr='mean')
for i in range(num_layers - 1)
])
self.pools.extend([
ASAPooling(hidden, ratio, dropout=dropout)
for i in range((num_layers) // 2)
])
self.jump = JumpingKnowledge(mode='cat')
self.lin1 = Linear(num_layers * hidden, hidden)
# self.lin2 = Linear(hidden, dataset.num_classes)
if self.num_class > 0: # classification
self.graph_pred_linear = torch.nn.Linear(hidden, self.num_class)
else:
self.graph_pred_linear_list = torch.nn.ModuleList()
for i in range(max_seq_len):
self.graph_pred_linear_list.append(torch.nn.Linear(hidden, num_vocab))
def reset_parameters(self):
self.conv1.reset_parameters()
for conv in self.convs:
conv.reset_parameters()
for pool in self.pools:
pool.reset_parameters()
self.lin1.reset_parameters()
self.lin2.reset_parameters()
def forward(self, data):
x, edge_index, node_depth, batch = data.x, data.edge_index, data.node_depth, data.batch
x = self.node_encoder(x, node_depth.view(-1, ))
edge_weight = None
x = F.relu(self.conv1(x, edge_index))
xs = [global_mean_pool(x, batch)]
for i, conv in enumerate(self.convs):
x = conv(x=x, edge_index=edge_index, edge_weight=edge_weight)
x = F.relu(x)
xs += [global_mean_pool(x, batch)]
if i % 2 == 0 and i < len(self.convs) - 1:
pool = self.pools[i // 2]
x, edge_index, edge_weight, batch, _ = pool(
x=x, edge_index=edge_index, edge_weight=edge_weight,
batch=batch)
x = self.jump(xs)
x = F.relu(self.lin1(x))
x = F.dropout(x, p=0.5, training=self.training)
# x = self.lin2(x)
# return F.log_softmax(x, dim=-1)
if self.num_class > 0:
return self.graph_pred_linear(x)
pred_list = []
for i in range(self.max_seq_len):
pred_list.append(self.graph_pred_linear_list[i](x))
return pred_list
def __repr__(self):
return self.__class__.__name__
| 36.256098
| 125
| 0.590649
|
import torch
import torch.nn.functional as F
from torch.nn import Linear
from torch_geometric.nn import (ASAPooling,
GraphConv, global_mean_pool,
JumpingKnowledge)
class ASAP(torch.nn.Module):
def __init__(self, num_vocab, max_seq_len, node_encoder, emb_dim, num_layers, hidden, ratio=0.8, dropout=0, num_class=0):
super(ASAP, self).__init__()
self.num_class = num_class
self.max_seq_len = max_seq_len
self.node_encoder = node_encoder
self.conv1 = GraphConv(emb_dim, hidden, aggr='mean')
self.convs = torch.nn.ModuleList()
self.pools = torch.nn.ModuleList()
self.convs.extend([
GraphConv(hidden, hidden, aggr='mean')
for i in range(num_layers - 1)
])
self.pools.extend([
ASAPooling(hidden, ratio, dropout=dropout)
for i in range((num_layers) // 2)
])
self.jump = JumpingKnowledge(mode='cat')
self.lin1 = Linear(num_layers * hidden, hidden)
if self.num_class > 0: self.graph_pred_linear = torch.nn.Linear(hidden, self.num_class)
else:
self.graph_pred_linear_list = torch.nn.ModuleList()
for i in range(max_seq_len):
self.graph_pred_linear_list.append(torch.nn.Linear(hidden, num_vocab))
def reset_parameters(self):
self.conv1.reset_parameters()
for conv in self.convs:
conv.reset_parameters()
for pool in self.pools:
pool.reset_parameters()
self.lin1.reset_parameters()
self.lin2.reset_parameters()
def forward(self, data):
x, edge_index, node_depth, batch = data.x, data.edge_index, data.node_depth, data.batch
x = self.node_encoder(x, node_depth.view(-1, ))
edge_weight = None
x = F.relu(self.conv1(x, edge_index))
xs = [global_mean_pool(x, batch)]
for i, conv in enumerate(self.convs):
x = conv(x=x, edge_index=edge_index, edge_weight=edge_weight)
x = F.relu(x)
xs += [global_mean_pool(x, batch)]
if i % 2 == 0 and i < len(self.convs) - 1:
pool = self.pools[i // 2]
x, edge_index, edge_weight, batch, _ = pool(
x=x, edge_index=edge_index, edge_weight=edge_weight,
batch=batch)
x = self.jump(xs)
x = F.relu(self.lin1(x))
x = F.dropout(x, p=0.5, training=self.training)
if self.num_class > 0:
return self.graph_pred_linear(x)
pred_list = []
for i in range(self.max_seq_len):
pred_list.append(self.graph_pred_linear_list[i](x))
return pred_list
def __repr__(self):
return self.__class__.__name__
| true
| true
|
f7025e482326addd111e4b18afa939b10069bdad
| 1,745
|
py
|
Python
|
catkin_ws/src/dagu_car/src/wheels_cmd_switch_node.py
|
OSLL/Dickietown-Software
|
fcbca82cae9a8076c30c6a21c93b729bb8c01eca
|
[
"CC-BY-2.0"
] | null | null | null |
catkin_ws/src/dagu_car/src/wheels_cmd_switch_node.py
|
OSLL/Dickietown-Software
|
fcbca82cae9a8076c30c6a21c93b729bb8c01eca
|
[
"CC-BY-2.0"
] | null | null | null |
catkin_ws/src/dagu_car/src/wheels_cmd_switch_node.py
|
OSLL/Dickietown-Software
|
fcbca82cae9a8076c30c6a21c93b729bb8c01eca
|
[
"CC-BY-2.0"
] | null | null | null |
#!/usr/bin/env python
import rospy
from duckietown_msgs.msg import WheelsCmdStamped, FSMState
class WheelsCmdSwitchNode(object):
def __init__(self):
self.node_name = rospy.get_name()
rospy.loginfo("[%s] Initializing " %(self.node_name))
# Read parameters
self.mappings = rospy.get_param("~mappings")
source_topic_dict = rospy.get_param("~source_topics")
self.current_src_name = None
# Construct publisher
self.pub_cmd = rospy.Publisher("~wheels_cmd",WheelsCmdStamped,queue_size=1)
# Construct subscribers
self.sub_fsm_state = rospy.Subscriber(rospy.get_param("~mode_topic"),FSMState,self.cbFSMState)
self.sub_dict = dict()
for src_name, topic_name in source_topic_dict.items():
self.sub_dict[src_name] = rospy.Subscriber(topic_name,WheelsCmdStamped,self.cbWheelsCmd,callback_args=src_name)
def cbFSMState(self,fsm_state_msg):
self.current_src_name = self.mappings.get(fsm_state_msg.state)
if self.current_src_name is None:
rospy.logwarn("[%s] FSMState %s not handled. No msg pass through the switch." %(self.node_name,fsm_state_msg.state))
def cbWheelsCmd(self,msg,src_name):
if src_name == self.current_src_name:
self.pub_cmd.publish(msg)
def on_shutdown(self):
rospy.loginfo("[%s] Shutting down." %(self.node_name))
if __name__ == '__main__':
# Initialize the node with rospy
rospy.init_node('wheels_cmd_switch_node', anonymous=False)
# Create the DaguCar object
node = WheelsCmdSwitchNode()
# Setup proper shutdown behavior
rospy.on_shutdown(node.on_shutdown)
# Keep it spinning to keep the node alive
rospy.spin()
| 39.659091
| 128
| 0.694556
|
import rospy
from duckietown_msgs.msg import WheelsCmdStamped, FSMState
class WheelsCmdSwitchNode(object):
def __init__(self):
self.node_name = rospy.get_name()
rospy.loginfo("[%s] Initializing " %(self.node_name))
self.mappings = rospy.get_param("~mappings")
source_topic_dict = rospy.get_param("~source_topics")
self.current_src_name = None
self.pub_cmd = rospy.Publisher("~wheels_cmd",WheelsCmdStamped,queue_size=1)
self.sub_fsm_state = rospy.Subscriber(rospy.get_param("~mode_topic"),FSMState,self.cbFSMState)
self.sub_dict = dict()
for src_name, topic_name in source_topic_dict.items():
self.sub_dict[src_name] = rospy.Subscriber(topic_name,WheelsCmdStamped,self.cbWheelsCmd,callback_args=src_name)
def cbFSMState(self,fsm_state_msg):
self.current_src_name = self.mappings.get(fsm_state_msg.state)
if self.current_src_name is None:
rospy.logwarn("[%s] FSMState %s not handled. No msg pass through the switch." %(self.node_name,fsm_state_msg.state))
def cbWheelsCmd(self,msg,src_name):
if src_name == self.current_src_name:
self.pub_cmd.publish(msg)
def on_shutdown(self):
rospy.loginfo("[%s] Shutting down." %(self.node_name))
if __name__ == '__main__':
rospy.init_node('wheels_cmd_switch_node', anonymous=False)
node = WheelsCmdSwitchNode()
rospy.on_shutdown(node.on_shutdown)
rospy.spin()
| true
| true
|
f70260b8d341e2802090c4507199d8c9a2de5f03
| 2,788
|
py
|
Python
|
alipay/aop/api/domain/AlipayTradeSettleReceivablesQueryModel.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 1
|
2022-03-07T06:11:10.000Z
|
2022-03-07T06:11:10.000Z
|
alipay/aop/api/domain/AlipayTradeSettleReceivablesQueryModel.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | null | null | null |
alipay/aop/api/domain/AlipayTradeSettleReceivablesQueryModel.py
|
snowxmas/alipay-sdk-python-all
|
96870ced60facd96c5bce18d19371720cbda3317
|
[
"Apache-2.0"
] | 1
|
2021-10-05T03:01:09.000Z
|
2021-10-05T03:01:09.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.SettleEntity import SettleEntity
class AlipayTradeSettleReceivablesQueryModel(object):
def __init__(self):
self._biz_product = None
self._extend_params = None
self._merchant_info = None
self._out_request_no = None
@property
def biz_product(self):
return self._biz_product
@biz_product.setter
def biz_product(self, value):
self._biz_product = value
@property
def extend_params(self):
return self._extend_params
@extend_params.setter
def extend_params(self, value):
self._extend_params = value
@property
def merchant_info(self):
return self._merchant_info
@merchant_info.setter
def merchant_info(self, value):
if isinstance(value, SettleEntity):
self._merchant_info = value
else:
self._merchant_info = SettleEntity.from_alipay_dict(value)
@property
def out_request_no(self):
return self._out_request_no
@out_request_no.setter
def out_request_no(self, value):
self._out_request_no = value
def to_alipay_dict(self):
params = dict()
if self.biz_product:
if hasattr(self.biz_product, 'to_alipay_dict'):
params['biz_product'] = self.biz_product.to_alipay_dict()
else:
params['biz_product'] = self.biz_product
if self.extend_params:
if hasattr(self.extend_params, 'to_alipay_dict'):
params['extend_params'] = self.extend_params.to_alipay_dict()
else:
params['extend_params'] = self.extend_params
if self.merchant_info:
if hasattr(self.merchant_info, 'to_alipay_dict'):
params['merchant_info'] = self.merchant_info.to_alipay_dict()
else:
params['merchant_info'] = self.merchant_info
if self.out_request_no:
if hasattr(self.out_request_no, 'to_alipay_dict'):
params['out_request_no'] = self.out_request_no.to_alipay_dict()
else:
params['out_request_no'] = self.out_request_no
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayTradeSettleReceivablesQueryModel()
if 'biz_product' in d:
o.biz_product = d['biz_product']
if 'extend_params' in d:
o.extend_params = d['extend_params']
if 'merchant_info' in d:
o.merchant_info = d['merchant_info']
if 'out_request_no' in d:
o.out_request_no = d['out_request_no']
return o
| 30.977778
| 79
| 0.625897
|
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.SettleEntity import SettleEntity
class AlipayTradeSettleReceivablesQueryModel(object):
def __init__(self):
self._biz_product = None
self._extend_params = None
self._merchant_info = None
self._out_request_no = None
@property
def biz_product(self):
return self._biz_product
@biz_product.setter
def biz_product(self, value):
self._biz_product = value
@property
def extend_params(self):
return self._extend_params
@extend_params.setter
def extend_params(self, value):
self._extend_params = value
@property
def merchant_info(self):
return self._merchant_info
@merchant_info.setter
def merchant_info(self, value):
if isinstance(value, SettleEntity):
self._merchant_info = value
else:
self._merchant_info = SettleEntity.from_alipay_dict(value)
@property
def out_request_no(self):
return self._out_request_no
@out_request_no.setter
def out_request_no(self, value):
self._out_request_no = value
def to_alipay_dict(self):
params = dict()
if self.biz_product:
if hasattr(self.biz_product, 'to_alipay_dict'):
params['biz_product'] = self.biz_product.to_alipay_dict()
else:
params['biz_product'] = self.biz_product
if self.extend_params:
if hasattr(self.extend_params, 'to_alipay_dict'):
params['extend_params'] = self.extend_params.to_alipay_dict()
else:
params['extend_params'] = self.extend_params
if self.merchant_info:
if hasattr(self.merchant_info, 'to_alipay_dict'):
params['merchant_info'] = self.merchant_info.to_alipay_dict()
else:
params['merchant_info'] = self.merchant_info
if self.out_request_no:
if hasattr(self.out_request_no, 'to_alipay_dict'):
params['out_request_no'] = self.out_request_no.to_alipay_dict()
else:
params['out_request_no'] = self.out_request_no
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayTradeSettleReceivablesQueryModel()
if 'biz_product' in d:
o.biz_product = d['biz_product']
if 'extend_params' in d:
o.extend_params = d['extend_params']
if 'merchant_info' in d:
o.merchant_info = d['merchant_info']
if 'out_request_no' in d:
o.out_request_no = d['out_request_no']
return o
| true
| true
|
f70261b694fca9802548a912f945ff89a5035eca
| 2,782
|
py
|
Python
|
src/state.py
|
xeust/Method-Draw
|
a8b264e37fdc3a3a8b629178d62ecb6c8008760b
|
[
"MIT"
] | 7
|
2021-07-25T23:38:31.000Z
|
2021-12-04T23:11:57.000Z
|
src/state.py
|
xeust/Method-Draw
|
a8b264e37fdc3a3a8b629178d62ecb6c8008760b
|
[
"MIT"
] | null | null | null |
src/state.py
|
xeust/Method-Draw
|
a8b264e37fdc3a3a8b629178d62ecb6c8008760b
|
[
"MIT"
] | 5
|
2021-08-11T12:02:10.000Z
|
2022-02-24T04:41:09.000Z
|
import os
from deta import Deta
from datetime import date, datetime
from fastapi import HTTPException
import urllib
import base64
deta = Deta()
base = deta.Base("drawings")
drive = deta.Drive("drawings")
def get_all(db, query):
blob_gen = db.fetch(query)
blobs = []
for stored_blob in blob_gen:
for blob in stored_blob:
blobs.append(blob)
return blobs
# list all drawings
def get_drawings():
try:
return get_all(base, {})
except:
return None
# save existing drawing
def save(name, file):
encoded_name = str(base64.urlsafe_b64encode(name.encode("utf-8")), 'utf-8')
b = base.get(encoded_name)
try:
if (b):
base.put({"key": encoded_name, "name": name, "public": b["public"], "lastModified": datetime.utcnow().timestamp()})
return drive.put(name, file)
base.put({"key":encoded_name, "name": name, "public": False, "lastModified": datetime.utcnow().timestamp()})
return drive.put(name, file)
except:
return None
# save
def save_as(name, file, overwrite):
encoded_name = str(base64.urlsafe_b64encode(name.encode("utf-8")), 'utf-8')
b = base.get(encoded_name)
record = {"key": encoded_name, "name":name, "public": False, 'lastModified': datetime.utcnow().timestamp()}
if (overwrite or not b): # Overwrite allowed or Record Does not Exist
base.put(record)
drive.put(name, file)
return record
else: # Overwrite False and Record Exists
return None
def get_drawing(name):
encoded_name = str(base64.urlsafe_b64encode(name.encode("utf-8")), 'utf-8')
b = base.get(encoded_name)
d = drive.get(name)
if (b and d):
return d.read()
base.delete(encoded_name)
drive.delete(name)
return None
def get_metadata(name):
encoded_name = str(base64.urlsafe_b64encode(name.encode("utf-8")), 'utf-8')
b = base.get(encoded_name)
if (b):
return b
return None
def delete_drawing(name):
encoded_name = str(base64.urlsafe_b64encode(name.encode("utf-8")), 'utf-8')
try:
base.delete(encoded_name)
drive.delete(name)
return name
except:
return None
def modify_public(name, public):
encoded_name = str(base64.urlsafe_b64encode(name.encode("utf-8")), 'utf-8')
try:
b = base.get(encoded_name)
if (b):
b["public"] = public
return base.put(b)
return None
except:
return None
def get_public_drawing(name):
encoded_name = str(base64.urlsafe_b64encode(name.encode("utf-8")), 'utf-8')
try:
b = base.get(encoded_name)
if (b and b["public"]):
return drive.get(name)
return None
except:
return None
| 27.82
| 127
| 0.62509
|
import os
from deta import Deta
from datetime import date, datetime
from fastapi import HTTPException
import urllib
import base64
deta = Deta()
base = deta.Base("drawings")
drive = deta.Drive("drawings")
def get_all(db, query):
blob_gen = db.fetch(query)
blobs = []
for stored_blob in blob_gen:
for blob in stored_blob:
blobs.append(blob)
return blobs
def get_drawings():
try:
return get_all(base, {})
except:
return None
def save(name, file):
encoded_name = str(base64.urlsafe_b64encode(name.encode("utf-8")), 'utf-8')
b = base.get(encoded_name)
try:
if (b):
base.put({"key": encoded_name, "name": name, "public": b["public"], "lastModified": datetime.utcnow().timestamp()})
return drive.put(name, file)
base.put({"key":encoded_name, "name": name, "public": False, "lastModified": datetime.utcnow().timestamp()})
return drive.put(name, file)
except:
return None
def save_as(name, file, overwrite):
encoded_name = str(base64.urlsafe_b64encode(name.encode("utf-8")), 'utf-8')
b = base.get(encoded_name)
record = {"key": encoded_name, "name":name, "public": False, 'lastModified': datetime.utcnow().timestamp()}
if (overwrite or not b): base.put(record)
drive.put(name, file)
return record
else: return None
def get_drawing(name):
encoded_name = str(base64.urlsafe_b64encode(name.encode("utf-8")), 'utf-8')
b = base.get(encoded_name)
d = drive.get(name)
if (b and d):
return d.read()
base.delete(encoded_name)
drive.delete(name)
return None
def get_metadata(name):
encoded_name = str(base64.urlsafe_b64encode(name.encode("utf-8")), 'utf-8')
b = base.get(encoded_name)
if (b):
return b
return None
def delete_drawing(name):
encoded_name = str(base64.urlsafe_b64encode(name.encode("utf-8")), 'utf-8')
try:
base.delete(encoded_name)
drive.delete(name)
return name
except:
return None
def modify_public(name, public):
encoded_name = str(base64.urlsafe_b64encode(name.encode("utf-8")), 'utf-8')
try:
b = base.get(encoded_name)
if (b):
b["public"] = public
return base.put(b)
return None
except:
return None
def get_public_drawing(name):
encoded_name = str(base64.urlsafe_b64encode(name.encode("utf-8")), 'utf-8')
try:
b = base.get(encoded_name)
if (b and b["public"]):
return drive.get(name)
return None
except:
return None
| true
| true
|
f70262ad5923fa3e4e49cdaa62ec5bb20a7a390f
| 6,835
|
py
|
Python
|
qiskit/providers/aer/library/save_amplitudes.py
|
ArunRamachandran25/qiskit-aer
|
0ece80f11d61193f74b639b3efb82253fedeaef1
|
[
"Apache-2.0"
] | 1
|
2021-02-25T16:56:19.000Z
|
2021-02-25T16:56:19.000Z
|
qiskit/providers/aer/library/save_amplitudes.py
|
ArunRamachandran25/qiskit-aer
|
0ece80f11d61193f74b639b3efb82253fedeaef1
|
[
"Apache-2.0"
] | null | null | null |
qiskit/providers/aer/library/save_amplitudes.py
|
ArunRamachandran25/qiskit-aer
|
0ece80f11d61193f74b639b3efb82253fedeaef1
|
[
"Apache-2.0"
] | null | null | null |
# This code is part of Qiskit.
#
# (C) Copyright IBM 2018, 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
Simulator instruction to save statevector amplitudes and amplitudes squared.
"""
from qiskit.circuit import QuantumCircuit
from qiskit.extensions.exceptions import ExtensionError
from .save_data import SaveSingleData, SaveAverageData, default_qubits
class SaveAmplitudes(SaveSingleData):
"""Save complex statevector amplitudes."""
def __init__(self,
key,
num_qubits,
params,
pershot=False,
conditional=False):
"""Instruction to save complex statevector amplitudes.
Args:
key (str): the key for retrieving saved data from results.
num_qubits (int): the number of qubits for the snapshot type.
params (list): list of entries to vale.
pershot (bool): if True save a list of amplitudes vectors for each
shot of the simulation rather than the a single
amplitude vector [Default: False].
conditional (bool): if True save the amplitudes vector conditional
on the current classical register values
[Default: False].
Raises:
ExtensionError: if params is invalid for the specified number of qubits.
"""
params = _format_amplitude_params(params, num_qubits)
super().__init__("save_amplitudes",
key,
num_qubits,
pershot=pershot,
conditional=conditional,
params=params)
class SaveAmplitudesSquared(SaveAverageData):
"""Save squared statevector amplitudes (probabilities)."""
def __init__(self,
key,
num_qubits,
params,
unnormalized=False,
pershot=False,
conditional=False):
"""Instruction to save squared statevector amplitudes (probabilities).
Args:
key (str): the key for retrieving saved data from results.
num_qubits (int): the number of qubits for the snapshot type.
params (list): list of entries to vale.
unnormalized (bool): If True return save the unnormalized accumulated
probabilities over all shots [Default: False].
pershot (bool): if True save a list of probability vectors for each
shot of the simulation rather than the a single
amplitude vector [Default: False].
conditional (bool): if True save the probability vector conditional
on the current classical register values
[Default: False].
Raises:
ExtensionError: if params is invalid for the specified number of qubits.
"""
params = _format_amplitude_params(params, num_qubits)
super().__init__("save_amplitudes_sq",
key,
num_qubits,
unnormalized=unnormalized,
pershot=pershot,
conditional=conditional,
params=params)
def save_amplitudes(self, key, params, pershot=False, conditional=False):
"""Save complex statevector amplitudes.
Args:
key (str): the key for retrieving saved data from results.
params (List[int] or List[str]): the basis states to return amplitudes for.
pershot (bool): if True save a list of amplitudes vectors for each
shot of the simulation rather than the a single
amplitude vector [Default: False].
conditional (bool): if True save the amplitudes vector conditional
on the current classical register values
[Default: False].
Returns:
QuantumCircuit: with attached instruction.
Raises:
ExtensionError: if params is invalid for the specified number of qubits.
"""
qubits = default_qubits(self)
instr = SaveAmplitudes(key, len(qubits), params,
pershot=pershot, conditional=conditional)
return self.append(instr, qubits)
def save_amplitudes_squared(self, key, params,
unnormalized=False,
pershot=False,
conditional=False):
"""Save squared statevector amplitudes (probabilities).
Args:
key (str): the key for retrieving saved data from results.
params (List[int] or List[str]): the basis states to return amplitudes for.
unnormalized (bool): If True return save the unnormalized accumulated
probabilities over all shots [Default: False].
pershot (bool): if True save a list of probability vectors for each
shot of the simulation rather than the a single
amplitude vector [Default: False].
conditional (bool): if True save the probability vector conditional
on the current classical register values
[Default: False].
Returns:
QuantumCircuit: with attached instruction.
Raises:
ExtensionError: if params is invalid for the specified number of qubits.
"""
qubits = default_qubits(self)
instr = SaveAmplitudesSquared(key, len(qubits), params,
unnormalized=unnormalized,
pershot=pershot,
conditional=conditional)
return self.append(instr, qubits)
def _format_amplitude_params(params, num_qubits=None):
"""Format amplitude params as a interger list."""
if isinstance(params[0], str):
if params[0].find('0x') == 0:
params = [int(i, 16) for i in params]
else:
params = [int(i, 2) for i in params]
if num_qubits and max(params) >= 2 ** num_qubits:
raise ExtensionError(
"Param values contain a state larger than the number of qubits")
return params
QuantumCircuit.save_amplitudes = save_amplitudes
QuantumCircuit.save_amplitudes_squared = save_amplitudes_squared
| 41.932515
| 84
| 0.593416
|
from qiskit.circuit import QuantumCircuit
from qiskit.extensions.exceptions import ExtensionError
from .save_data import SaveSingleData, SaveAverageData, default_qubits
class SaveAmplitudes(SaveSingleData):
def __init__(self,
key,
num_qubits,
params,
pershot=False,
conditional=False):
params = _format_amplitude_params(params, num_qubits)
super().__init__("save_amplitudes",
key,
num_qubits,
pershot=pershot,
conditional=conditional,
params=params)
class SaveAmplitudesSquared(SaveAverageData):
def __init__(self,
key,
num_qubits,
params,
unnormalized=False,
pershot=False,
conditional=False):
params = _format_amplitude_params(params, num_qubits)
super().__init__("save_amplitudes_sq",
key,
num_qubits,
unnormalized=unnormalized,
pershot=pershot,
conditional=conditional,
params=params)
def save_amplitudes(self, key, params, pershot=False, conditional=False):
qubits = default_qubits(self)
instr = SaveAmplitudes(key, len(qubits), params,
pershot=pershot, conditional=conditional)
return self.append(instr, qubits)
def save_amplitudes_squared(self, key, params,
unnormalized=False,
pershot=False,
conditional=False):
qubits = default_qubits(self)
instr = SaveAmplitudesSquared(key, len(qubits), params,
unnormalized=unnormalized,
pershot=pershot,
conditional=conditional)
return self.append(instr, qubits)
def _format_amplitude_params(params, num_qubits=None):
if isinstance(params[0], str):
if params[0].find('0x') == 0:
params = [int(i, 16) for i in params]
else:
params = [int(i, 2) for i in params]
if num_qubits and max(params) >= 2 ** num_qubits:
raise ExtensionError(
"Param values contain a state larger than the number of qubits")
return params
QuantumCircuit.save_amplitudes = save_amplitudes
QuantumCircuit.save_amplitudes_squared = save_amplitudes_squared
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.