repository_name
stringclasses 316
values | func_path_in_repository
stringlengths 6
223
| func_name
stringlengths 1
134
| language
stringclasses 1
value | func_code_string
stringlengths 57
65.5k
| func_documentation_string
stringlengths 1
46.3k
| split_name
stringclasses 1
value | func_code_url
stringlengths 91
315
| called_functions
listlengths 1
156
⌀ | enclosing_scope
stringlengths 2
1.48M
|
|---|---|---|---|---|---|---|---|---|---|
tschaume/ccsgp_get_started
|
ccsgp_get_started/examples/gp_lcltpt.py
|
gp_lcltpt
|
python
|
def gp_lcltpt():
inDir, outDir = getWorkDirs()
nSets = len(default_colors)
make_plot(
data = [
np.array([ [0,i,0,0,0], [1,i,0,0,0] ])
for i in xrange(nSets)
],
properties = [
'with linespoints lw 4 lc %s lt %d pt %d' % (col, i, i)
for i, col in enumerate(default_colors)
],
titles = [''] * nSets, yr = [-1, 51],
name = os.path.join(outDir, 'gp_lcltpt'),
ylabel = 'linecolor / linetype / pointtype', xlabel = '',
)
return 'done'
|
example plot to display linecolors, linetypes and pointtypes
.. image:: pics/gp_lcltpt.png
:width: 450 px
|
train
|
https://github.com/tschaume/ccsgp_get_started/blob/e4e29844a3e6fc7574e9b4b8cd84131f28ddc3f2/ccsgp_get_started/examples/gp_lcltpt.py#L7-L28
|
[
"def getWorkDirs():\n \"\"\"get input/output dirs (same input/output layout as for package)\"\"\"\n # get caller module\n caller_fullurl = inspect.stack()[1][1]\n caller_relurl = os.path.relpath(caller_fullurl)\n caller_modurl = os.path.splitext(caller_relurl)[0]\n # split caller_url & append 'Dir' to package name\n dirs = caller_modurl.split('/')\n dirs[0] = 'data' # TODO de-hardcode\n # get, check and create outdir\n outDir = os.path.join(*(['output'] + dirs[1:]))\n if not os.path.exists(outDir): os.makedirs(outDir)\n # get and check indir\n dirs.append('input')\n inDir = os.path.join(*dirs)\n if not os.path.exists(inDir):\n logging.critical('create input dir %s to continue!' % inDir)\n sys.exit(1)\n return inDir, outDir\n"
] |
import os
import numpy as np
from ..ccsgp.ccsgp import make_plot
from .utils import getWorkDirs
from ..ccsgp.config import default_colors
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
args = parser.parse_args()
print gp_lcltpt()
|
tschaume/ccsgp_get_started
|
ccsgp_get_started/examples/gp_datdir.py
|
gp_datdir
|
python
|
def gp_datdir(initial, topN):
# prepare input/output directories
inDir, outDir = getWorkDirs()
initial = initial.capitalize()
inDir = os.path.join(inDir, initial)
if not os.path.exists(inDir): # catch missing initial
return "initial %s doesn't exist" % initial
# prepare data
data = OrderedDict()
for file in os.listdir(inDir):
country = os.path.splitext(file)[0]
file_url = os.path.join(inDir, file)
data[country] = np.loadtxt(open(file_url, 'rb')) # load data
# set y-axis unit to 1M
data[country][:, 1] /= 1e6
if data[country].shape[1] > 2: data[country][:, 3:] /= 1e6
logging.debug(data) # shown if --log flag given on command line
# sort countries according to mean population (highest -> lowest)
sorted_data = OrderedDict(sorted(
data.items(), key = lambda t: np.mean(t[1][:,1]), reverse = True
))
# "pop" (select) N most populated countries
top_data = OrderedDict(
sorted_data.popitem(last = False) for i in xrange(topN)
if sorted_data
)
# generate plot using ccsgp.make_plot
nSets = len(top_data)
make_plot(
data = top_data.values(),
properties = [ getOpts(i) for i in xrange(nSets) ],
titles = top_data.keys(), # use data keys as legend titles
name = os.path.join(outDir, initial),
key = [ 'at graph 1., 1.2', 'maxrows 2' ],
ylabel = 'total population ({/Symbol \664} 10^{6})',
xlabel = 'year', rmargin = 0.99, tmargin = 0.85, size='8.5in,8in'
)
return 'done'
|
example for plotting from a text file via numpy.loadtxt
1. prepare input/output directories
2. load the data into an OrderedDict() [adjust axes units]
3. sort countries from highest to lowest population
4. select the <topN> most populated countries
5. call ccsgp.make_plot with data from 4
Below is an output image for country initial T and the 4 most populated
countries for this initial (click to enlarge). Also see::
$ python -m ccsgp_get_started.examples.gp_datdir -h
for help on the command line options.
.. image:: pics/T.png
:width: 450 px
.. image:: pics/U.png
:width: 450 px
:param initial: country initial
:type initial: str
:param topN: number of most populated countries to plot
:type topN: int
:ivar inDir: input directory according to package structure and initial
:ivar outDir: output directory according to package structure
:ivar data: OrderedDict with datasets to plot as separate keys
:ivar file: data input file for specific country, format: [x y] OR [x y dx dy]
:ivar country: country, filename stem of input file
:ivar file_url: absolute url to input file
:ivar nSets: number of datasets
|
train
|
https://github.com/tschaume/ccsgp_get_started/blob/e4e29844a3e6fc7574e9b4b8cd84131f28ddc3f2/ccsgp_get_started/examples/gp_datdir.py#L8-L78
|
[
"def getWorkDirs():\n \"\"\"get input/output dirs (same input/output layout as for package)\"\"\"\n # get caller module\n caller_fullurl = inspect.stack()[1][1]\n caller_relurl = os.path.relpath(caller_fullurl)\n caller_modurl = os.path.splitext(caller_relurl)[0]\n # split caller_url & append 'Dir' to package name\n dirs = caller_modurl.split('/')\n dirs[0] = 'data' # TODO de-hardcode\n # get, check and create outdir\n outDir = os.path.join(*(['output'] + dirs[1:]))\n if not os.path.exists(outDir): os.makedirs(outDir)\n # get and check indir\n dirs.append('input')\n inDir = os.path.join(*dirs)\n if not os.path.exists(inDir):\n logging.critical('create input dir %s to continue!' % inDir)\n sys.exit(1)\n return inDir, outDir\n"
] |
import logging, argparse, os, sys
import numpy as np
from collections import OrderedDict
from ..ccsgp.ccsgp import make_plot
from .utils import getWorkDirs
from ..ccsgp.utils import getOpts
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("initial", help="country initial = input subdir with txt files")
parser.add_argument("topN", help="number of most populated countries to plot")
parser.add_argument("--log", help="show log output", action="store_true")
args = parser.parse_args()
loglevel = 'DEBUG' if args.log else 'WARNING'
logging.basicConfig(
format='%(message)s', level=getattr(logging, loglevel)
)
print gp_datdir(args.initial, int(args.topN))
|
tschaume/ccsgp_get_started
|
ccsgp_get_started/examples/gp_rdiff.py
|
gp_rdiff
|
python
|
def gp_rdiff(version, nomed, noxerr, diffRel, divdNdy):
inDir, outDir = getWorkDirs()
inDir = os.path.join(inDir, version)
data, cocktail, medium, rhofo, vacrho = \
OrderedDict(), OrderedDict(), OrderedDict(), OrderedDict(), OrderedDict()
#scale = { # QM14 (19 GeV skip later, factor here only informational)
# '19.6': 1.0340571932983775, '200': 1.0, '39': 0.7776679085382481,
# '27': 0.6412140408244136, '62.4': 0.9174700031778402
#}
scale = {
'19.6': 1.3410566491548412, '200': 1.1051002240771077,
'39': 1.2719203877292842, '27': 1.350873678084769,
'62.4': 1.2664666321635087
}
yunit = 1.0e-3 if not diffRel else 1.
for infile in os.listdir(inDir):
if infile == "cocktail_contribs": continue
energy = re.compile('\d+').search(infile).group()
data_type = re.sub('%s\.dat' % energy, '', infile)
energy = getEnergy4Key(energy)
file_url = os.path.join(inDir, infile)
data_import = np.loadtxt(open(file_url, 'rb'))
if data_type != 'data' and (
(version == 'QM14' and energy != '19.6') or version == 'LatestPatrickJieYi'
):
data_import[:,(1,3,4)] /= scale[energy]
if version == 'LatestPatrickJieYi':
if data_type == 'data':
data_import = data_import[(data_import[:,0] > 0.14) & (data_import[:,0] < 1.0)]
else:
data_import = data_import[data_import[:,0] < 1.0]
if data_type == 'data': data[energy] = data_import
elif data_type == 'cocktail': cocktail[energy] = data_import
elif data_type == 'rho' or data_type == 'vacRho' or data_type == 'medium':
if noxerr and not diffRel: data_import[:,2:] = 0.
data_import[:,1] /= yunit
if data_type == 'rho':
mask = data_import[:,1] > 0.1
rhofo[energy] = data_import if diffRel else data_import[mask]
elif data_type == 'vacRho':
mask = (data_import[:,0] > 0.35) & (data_import[:,1] > 0.01)
vacrho[energy] = data_import if diffRel else data_import[mask]
elif not nomed and data_type == 'medium':
medium[energy] = data_import
nSetsData = len(data)
shift = { '19.6': '1e0', '27': '1e1', '39': '1e2', '62.4': '1e3', '200': '1e4'
} if not diffRel else {
'19.6': '1', '27': '8', '39': '50', '62.4': '200', '200': '900'
}
dataOrdered = OrderedDict()
for energy in sorted(data, key=float, reverse=True):
# data & bin edges
# getUArray propagates stat/syst errors separately internally but
# errors need to be doubled to retrieve correct errors
uData = getUArray(data[energy])
eData = getEdges(data[energy])
uCocktail = getUArray(cocktail[energy])
eCocktail = getEdges(cocktail[energy])
loop = [eData]
if energy in medium and diffRel:
uMedium = getUArray(medium[energy])
eMedium = getEdges(medium[energy])
loop.append(eMedium)
if energy in rhofo and diffRel:
uRho = getUArray(rhofo[energy])
eRho = getEdges(rhofo[energy])
loop.append(eRho)
if energy in vacrho and diffRel:
uVacRho = getUArray(vacrho[energy])
eVacRho = getEdges(vacrho[energy])
loop.append(eVacRho)
# loop data/medium bins
for l, eArr in enumerate(loop):
for i, (e0, e1) in enumzipEdges(eArr):
logging.debug('%s/%d> %g - %g:' % (energy, l, e0, e1))
# get cocktail sum in data bin range
# value+/-0.5*tot.uncert.
uCocktailSum = getCocktailSum(e0, e1, eCocktail, uCocktail)
if uCocktailSum == 0.: continue
# calc. difference and divide by data binwidth again
# + set data point
if l == 0:
uDiff = uData[i] # value+/-0.5*tot.uncert.
if diffRel:
uDiff /= uCocktailSum # value+/-0.5*tot.uncert.
else:
uDiff -= uCocktailSum
uDiff /= data[energy][i,2] * 2 * yunit
dp = [
data[energy][i,0], uDiff.nominal_value,
data[energy][i,2] if not noxerr else 0.,
getErrorComponent(uDiff, 'stat'),
getErrorComponent(uDiff, 'syst')
]
key = ' '.join([energy, 'GeV'])
if noxerr:
if diffRel:
key += ' {/Symbol \264} %s' % shift[energy]
else:
expon = shift[energy].split('e')[1]
key += ' {/Symbol \264} 10^{%s}' % expon
elif l == 1:
# only done if diffRel
uDiff = uMedium[i]
uDiff /= uCocktailSum
dp = [
medium[energy][i,0], uDiff.nominal_value+1,
medium[energy][i,2] if not noxerr else 0.,
0., 0. # both errors included in data points
]
key = ' '.join([energy, 'GeV (Med.)'])
elif l == 2:
# only done if diffRel
uDiff = uRho[i]
uDiff /= uCocktailSum
dp = [
rhofo[energy][i,0], uDiff.nominal_value+1.,
rhofo[energy][i,2] if not noxerr else 0.,
0., 0. # both errors included in data points
]
key = ' '.join([energy, 'GeV (RhoFO.)'])
elif l == 3:
# only done if diffRel
uDiff = uVacRho[i]
uDiff /= uCocktailSum
dp = [
vacrho[energy][i,0], uDiff.nominal_value+1.,
vacrho[energy][i,2] if not noxerr else 0.,
0., 0. # both errors included in data points
]
key = ' '.join([energy, 'GeV (VacRho.)'])
# build list of data points
if diffRel or l == 0:
if dp[0] > 0.7425 and dp[0] < 0.825: continue # mask out omega region
if dp[0] > 0.97 and dp[0] < 1.0495: continue # mask out phi region
if key in dataOrdered:
dataOrdered[key] = np.vstack([dataOrdered[key], dp])
else:
dataOrdered[key] = np.array([ dp ])
if not diffRel:
if energy in medium:
dataOrdered[' '.join([energy, 'GeV (Med.)'])] = medium[energy]
if energy in rhofo:
dataOrdered[' '.join([energy, 'GeV (RhoFO.)'])] = rhofo[energy]
if energy in vacrho:
dataOrdered[' '.join([energy, 'GeV (VacRho.)'])] = vacrho[energy]
# make plot
nSets = len(dataOrdered)
nCats = 4
nSetsPlot = nSets/nCats if nSets > nSetsData else nSets
props = [
'lt 1 lw 4 ps 1.5 lc %s pt 18' % default_colors[i]
for i in reversed(range(nSetsPlot))
]
titles = dataOrdered.keys()
if nSets > nSetsData:
props = zip_flat(props, *[
[
'with lines lt %d lw 4 lc %s' % (j+1, default_colors[i])
for i in reversed(range(nSetsPlot))
]
for j in xrange(nCats-1)
])
titles = zip_flat(dataOrdered.keys()[::nCats], *[ [''] * nSetsPlot for j in xrange(nCats-1) ])
global labels
labels = {
'{BES: STAR Preliminary}' if version == 'QM12Latest200' or \
version == 'QM14' or version == 'LatestPatrickJieYi'
else 'STAR Preliminary': [
0.4 if diffRel else 0.2,0.09 if not diffRel and noxerr else 0.75,False
],
'{200 GeV: PRL 113 022301' if version == 'QM12Latest200' \
or version == 'QM14' or version == 'LatestPatrickJieYi'
else '': [0.4 if diffRel else 0.2,0.04 if not diffRel and noxerr else 0.7,False],
}
yr = [.6,2.5e3] if diffRel else [0.05,1.5e5]
if noxerr:
for k,d in dataOrdered.iteritems():
energy = getEnergy4Key(re.compile('\d+').search(k).group())
d[:,(1,3,4)] *= float(shift[energy])
gpcalls = [
'object 1 rectangle back fc rgb "grey" from 0.75,%f to 0.825,%f' % \
(1.7 if diffRel else 0.5, yr[1]),
'object 2 rectangle back fc rgb "grey" from 0.96,%f to 1.0495,%f' % \
(1.7 if diffRel else 0.5, yr[1]),
'object 3 rectangle back fc rgb "#C6E2FF" from 0.4,%f to 0.75,%f' % \
(1.7 if diffRel else 0.5, yr[1]),
'boxwidth 0.01 absolute',
]
hline = 1. if diffRel else .5
lines = dict(
(('x=%g' % (hline*float(shift[energy]))), 'lc rgb "black" lw 4 lt 4')
for energy in shift
)
pseudo_point = np.array([[-1,1,0,0,0]])
make_plot(
data = dataOrdered.values() + [
pseudo_point, pseudo_point, pseudo_point, pseudo_point
],
properties = props + [
'with lines lt %d lw 4 lc rgb "black"' % (lt+1)
for lt in xrange(nCats)
],
titles = titles + [
'HMBT + QGP', 'BW/FO-{/Symbol \162}', '{/Symbol \162}/{/Symbol \167} VacSF+FB+FO',
'baseline', #'%g%s' % (hline, ' {/Symbol \264} 10^{-3}' if not diffRel else '')
],
name = os.path.join(outDir, 'diff%s%s%s%s' % (
'Rel' if diffRel else 'Abs', version,
'NoMed' if nomed else '', 'NoXErr' if noxerr else ''
)),
xlabel = 'dielectron invariant mass, M_{ee} (GeV/c^{2})',
ylabel = 'Enhancement Ratio' if diffRel else 'Excess Yield / dM_{ee} ({/Symbol \264} 10^{-3} (GeV/c^2)^{-1})',
#labels = labels,
xr = [0.18,0.97], yr = yr, ylog = True,
key = ['at graph 0.96,1.17', 'maxrows 3', 'width -4', 'nobox', 'samplen 0.9'],
lines = lines if noxerr else {},
gpcalls = gpcalls,
lmargin = 0.17, bmargin = 0.1, tmargin = 0.86, rmargin = 0.98,
size = '9in,11in', arrow_offset = 0.9, #arrow_length = 0.4,
)
if nomed or noxerr or version == 'QM12': return 'done'
# integrated enhancement factor
if diffRel:
enhance = {}
data_enhance, medium_enhance, rhofo_enhance, vacrho_enhance = None, None, None, None
for energy in sorted(data, key=float):
for systLMR in [False, True]:
suffix = str(energy)
uEnhanceData = getMassRangesSums(
data[energy], onlyLMR = True,
systLMR = systLMR, suffix = suffix
)
uEnhanceCocktail = getMassRangesSums(
cocktail[energy], onlyLMR = True,
systLMR = systLMR, suffix = suffix
)
if energy in medium:
uEnhanceMed = getMassRangesSums(
medium[energy], onlyLMR = True,
systLMR = systLMR, suffix = suffix
)
if energy in rhofo:
uEnhanceRhoFO = getMassRangesSums(
rhofo[energy], onlyLMR = True,
systLMR = systLMR, suffix = suffix
)
if energy in vacrho:
uEnhanceVacRho = getMassRangesSums(
vacrho[energy], onlyLMR = True,
systLMR = systLMR, suffix = suffix
)
if not systLMR: # uEnhance's are ufloats
uEnhanceData /= uEnhanceCocktail
dp = [
float(energy), uEnhanceData.nominal_value, 0,
getErrorComponent(uEnhanceData, 'stat'),
getErrorComponent(uEnhanceData, 'syst')
]
if data_enhance is None: data_enhance = [ dp ]
else: data_enhance.append(dp)
if energy in medium:
uEnhanceMed /= uEnhanceCocktail
dpM = [ float(energy), uEnhanceMed.nominal_value+1., 0, 0, 0 ]
if medium_enhance is None: medium_enhance = [ dpM ]
else: medium_enhance.append(dpM)
if energy in rhofo:
uEnhanceRhoFO /= uEnhanceCocktail
dpM = [ float(energy), uEnhanceRhoFO.nominal_value+1., 0, 0, 0 ]
if rhofo_enhance is None: rhofo_enhance = [ dpM ]
else: rhofo_enhance.append(dpM)
if energy in vacrho:
uEnhanceVacRho /= uEnhanceCocktail
dpM = [ float(energy), uEnhanceVacRho.nominal_value+1., 0, 0, 0 ]
if vacrho_enhance is None: vacrho_enhance = [ dpM ]
else: vacrho_enhance.append(dpM)
else: # uEnhance's are dicts of ufloats
for k in uEnhanceData:
uEnhanceData[k] /= uEnhanceCocktail[k]
dp = [
float(energy), uEnhanceData[k].nominal_value, 0,
getErrorComponent(uEnhanceData[k], 'stat'),
getErrorComponent(uEnhanceData[k], 'syst')
]
rngstr = k.split('_')[-1]
data_key = 'data_' + rngstr
if data_key not in enhance: enhance[data_key] = [ dp ]
else: enhance[data_key].append(dp)
if k in uEnhanceMed:
uEnhanceMed[k] /= uEnhanceCocktail[k]
dpM = [ float(energy), uEnhanceMed[k].nominal_value ]
med_key = 'model_' + rngstr
if med_key not in enhance: enhance[med_key] = [ dpM ]
else: enhance[med_key].append(dpM)
if k in uEnhanceRhoFO:
uEnhanceRhoFO[k] /= uEnhanceCocktail[k]
dpM = [ float(energy), uEnhanceRhoFO[k].nominal_value+1. ]
rhofo_key = 'rhofo_' + rngstr
if rhofo_key not in enhance: enhance[rhofo_key] = [ dpM ]
else: enhance[rhofo_key].append(dpM)
if k in uEnhanceVacRho:
uEnhanceVacRho[k] /= uEnhanceCocktail[k]
dpM = [ float(energy), uEnhanceVacRho[k].nominal_value+1. ]
vacrho_key = 'vacrho_' + rngstr
if vacrho_key not in enhance: enhance[vacrho_key] = [ dpM ]
else: enhance[vacrho_key].append(dpM)
xfacs = os.path.join(outDir, 'xfacs%s.dat' % version)
if os.path.exists(xfacs): os.remove(xfacs)
fSystLMR = open(xfacs, 'ab')
for k in sorted(enhance.keys()):
np.savetxt(fSystLMR, enhance[k], fmt = '%g', header = k, comments = '\n\n')
fSystLMR.close()
yr_upp = 4 if version == 'QM12Latest200' or version == 'QM14' else 7
if version == 'LatestPatrickJieYi': yr_upp = 5.5
#labels.update({
# '{LMR: %.2f < M_{ee} < %.2f GeV/c^{2}}' % (eRanges[1], eRanges[2]): [0.4,0.15,False]
#})
make_plot(
data = [
pseudo_point, pseudo_point, pseudo_point,
np.array([[17.3,2.73,0,0.25,1.47]]),
np.array([[200,4.7,0,0.4,1.5]]),
np.array(enhance['data_0.15-0.75']),
np.array(enhance['data_0.4-0.75']),
np.array(medium_enhance),
np.array(rhofo_enhance), np.array(vacrho_enhance)
],
properties = [
'lt 1 lw 4 ps 2 lc rgb "white" pt 19',
'lt 1 lw 4 ps 2 lc rgb "white" pt 20',
'lt 1 lw 4 ps 2 lc rgb "white" pt 18',
'lt 1 lw 4 ps 2 lc %s pt 19' % default_colors[1],
'lt 1 lw 4 ps 2 lc %s pt 20' % default_colors[3],
'lt 1 lw 4 ps 2 lc %s pt 18' % default_colors[4],
'lt 1 lw 4 ps 2 lc %s pt 18' % default_colors[0],
'with lines lt 2 lw 4 lc %s' % default_colors[-1],
'with lines lt 3 lw 4 lc %s' % default_colors[-1],
'with lines lt 4 lw 4 lc %s' % default_colors[-1],
],
titles = [
'CERES Pb+Au', 'PHENIX Au+Au', 'STAR Au+Au',
'', '', '', '',
'HMBT + QGP', 'BW/FO-{/Symbol \162}', '{/Symbol \162}/{/Symbol \167} VacSF+FB',
],
name = os.path.join(outDir, 'enhance%s' % version),
xlabel = '{/Symbol \326}s_{NN} (GeV)',
ylabel = 'LMR Enhancement Factor',
xlog = True, key = [ 'at graph 0.9,0.98', 'nobox', 'maxrows 4' ],
size = '10in,8in', bmargin = 0.13, tmargin = 0.92, rmargin = 0.99,
yr = [1.,yr_upp], xr = [14,220], gpcalls = [
'format x "%g"',
'xtics (10, 20,"" 30, 40,"" 50, 60,"" 70,"" 80,"" 90, 100, 200)',
'boxwidth 0.025 absolute',
'label 50 "{/=18 0.2 < M_{ee} < 0.6 GeV/c^{2}}" at 15.5,3 tc %s rotate center' % default_colors[1],
'label 51 "{/=18 0.15 < M_{ee} < 0.75 GeV/c^{2}}" at 180,4.2 tc %s rotate center' % default_colors[3],
'label 52 "{/=18 0.4 < M_{ee} < 0.75 GeV/c^{2}}" at 75,3.1 tc %s rotate by -20' % default_colors[0],
'label 53 "{/=18 0.15 < M_{ee} < 0.75 GeV/c^{2}}" at 50,1.2 tc %s' % default_colors[4]
], #labels = labels
)
return 'done'
# integrated excess yield in mass ranges
excess = {}
for k, v in dataOrdered.iteritems():
suffix = ''
energy = getEnergy4Key(re.compile('\d+').search(k).group())
if fnmatch(k, '*Med.*'):
suffix = '_Med'
if version != 'LatestPatrickJieYi' and energy == '27': continue # TODO
if fnmatch(k, '*RhoFO.*'): suffix = '_RhoFO'
if fnmatch(k, '*VacRho.*'): suffix = '_VacRho'
exc = getMassRangesSums(np.array(v), onlyLMR = True)
if divdNdy: exc /= dNdyPi0[energy] * 1e-2
dp = [
float(energy), exc.nominal_value, 0,
getErrorComponent(exc, 'stat'), getErrorComponent(exc, 'syst')
]
if suffix == '_Med' and not diffRel and not divdNdy:
print dp
key = 'LMR' + suffix
if key not in excess: excess[key] = [ dp ]
else: excess[key].append(dp)
logging.debug(excess)
avdata = np.array(excess['LMR'])
avg = np.average(avdata[:,1], weights = avdata[:,3])
graph_data = [
np.array([
[ 7.7, avg, 0, 0, avdata[-1][-1]],
[ 19.6, avg, 0, 0, avdata[-1][-1]]
]),
np.array([
[ 19.6, avg, 0, 0, 0], [ 200., avg, 0, 0, 0]
]),
np.array([
[ 7.7, 2*avg, 0, 0, 0], [ 19.6, avg, 0, 0, 0],
]),
np.array(excess['LMR']),
]
props = [
'with filledcurves pt 0 lc %s lw 4 lt 2' % default_colors[8],
'with lines lc %s lw 4 lt 2' % default_colors[8],
'with lines lc %s lw 8 lt 2' % default_colors[1],
'lt 1 lw 4 ps 2 lc %s pt 18' % default_colors[0],
]
tits = [
'BES-I extrapolation', '', 'model expectation', 'STAR Au+Au',
]
if version != 'QM14':
graph_data += [
np.array(excess['LMR_Med']),
np.array(excess['LMR_VacRho']),
np.array(excess['LMR_RhoFO']),
]
props += [
'with lines lt 2 lw 4 lc %s' % default_colors[-1],
'with lines lt 3 lw 4 lc %s' % default_colors[-1],
'with lines lt 4 lw 4 lc %s' % default_colors[-1],
]
tits += [
'HMBT + QGP', '{/Symbol \162}/{/Symbol \167} VacSF+FB', 'BW/FO-{/Symbol \162}',
]
yr_upp = 4.5 if version == 'QM12Latest200' or version == 'QM14' else 7
if version == 'LatestPatrickJieYi': yr_upp = 2 if divdNdy else 2.
labels = {} if version != 'QM14' else labels
if divdNdy:
labels.update(dict((str(v), [float(k)*0.9,yr_upp*1.05,True]) for k,v in dNdyPi0.items()))
labels.update({ 'dN/dy|_{/Symbol \\160}': [100,yr_upp*1.05,True]})
gpcalls = [
'format x "%g"',
'xtics (7,10,20,"" 30, 40,"" 50, 60,"" 70,"" 80,"" 90, 100, 200)',
'boxwidth 0.025 absolute',
]
if version == 'QM14':
labels.update({
'{LMR: %.2f < M_{ee} < %.2f GeV/c^{2}}' % (eRanges[1], eRanges[2]): [0.4,0.15,False],
})
else:
gpcalls.append('label 52 "{/=18 0.4 < M_{ee} < 0.75 GeV/c^{2}}" at 60,0.4 tc %s' % default_colors[0])
make_plot(
data = graph_data, properties = props, titles = tits,
name = os.path.join(outDir, 'excess%s%s' % (version,'DivdNdy' if divdNdy else '')),
xlabel = '{/Symbol \326}s_{NN} (GeV)',
ylabel = 'LMR Excess Yield %s({/Symbol \264} 10^{-%d})' % (
'/ dN/dy|_{/Symbol \\160} ' if divdNdy else '', 5 if divdNdy else 3
),
xlog = True, xr = [7,220], size = '10in,8in',
key = ['at graph 1.05,0.98', 'width -3', 'nobox', 'maxrows 3'],
bmargin = 0.13, tmargin = 0.92, rmargin = 0.99,
yr = [0,yr_upp], gpcalls = gpcalls, labels = labels,
)
return 'done'
|
example for ratio or difference plots using QM12 data (see gp_panel)
- uses uncertainties package for easier error propagation and rebinning
- stat. error for medium = 0!
- stat. error for cocktail ~ 0!
- statistical error bar on data stays the same for diff
- TODO: implement ratio!
- TODO: adjust statistical error on data for ratio!
- TODO: adjust name and ylabel for ratio
.. image:: pics/diffAbsQM12.png
:width: 450 px
:param version: plot version
:type version: str
:param nomed: don't plot medium
:type nomed: bool
:param noxerr: don't plot x-errors
:type noxerr: bool
|
train
|
https://github.com/tschaume/ccsgp_get_started/blob/e4e29844a3e6fc7574e9b4b8cd84131f28ddc3f2/ccsgp_get_started/examples/gp_rdiff.py#L17-L491
|
[
"def getWorkDirs():\n \"\"\"get input/output dirs (same input/output layout as for package)\"\"\"\n # get caller module\n caller_fullurl = inspect.stack()[1][1]\n caller_relurl = os.path.relpath(caller_fullurl)\n caller_modurl = os.path.splitext(caller_relurl)[0]\n # split caller_url & append 'Dir' to package name\n dirs = caller_modurl.split('/')\n dirs[0] = 'data' # TODO de-hardcode\n # get, check and create outdir\n outDir = os.path.join(*(['output'] + dirs[1:]))\n if not os.path.exists(outDir): os.makedirs(outDir)\n # get and check indir\n dirs.append('input')\n inDir = os.path.join(*dirs)\n if not os.path.exists(inDir):\n logging.critical('create input dir %s to continue!' % inDir)\n sys.exit(1)\n return inDir, outDir\n",
"def getEnergy4Key(energy):\n if energy == '19': return '19.6'\n if energy == '62': return '62.4'\n return energy\n",
"def getMassRangesSums(\n indata, suffix = \"\", customRanges = None,\n onlyLMR = False, systLMR = False, singleRange = False\n):\n eRangesSyst = [ eRanges if customRanges is None else customRanges ]\n if systLMR:\n step_size, nsteps, rangeOffsetsLMR = 0.05, 6, [0.15, 0.5]\n eEdgesSyst = [ [ # all lower & upper edges for LMR syst. study\n Decimal(str(rangeOffsetsLMR[j]+i*step_size))\n for i in xrange(nsteps)\n ] for j in xrange(2) ]\n # all combos of lower and upper LMR edges\n eRangesSyst = [ [ le, ue ] for ue in eEdgesSyst[1] for le in eEdgesSyst[0] ]\n onlyLMR = False # flag meaningless in this case\n uInData = getUArray(indata)\n eInData = getEdges(indata)\n uSums = {}\n for erngs in eRangesSyst:\n for i, (e0, e1) in enumzipEdges(erngs):\n if onlyLMR and i != 1: continue\n uSum = getCocktailSum(e0, e1, eInData, uInData)\n if (not systLMR) and (onlyLMR or singleRange): return uSum\n logging.debug('%g - %g: %r' % (e0, e1, uSum))\n key = mass_titles[1 if systLMR else i] + suffix\n if systLMR: key += '_%s-%s' % (e0,e1)\n uSums[key] = uSum\n return uSums\n",
"def getUArray(npArr):\n \"\"\"uncertainty array multiplied by binwidth (col2 = dx)\"\"\"\n ufloats = []\n for dp in npArr:\n u = ufloat(dp[1], abs(dp[3]), 'stat')\n v = ufloat(dp[1], abs(dp[4]), 'syst')\n r = (u+v)/2.*dp[2]*2.\n ufloats.append(r)\n # NOTE: center value ok, but both error contribs half!\n # see getErrorComponent()\n return np.array(ufloats)\n",
"def getEdges(npArr):\n \"\"\"get np array of bin edges\"\"\"\n edges = np.concatenate(([0], npArr[:,0] + npArr[:,2]))\n return np.array([Decimal(str(i)) for i in edges])\n",
"def getCocktailSum(e0, e1, eCocktail, uCocktail):\n \"\"\"get the cocktail sum for a given data bin range\"\"\"\n # get mask and according indices\n mask = (eCocktail >= e0) & (eCocktail <= e1)\n # data bin range wider than single cocktail bin\n if np.any(mask):\n idx = getMaskIndices(mask)\n # determine coinciding flags\n eCl, eCu = eCocktail[idx[0]], eCocktail[idx[1]]\n not_coinc_low, not_coinc_upp = (eCl != e0), (eCu != e1)\n # get cocktail sum in data bin (always w/o last bin)\n uCocktailSum = fsum(uCocktail[mask[:-1]][:-1])\n logging.debug(' sum: {}'.format(uCocktailSum))\n # get correction for non-coinciding edges\n if not_coinc_low:\n eCl_bw = eCl - eCocktail[idx[0]-1]\n corr_low = (eCl - e0) / eCl_bw\n abs_corr_low = float(corr_low) * uCocktail[idx[0]-1]\n uCocktailSum += abs_corr_low\n logging.debug((' low: %g == %g -> %g (%g) -> %g -> {} -> {}' % (\n e0, eCl, eCl - e0, eCl_bw, corr_low\n )).format(abs_corr_low, uCocktailSum))\n if not_coinc_upp:\n if idx[1]+1 < len(eCocktail):\n eCu_bw = eCocktail[idx[1]+1] - eCu\n corr_upp = (e1 - eCu) / eCu_bw\n abs_corr_upp = float(corr_upp) * uCocktail[idx[1]]\n else:# catch last index (quick fix!)\n abs_corr_upp = eCu_bw = corr_upp = 0\n uCocktailSum += abs_corr_upp\n logging.debug((' upp: %g == %g -> %g (%g) -> %g -> {} -> {}' % (\n e1, eCu, e1 - eCu, eCu_bw, corr_upp\n )).format(abs_corr_upp, uCocktailSum))\n else:\n mask = (eCocktail >= e0)\n idx = getMaskIndices(mask) # only use first index\n # catch if already at last index\n if idx[0] == idx[1] and idx[0] == len(eCocktail)-1:\n corr = (e1 - e0) / (eCocktail[idx[0]] - eCocktail[idx[0]-1])\n uCocktailSum = float(corr) * uCocktail[idx[0]-1]\n else: # default case\n corr = (e1 - e0) / (eCocktail[idx[0]+1] - eCocktail[idx[0]])\n uCocktailSum = float(corr) * uCocktail[idx[0]]\n logging.debug(' sum: {}'.format(uCocktailSum))\n return uCocktailSum\n",
"def enumzipEdges(eArr):\n \"\"\"zip and enumerate edges into pairs of lower and upper limits\"\"\"\n return enumerate(zip(eArr[:-1], eArr[1:]))\n",
"def getErrorComponent(result, tag):\n \"\"\"get total error contribution for component with specific tag\"\"\"\n return math.sqrt(sum(\n (error*2)**2\n for (var, error) in result.error_components().items()\n if var.tag == tag\n ))\n"
] |
import logging, argparse, os, sys, re
import numpy as np
from fnmatch import fnmatch
from collections import OrderedDict
from .utils import getWorkDirs, eRanges, getEnergy4Key
from .utils import getUArray, getEdges, getCocktailSum, enumzipEdges, getMassRangesSums
from .utils import getErrorComponent
from ..ccsgp.ccsgp import make_plot
from ..ccsgp.utils import getOpts, zip_flat
from ..ccsgp.config import default_colors
from uncertainties import ufloat
labels = None
dNdyPi0 = { '19.6': 52.8, '27': 57.6, '39': 60.8, '62.4': 77.2, '200': 105 }
def gp_rdiff_merged(version, divdNdy):
# merge plots for excess yields and enhancement factors if output files exist
inDir, outDir = getWorkDirs() # inDir not used
enhance_datdir = os.path.join(outDir, 'enhance%s' % version)
excess_datdir = os.path.join(outDir, 'excess%s%s' % (version,'DivdNdy' if divdNdy else ''))
print enhance_datdir, excess_datdir
weird_key = 'LMR Excess Yield %s({/Symbol \264} 10^{-%d} (GeV/c^2)^{-1}))' % (
'/ dN/dy|_{/Symbol \\160} ' if divdNdy else '', 5 if divdNdy else 3
)
if os.path.exists(enhance_datdir) and os.path.exists(excess_datdir):
excess_data = np.loadtxt(
open(os.path.join(
excess_datdir,
'LMR_Excess_Yield_%s_Symbol_10_%d_.dat' % (
'dN_dy___Symbol_160' if divdNdy else '', 5 if divdNdy else 3
)
), 'rb')
)
avdata = np.array(excess_data)
avg = np.average(avdata[:,1], weights = avdata[:,4])
data = OrderedDict()
data['BES-II Extrapolation'] = np.array([
[ 7.7, avg, 0, 0, excess_data[-1][-1]],
[ 9.1, avg, 0, 0, excess_data[-1][-1]],
[ 11.5, avg, 0, 0, excess_data[-1][-1]],
[ 14.6, avg, 0, 0, excess_data[-1][-1]],
[ 19.6, avg, 0, 0, excess_data[-1][-1]]
])
data['Model Expectation at BES-II'] = np.array([
[ 7.7, 2*avg, 0, 0, 0], [ 19.6, avg, 0, 0, 0],
])
data[weird_key] = excess_data
#data['LMR Enhancement Factor'] = np.loadtxt(
# open(os.path.join(enhance_datdir, 'LMR_Enhancement_Factor.dat'), 'rb')
#)
data['Model for Excess'] = np.loadtxt(
open(os.path.join(excess_datdir, 'Model.dat'), 'rb')
)
#data['Model for Enhancement'] = np.loadtxt(
# open(os.path.join(enhance_datdir, 'Model.dat'), 'rb')
#)
#xshift = 1.05
#data['LMR Enhancement Factor'][:,0] *= xshift
#data['Model for Enhancement'][:,0] *= xshift
if divdNdy:
labels.update(dict((str(v), [float(k)*0.9,5.2,True]) for k,v in dNdyPi0.items()))
labels.update({ 'dN/dy|_{/Symbol \\160}': [0.73,1.043,False]})
make_plot(
data = data.values(),
properties = [
'with filledcurves pt 0 lc %s lw 4 lt 2' % default_colors[8]
] + [
'with lines lc %s lw 10 lt 2' % default_colors[3]
] + [
'lt 1 lw 4 ps 1.5 lc %s pt %d' % (default_colors[i], 18+i) for i in xrange(1) #2
] + [
'with lines lt %d lw 4 lc %s' % (i+2, default_colors[i]) for i in xrange(1) #2
],
titles = data.keys(),
name = os.path.join(outDir, 'enhanceexcess%s' % version),
xlabel = '{/Symbol \326}s_{NN} (GeV)', ylabel = '',
lmargin = 0.02, rmargin = 0.99, xlog = True,
xr = [7,220], key = ['width -10'],#, 'font ",18"', 'spacing 0.9'],
yr = [0.,5 if version == 'QM12Latest200' or version == 'QM14' else 7],
#labels = labels,
gpcalls = [
'format x "%g"',
'xtics (7,10,20,"" 30, 40,"" 50, 60,"" 70,"" 80,"" 90, 100, 200)',
],
)
return 'done'
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("version", help="version = subdir name of input dir")
parser.add_argument("--nomed", help="don't plot medium", action="store_true")
parser.add_argument("--noxerr", help="no dx errors", action="store_true")
parser.add_argument("--diffRel", help="plot relative difference (ratio)", action="store_true")
parser.add_argument("--divdNdy", help="divide excess plot by dNdy_pi0", action="store_true")
parser.add_argument("--log", help="show log output", action="store_true")
args = parser.parse_args()
loglevel = 'DEBUG' if args.log else 'WARNING'
logging.basicConfig(
format='%(message)s', level=getattr(logging, loglevel)
)
print gp_rdiff(args.version, args.nomed, args.noxerr, args.diffRel, args.divdNdy)
#print gp_rdiff_merged(args.version,args.divdNdy)
|
bwesterb/mirte
|
src/__init__.py
|
get_a_manager
|
python
|
def get_a_manager(threadPool_settings=None):
global __singleton_manager
if __singleton_manager is None:
def _thread_entry():
if prctl:
prctl.set_name('mirte manager')
m.run()
l.info('manager.run() returned')
l = logging.getLogger('mirte.get_a_manager')
l.info("Creating new instance")
m = Manager(logging.getLogger('mirte'))
if threadPool_settings:
m.update_instance('threadPool', threadPool_settings)
threading.Thread(target=_thread_entry).start()
m.running_event.wait()
__singleton_manager = m
return __singleton_manager
|
On first call, creates and returns a @mirte.core.Manager. On
subsequent calls, returns the previously created instance.
If it is the first call, it will initialize the threadPool
with @threadPool_settings.
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/__init__.py#L17-L38
| null |
import logging
import threading
from mirte.core import Manager
__names__ = ['get_a_manager']
try:
import prctl
except ImportError:
prctl = None
__singleton_manager = None
# vim: et:sta:bs=2:sw=4:
|
bwesterb/mirte
|
src/main.py
|
parse_cmdLine_instructions
|
python
|
def parse_cmdLine_instructions(args):
instructions = dict()
rargs = list()
for arg in args:
if arg[:2] == '--':
tmp = arg[2:]
bits = tmp.split('=', 1)
if len(bits) == 1:
bits.append('')
instructions[bits[0]] = bits[1]
else:
rargs.append(arg)
return instructions, rargs
|
Parses command-line arguments. These are
instruction to the manager to create instances and
put settings.
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/main.py#L14-L29
| null |
import logging
import os.path
import sys
import six
from mirte.core import Manager
from mirte.mirteFile import load_mirteFile
from sarah.order import sort_by_successors
import sarah.coloredLogging
def execute_cmdLine_instructions(instructions, m, l):
""" Applies the instructions given via
<instructions> on the manager <m> """
opt_lut = dict()
inst_lut = dict()
for k, v in six.iteritems(instructions):
bits = k.split('-', 1)
if len(bits) == 1:
if v not in m.modules:
raise KeyError("No such module: %s" % v)
inst_lut[bits[0]] = v
else:
if not bits[0] in opt_lut:
opt_lut[bits[0]] = list()
opt_lut[bits[0]].append((bits[1], v))
inst_list = sort_by_successors(
six.viewkeys(inst_lut),
lambda inst: [v for (k, v) in opt_lut.get(inst, ())
if k in m.modules[inst_lut[inst]].deps]
)
for k in reversed(tuple(inst_list)):
if k in m.insts:
raise NotImplementedError(
"Overwriting instancens not yet supported")
settings = dict()
if k in opt_lut:
for k2, v2 in opt_lut[k]:
settings[k2] = v2
m.create_instance(k, inst_lut[k], settings)
for k in opt_lut:
if k in inst_lut:
continue
for k2, v2 in opt_lut[k]:
if k not in m.insts:
raise ValueError("No such instance %s" % k)
m.change_setting(k, k2, v2)
class MirteFormatter(logging.Formatter):
def __init__(self):
pass
def format(self, record):
record.message = record.getMessage()
if 'sid' in record.__dict__:
record.name += '.' + str(record.sid)
ret = ("%(relativeCreated)d %(levelname)" +
"-8s%(name)s:%(message)s") % record.__dict__
if record.exc_info:
ret += self.formatException(record.exc_info)
return ret
def main():
""" Entry-point """
sarah.coloredLogging.basicConfig(level=logging.DEBUG,
formatter=MirteFormatter())
l = logging.getLogger('mirte')
instructions, args = parse_cmdLine_instructions(sys.argv[1:])
m = Manager(l)
load_mirteFile(args[0] if args else 'default', m, logger=l)
execute_cmdLine_instructions(instructions, m, l)
m.run()
if __name__ == '__main__':
if os.path.abspath('.') in sys.path:
sys.path.remove(os.path.abspath('.'))
main()
# vim: et:sta:bs=2:sw=4:
|
bwesterb/mirte
|
src/main.py
|
execute_cmdLine_instructions
|
python
|
def execute_cmdLine_instructions(instructions, m, l):
opt_lut = dict()
inst_lut = dict()
for k, v in six.iteritems(instructions):
bits = k.split('-', 1)
if len(bits) == 1:
if v not in m.modules:
raise KeyError("No such module: %s" % v)
inst_lut[bits[0]] = v
else:
if not bits[0] in opt_lut:
opt_lut[bits[0]] = list()
opt_lut[bits[0]].append((bits[1], v))
inst_list = sort_by_successors(
six.viewkeys(inst_lut),
lambda inst: [v for (k, v) in opt_lut.get(inst, ())
if k in m.modules[inst_lut[inst]].deps]
)
for k in reversed(tuple(inst_list)):
if k in m.insts:
raise NotImplementedError(
"Overwriting instancens not yet supported")
settings = dict()
if k in opt_lut:
for k2, v2 in opt_lut[k]:
settings[k2] = v2
m.create_instance(k, inst_lut[k], settings)
for k in opt_lut:
if k in inst_lut:
continue
for k2, v2 in opt_lut[k]:
if k not in m.insts:
raise ValueError("No such instance %s" % k)
m.change_setting(k, k2, v2)
|
Applies the instructions given via
<instructions> on the manager <m>
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/main.py#L32-L67
| null |
import logging
import os.path
import sys
import six
from mirte.core import Manager
from mirte.mirteFile import load_mirteFile
from sarah.order import sort_by_successors
import sarah.coloredLogging
def parse_cmdLine_instructions(args):
""" Parses command-line arguments. These are
instruction to the manager to create instances and
put settings. """
instructions = dict()
rargs = list()
for arg in args:
if arg[:2] == '--':
tmp = arg[2:]
bits = tmp.split('=', 1)
if len(bits) == 1:
bits.append('')
instructions[bits[0]] = bits[1]
else:
rargs.append(arg)
return instructions, rargs
class MirteFormatter(logging.Formatter):
def __init__(self):
pass
def format(self, record):
record.message = record.getMessage()
if 'sid' in record.__dict__:
record.name += '.' + str(record.sid)
ret = ("%(relativeCreated)d %(levelname)" +
"-8s%(name)s:%(message)s") % record.__dict__
if record.exc_info:
ret += self.formatException(record.exc_info)
return ret
def main():
""" Entry-point """
sarah.coloredLogging.basicConfig(level=logging.DEBUG,
formatter=MirteFormatter())
l = logging.getLogger('mirte')
instructions, args = parse_cmdLine_instructions(sys.argv[1:])
m = Manager(l)
load_mirteFile(args[0] if args else 'default', m, logger=l)
execute_cmdLine_instructions(instructions, m, l)
m.run()
if __name__ == '__main__':
if os.path.abspath('.') in sys.path:
sys.path.remove(os.path.abspath('.'))
main()
# vim: et:sta:bs=2:sw=4:
|
bwesterb/mirte
|
src/main.py
|
main
|
python
|
def main():
sarah.coloredLogging.basicConfig(level=logging.DEBUG,
formatter=MirteFormatter())
l = logging.getLogger('mirte')
instructions, args = parse_cmdLine_instructions(sys.argv[1:])
m = Manager(l)
load_mirteFile(args[0] if args else 'default', m, logger=l)
execute_cmdLine_instructions(instructions, m, l)
m.run()
|
Entry-point
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/main.py#L86-L95
|
[
"def parse_cmdLine_instructions(args):\n \"\"\" Parses command-line arguments. These are\n instruction to the manager to create instances and\n put settings. \"\"\"\n instructions = dict()\n rargs = list()\n for arg in args:\n if arg[:2] == '--':\n tmp = arg[2:]\n bits = tmp.split('=', 1)\n if len(bits) == 1:\n bits.append('')\n instructions[bits[0]] = bits[1]\n else:\n rargs.append(arg)\n return instructions, rargs\n",
"def execute_cmdLine_instructions(instructions, m, l):\n \"\"\" Applies the instructions given via\n <instructions> on the manager <m> \"\"\"\n opt_lut = dict()\n inst_lut = dict()\n for k, v in six.iteritems(instructions):\n bits = k.split('-', 1)\n if len(bits) == 1:\n if v not in m.modules:\n raise KeyError(\"No such module: %s\" % v)\n inst_lut[bits[0]] = v\n else:\n if not bits[0] in opt_lut:\n opt_lut[bits[0]] = list()\n opt_lut[bits[0]].append((bits[1], v))\n inst_list = sort_by_successors(\n six.viewkeys(inst_lut),\n lambda inst: [v for (k, v) in opt_lut.get(inst, ())\n if k in m.modules[inst_lut[inst]].deps]\n )\n for k in reversed(tuple(inst_list)):\n if k in m.insts:\n raise NotImplementedError(\n \"Overwriting instancens not yet supported\")\n settings = dict()\n if k in opt_lut:\n for k2, v2 in opt_lut[k]:\n settings[k2] = v2\n m.create_instance(k, inst_lut[k], settings)\n for k in opt_lut:\n if k in inst_lut:\n continue\n for k2, v2 in opt_lut[k]:\n if k not in m.insts:\n raise ValueError(\"No such instance %s\" % k)\n m.change_setting(k, k2, v2)\n"
] |
import logging
import os.path
import sys
import six
from mirte.core import Manager
from mirte.mirteFile import load_mirteFile
from sarah.order import sort_by_successors
import sarah.coloredLogging
def parse_cmdLine_instructions(args):
""" Parses command-line arguments. These are
instruction to the manager to create instances and
put settings. """
instructions = dict()
rargs = list()
for arg in args:
if arg[:2] == '--':
tmp = arg[2:]
bits = tmp.split('=', 1)
if len(bits) == 1:
bits.append('')
instructions[bits[0]] = bits[1]
else:
rargs.append(arg)
return instructions, rargs
def execute_cmdLine_instructions(instructions, m, l):
""" Applies the instructions given via
<instructions> on the manager <m> """
opt_lut = dict()
inst_lut = dict()
for k, v in six.iteritems(instructions):
bits = k.split('-', 1)
if len(bits) == 1:
if v not in m.modules:
raise KeyError("No such module: %s" % v)
inst_lut[bits[0]] = v
else:
if not bits[0] in opt_lut:
opt_lut[bits[0]] = list()
opt_lut[bits[0]].append((bits[1], v))
inst_list = sort_by_successors(
six.viewkeys(inst_lut),
lambda inst: [v for (k, v) in opt_lut.get(inst, ())
if k in m.modules[inst_lut[inst]].deps]
)
for k in reversed(tuple(inst_list)):
if k in m.insts:
raise NotImplementedError(
"Overwriting instancens not yet supported")
settings = dict()
if k in opt_lut:
for k2, v2 in opt_lut[k]:
settings[k2] = v2
m.create_instance(k, inst_lut[k], settings)
for k in opt_lut:
if k in inst_lut:
continue
for k2, v2 in opt_lut[k]:
if k not in m.insts:
raise ValueError("No such instance %s" % k)
m.change_setting(k, k2, v2)
class MirteFormatter(logging.Formatter):
def __init__(self):
pass
def format(self, record):
record.message = record.getMessage()
if 'sid' in record.__dict__:
record.name += '.' + str(record.sid)
ret = ("%(relativeCreated)d %(levelname)" +
"-8s%(name)s:%(message)s") % record.__dict__
if record.exc_info:
ret += self.formatException(record.exc_info)
return ret
if __name__ == '__main__':
if os.path.abspath('.') in sys.path:
sys.path.remove(os.path.abspath('.'))
main()
# vim: et:sta:bs=2:sw=4:
|
bwesterb/mirte
|
src/mirteFile.py
|
depsOf_of_mirteFile_instance_definition
|
python
|
def depsOf_of_mirteFile_instance_definition(man, insts):
return lambda x: [a[1] for a in six.iteritems(insts[x])
if a[0] in [dn for dn, d in (
six.iteritems(man.modules[insts[x]['module']].deps)
if 'module' in insts[x] else [])]]
|
Returns a function that returns the dependencies of
an instance definition by its name, where insts is a
dictionary of instance definitions from a mirteFile
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/mirteFile.py#L31-L38
| null |
from sarah.lazy import lazy
import os
import sys
import copy
import errno
import os.path
import msgpack
import logging
from itertools import chain
import six
from sarah.order import sort_by_successors, dual_cover, restricted_cover
from mirte.core import ModuleDefinition, DepDefinition, VSettingDefinition
@lazy
def yaml():
import yaml
return yaml
CACHE_FILENAME_TEMPLATE = '.%s.msgpack'
FILE_SUFFIX = '.mirte'
DEFAULT_FILE = 'default.mirte'
def depsOf_of_mirteFile_module_definition(defs):
""" Returns a function that returns the dependencies of a module
definition by its name, where defs is a dictionary of module
definitions from a mirteFile """
return lambda x: (list(filter(lambda z: z is not None and z in defs,
map(lambda y: y[1].get('type'),
six.iteritems(defs[x]['settings'])
if 'settings' in defs[x] else [])))) + \
(list(defs[x]['inherits']) if 'inherits' in defs[x] else [])
def module_definition_from_mirteFile_dict(man, d):
""" Creates a ModuleDefinition instance from the dictionary <d> from
a mirte-file for the Manager instance <man>. """
m = ModuleDefinition()
if 'inherits' not in d:
d['inherits'] = list()
if 'settings' not in d:
d['settings'] = dict()
if 'implementedBy' in d:
m.implementedBy = d['implementedBy']
m.inherits = set(d['inherits'])
for p in d['inherits']:
if p not in man.modules:
raise ValueError("No such module %s" % p)
m.deps.update(man.modules[p].deps)
m.vsettings.update(man.modules[p].vsettings)
m.inherits.update(man.modules[p].inherits)
m.run = m.run or man.modules[p].run
if 'run' in d:
m.run = d['run']
if len(m.inherits) == 0:
m.inherits = set(['module'])
for k, v in six.iteritems(d['settings']):
if 'type' not in v:
if k not in m.vsettings:
raise ValueError("No such existing vsetting %s" % k)
if 'default' in v:
m.vsettings[k] = copy.copy(m.vsettings[k])
m.vsettings[k].default = v['default']
continue
if v['type'] in man.modules:
m.deps[k] = DepDefinition(v['type'], v.get('allownull', False))
elif v['type'] in man.valueTypes:
m.vsettings[k] = VSettingDefinition(
v['type'],
(man.valueTypes[v['type']](v['default'])
if 'default' in v else None)
)
else:
raise ValueError("No such module or valuetype %s" % v)
return m
def load_mirteFile(path, m, logger=None):
""" Loads the mirte-file at <path> into the manager <m>. """
l = logging.getLogger('load_mirteFile') if logger is None else logger
had = set()
for name, path, d in walk_mirteFiles(path, logger):
if os.path.realpath(path) in m.loaded_mirteFiles:
continue
identifier = name
if name in had:
identifier = path
else:
had.add(name)
l.info('loading %s' % identifier)
m.loaded_mirteFiles.add(os.path.realpath(path))
_load_mirteFile(d, m)
def _load_mirteFile(d, m):
""" Loads the dictionary from the mirteFile into <m> """
defs = d['definitions'] if 'definitions' in d else {}
insts = d['instances'] if 'instances' in d else {}
# Filter out existing instances
insts_to_skip = []
for k in insts:
if k in m.insts:
m.update_instance(k, dict(insts[k]))
insts_to_skip.append(k)
for k in insts_to_skip:
del(insts[k])
# Sort module definitions by dependency
it = sort_by_successors(
six.viewkeys(defs),
dual_cover(
six.viewkeys(defs),
restricted_cover(
six.viewkeys(defs),
depsOf_of_mirteFile_module_definition(defs)
)
)
)
# Add module definitions
for k in it:
m.add_module_definition(
k,
module_definition_from_mirteFile_dict(m, defs[k])
)
# Sort instance declarations by dependency
it = sort_by_successors(
six.viewkeys(insts),
dual_cover(
six.viewkeys(insts),
restricted_cover(
six.viewkeys(insts),
depsOf_of_mirteFile_instance_definition(m, insts)
)
)
)
# Create instances
for k in it:
settings = dict(insts[k])
del(settings['module'])
m.create_instance(k, insts[k]['module'], settings)
def find_mirteFile(name, extra_path=None):
""" Resolves <name> to a path. Uses <extra_path> """
extra_path = () if extra_path is None else extra_path
for bp in chain(extra_path, sys.path):
pb = os.path.join(bp, name)
p = pb + FILE_SUFFIX
if os.path.exists(p):
return os.path.abspath(p)
p = os.path.join(pb, DEFAULT_FILE)
if os.path.exists(p):
return os.path.abspath(p)
raise ValueError("Couldn't find mirteFile %s" % name)
def walk_mirteFiles(name, logger=None):
""" Yields (cpath, d) for all dependencies of and including the
mirte-file <name>, where <d> are the dictionaries from
the mirte-file at <cpath> """
stack = [(name, find_mirteFile(name, (os.getcwd(),)))]
loadStack = []
had = dict()
while stack:
name, path = stack.pop()
if path in had:
d = had[path]
else:
d = _parse_mirteFile(path, logger)
had[path] = d
loadStack.append((name, path, d))
if 'includes' not in d:
continue
for include in d['includes']:
stack.append(
(include,
find_mirteFile(include, (os.path.dirname(path),))))
had = set()
for name, path, d in reversed(loadStack):
if path in had:
continue
had.add(path)
yield name, path, d
def _parse_mirteFile(path, logger=None):
""" Open and parses the mirteFile at <path>. """
l = logging.getLogger('_parse_mirteFile') if logger is None else logger
cache_path = os.path.join(os.path.dirname(path),
CACHE_FILENAME_TEMPLATE % os.path.basename(path))
if (os.path.exists(cache_path) and
os.path.getmtime(cache_path) >= os.path.getmtime(path)):
with open(cache_path) as f:
return msgpack.unpack(f)
with open(path) as f:
ret = yaml.load(f)
try:
with open(cache_path, 'w') as f:
msgpack.pack(ret, f)
except IOError as e:
if e.errno == errno.EACCES:
l.warn('Not allowed to write %s', path)
else:
raise
return ret
# vim: et:sta:bs=2:sw=4:
|
bwesterb/mirte
|
src/mirteFile.py
|
depsOf_of_mirteFile_module_definition
|
python
|
def depsOf_of_mirteFile_module_definition(defs):
return lambda x: (list(filter(lambda z: z is not None and z in defs,
map(lambda y: y[1].get('type'),
six.iteritems(defs[x]['settings'])
if 'settings' in defs[x] else [])))) + \
(list(defs[x]['inherits']) if 'inherits' in defs[x] else [])
|
Returns a function that returns the dependencies of a module
definition by its name, where defs is a dictionary of module
definitions from a mirteFile
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/mirteFile.py#L41-L49
| null |
from sarah.lazy import lazy
import os
import sys
import copy
import errno
import os.path
import msgpack
import logging
from itertools import chain
import six
from sarah.order import sort_by_successors, dual_cover, restricted_cover
from mirte.core import ModuleDefinition, DepDefinition, VSettingDefinition
@lazy
def yaml():
import yaml
return yaml
CACHE_FILENAME_TEMPLATE = '.%s.msgpack'
FILE_SUFFIX = '.mirte'
DEFAULT_FILE = 'default.mirte'
def depsOf_of_mirteFile_instance_definition(man, insts):
""" Returns a function that returns the dependencies of
an instance definition by its name, where insts is a
dictionary of instance definitions from a mirteFile """
return lambda x: [a[1] for a in six.iteritems(insts[x])
if a[0] in [dn for dn, d in (
six.iteritems(man.modules[insts[x]['module']].deps)
if 'module' in insts[x] else [])]]
def module_definition_from_mirteFile_dict(man, d):
""" Creates a ModuleDefinition instance from the dictionary <d> from
a mirte-file for the Manager instance <man>. """
m = ModuleDefinition()
if 'inherits' not in d:
d['inherits'] = list()
if 'settings' not in d:
d['settings'] = dict()
if 'implementedBy' in d:
m.implementedBy = d['implementedBy']
m.inherits = set(d['inherits'])
for p in d['inherits']:
if p not in man.modules:
raise ValueError("No such module %s" % p)
m.deps.update(man.modules[p].deps)
m.vsettings.update(man.modules[p].vsettings)
m.inherits.update(man.modules[p].inherits)
m.run = m.run or man.modules[p].run
if 'run' in d:
m.run = d['run']
if len(m.inherits) == 0:
m.inherits = set(['module'])
for k, v in six.iteritems(d['settings']):
if 'type' not in v:
if k not in m.vsettings:
raise ValueError("No such existing vsetting %s" % k)
if 'default' in v:
m.vsettings[k] = copy.copy(m.vsettings[k])
m.vsettings[k].default = v['default']
continue
if v['type'] in man.modules:
m.deps[k] = DepDefinition(v['type'], v.get('allownull', False))
elif v['type'] in man.valueTypes:
m.vsettings[k] = VSettingDefinition(
v['type'],
(man.valueTypes[v['type']](v['default'])
if 'default' in v else None)
)
else:
raise ValueError("No such module or valuetype %s" % v)
return m
def load_mirteFile(path, m, logger=None):
""" Loads the mirte-file at <path> into the manager <m>. """
l = logging.getLogger('load_mirteFile') if logger is None else logger
had = set()
for name, path, d in walk_mirteFiles(path, logger):
if os.path.realpath(path) in m.loaded_mirteFiles:
continue
identifier = name
if name in had:
identifier = path
else:
had.add(name)
l.info('loading %s' % identifier)
m.loaded_mirteFiles.add(os.path.realpath(path))
_load_mirteFile(d, m)
def _load_mirteFile(d, m):
""" Loads the dictionary from the mirteFile into <m> """
defs = d['definitions'] if 'definitions' in d else {}
insts = d['instances'] if 'instances' in d else {}
# Filter out existing instances
insts_to_skip = []
for k in insts:
if k in m.insts:
m.update_instance(k, dict(insts[k]))
insts_to_skip.append(k)
for k in insts_to_skip:
del(insts[k])
# Sort module definitions by dependency
it = sort_by_successors(
six.viewkeys(defs),
dual_cover(
six.viewkeys(defs),
restricted_cover(
six.viewkeys(defs),
depsOf_of_mirteFile_module_definition(defs)
)
)
)
# Add module definitions
for k in it:
m.add_module_definition(
k,
module_definition_from_mirteFile_dict(m, defs[k])
)
# Sort instance declarations by dependency
it = sort_by_successors(
six.viewkeys(insts),
dual_cover(
six.viewkeys(insts),
restricted_cover(
six.viewkeys(insts),
depsOf_of_mirteFile_instance_definition(m, insts)
)
)
)
# Create instances
for k in it:
settings = dict(insts[k])
del(settings['module'])
m.create_instance(k, insts[k]['module'], settings)
def find_mirteFile(name, extra_path=None):
""" Resolves <name> to a path. Uses <extra_path> """
extra_path = () if extra_path is None else extra_path
for bp in chain(extra_path, sys.path):
pb = os.path.join(bp, name)
p = pb + FILE_SUFFIX
if os.path.exists(p):
return os.path.abspath(p)
p = os.path.join(pb, DEFAULT_FILE)
if os.path.exists(p):
return os.path.abspath(p)
raise ValueError("Couldn't find mirteFile %s" % name)
def walk_mirteFiles(name, logger=None):
""" Yields (cpath, d) for all dependencies of and including the
mirte-file <name>, where <d> are the dictionaries from
the mirte-file at <cpath> """
stack = [(name, find_mirteFile(name, (os.getcwd(),)))]
loadStack = []
had = dict()
while stack:
name, path = stack.pop()
if path in had:
d = had[path]
else:
d = _parse_mirteFile(path, logger)
had[path] = d
loadStack.append((name, path, d))
if 'includes' not in d:
continue
for include in d['includes']:
stack.append(
(include,
find_mirteFile(include, (os.path.dirname(path),))))
had = set()
for name, path, d in reversed(loadStack):
if path in had:
continue
had.add(path)
yield name, path, d
def _parse_mirteFile(path, logger=None):
""" Open and parses the mirteFile at <path>. """
l = logging.getLogger('_parse_mirteFile') if logger is None else logger
cache_path = os.path.join(os.path.dirname(path),
CACHE_FILENAME_TEMPLATE % os.path.basename(path))
if (os.path.exists(cache_path) and
os.path.getmtime(cache_path) >= os.path.getmtime(path)):
with open(cache_path) as f:
return msgpack.unpack(f)
with open(path) as f:
ret = yaml.load(f)
try:
with open(cache_path, 'w') as f:
msgpack.pack(ret, f)
except IOError as e:
if e.errno == errno.EACCES:
l.warn('Not allowed to write %s', path)
else:
raise
return ret
# vim: et:sta:bs=2:sw=4:
|
bwesterb/mirte
|
src/mirteFile.py
|
module_definition_from_mirteFile_dict
|
python
|
def module_definition_from_mirteFile_dict(man, d):
m = ModuleDefinition()
if 'inherits' not in d:
d['inherits'] = list()
if 'settings' not in d:
d['settings'] = dict()
if 'implementedBy' in d:
m.implementedBy = d['implementedBy']
m.inherits = set(d['inherits'])
for p in d['inherits']:
if p not in man.modules:
raise ValueError("No such module %s" % p)
m.deps.update(man.modules[p].deps)
m.vsettings.update(man.modules[p].vsettings)
m.inherits.update(man.modules[p].inherits)
m.run = m.run or man.modules[p].run
if 'run' in d:
m.run = d['run']
if len(m.inherits) == 0:
m.inherits = set(['module'])
for k, v in six.iteritems(d['settings']):
if 'type' not in v:
if k not in m.vsettings:
raise ValueError("No such existing vsetting %s" % k)
if 'default' in v:
m.vsettings[k] = copy.copy(m.vsettings[k])
m.vsettings[k].default = v['default']
continue
if v['type'] in man.modules:
m.deps[k] = DepDefinition(v['type'], v.get('allownull', False))
elif v['type'] in man.valueTypes:
m.vsettings[k] = VSettingDefinition(
v['type'],
(man.valueTypes[v['type']](v['default'])
if 'default' in v else None)
)
else:
raise ValueError("No such module or valuetype %s" % v)
return m
|
Creates a ModuleDefinition instance from the dictionary <d> from
a mirte-file for the Manager instance <man>.
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/mirteFile.py#L52-L92
| null |
from sarah.lazy import lazy
import os
import sys
import copy
import errno
import os.path
import msgpack
import logging
from itertools import chain
import six
from sarah.order import sort_by_successors, dual_cover, restricted_cover
from mirte.core import ModuleDefinition, DepDefinition, VSettingDefinition
@lazy
def yaml():
import yaml
return yaml
CACHE_FILENAME_TEMPLATE = '.%s.msgpack'
FILE_SUFFIX = '.mirte'
DEFAULT_FILE = 'default.mirte'
def depsOf_of_mirteFile_instance_definition(man, insts):
""" Returns a function that returns the dependencies of
an instance definition by its name, where insts is a
dictionary of instance definitions from a mirteFile """
return lambda x: [a[1] for a in six.iteritems(insts[x])
if a[0] in [dn for dn, d in (
six.iteritems(man.modules[insts[x]['module']].deps)
if 'module' in insts[x] else [])]]
def depsOf_of_mirteFile_module_definition(defs):
""" Returns a function that returns the dependencies of a module
definition by its name, where defs is a dictionary of module
definitions from a mirteFile """
return lambda x: (list(filter(lambda z: z is not None and z in defs,
map(lambda y: y[1].get('type'),
six.iteritems(defs[x]['settings'])
if 'settings' in defs[x] else [])))) + \
(list(defs[x]['inherits']) if 'inherits' in defs[x] else [])
def load_mirteFile(path, m, logger=None):
""" Loads the mirte-file at <path> into the manager <m>. """
l = logging.getLogger('load_mirteFile') if logger is None else logger
had = set()
for name, path, d in walk_mirteFiles(path, logger):
if os.path.realpath(path) in m.loaded_mirteFiles:
continue
identifier = name
if name in had:
identifier = path
else:
had.add(name)
l.info('loading %s' % identifier)
m.loaded_mirteFiles.add(os.path.realpath(path))
_load_mirteFile(d, m)
def _load_mirteFile(d, m):
""" Loads the dictionary from the mirteFile into <m> """
defs = d['definitions'] if 'definitions' in d else {}
insts = d['instances'] if 'instances' in d else {}
# Filter out existing instances
insts_to_skip = []
for k in insts:
if k in m.insts:
m.update_instance(k, dict(insts[k]))
insts_to_skip.append(k)
for k in insts_to_skip:
del(insts[k])
# Sort module definitions by dependency
it = sort_by_successors(
six.viewkeys(defs),
dual_cover(
six.viewkeys(defs),
restricted_cover(
six.viewkeys(defs),
depsOf_of_mirteFile_module_definition(defs)
)
)
)
# Add module definitions
for k in it:
m.add_module_definition(
k,
module_definition_from_mirteFile_dict(m, defs[k])
)
# Sort instance declarations by dependency
it = sort_by_successors(
six.viewkeys(insts),
dual_cover(
six.viewkeys(insts),
restricted_cover(
six.viewkeys(insts),
depsOf_of_mirteFile_instance_definition(m, insts)
)
)
)
# Create instances
for k in it:
settings = dict(insts[k])
del(settings['module'])
m.create_instance(k, insts[k]['module'], settings)
def find_mirteFile(name, extra_path=None):
""" Resolves <name> to a path. Uses <extra_path> """
extra_path = () if extra_path is None else extra_path
for bp in chain(extra_path, sys.path):
pb = os.path.join(bp, name)
p = pb + FILE_SUFFIX
if os.path.exists(p):
return os.path.abspath(p)
p = os.path.join(pb, DEFAULT_FILE)
if os.path.exists(p):
return os.path.abspath(p)
raise ValueError("Couldn't find mirteFile %s" % name)
def walk_mirteFiles(name, logger=None):
""" Yields (cpath, d) for all dependencies of and including the
mirte-file <name>, where <d> are the dictionaries from
the mirte-file at <cpath> """
stack = [(name, find_mirteFile(name, (os.getcwd(),)))]
loadStack = []
had = dict()
while stack:
name, path = stack.pop()
if path in had:
d = had[path]
else:
d = _parse_mirteFile(path, logger)
had[path] = d
loadStack.append((name, path, d))
if 'includes' not in d:
continue
for include in d['includes']:
stack.append(
(include,
find_mirteFile(include, (os.path.dirname(path),))))
had = set()
for name, path, d in reversed(loadStack):
if path in had:
continue
had.add(path)
yield name, path, d
def _parse_mirteFile(path, logger=None):
""" Open and parses the mirteFile at <path>. """
l = logging.getLogger('_parse_mirteFile') if logger is None else logger
cache_path = os.path.join(os.path.dirname(path),
CACHE_FILENAME_TEMPLATE % os.path.basename(path))
if (os.path.exists(cache_path) and
os.path.getmtime(cache_path) >= os.path.getmtime(path)):
with open(cache_path) as f:
return msgpack.unpack(f)
with open(path) as f:
ret = yaml.load(f)
try:
with open(cache_path, 'w') as f:
msgpack.pack(ret, f)
except IOError as e:
if e.errno == errno.EACCES:
l.warn('Not allowed to write %s', path)
else:
raise
return ret
# vim: et:sta:bs=2:sw=4:
|
bwesterb/mirte
|
src/mirteFile.py
|
load_mirteFile
|
python
|
def load_mirteFile(path, m, logger=None):
l = logging.getLogger('load_mirteFile') if logger is None else logger
had = set()
for name, path, d in walk_mirteFiles(path, logger):
if os.path.realpath(path) in m.loaded_mirteFiles:
continue
identifier = name
if name in had:
identifier = path
else:
had.add(name)
l.info('loading %s' % identifier)
m.loaded_mirteFiles.add(os.path.realpath(path))
_load_mirteFile(d, m)
|
Loads the mirte-file at <path> into the manager <m>.
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/mirteFile.py#L95-L109
|
[
"def walk_mirteFiles(name, logger=None):\n \"\"\" Yields (cpath, d) for all dependencies of and including the\n mirte-file <name>, where <d> are the dictionaries from\n the mirte-file at <cpath> \"\"\"\n stack = [(name, find_mirteFile(name, (os.getcwd(),)))]\n loadStack = []\n had = dict()\n while stack:\n name, path = stack.pop()\n if path in had:\n d = had[path]\n else:\n d = _parse_mirteFile(path, logger)\n had[path] = d\n loadStack.append((name, path, d))\n if 'includes' not in d:\n continue\n for include in d['includes']:\n stack.append(\n (include,\n find_mirteFile(include, (os.path.dirname(path),))))\n had = set()\n for name, path, d in reversed(loadStack):\n if path in had:\n continue\n had.add(path)\n yield name, path, d\n",
"def _load_mirteFile(d, m):\n \"\"\" Loads the dictionary from the mirteFile into <m> \"\"\"\n defs = d['definitions'] if 'definitions' in d else {}\n insts = d['instances'] if 'instances' in d else {}\n # Filter out existing instances\n insts_to_skip = []\n for k in insts:\n if k in m.insts:\n m.update_instance(k, dict(insts[k]))\n insts_to_skip.append(k)\n for k in insts_to_skip:\n del(insts[k])\n # Sort module definitions by dependency\n it = sort_by_successors(\n six.viewkeys(defs),\n dual_cover(\n six.viewkeys(defs),\n restricted_cover(\n six.viewkeys(defs),\n depsOf_of_mirteFile_module_definition(defs)\n )\n )\n )\n # Add module definitions\n for k in it:\n m.add_module_definition(\n k,\n module_definition_from_mirteFile_dict(m, defs[k])\n )\n # Sort instance declarations by dependency\n it = sort_by_successors(\n six.viewkeys(insts),\n dual_cover(\n six.viewkeys(insts),\n restricted_cover(\n six.viewkeys(insts),\n depsOf_of_mirteFile_instance_definition(m, insts)\n )\n )\n )\n # Create instances\n for k in it:\n settings = dict(insts[k])\n del(settings['module'])\n m.create_instance(k, insts[k]['module'], settings)\n"
] |
from sarah.lazy import lazy
import os
import sys
import copy
import errno
import os.path
import msgpack
import logging
from itertools import chain
import six
from sarah.order import sort_by_successors, dual_cover, restricted_cover
from mirte.core import ModuleDefinition, DepDefinition, VSettingDefinition
@lazy
def yaml():
import yaml
return yaml
CACHE_FILENAME_TEMPLATE = '.%s.msgpack'
FILE_SUFFIX = '.mirte'
DEFAULT_FILE = 'default.mirte'
def depsOf_of_mirteFile_instance_definition(man, insts):
""" Returns a function that returns the dependencies of
an instance definition by its name, where insts is a
dictionary of instance definitions from a mirteFile """
return lambda x: [a[1] for a in six.iteritems(insts[x])
if a[0] in [dn for dn, d in (
six.iteritems(man.modules[insts[x]['module']].deps)
if 'module' in insts[x] else [])]]
def depsOf_of_mirteFile_module_definition(defs):
""" Returns a function that returns the dependencies of a module
definition by its name, where defs is a dictionary of module
definitions from a mirteFile """
return lambda x: (list(filter(lambda z: z is not None and z in defs,
map(lambda y: y[1].get('type'),
six.iteritems(defs[x]['settings'])
if 'settings' in defs[x] else [])))) + \
(list(defs[x]['inherits']) if 'inherits' in defs[x] else [])
def module_definition_from_mirteFile_dict(man, d):
""" Creates a ModuleDefinition instance from the dictionary <d> from
a mirte-file for the Manager instance <man>. """
m = ModuleDefinition()
if 'inherits' not in d:
d['inherits'] = list()
if 'settings' not in d:
d['settings'] = dict()
if 'implementedBy' in d:
m.implementedBy = d['implementedBy']
m.inherits = set(d['inherits'])
for p in d['inherits']:
if p not in man.modules:
raise ValueError("No such module %s" % p)
m.deps.update(man.modules[p].deps)
m.vsettings.update(man.modules[p].vsettings)
m.inherits.update(man.modules[p].inherits)
m.run = m.run or man.modules[p].run
if 'run' in d:
m.run = d['run']
if len(m.inherits) == 0:
m.inherits = set(['module'])
for k, v in six.iteritems(d['settings']):
if 'type' not in v:
if k not in m.vsettings:
raise ValueError("No such existing vsetting %s" % k)
if 'default' in v:
m.vsettings[k] = copy.copy(m.vsettings[k])
m.vsettings[k].default = v['default']
continue
if v['type'] in man.modules:
m.deps[k] = DepDefinition(v['type'], v.get('allownull', False))
elif v['type'] in man.valueTypes:
m.vsettings[k] = VSettingDefinition(
v['type'],
(man.valueTypes[v['type']](v['default'])
if 'default' in v else None)
)
else:
raise ValueError("No such module or valuetype %s" % v)
return m
def _load_mirteFile(d, m):
""" Loads the dictionary from the mirteFile into <m> """
defs = d['definitions'] if 'definitions' in d else {}
insts = d['instances'] if 'instances' in d else {}
# Filter out existing instances
insts_to_skip = []
for k in insts:
if k in m.insts:
m.update_instance(k, dict(insts[k]))
insts_to_skip.append(k)
for k in insts_to_skip:
del(insts[k])
# Sort module definitions by dependency
it = sort_by_successors(
six.viewkeys(defs),
dual_cover(
six.viewkeys(defs),
restricted_cover(
six.viewkeys(defs),
depsOf_of_mirteFile_module_definition(defs)
)
)
)
# Add module definitions
for k in it:
m.add_module_definition(
k,
module_definition_from_mirteFile_dict(m, defs[k])
)
# Sort instance declarations by dependency
it = sort_by_successors(
six.viewkeys(insts),
dual_cover(
six.viewkeys(insts),
restricted_cover(
six.viewkeys(insts),
depsOf_of_mirteFile_instance_definition(m, insts)
)
)
)
# Create instances
for k in it:
settings = dict(insts[k])
del(settings['module'])
m.create_instance(k, insts[k]['module'], settings)
def find_mirteFile(name, extra_path=None):
""" Resolves <name> to a path. Uses <extra_path> """
extra_path = () if extra_path is None else extra_path
for bp in chain(extra_path, sys.path):
pb = os.path.join(bp, name)
p = pb + FILE_SUFFIX
if os.path.exists(p):
return os.path.abspath(p)
p = os.path.join(pb, DEFAULT_FILE)
if os.path.exists(p):
return os.path.abspath(p)
raise ValueError("Couldn't find mirteFile %s" % name)
def walk_mirteFiles(name, logger=None):
""" Yields (cpath, d) for all dependencies of and including the
mirte-file <name>, where <d> are the dictionaries from
the mirte-file at <cpath> """
stack = [(name, find_mirteFile(name, (os.getcwd(),)))]
loadStack = []
had = dict()
while stack:
name, path = stack.pop()
if path in had:
d = had[path]
else:
d = _parse_mirteFile(path, logger)
had[path] = d
loadStack.append((name, path, d))
if 'includes' not in d:
continue
for include in d['includes']:
stack.append(
(include,
find_mirteFile(include, (os.path.dirname(path),))))
had = set()
for name, path, d in reversed(loadStack):
if path in had:
continue
had.add(path)
yield name, path, d
def _parse_mirteFile(path, logger=None):
""" Open and parses the mirteFile at <path>. """
l = logging.getLogger('_parse_mirteFile') if logger is None else logger
cache_path = os.path.join(os.path.dirname(path),
CACHE_FILENAME_TEMPLATE % os.path.basename(path))
if (os.path.exists(cache_path) and
os.path.getmtime(cache_path) >= os.path.getmtime(path)):
with open(cache_path) as f:
return msgpack.unpack(f)
with open(path) as f:
ret = yaml.load(f)
try:
with open(cache_path, 'w') as f:
msgpack.pack(ret, f)
except IOError as e:
if e.errno == errno.EACCES:
l.warn('Not allowed to write %s', path)
else:
raise
return ret
# vim: et:sta:bs=2:sw=4:
|
bwesterb/mirte
|
src/mirteFile.py
|
_load_mirteFile
|
python
|
def _load_mirteFile(d, m):
defs = d['definitions'] if 'definitions' in d else {}
insts = d['instances'] if 'instances' in d else {}
# Filter out existing instances
insts_to_skip = []
for k in insts:
if k in m.insts:
m.update_instance(k, dict(insts[k]))
insts_to_skip.append(k)
for k in insts_to_skip:
del(insts[k])
# Sort module definitions by dependency
it = sort_by_successors(
six.viewkeys(defs),
dual_cover(
six.viewkeys(defs),
restricted_cover(
six.viewkeys(defs),
depsOf_of_mirteFile_module_definition(defs)
)
)
)
# Add module definitions
for k in it:
m.add_module_definition(
k,
module_definition_from_mirteFile_dict(m, defs[k])
)
# Sort instance declarations by dependency
it = sort_by_successors(
six.viewkeys(insts),
dual_cover(
six.viewkeys(insts),
restricted_cover(
six.viewkeys(insts),
depsOf_of_mirteFile_instance_definition(m, insts)
)
)
)
# Create instances
for k in it:
settings = dict(insts[k])
del(settings['module'])
m.create_instance(k, insts[k]['module'], settings)
|
Loads the dictionary from the mirteFile into <m>
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/mirteFile.py#L112-L156
|
[
"def depsOf_of_mirteFile_instance_definition(man, insts):\n \"\"\" Returns a function that returns the dependencies of\n an instance definition by its name, where insts is a\n dictionary of instance definitions from a mirteFile \"\"\"\n return lambda x: [a[1] for a in six.iteritems(insts[x])\n if a[0] in [dn for dn, d in (\n six.iteritems(man.modules[insts[x]['module']].deps)\n if 'module' in insts[x] else [])]]\n",
"def depsOf_of_mirteFile_module_definition(defs):\n \"\"\" Returns a function that returns the dependencies of a module\n definition by its name, where defs is a dictionary of module\n definitions from a mirteFile \"\"\"\n return lambda x: (list(filter(lambda z: z is not None and z in defs,\n map(lambda y: y[1].get('type'),\n six.iteritems(defs[x]['settings'])\n if 'settings' in defs[x] else [])))) + \\\n (list(defs[x]['inherits']) if 'inherits' in defs[x] else [])\n",
"def module_definition_from_mirteFile_dict(man, d):\n \"\"\" Creates a ModuleDefinition instance from the dictionary <d> from\n a mirte-file for the Manager instance <man>. \"\"\"\n m = ModuleDefinition()\n if 'inherits' not in d:\n d['inherits'] = list()\n if 'settings' not in d:\n d['settings'] = dict()\n if 'implementedBy' in d:\n m.implementedBy = d['implementedBy']\n m.inherits = set(d['inherits'])\n for p in d['inherits']:\n if p not in man.modules:\n raise ValueError(\"No such module %s\" % p)\n m.deps.update(man.modules[p].deps)\n m.vsettings.update(man.modules[p].vsettings)\n m.inherits.update(man.modules[p].inherits)\n m.run = m.run or man.modules[p].run\n if 'run' in d:\n m.run = d['run']\n if len(m.inherits) == 0:\n m.inherits = set(['module'])\n for k, v in six.iteritems(d['settings']):\n if 'type' not in v:\n if k not in m.vsettings:\n raise ValueError(\"No such existing vsetting %s\" % k)\n if 'default' in v:\n m.vsettings[k] = copy.copy(m.vsettings[k])\n m.vsettings[k].default = v['default']\n continue\n if v['type'] in man.modules:\n m.deps[k] = DepDefinition(v['type'], v.get('allownull', False))\n elif v['type'] in man.valueTypes:\n m.vsettings[k] = VSettingDefinition(\n v['type'],\n (man.valueTypes[v['type']](v['default'])\n if 'default' in v else None)\n )\n else:\n raise ValueError(\"No such module or valuetype %s\" % v)\n return m\n"
] |
from sarah.lazy import lazy
import os
import sys
import copy
import errno
import os.path
import msgpack
import logging
from itertools import chain
import six
from sarah.order import sort_by_successors, dual_cover, restricted_cover
from mirte.core import ModuleDefinition, DepDefinition, VSettingDefinition
@lazy
def yaml():
import yaml
return yaml
CACHE_FILENAME_TEMPLATE = '.%s.msgpack'
FILE_SUFFIX = '.mirte'
DEFAULT_FILE = 'default.mirte'
def depsOf_of_mirteFile_instance_definition(man, insts):
""" Returns a function that returns the dependencies of
an instance definition by its name, where insts is a
dictionary of instance definitions from a mirteFile """
return lambda x: [a[1] for a in six.iteritems(insts[x])
if a[0] in [dn for dn, d in (
six.iteritems(man.modules[insts[x]['module']].deps)
if 'module' in insts[x] else [])]]
def depsOf_of_mirteFile_module_definition(defs):
""" Returns a function that returns the dependencies of a module
definition by its name, where defs is a dictionary of module
definitions from a mirteFile """
return lambda x: (list(filter(lambda z: z is not None and z in defs,
map(lambda y: y[1].get('type'),
six.iteritems(defs[x]['settings'])
if 'settings' in defs[x] else [])))) + \
(list(defs[x]['inherits']) if 'inherits' in defs[x] else [])
def module_definition_from_mirteFile_dict(man, d):
""" Creates a ModuleDefinition instance from the dictionary <d> from
a mirte-file for the Manager instance <man>. """
m = ModuleDefinition()
if 'inherits' not in d:
d['inherits'] = list()
if 'settings' not in d:
d['settings'] = dict()
if 'implementedBy' in d:
m.implementedBy = d['implementedBy']
m.inherits = set(d['inherits'])
for p in d['inherits']:
if p not in man.modules:
raise ValueError("No such module %s" % p)
m.deps.update(man.modules[p].deps)
m.vsettings.update(man.modules[p].vsettings)
m.inherits.update(man.modules[p].inherits)
m.run = m.run or man.modules[p].run
if 'run' in d:
m.run = d['run']
if len(m.inherits) == 0:
m.inherits = set(['module'])
for k, v in six.iteritems(d['settings']):
if 'type' not in v:
if k not in m.vsettings:
raise ValueError("No such existing vsetting %s" % k)
if 'default' in v:
m.vsettings[k] = copy.copy(m.vsettings[k])
m.vsettings[k].default = v['default']
continue
if v['type'] in man.modules:
m.deps[k] = DepDefinition(v['type'], v.get('allownull', False))
elif v['type'] in man.valueTypes:
m.vsettings[k] = VSettingDefinition(
v['type'],
(man.valueTypes[v['type']](v['default'])
if 'default' in v else None)
)
else:
raise ValueError("No such module or valuetype %s" % v)
return m
def load_mirteFile(path, m, logger=None):
""" Loads the mirte-file at <path> into the manager <m>. """
l = logging.getLogger('load_mirteFile') if logger is None else logger
had = set()
for name, path, d in walk_mirteFiles(path, logger):
if os.path.realpath(path) in m.loaded_mirteFiles:
continue
identifier = name
if name in had:
identifier = path
else:
had.add(name)
l.info('loading %s' % identifier)
m.loaded_mirteFiles.add(os.path.realpath(path))
_load_mirteFile(d, m)
def find_mirteFile(name, extra_path=None):
""" Resolves <name> to a path. Uses <extra_path> """
extra_path = () if extra_path is None else extra_path
for bp in chain(extra_path, sys.path):
pb = os.path.join(bp, name)
p = pb + FILE_SUFFIX
if os.path.exists(p):
return os.path.abspath(p)
p = os.path.join(pb, DEFAULT_FILE)
if os.path.exists(p):
return os.path.abspath(p)
raise ValueError("Couldn't find mirteFile %s" % name)
def walk_mirteFiles(name, logger=None):
""" Yields (cpath, d) for all dependencies of and including the
mirte-file <name>, where <d> are the dictionaries from
the mirte-file at <cpath> """
stack = [(name, find_mirteFile(name, (os.getcwd(),)))]
loadStack = []
had = dict()
while stack:
name, path = stack.pop()
if path in had:
d = had[path]
else:
d = _parse_mirteFile(path, logger)
had[path] = d
loadStack.append((name, path, d))
if 'includes' not in d:
continue
for include in d['includes']:
stack.append(
(include,
find_mirteFile(include, (os.path.dirname(path),))))
had = set()
for name, path, d in reversed(loadStack):
if path in had:
continue
had.add(path)
yield name, path, d
def _parse_mirteFile(path, logger=None):
""" Open and parses the mirteFile at <path>. """
l = logging.getLogger('_parse_mirteFile') if logger is None else logger
cache_path = os.path.join(os.path.dirname(path),
CACHE_FILENAME_TEMPLATE % os.path.basename(path))
if (os.path.exists(cache_path) and
os.path.getmtime(cache_path) >= os.path.getmtime(path)):
with open(cache_path) as f:
return msgpack.unpack(f)
with open(path) as f:
ret = yaml.load(f)
try:
with open(cache_path, 'w') as f:
msgpack.pack(ret, f)
except IOError as e:
if e.errno == errno.EACCES:
l.warn('Not allowed to write %s', path)
else:
raise
return ret
# vim: et:sta:bs=2:sw=4:
|
bwesterb/mirte
|
src/mirteFile.py
|
find_mirteFile
|
python
|
def find_mirteFile(name, extra_path=None):
extra_path = () if extra_path is None else extra_path
for bp in chain(extra_path, sys.path):
pb = os.path.join(bp, name)
p = pb + FILE_SUFFIX
if os.path.exists(p):
return os.path.abspath(p)
p = os.path.join(pb, DEFAULT_FILE)
if os.path.exists(p):
return os.path.abspath(p)
raise ValueError("Couldn't find mirteFile %s" % name)
|
Resolves <name> to a path. Uses <extra_path>
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/mirteFile.py#L159-L170
| null |
from sarah.lazy import lazy
import os
import sys
import copy
import errno
import os.path
import msgpack
import logging
from itertools import chain
import six
from sarah.order import sort_by_successors, dual_cover, restricted_cover
from mirte.core import ModuleDefinition, DepDefinition, VSettingDefinition
@lazy
def yaml():
import yaml
return yaml
CACHE_FILENAME_TEMPLATE = '.%s.msgpack'
FILE_SUFFIX = '.mirte'
DEFAULT_FILE = 'default.mirte'
def depsOf_of_mirteFile_instance_definition(man, insts):
""" Returns a function that returns the dependencies of
an instance definition by its name, where insts is a
dictionary of instance definitions from a mirteFile """
return lambda x: [a[1] for a in six.iteritems(insts[x])
if a[0] in [dn for dn, d in (
six.iteritems(man.modules[insts[x]['module']].deps)
if 'module' in insts[x] else [])]]
def depsOf_of_mirteFile_module_definition(defs):
""" Returns a function that returns the dependencies of a module
definition by its name, where defs is a dictionary of module
definitions from a mirteFile """
return lambda x: (list(filter(lambda z: z is not None and z in defs,
map(lambda y: y[1].get('type'),
six.iteritems(defs[x]['settings'])
if 'settings' in defs[x] else [])))) + \
(list(defs[x]['inherits']) if 'inherits' in defs[x] else [])
def module_definition_from_mirteFile_dict(man, d):
""" Creates a ModuleDefinition instance from the dictionary <d> from
a mirte-file for the Manager instance <man>. """
m = ModuleDefinition()
if 'inherits' not in d:
d['inherits'] = list()
if 'settings' not in d:
d['settings'] = dict()
if 'implementedBy' in d:
m.implementedBy = d['implementedBy']
m.inherits = set(d['inherits'])
for p in d['inherits']:
if p not in man.modules:
raise ValueError("No such module %s" % p)
m.deps.update(man.modules[p].deps)
m.vsettings.update(man.modules[p].vsettings)
m.inherits.update(man.modules[p].inherits)
m.run = m.run or man.modules[p].run
if 'run' in d:
m.run = d['run']
if len(m.inherits) == 0:
m.inherits = set(['module'])
for k, v in six.iteritems(d['settings']):
if 'type' not in v:
if k not in m.vsettings:
raise ValueError("No such existing vsetting %s" % k)
if 'default' in v:
m.vsettings[k] = copy.copy(m.vsettings[k])
m.vsettings[k].default = v['default']
continue
if v['type'] in man.modules:
m.deps[k] = DepDefinition(v['type'], v.get('allownull', False))
elif v['type'] in man.valueTypes:
m.vsettings[k] = VSettingDefinition(
v['type'],
(man.valueTypes[v['type']](v['default'])
if 'default' in v else None)
)
else:
raise ValueError("No such module or valuetype %s" % v)
return m
def load_mirteFile(path, m, logger=None):
""" Loads the mirte-file at <path> into the manager <m>. """
l = logging.getLogger('load_mirteFile') if logger is None else logger
had = set()
for name, path, d in walk_mirteFiles(path, logger):
if os.path.realpath(path) in m.loaded_mirteFiles:
continue
identifier = name
if name in had:
identifier = path
else:
had.add(name)
l.info('loading %s' % identifier)
m.loaded_mirteFiles.add(os.path.realpath(path))
_load_mirteFile(d, m)
def _load_mirteFile(d, m):
""" Loads the dictionary from the mirteFile into <m> """
defs = d['definitions'] if 'definitions' in d else {}
insts = d['instances'] if 'instances' in d else {}
# Filter out existing instances
insts_to_skip = []
for k in insts:
if k in m.insts:
m.update_instance(k, dict(insts[k]))
insts_to_skip.append(k)
for k in insts_to_skip:
del(insts[k])
# Sort module definitions by dependency
it = sort_by_successors(
six.viewkeys(defs),
dual_cover(
six.viewkeys(defs),
restricted_cover(
six.viewkeys(defs),
depsOf_of_mirteFile_module_definition(defs)
)
)
)
# Add module definitions
for k in it:
m.add_module_definition(
k,
module_definition_from_mirteFile_dict(m, defs[k])
)
# Sort instance declarations by dependency
it = sort_by_successors(
six.viewkeys(insts),
dual_cover(
six.viewkeys(insts),
restricted_cover(
six.viewkeys(insts),
depsOf_of_mirteFile_instance_definition(m, insts)
)
)
)
# Create instances
for k in it:
settings = dict(insts[k])
del(settings['module'])
m.create_instance(k, insts[k]['module'], settings)
def walk_mirteFiles(name, logger=None):
""" Yields (cpath, d) for all dependencies of and including the
mirte-file <name>, where <d> are the dictionaries from
the mirte-file at <cpath> """
stack = [(name, find_mirteFile(name, (os.getcwd(),)))]
loadStack = []
had = dict()
while stack:
name, path = stack.pop()
if path in had:
d = had[path]
else:
d = _parse_mirteFile(path, logger)
had[path] = d
loadStack.append((name, path, d))
if 'includes' not in d:
continue
for include in d['includes']:
stack.append(
(include,
find_mirteFile(include, (os.path.dirname(path),))))
had = set()
for name, path, d in reversed(loadStack):
if path in had:
continue
had.add(path)
yield name, path, d
def _parse_mirteFile(path, logger=None):
""" Open and parses the mirteFile at <path>. """
l = logging.getLogger('_parse_mirteFile') if logger is None else logger
cache_path = os.path.join(os.path.dirname(path),
CACHE_FILENAME_TEMPLATE % os.path.basename(path))
if (os.path.exists(cache_path) and
os.path.getmtime(cache_path) >= os.path.getmtime(path)):
with open(cache_path) as f:
return msgpack.unpack(f)
with open(path) as f:
ret = yaml.load(f)
try:
with open(cache_path, 'w') as f:
msgpack.pack(ret, f)
except IOError as e:
if e.errno == errno.EACCES:
l.warn('Not allowed to write %s', path)
else:
raise
return ret
# vim: et:sta:bs=2:sw=4:
|
bwesterb/mirte
|
src/mirteFile.py
|
walk_mirteFiles
|
python
|
def walk_mirteFiles(name, logger=None):
stack = [(name, find_mirteFile(name, (os.getcwd(),)))]
loadStack = []
had = dict()
while stack:
name, path = stack.pop()
if path in had:
d = had[path]
else:
d = _parse_mirteFile(path, logger)
had[path] = d
loadStack.append((name, path, d))
if 'includes' not in d:
continue
for include in d['includes']:
stack.append(
(include,
find_mirteFile(include, (os.path.dirname(path),))))
had = set()
for name, path, d in reversed(loadStack):
if path in had:
continue
had.add(path)
yield name, path, d
|
Yields (cpath, d) for all dependencies of and including the
mirte-file <name>, where <d> are the dictionaries from
the mirte-file at <cpath>
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/mirteFile.py#L173-L199
|
[
"def find_mirteFile(name, extra_path=None):\n \"\"\" Resolves <name> to a path. Uses <extra_path> \"\"\"\n extra_path = () if extra_path is None else extra_path\n for bp in chain(extra_path, sys.path):\n pb = os.path.join(bp, name)\n p = pb + FILE_SUFFIX\n if os.path.exists(p):\n return os.path.abspath(p)\n p = os.path.join(pb, DEFAULT_FILE)\n if os.path.exists(p):\n return os.path.abspath(p)\n raise ValueError(\"Couldn't find mirteFile %s\" % name)\n",
"def _parse_mirteFile(path, logger=None):\n \"\"\" Open and parses the mirteFile at <path>. \"\"\"\n l = logging.getLogger('_parse_mirteFile') if logger is None else logger\n cache_path = os.path.join(os.path.dirname(path),\n CACHE_FILENAME_TEMPLATE % os.path.basename(path))\n if (os.path.exists(cache_path) and\n os.path.getmtime(cache_path) >= os.path.getmtime(path)):\n with open(cache_path) as f:\n return msgpack.unpack(f)\n with open(path) as f:\n ret = yaml.load(f)\n try:\n with open(cache_path, 'w') as f:\n msgpack.pack(ret, f)\n except IOError as e:\n if e.errno == errno.EACCES:\n l.warn('Not allowed to write %s', path)\n else:\n raise\n return ret\n"
] |
from sarah.lazy import lazy
import os
import sys
import copy
import errno
import os.path
import msgpack
import logging
from itertools import chain
import six
from sarah.order import sort_by_successors, dual_cover, restricted_cover
from mirte.core import ModuleDefinition, DepDefinition, VSettingDefinition
@lazy
def yaml():
import yaml
return yaml
CACHE_FILENAME_TEMPLATE = '.%s.msgpack'
FILE_SUFFIX = '.mirte'
DEFAULT_FILE = 'default.mirte'
def depsOf_of_mirteFile_instance_definition(man, insts):
""" Returns a function that returns the dependencies of
an instance definition by its name, where insts is a
dictionary of instance definitions from a mirteFile """
return lambda x: [a[1] for a in six.iteritems(insts[x])
if a[0] in [dn for dn, d in (
six.iteritems(man.modules[insts[x]['module']].deps)
if 'module' in insts[x] else [])]]
def depsOf_of_mirteFile_module_definition(defs):
""" Returns a function that returns the dependencies of a module
definition by its name, where defs is a dictionary of module
definitions from a mirteFile """
return lambda x: (list(filter(lambda z: z is not None and z in defs,
map(lambda y: y[1].get('type'),
six.iteritems(defs[x]['settings'])
if 'settings' in defs[x] else [])))) + \
(list(defs[x]['inherits']) if 'inherits' in defs[x] else [])
def module_definition_from_mirteFile_dict(man, d):
""" Creates a ModuleDefinition instance from the dictionary <d> from
a mirte-file for the Manager instance <man>. """
m = ModuleDefinition()
if 'inherits' not in d:
d['inherits'] = list()
if 'settings' not in d:
d['settings'] = dict()
if 'implementedBy' in d:
m.implementedBy = d['implementedBy']
m.inherits = set(d['inherits'])
for p in d['inherits']:
if p not in man.modules:
raise ValueError("No such module %s" % p)
m.deps.update(man.modules[p].deps)
m.vsettings.update(man.modules[p].vsettings)
m.inherits.update(man.modules[p].inherits)
m.run = m.run or man.modules[p].run
if 'run' in d:
m.run = d['run']
if len(m.inherits) == 0:
m.inherits = set(['module'])
for k, v in six.iteritems(d['settings']):
if 'type' not in v:
if k not in m.vsettings:
raise ValueError("No such existing vsetting %s" % k)
if 'default' in v:
m.vsettings[k] = copy.copy(m.vsettings[k])
m.vsettings[k].default = v['default']
continue
if v['type'] in man.modules:
m.deps[k] = DepDefinition(v['type'], v.get('allownull', False))
elif v['type'] in man.valueTypes:
m.vsettings[k] = VSettingDefinition(
v['type'],
(man.valueTypes[v['type']](v['default'])
if 'default' in v else None)
)
else:
raise ValueError("No such module or valuetype %s" % v)
return m
def load_mirteFile(path, m, logger=None):
""" Loads the mirte-file at <path> into the manager <m>. """
l = logging.getLogger('load_mirteFile') if logger is None else logger
had = set()
for name, path, d in walk_mirteFiles(path, logger):
if os.path.realpath(path) in m.loaded_mirteFiles:
continue
identifier = name
if name in had:
identifier = path
else:
had.add(name)
l.info('loading %s' % identifier)
m.loaded_mirteFiles.add(os.path.realpath(path))
_load_mirteFile(d, m)
def _load_mirteFile(d, m):
""" Loads the dictionary from the mirteFile into <m> """
defs = d['definitions'] if 'definitions' in d else {}
insts = d['instances'] if 'instances' in d else {}
# Filter out existing instances
insts_to_skip = []
for k in insts:
if k in m.insts:
m.update_instance(k, dict(insts[k]))
insts_to_skip.append(k)
for k in insts_to_skip:
del(insts[k])
# Sort module definitions by dependency
it = sort_by_successors(
six.viewkeys(defs),
dual_cover(
six.viewkeys(defs),
restricted_cover(
six.viewkeys(defs),
depsOf_of_mirteFile_module_definition(defs)
)
)
)
# Add module definitions
for k in it:
m.add_module_definition(
k,
module_definition_from_mirteFile_dict(m, defs[k])
)
# Sort instance declarations by dependency
it = sort_by_successors(
six.viewkeys(insts),
dual_cover(
six.viewkeys(insts),
restricted_cover(
six.viewkeys(insts),
depsOf_of_mirteFile_instance_definition(m, insts)
)
)
)
# Create instances
for k in it:
settings = dict(insts[k])
del(settings['module'])
m.create_instance(k, insts[k]['module'], settings)
def find_mirteFile(name, extra_path=None):
""" Resolves <name> to a path. Uses <extra_path> """
extra_path = () if extra_path is None else extra_path
for bp in chain(extra_path, sys.path):
pb = os.path.join(bp, name)
p = pb + FILE_SUFFIX
if os.path.exists(p):
return os.path.abspath(p)
p = os.path.join(pb, DEFAULT_FILE)
if os.path.exists(p):
return os.path.abspath(p)
raise ValueError("Couldn't find mirteFile %s" % name)
def _parse_mirteFile(path, logger=None):
""" Open and parses the mirteFile at <path>. """
l = logging.getLogger('_parse_mirteFile') if logger is None else logger
cache_path = os.path.join(os.path.dirname(path),
CACHE_FILENAME_TEMPLATE % os.path.basename(path))
if (os.path.exists(cache_path) and
os.path.getmtime(cache_path) >= os.path.getmtime(path)):
with open(cache_path) as f:
return msgpack.unpack(f)
with open(path) as f:
ret = yaml.load(f)
try:
with open(cache_path, 'w') as f:
msgpack.pack(ret, f)
except IOError as e:
if e.errno == errno.EACCES:
l.warn('Not allowed to write %s', path)
else:
raise
return ret
# vim: et:sta:bs=2:sw=4:
|
bwesterb/mirte
|
src/mirteFile.py
|
_parse_mirteFile
|
python
|
def _parse_mirteFile(path, logger=None):
l = logging.getLogger('_parse_mirteFile') if logger is None else logger
cache_path = os.path.join(os.path.dirname(path),
CACHE_FILENAME_TEMPLATE % os.path.basename(path))
if (os.path.exists(cache_path) and
os.path.getmtime(cache_path) >= os.path.getmtime(path)):
with open(cache_path) as f:
return msgpack.unpack(f)
with open(path) as f:
ret = yaml.load(f)
try:
with open(cache_path, 'w') as f:
msgpack.pack(ret, f)
except IOError as e:
if e.errno == errno.EACCES:
l.warn('Not allowed to write %s', path)
else:
raise
return ret
|
Open and parses the mirteFile at <path>.
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/mirteFile.py#L202-L221
| null |
from sarah.lazy import lazy
import os
import sys
import copy
import errno
import os.path
import msgpack
import logging
from itertools import chain
import six
from sarah.order import sort_by_successors, dual_cover, restricted_cover
from mirte.core import ModuleDefinition, DepDefinition, VSettingDefinition
@lazy
def yaml():
import yaml
return yaml
CACHE_FILENAME_TEMPLATE = '.%s.msgpack'
FILE_SUFFIX = '.mirte'
DEFAULT_FILE = 'default.mirte'
def depsOf_of_mirteFile_instance_definition(man, insts):
""" Returns a function that returns the dependencies of
an instance definition by its name, where insts is a
dictionary of instance definitions from a mirteFile """
return lambda x: [a[1] for a in six.iteritems(insts[x])
if a[0] in [dn for dn, d in (
six.iteritems(man.modules[insts[x]['module']].deps)
if 'module' in insts[x] else [])]]
def depsOf_of_mirteFile_module_definition(defs):
""" Returns a function that returns the dependencies of a module
definition by its name, where defs is a dictionary of module
definitions from a mirteFile """
return lambda x: (list(filter(lambda z: z is not None and z in defs,
map(lambda y: y[1].get('type'),
six.iteritems(defs[x]['settings'])
if 'settings' in defs[x] else [])))) + \
(list(defs[x]['inherits']) if 'inherits' in defs[x] else [])
def module_definition_from_mirteFile_dict(man, d):
""" Creates a ModuleDefinition instance from the dictionary <d> from
a mirte-file for the Manager instance <man>. """
m = ModuleDefinition()
if 'inherits' not in d:
d['inherits'] = list()
if 'settings' not in d:
d['settings'] = dict()
if 'implementedBy' in d:
m.implementedBy = d['implementedBy']
m.inherits = set(d['inherits'])
for p in d['inherits']:
if p not in man.modules:
raise ValueError("No such module %s" % p)
m.deps.update(man.modules[p].deps)
m.vsettings.update(man.modules[p].vsettings)
m.inherits.update(man.modules[p].inherits)
m.run = m.run or man.modules[p].run
if 'run' in d:
m.run = d['run']
if len(m.inherits) == 0:
m.inherits = set(['module'])
for k, v in six.iteritems(d['settings']):
if 'type' not in v:
if k not in m.vsettings:
raise ValueError("No such existing vsetting %s" % k)
if 'default' in v:
m.vsettings[k] = copy.copy(m.vsettings[k])
m.vsettings[k].default = v['default']
continue
if v['type'] in man.modules:
m.deps[k] = DepDefinition(v['type'], v.get('allownull', False))
elif v['type'] in man.valueTypes:
m.vsettings[k] = VSettingDefinition(
v['type'],
(man.valueTypes[v['type']](v['default'])
if 'default' in v else None)
)
else:
raise ValueError("No such module or valuetype %s" % v)
return m
def load_mirteFile(path, m, logger=None):
""" Loads the mirte-file at <path> into the manager <m>. """
l = logging.getLogger('load_mirteFile') if logger is None else logger
had = set()
for name, path, d in walk_mirteFiles(path, logger):
if os.path.realpath(path) in m.loaded_mirteFiles:
continue
identifier = name
if name in had:
identifier = path
else:
had.add(name)
l.info('loading %s' % identifier)
m.loaded_mirteFiles.add(os.path.realpath(path))
_load_mirteFile(d, m)
def _load_mirteFile(d, m):
""" Loads the dictionary from the mirteFile into <m> """
defs = d['definitions'] if 'definitions' in d else {}
insts = d['instances'] if 'instances' in d else {}
# Filter out existing instances
insts_to_skip = []
for k in insts:
if k in m.insts:
m.update_instance(k, dict(insts[k]))
insts_to_skip.append(k)
for k in insts_to_skip:
del(insts[k])
# Sort module definitions by dependency
it = sort_by_successors(
six.viewkeys(defs),
dual_cover(
six.viewkeys(defs),
restricted_cover(
six.viewkeys(defs),
depsOf_of_mirteFile_module_definition(defs)
)
)
)
# Add module definitions
for k in it:
m.add_module_definition(
k,
module_definition_from_mirteFile_dict(m, defs[k])
)
# Sort instance declarations by dependency
it = sort_by_successors(
six.viewkeys(insts),
dual_cover(
six.viewkeys(insts),
restricted_cover(
six.viewkeys(insts),
depsOf_of_mirteFile_instance_definition(m, insts)
)
)
)
# Create instances
for k in it:
settings = dict(insts[k])
del(settings['module'])
m.create_instance(k, insts[k]['module'], settings)
def find_mirteFile(name, extra_path=None):
""" Resolves <name> to a path. Uses <extra_path> """
extra_path = () if extra_path is None else extra_path
for bp in chain(extra_path, sys.path):
pb = os.path.join(bp, name)
p = pb + FILE_SUFFIX
if os.path.exists(p):
return os.path.abspath(p)
p = os.path.join(pb, DEFAULT_FILE)
if os.path.exists(p):
return os.path.abspath(p)
raise ValueError("Couldn't find mirteFile %s" % name)
def walk_mirteFiles(name, logger=None):
""" Yields (cpath, d) for all dependencies of and including the
mirte-file <name>, where <d> are the dictionaries from
the mirte-file at <cpath> """
stack = [(name, find_mirteFile(name, (os.getcwd(),)))]
loadStack = []
had = dict()
while stack:
name, path = stack.pop()
if path in had:
d = had[path]
else:
d = _parse_mirteFile(path, logger)
had[path] = d
loadStack.append((name, path, d))
if 'includes' not in d:
continue
for include in d['includes']:
stack.append(
(include,
find_mirteFile(include, (os.path.dirname(path),))))
had = set()
for name, path, d in reversed(loadStack):
if path in had:
continue
had.add(path)
yield name, path, d
# vim: et:sta:bs=2:sw=4:
|
bwesterb/mirte
|
src/core.py
|
Manager._get_all
|
python
|
def _get_all(self, _type):
if _type not in self.modules:
raise ValueError("No such module, %s" % _type)
if not self.insts_implementing.get(_type, None):
raise ValueError("No instance implementing %s" % _type)
return self.insts_implementing[_type]
|
Gets all instances implementing type <_type>
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/core.py#L69-L75
| null |
class Manager(Module):
def __init__(self, logger=None):
if logger is None:
logger = logging.getLogger(object.__repr__(self))
super(Manager, self).__init__({}, logger)
self.running = False
self.running_event = threading.Event()
self.modules = dict()
self.to_stop = list() # objects to stop
self.daemons = list() # and to join
self.valueTypes = {'str': str,
'float': float,
'bool': bool,
'int': int}
self.insts = dict()
# module -> concrete modules implementing module
self.modules_implementing = dict()
self.insts_implementing = dict()
self.add_module_definition('module', ModuleDefinition())
self.add_module_definition('manager', ModuleDefinition())
self.add_module_definition('threadPool', ModuleDefinition(
vsettings={'minFree': VSettingDefinition('int', 4),
'maxFree': VSettingDefinition('int', 8),
'min': VSettingDefinition('int', 8)},
implementedBy='mirte.threadPool.ThreadPool'))
self.register_instance('manager', 'manager', self, {}, {})
self.create_instance('threadPool', 'threadPool', {})
self.sleep_event = KeyboardInterruptableEvent()
# set of paths of the mirteFiles that already have been loaded
self.loaded_mirteFiles = set([])
def get_a(self, _type):
""" Gets an instance implementing type <_type> """
return self.insts[self._get_a(_type)].object
def _get_a(self, _type):
""" Gets an instance implementing type <_type> """
tmp = self._get_all(_type)
ret = pick(tmp)
if len(tmp) != 1:
self.l.warn(("get_a: %s all implement %s; " +
"picking %s") % (tmp, _type, ret))
return ret
def got_a(self, _type):
""" Returns whether there is an instance implementing <_type>
"""
if _type not in self.modules:
raise ValueError("No such module, %s" % _type)
return (_type in self.insts_implementing and
self.insts_implementing[_type])
class GoCa_Plan(object):
""" A partial plan for a get_or_create_a call """
def __init__(self, man, targets, insts=None,
insts_implementing=None):
self.man = man
self.targets = targets
self.insts = dict() if insts is None else insts
self.insts_implementing = (dict() if insts_implementing
is None else insts_implementing)
def free_instance_name_like(self, name):
if (name not in self.insts and
name not in self.man.insts):
return name
suffix = 2
while True:
shot = "%s-%s" % (name, suffix)
if (shot not in self.insts and
name not in self.man.insts):
return shot
suffix += 1
def got_a(self, _type):
if self.man.got_a(_type):
return True
return (_type in self.insts_implementing and
self.insts_implementing[_type])
def get_all(self, _type):
ret = list()
if self.man.got_a(_type):
ret.extend(self.man._get_all(_type))
if _type in self.insts_implementing:
ret.extend(self.insts_implementing[_type])
return ret
def get_a(self, _type):
return pick(self.get_all(_type))
@property
def finished(self):
return not self.targets
def plan_a(self, mod):
name = self.free_instance_name_like(mod)
self.insts[name] = (name, mod, {})
md = self.man.modules[mod]
for mod2 in chain(md.inherits, (mod,)):
if mod2 not in self.insts_implementing:
self.insts_implementing[mod2] = list()
self.insts_implementing[mod2].append(name)
for depName, dep in six.iteritems(md.deps):
if dep.type in self.targets:
self.targets[dep.type].append(
(name, depName))
else:
self.targets[dep.type] = [
(name, depName)]
return name
def branches(self):
choices = dict()
for target in self.targets:
if self.got_a(target):
choices[target] = [(True, name) for
name in self.get_all(target)]
continue
choices[target] = [(False, name) for name
in self.man.modules_implementing[
target]]
choices_t = list(six.iteritems(choices))
for choice in product(*[list(range(len(v)))
for k, v in choices_t]):
plan2 = Manager.GoCa_Plan(self.man, dict(),
dict(self.insts),
dict(self.insts_implementing))
tmp = [(choices_t[n][0], choices_t[n][1][m])
for n, m in enumerate(choice)]
for target, inst_or_mod in tmp:
if inst_or_mod[0]:
name = inst_or_mod[1]
else:
name = plan2.plan_a(inst_or_mod[1])
for _in, depName in self.targets[target]:
plan2.insts[_in][2][depName] = name
yield plan2
def execute(self):
insts = frozenset(six.iterkeys(self.insts))
inst_list = tuple(sort_by_successors(
insts,
lambda inst: [self.insts[inst][2][k] for k
in self.man.modules[self.insts[inst][1]].deps
if self.insts[inst][2][k] in insts]
))
for name in reversed(inst_list):
self.man.create_instance(*self.insts[name])
def get_or_create_a(self, _type):
""" Gets or creates an instance of type <_type> """
return self.insts[self._get_or_create_a(_type)].object
def _get_or_create_a(self, _type):
""" Gets or creates an instance of type <_type> """
self.l.debug("get_or_create_a: %s" % _type)
stack = [Manager.GoCa_Plan(self, {_type: ()})]
while stack:
p = stack.pop()
if p.finished:
p.execute()
return p.get_a(_type)
for c in p.branches():
stack.append(c)
raise NotImplementedError
def free_instance_name_like(self, name):
if name not in self.insts:
return name
suffix = 2
while True:
shot = "%s-%s" % (name, suffix)
if shot not in self.insts:
return shot
suffix += 1
def add_module_definition(self, name, definition):
if name in self.modules:
raise ValueError("Duplicate module name")
self.modules[name] = definition
if definition.implementedBy is not None:
for t in chain(definition.inherits, (name,)):
if t not in self.modules_implementing:
self.insts_implementing[t] = set()
self.modules_implementing[t] = set()
self.modules_implementing[t].add(name)
def update_instance(self, name, settings):
""" Updates settings of instance <name> with the
dictionary <settings>. """
if name not in self.insts:
raise ValueError("There's no instance named %s" % name)
if 'module' in settings:
raise ValueError(("Can't change module of existing instan"
+ "ce %s") % name)
self.l.info('update instance %-15s' % (name))
for k, v in six.iteritems(settings):
self.change_setting(name, k, v)
def create_instance(self, name, moduleName, settings):
""" Creates an instance of <moduleName> at <name> with
<settings>. """
if name in self.insts:
raise ValueError("There's already an instance named %s" %
name)
if moduleName not in self.modules:
raise ValueError("There's no module %s" % moduleName)
md = self.modules[moduleName]
deps = dict()
for k, v in six.iteritems(md.deps):
if k not in settings:
settings[k] = self._get_or_create_a(v.type)
if settings[k] is None:
if not v.allow_null:
raise ValueError("`null' not allowed for %s" % k)
elif settings[k] not in self.insts:
raise ValueError("No such instance %s" % settings[k])
else:
settings[k] = self.insts[settings[k]].object
deps[k] = settings[k]
for k, v in six.iteritems(md.vsettings):
if k not in settings:
settings[k] = v.default
if v.default is None:
self.l.warn('%s:%s not set' % (name, k))
self.l.info('create_instance %-15s %s' % (name, md.implementedBy))
cl = get_by_path(md.implementedBy)
il = logging.getLogger(name)
obj = cl(settings, il)
self.register_instance(name, moduleName, obj, settings, deps)
return obj
def register_instance(self, name, moduleName, obj, settings, deps):
md = self.modules[moduleName]
self.insts[name] = InstanceInfo(name, moduleName, obj, settings, deps)
for mn in chain(md.inherits, (moduleName,)):
if mn not in self.insts_implementing:
self.insts_implementing[mn] = set()
self.insts_implementing[mn].add(name)
if md.run:
self.to_stop.append(name)
self.daemons.append(name)
if self.running:
self._run_instance(name)
elif hasattr(obj, 'stop'):
self.to_stop.append(name)
def _run_instance(self, name):
ii = self.insts[name]
self.insts['threadPool'].object.execute_named(
self._daemon_entry, "mirte run %s" % name, ii)
def _daemon_entry(self, ii):
try:
ii.object.run()
except Exception:
self.l.exception(("Module %s exited " +
"abnormally") % ii.name)
return
self.l.info("Module %s exited normally" % ii.name)
def run(self):
assert not self.running
self.running = True
self.running_event.set()
tp = self.insts['threadPool'].object
tp.start()
# Note that self.daemons is already dependency ordered for us
for name in self.daemons:
self._run_instance(name)
while self.running:
try:
self.sleep_event.wait()
except KeyboardInterrupt:
self.l.warn("Keyboard interrupt")
self.stop()
self.l.info("Woke up")
self.l.info("Stopping modules")
for name in reversed(self.to_stop):
ii = self.insts[name]
self.l.info(" %s" % ii.name)
ii.object.stop()
self.l.info("Joining threadPool")
tp.join()
def change_setting(self, instance_name, key, raw_value):
""" Change the settings <key> to <raw_value> of an instance
named <instance_name>. <raw_value> should be a string and
is properly converted. """
ii = self.insts[instance_name]
mo = self.modules[ii.module]
if key in mo.deps:
if raw_value not in self.insts:
raise ValueError("No such instance %s" % raw_value)
vii = self.insts[raw_value]
vmo = self.modules[vii.module]
if not (mo.deps[key].type in vmo.inherits or
mo.deps[key].type == vii.module):
raise ValueError("%s isn't a %s" % (
raw_value, mo.deps[key].type))
value = vii.object
elif key in mo.vsettings:
value = self.valueTypes[mo.vsettings[key].type](
raw_value)
else:
raise ValueError("No such settings %s" % key)
self.l.info("Changing %s.%s to %s" % (instance_name,
key,
raw_value))
ii.settings[key] = value
ii.object.change_setting(key, value)
def stop(self):
if not self.running:
return
self.running_event.clear()
self.running = False
self.sleep_event.set()
|
bwesterb/mirte
|
src/core.py
|
Manager._get_a
|
python
|
def _get_a(self, _type):
tmp = self._get_all(_type)
ret = pick(tmp)
if len(tmp) != 1:
self.l.warn(("get_a: %s all implement %s; " +
"picking %s") % (tmp, _type, ret))
return ret
|
Gets an instance implementing type <_type>
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/core.py#L81-L88
|
[
"def _get_all(self, _type):\n \"\"\" Gets all instances implementing type <_type> \"\"\"\n if _type not in self.modules:\n raise ValueError(\"No such module, %s\" % _type)\n if not self.insts_implementing.get(_type, None):\n raise ValueError(\"No instance implementing %s\" % _type)\n return self.insts_implementing[_type]\n"
] |
class Manager(Module):
def __init__(self, logger=None):
if logger is None:
logger = logging.getLogger(object.__repr__(self))
super(Manager, self).__init__({}, logger)
self.running = False
self.running_event = threading.Event()
self.modules = dict()
self.to_stop = list() # objects to stop
self.daemons = list() # and to join
self.valueTypes = {'str': str,
'float': float,
'bool': bool,
'int': int}
self.insts = dict()
# module -> concrete modules implementing module
self.modules_implementing = dict()
self.insts_implementing = dict()
self.add_module_definition('module', ModuleDefinition())
self.add_module_definition('manager', ModuleDefinition())
self.add_module_definition('threadPool', ModuleDefinition(
vsettings={'minFree': VSettingDefinition('int', 4),
'maxFree': VSettingDefinition('int', 8),
'min': VSettingDefinition('int', 8)},
implementedBy='mirte.threadPool.ThreadPool'))
self.register_instance('manager', 'manager', self, {}, {})
self.create_instance('threadPool', 'threadPool', {})
self.sleep_event = KeyboardInterruptableEvent()
# set of paths of the mirteFiles that already have been loaded
self.loaded_mirteFiles = set([])
def _get_all(self, _type):
""" Gets all instances implementing type <_type> """
if _type not in self.modules:
raise ValueError("No such module, %s" % _type)
if not self.insts_implementing.get(_type, None):
raise ValueError("No instance implementing %s" % _type)
return self.insts_implementing[_type]
def get_a(self, _type):
""" Gets an instance implementing type <_type> """
return self.insts[self._get_a(_type)].object
def got_a(self, _type):
""" Returns whether there is an instance implementing <_type>
"""
if _type not in self.modules:
raise ValueError("No such module, %s" % _type)
return (_type in self.insts_implementing and
self.insts_implementing[_type])
class GoCa_Plan(object):
""" A partial plan for a get_or_create_a call """
def __init__(self, man, targets, insts=None,
insts_implementing=None):
self.man = man
self.targets = targets
self.insts = dict() if insts is None else insts
self.insts_implementing = (dict() if insts_implementing
is None else insts_implementing)
def free_instance_name_like(self, name):
if (name not in self.insts and
name not in self.man.insts):
return name
suffix = 2
while True:
shot = "%s-%s" % (name, suffix)
if (shot not in self.insts and
name not in self.man.insts):
return shot
suffix += 1
def got_a(self, _type):
if self.man.got_a(_type):
return True
return (_type in self.insts_implementing and
self.insts_implementing[_type])
def get_all(self, _type):
ret = list()
if self.man.got_a(_type):
ret.extend(self.man._get_all(_type))
if _type in self.insts_implementing:
ret.extend(self.insts_implementing[_type])
return ret
def get_a(self, _type):
return pick(self.get_all(_type))
@property
def finished(self):
return not self.targets
def plan_a(self, mod):
name = self.free_instance_name_like(mod)
self.insts[name] = (name, mod, {})
md = self.man.modules[mod]
for mod2 in chain(md.inherits, (mod,)):
if mod2 not in self.insts_implementing:
self.insts_implementing[mod2] = list()
self.insts_implementing[mod2].append(name)
for depName, dep in six.iteritems(md.deps):
if dep.type in self.targets:
self.targets[dep.type].append(
(name, depName))
else:
self.targets[dep.type] = [
(name, depName)]
return name
def branches(self):
choices = dict()
for target in self.targets:
if self.got_a(target):
choices[target] = [(True, name) for
name in self.get_all(target)]
continue
choices[target] = [(False, name) for name
in self.man.modules_implementing[
target]]
choices_t = list(six.iteritems(choices))
for choice in product(*[list(range(len(v)))
for k, v in choices_t]):
plan2 = Manager.GoCa_Plan(self.man, dict(),
dict(self.insts),
dict(self.insts_implementing))
tmp = [(choices_t[n][0], choices_t[n][1][m])
for n, m in enumerate(choice)]
for target, inst_or_mod in tmp:
if inst_or_mod[0]:
name = inst_or_mod[1]
else:
name = plan2.plan_a(inst_or_mod[1])
for _in, depName in self.targets[target]:
plan2.insts[_in][2][depName] = name
yield plan2
def execute(self):
insts = frozenset(six.iterkeys(self.insts))
inst_list = tuple(sort_by_successors(
insts,
lambda inst: [self.insts[inst][2][k] for k
in self.man.modules[self.insts[inst][1]].deps
if self.insts[inst][2][k] in insts]
))
for name in reversed(inst_list):
self.man.create_instance(*self.insts[name])
def get_or_create_a(self, _type):
""" Gets or creates an instance of type <_type> """
return self.insts[self._get_or_create_a(_type)].object
def _get_or_create_a(self, _type):
""" Gets or creates an instance of type <_type> """
self.l.debug("get_or_create_a: %s" % _type)
stack = [Manager.GoCa_Plan(self, {_type: ()})]
while stack:
p = stack.pop()
if p.finished:
p.execute()
return p.get_a(_type)
for c in p.branches():
stack.append(c)
raise NotImplementedError
def free_instance_name_like(self, name):
if name not in self.insts:
return name
suffix = 2
while True:
shot = "%s-%s" % (name, suffix)
if shot not in self.insts:
return shot
suffix += 1
def add_module_definition(self, name, definition):
if name in self.modules:
raise ValueError("Duplicate module name")
self.modules[name] = definition
if definition.implementedBy is not None:
for t in chain(definition.inherits, (name,)):
if t not in self.modules_implementing:
self.insts_implementing[t] = set()
self.modules_implementing[t] = set()
self.modules_implementing[t].add(name)
def update_instance(self, name, settings):
""" Updates settings of instance <name> with the
dictionary <settings>. """
if name not in self.insts:
raise ValueError("There's no instance named %s" % name)
if 'module' in settings:
raise ValueError(("Can't change module of existing instan"
+ "ce %s") % name)
self.l.info('update instance %-15s' % (name))
for k, v in six.iteritems(settings):
self.change_setting(name, k, v)
def create_instance(self, name, moduleName, settings):
""" Creates an instance of <moduleName> at <name> with
<settings>. """
if name in self.insts:
raise ValueError("There's already an instance named %s" %
name)
if moduleName not in self.modules:
raise ValueError("There's no module %s" % moduleName)
md = self.modules[moduleName]
deps = dict()
for k, v in six.iteritems(md.deps):
if k not in settings:
settings[k] = self._get_or_create_a(v.type)
if settings[k] is None:
if not v.allow_null:
raise ValueError("`null' not allowed for %s" % k)
elif settings[k] not in self.insts:
raise ValueError("No such instance %s" % settings[k])
else:
settings[k] = self.insts[settings[k]].object
deps[k] = settings[k]
for k, v in six.iteritems(md.vsettings):
if k not in settings:
settings[k] = v.default
if v.default is None:
self.l.warn('%s:%s not set' % (name, k))
self.l.info('create_instance %-15s %s' % (name, md.implementedBy))
cl = get_by_path(md.implementedBy)
il = logging.getLogger(name)
obj = cl(settings, il)
self.register_instance(name, moduleName, obj, settings, deps)
return obj
def register_instance(self, name, moduleName, obj, settings, deps):
md = self.modules[moduleName]
self.insts[name] = InstanceInfo(name, moduleName, obj, settings, deps)
for mn in chain(md.inherits, (moduleName,)):
if mn not in self.insts_implementing:
self.insts_implementing[mn] = set()
self.insts_implementing[mn].add(name)
if md.run:
self.to_stop.append(name)
self.daemons.append(name)
if self.running:
self._run_instance(name)
elif hasattr(obj, 'stop'):
self.to_stop.append(name)
def _run_instance(self, name):
ii = self.insts[name]
self.insts['threadPool'].object.execute_named(
self._daemon_entry, "mirte run %s" % name, ii)
def _daemon_entry(self, ii):
try:
ii.object.run()
except Exception:
self.l.exception(("Module %s exited " +
"abnormally") % ii.name)
return
self.l.info("Module %s exited normally" % ii.name)
def run(self):
assert not self.running
self.running = True
self.running_event.set()
tp = self.insts['threadPool'].object
tp.start()
# Note that self.daemons is already dependency ordered for us
for name in self.daemons:
self._run_instance(name)
while self.running:
try:
self.sleep_event.wait()
except KeyboardInterrupt:
self.l.warn("Keyboard interrupt")
self.stop()
self.l.info("Woke up")
self.l.info("Stopping modules")
for name in reversed(self.to_stop):
ii = self.insts[name]
self.l.info(" %s" % ii.name)
ii.object.stop()
self.l.info("Joining threadPool")
tp.join()
def change_setting(self, instance_name, key, raw_value):
""" Change the settings <key> to <raw_value> of an instance
named <instance_name>. <raw_value> should be a string and
is properly converted. """
ii = self.insts[instance_name]
mo = self.modules[ii.module]
if key in mo.deps:
if raw_value not in self.insts:
raise ValueError("No such instance %s" % raw_value)
vii = self.insts[raw_value]
vmo = self.modules[vii.module]
if not (mo.deps[key].type in vmo.inherits or
mo.deps[key].type == vii.module):
raise ValueError("%s isn't a %s" % (
raw_value, mo.deps[key].type))
value = vii.object
elif key in mo.vsettings:
value = self.valueTypes[mo.vsettings[key].type](
raw_value)
else:
raise ValueError("No such settings %s" % key)
self.l.info("Changing %s.%s to %s" % (instance_name,
key,
raw_value))
ii.settings[key] = value
ii.object.change_setting(key, value)
def stop(self):
if not self.running:
return
self.running_event.clear()
self.running = False
self.sleep_event.set()
|
bwesterb/mirte
|
src/core.py
|
Manager.got_a
|
python
|
def got_a(self, _type):
if _type not in self.modules:
raise ValueError("No such module, %s" % _type)
return (_type in self.insts_implementing and
self.insts_implementing[_type])
|
Returns whether there is an instance implementing <_type>
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/core.py#L90-L96
| null |
class Manager(Module):
def __init__(self, logger=None):
if logger is None:
logger = logging.getLogger(object.__repr__(self))
super(Manager, self).__init__({}, logger)
self.running = False
self.running_event = threading.Event()
self.modules = dict()
self.to_stop = list() # objects to stop
self.daemons = list() # and to join
self.valueTypes = {'str': str,
'float': float,
'bool': bool,
'int': int}
self.insts = dict()
# module -> concrete modules implementing module
self.modules_implementing = dict()
self.insts_implementing = dict()
self.add_module_definition('module', ModuleDefinition())
self.add_module_definition('manager', ModuleDefinition())
self.add_module_definition('threadPool', ModuleDefinition(
vsettings={'minFree': VSettingDefinition('int', 4),
'maxFree': VSettingDefinition('int', 8),
'min': VSettingDefinition('int', 8)},
implementedBy='mirte.threadPool.ThreadPool'))
self.register_instance('manager', 'manager', self, {}, {})
self.create_instance('threadPool', 'threadPool', {})
self.sleep_event = KeyboardInterruptableEvent()
# set of paths of the mirteFiles that already have been loaded
self.loaded_mirteFiles = set([])
def _get_all(self, _type):
""" Gets all instances implementing type <_type> """
if _type not in self.modules:
raise ValueError("No such module, %s" % _type)
if not self.insts_implementing.get(_type, None):
raise ValueError("No instance implementing %s" % _type)
return self.insts_implementing[_type]
def get_a(self, _type):
""" Gets an instance implementing type <_type> """
return self.insts[self._get_a(_type)].object
def _get_a(self, _type):
""" Gets an instance implementing type <_type> """
tmp = self._get_all(_type)
ret = pick(tmp)
if len(tmp) != 1:
self.l.warn(("get_a: %s all implement %s; " +
"picking %s") % (tmp, _type, ret))
return ret
class GoCa_Plan(object):
""" A partial plan for a get_or_create_a call """
def __init__(self, man, targets, insts=None,
insts_implementing=None):
self.man = man
self.targets = targets
self.insts = dict() if insts is None else insts
self.insts_implementing = (dict() if insts_implementing
is None else insts_implementing)
def free_instance_name_like(self, name):
if (name not in self.insts and
name not in self.man.insts):
return name
suffix = 2
while True:
shot = "%s-%s" % (name, suffix)
if (shot not in self.insts and
name not in self.man.insts):
return shot
suffix += 1
def got_a(self, _type):
if self.man.got_a(_type):
return True
return (_type in self.insts_implementing and
self.insts_implementing[_type])
def get_all(self, _type):
ret = list()
if self.man.got_a(_type):
ret.extend(self.man._get_all(_type))
if _type in self.insts_implementing:
ret.extend(self.insts_implementing[_type])
return ret
def get_a(self, _type):
return pick(self.get_all(_type))
@property
def finished(self):
return not self.targets
def plan_a(self, mod):
name = self.free_instance_name_like(mod)
self.insts[name] = (name, mod, {})
md = self.man.modules[mod]
for mod2 in chain(md.inherits, (mod,)):
if mod2 not in self.insts_implementing:
self.insts_implementing[mod2] = list()
self.insts_implementing[mod2].append(name)
for depName, dep in six.iteritems(md.deps):
if dep.type in self.targets:
self.targets[dep.type].append(
(name, depName))
else:
self.targets[dep.type] = [
(name, depName)]
return name
def branches(self):
choices = dict()
for target in self.targets:
if self.got_a(target):
choices[target] = [(True, name) for
name in self.get_all(target)]
continue
choices[target] = [(False, name) for name
in self.man.modules_implementing[
target]]
choices_t = list(six.iteritems(choices))
for choice in product(*[list(range(len(v)))
for k, v in choices_t]):
plan2 = Manager.GoCa_Plan(self.man, dict(),
dict(self.insts),
dict(self.insts_implementing))
tmp = [(choices_t[n][0], choices_t[n][1][m])
for n, m in enumerate(choice)]
for target, inst_or_mod in tmp:
if inst_or_mod[0]:
name = inst_or_mod[1]
else:
name = plan2.plan_a(inst_or_mod[1])
for _in, depName in self.targets[target]:
plan2.insts[_in][2][depName] = name
yield plan2
def execute(self):
insts = frozenset(six.iterkeys(self.insts))
inst_list = tuple(sort_by_successors(
insts,
lambda inst: [self.insts[inst][2][k] for k
in self.man.modules[self.insts[inst][1]].deps
if self.insts[inst][2][k] in insts]
))
for name in reversed(inst_list):
self.man.create_instance(*self.insts[name])
def get_or_create_a(self, _type):
""" Gets or creates an instance of type <_type> """
return self.insts[self._get_or_create_a(_type)].object
def _get_or_create_a(self, _type):
""" Gets or creates an instance of type <_type> """
self.l.debug("get_or_create_a: %s" % _type)
stack = [Manager.GoCa_Plan(self, {_type: ()})]
while stack:
p = stack.pop()
if p.finished:
p.execute()
return p.get_a(_type)
for c in p.branches():
stack.append(c)
raise NotImplementedError
def free_instance_name_like(self, name):
if name not in self.insts:
return name
suffix = 2
while True:
shot = "%s-%s" % (name, suffix)
if shot not in self.insts:
return shot
suffix += 1
def add_module_definition(self, name, definition):
if name in self.modules:
raise ValueError("Duplicate module name")
self.modules[name] = definition
if definition.implementedBy is not None:
for t in chain(definition.inherits, (name,)):
if t not in self.modules_implementing:
self.insts_implementing[t] = set()
self.modules_implementing[t] = set()
self.modules_implementing[t].add(name)
def update_instance(self, name, settings):
""" Updates settings of instance <name> with the
dictionary <settings>. """
if name not in self.insts:
raise ValueError("There's no instance named %s" % name)
if 'module' in settings:
raise ValueError(("Can't change module of existing instan"
+ "ce %s") % name)
self.l.info('update instance %-15s' % (name))
for k, v in six.iteritems(settings):
self.change_setting(name, k, v)
def create_instance(self, name, moduleName, settings):
""" Creates an instance of <moduleName> at <name> with
<settings>. """
if name in self.insts:
raise ValueError("There's already an instance named %s" %
name)
if moduleName not in self.modules:
raise ValueError("There's no module %s" % moduleName)
md = self.modules[moduleName]
deps = dict()
for k, v in six.iteritems(md.deps):
if k not in settings:
settings[k] = self._get_or_create_a(v.type)
if settings[k] is None:
if not v.allow_null:
raise ValueError("`null' not allowed for %s" % k)
elif settings[k] not in self.insts:
raise ValueError("No such instance %s" % settings[k])
else:
settings[k] = self.insts[settings[k]].object
deps[k] = settings[k]
for k, v in six.iteritems(md.vsettings):
if k not in settings:
settings[k] = v.default
if v.default is None:
self.l.warn('%s:%s not set' % (name, k))
self.l.info('create_instance %-15s %s' % (name, md.implementedBy))
cl = get_by_path(md.implementedBy)
il = logging.getLogger(name)
obj = cl(settings, il)
self.register_instance(name, moduleName, obj, settings, deps)
return obj
def register_instance(self, name, moduleName, obj, settings, deps):
md = self.modules[moduleName]
self.insts[name] = InstanceInfo(name, moduleName, obj, settings, deps)
for mn in chain(md.inherits, (moduleName,)):
if mn not in self.insts_implementing:
self.insts_implementing[mn] = set()
self.insts_implementing[mn].add(name)
if md.run:
self.to_stop.append(name)
self.daemons.append(name)
if self.running:
self._run_instance(name)
elif hasattr(obj, 'stop'):
self.to_stop.append(name)
def _run_instance(self, name):
ii = self.insts[name]
self.insts['threadPool'].object.execute_named(
self._daemon_entry, "mirte run %s" % name, ii)
def _daemon_entry(self, ii):
try:
ii.object.run()
except Exception:
self.l.exception(("Module %s exited " +
"abnormally") % ii.name)
return
self.l.info("Module %s exited normally" % ii.name)
def run(self):
assert not self.running
self.running = True
self.running_event.set()
tp = self.insts['threadPool'].object
tp.start()
# Note that self.daemons is already dependency ordered for us
for name in self.daemons:
self._run_instance(name)
while self.running:
try:
self.sleep_event.wait()
except KeyboardInterrupt:
self.l.warn("Keyboard interrupt")
self.stop()
self.l.info("Woke up")
self.l.info("Stopping modules")
for name in reversed(self.to_stop):
ii = self.insts[name]
self.l.info(" %s" % ii.name)
ii.object.stop()
self.l.info("Joining threadPool")
tp.join()
def change_setting(self, instance_name, key, raw_value):
""" Change the settings <key> to <raw_value> of an instance
named <instance_name>. <raw_value> should be a string and
is properly converted. """
ii = self.insts[instance_name]
mo = self.modules[ii.module]
if key in mo.deps:
if raw_value not in self.insts:
raise ValueError("No such instance %s" % raw_value)
vii = self.insts[raw_value]
vmo = self.modules[vii.module]
if not (mo.deps[key].type in vmo.inherits or
mo.deps[key].type == vii.module):
raise ValueError("%s isn't a %s" % (
raw_value, mo.deps[key].type))
value = vii.object
elif key in mo.vsettings:
value = self.valueTypes[mo.vsettings[key].type](
raw_value)
else:
raise ValueError("No such settings %s" % key)
self.l.info("Changing %s.%s to %s" % (instance_name,
key,
raw_value))
ii.settings[key] = value
ii.object.change_setting(key, value)
def stop(self):
if not self.running:
return
self.running_event.clear()
self.running = False
self.sleep_event.set()
|
bwesterb/mirte
|
src/core.py
|
Manager._get_or_create_a
|
python
|
def _get_or_create_a(self, _type):
self.l.debug("get_or_create_a: %s" % _type)
stack = [Manager.GoCa_Plan(self, {_type: ()})]
while stack:
p = stack.pop()
if p.finished:
p.execute()
return p.get_a(_type)
for c in p.branches():
stack.append(c)
raise NotImplementedError
|
Gets or creates an instance of type <_type>
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/core.py#L201-L212
| null |
class Manager(Module):
def __init__(self, logger=None):
if logger is None:
logger = logging.getLogger(object.__repr__(self))
super(Manager, self).__init__({}, logger)
self.running = False
self.running_event = threading.Event()
self.modules = dict()
self.to_stop = list() # objects to stop
self.daemons = list() # and to join
self.valueTypes = {'str': str,
'float': float,
'bool': bool,
'int': int}
self.insts = dict()
# module -> concrete modules implementing module
self.modules_implementing = dict()
self.insts_implementing = dict()
self.add_module_definition('module', ModuleDefinition())
self.add_module_definition('manager', ModuleDefinition())
self.add_module_definition('threadPool', ModuleDefinition(
vsettings={'minFree': VSettingDefinition('int', 4),
'maxFree': VSettingDefinition('int', 8),
'min': VSettingDefinition('int', 8)},
implementedBy='mirte.threadPool.ThreadPool'))
self.register_instance('manager', 'manager', self, {}, {})
self.create_instance('threadPool', 'threadPool', {})
self.sleep_event = KeyboardInterruptableEvent()
# set of paths of the mirteFiles that already have been loaded
self.loaded_mirteFiles = set([])
def _get_all(self, _type):
""" Gets all instances implementing type <_type> """
if _type not in self.modules:
raise ValueError("No such module, %s" % _type)
if not self.insts_implementing.get(_type, None):
raise ValueError("No instance implementing %s" % _type)
return self.insts_implementing[_type]
def get_a(self, _type):
""" Gets an instance implementing type <_type> """
return self.insts[self._get_a(_type)].object
def _get_a(self, _type):
""" Gets an instance implementing type <_type> """
tmp = self._get_all(_type)
ret = pick(tmp)
if len(tmp) != 1:
self.l.warn(("get_a: %s all implement %s; " +
"picking %s") % (tmp, _type, ret))
return ret
def got_a(self, _type):
""" Returns whether there is an instance implementing <_type>
"""
if _type not in self.modules:
raise ValueError("No such module, %s" % _type)
return (_type in self.insts_implementing and
self.insts_implementing[_type])
class GoCa_Plan(object):
""" A partial plan for a get_or_create_a call """
def __init__(self, man, targets, insts=None,
insts_implementing=None):
self.man = man
self.targets = targets
self.insts = dict() if insts is None else insts
self.insts_implementing = (dict() if insts_implementing
is None else insts_implementing)
def free_instance_name_like(self, name):
if (name not in self.insts and
name not in self.man.insts):
return name
suffix = 2
while True:
shot = "%s-%s" % (name, suffix)
if (shot not in self.insts and
name not in self.man.insts):
return shot
suffix += 1
def got_a(self, _type):
if self.man.got_a(_type):
return True
return (_type in self.insts_implementing and
self.insts_implementing[_type])
def get_all(self, _type):
ret = list()
if self.man.got_a(_type):
ret.extend(self.man._get_all(_type))
if _type in self.insts_implementing:
ret.extend(self.insts_implementing[_type])
return ret
def get_a(self, _type):
return pick(self.get_all(_type))
@property
def finished(self):
return not self.targets
def plan_a(self, mod):
name = self.free_instance_name_like(mod)
self.insts[name] = (name, mod, {})
md = self.man.modules[mod]
for mod2 in chain(md.inherits, (mod,)):
if mod2 not in self.insts_implementing:
self.insts_implementing[mod2] = list()
self.insts_implementing[mod2].append(name)
for depName, dep in six.iteritems(md.deps):
if dep.type in self.targets:
self.targets[dep.type].append(
(name, depName))
else:
self.targets[dep.type] = [
(name, depName)]
return name
def branches(self):
choices = dict()
for target in self.targets:
if self.got_a(target):
choices[target] = [(True, name) for
name in self.get_all(target)]
continue
choices[target] = [(False, name) for name
in self.man.modules_implementing[
target]]
choices_t = list(six.iteritems(choices))
for choice in product(*[list(range(len(v)))
for k, v in choices_t]):
plan2 = Manager.GoCa_Plan(self.man, dict(),
dict(self.insts),
dict(self.insts_implementing))
tmp = [(choices_t[n][0], choices_t[n][1][m])
for n, m in enumerate(choice)]
for target, inst_or_mod in tmp:
if inst_or_mod[0]:
name = inst_or_mod[1]
else:
name = plan2.plan_a(inst_or_mod[1])
for _in, depName in self.targets[target]:
plan2.insts[_in][2][depName] = name
yield plan2
def execute(self):
insts = frozenset(six.iterkeys(self.insts))
inst_list = tuple(sort_by_successors(
insts,
lambda inst: [self.insts[inst][2][k] for k
in self.man.modules[self.insts[inst][1]].deps
if self.insts[inst][2][k] in insts]
))
for name in reversed(inst_list):
self.man.create_instance(*self.insts[name])
def get_or_create_a(self, _type):
""" Gets or creates an instance of type <_type> """
return self.insts[self._get_or_create_a(_type)].object
def free_instance_name_like(self, name):
if name not in self.insts:
return name
suffix = 2
while True:
shot = "%s-%s" % (name, suffix)
if shot not in self.insts:
return shot
suffix += 1
def add_module_definition(self, name, definition):
if name in self.modules:
raise ValueError("Duplicate module name")
self.modules[name] = definition
if definition.implementedBy is not None:
for t in chain(definition.inherits, (name,)):
if t not in self.modules_implementing:
self.insts_implementing[t] = set()
self.modules_implementing[t] = set()
self.modules_implementing[t].add(name)
def update_instance(self, name, settings):
""" Updates settings of instance <name> with the
dictionary <settings>. """
if name not in self.insts:
raise ValueError("There's no instance named %s" % name)
if 'module' in settings:
raise ValueError(("Can't change module of existing instan"
+ "ce %s") % name)
self.l.info('update instance %-15s' % (name))
for k, v in six.iteritems(settings):
self.change_setting(name, k, v)
def create_instance(self, name, moduleName, settings):
""" Creates an instance of <moduleName> at <name> with
<settings>. """
if name in self.insts:
raise ValueError("There's already an instance named %s" %
name)
if moduleName not in self.modules:
raise ValueError("There's no module %s" % moduleName)
md = self.modules[moduleName]
deps = dict()
for k, v in six.iteritems(md.deps):
if k not in settings:
settings[k] = self._get_or_create_a(v.type)
if settings[k] is None:
if not v.allow_null:
raise ValueError("`null' not allowed for %s" % k)
elif settings[k] not in self.insts:
raise ValueError("No such instance %s" % settings[k])
else:
settings[k] = self.insts[settings[k]].object
deps[k] = settings[k]
for k, v in six.iteritems(md.vsettings):
if k not in settings:
settings[k] = v.default
if v.default is None:
self.l.warn('%s:%s not set' % (name, k))
self.l.info('create_instance %-15s %s' % (name, md.implementedBy))
cl = get_by_path(md.implementedBy)
il = logging.getLogger(name)
obj = cl(settings, il)
self.register_instance(name, moduleName, obj, settings, deps)
return obj
def register_instance(self, name, moduleName, obj, settings, deps):
md = self.modules[moduleName]
self.insts[name] = InstanceInfo(name, moduleName, obj, settings, deps)
for mn in chain(md.inherits, (moduleName,)):
if mn not in self.insts_implementing:
self.insts_implementing[mn] = set()
self.insts_implementing[mn].add(name)
if md.run:
self.to_stop.append(name)
self.daemons.append(name)
if self.running:
self._run_instance(name)
elif hasattr(obj, 'stop'):
self.to_stop.append(name)
def _run_instance(self, name):
ii = self.insts[name]
self.insts['threadPool'].object.execute_named(
self._daemon_entry, "mirte run %s" % name, ii)
def _daemon_entry(self, ii):
try:
ii.object.run()
except Exception:
self.l.exception(("Module %s exited " +
"abnormally") % ii.name)
return
self.l.info("Module %s exited normally" % ii.name)
def run(self):
assert not self.running
self.running = True
self.running_event.set()
tp = self.insts['threadPool'].object
tp.start()
# Note that self.daemons is already dependency ordered for us
for name in self.daemons:
self._run_instance(name)
while self.running:
try:
self.sleep_event.wait()
except KeyboardInterrupt:
self.l.warn("Keyboard interrupt")
self.stop()
self.l.info("Woke up")
self.l.info("Stopping modules")
for name in reversed(self.to_stop):
ii = self.insts[name]
self.l.info(" %s" % ii.name)
ii.object.stop()
self.l.info("Joining threadPool")
tp.join()
def change_setting(self, instance_name, key, raw_value):
""" Change the settings <key> to <raw_value> of an instance
named <instance_name>. <raw_value> should be a string and
is properly converted. """
ii = self.insts[instance_name]
mo = self.modules[ii.module]
if key in mo.deps:
if raw_value not in self.insts:
raise ValueError("No such instance %s" % raw_value)
vii = self.insts[raw_value]
vmo = self.modules[vii.module]
if not (mo.deps[key].type in vmo.inherits or
mo.deps[key].type == vii.module):
raise ValueError("%s isn't a %s" % (
raw_value, mo.deps[key].type))
value = vii.object
elif key in mo.vsettings:
value = self.valueTypes[mo.vsettings[key].type](
raw_value)
else:
raise ValueError("No such settings %s" % key)
self.l.info("Changing %s.%s to %s" % (instance_name,
key,
raw_value))
ii.settings[key] = value
ii.object.change_setting(key, value)
def stop(self):
if not self.running:
return
self.running_event.clear()
self.running = False
self.sleep_event.set()
|
bwesterb/mirte
|
src/core.py
|
Manager.update_instance
|
python
|
def update_instance(self, name, settings):
if name not in self.insts:
raise ValueError("There's no instance named %s" % name)
if 'module' in settings:
raise ValueError(("Can't change module of existing instan"
+ "ce %s") % name)
self.l.info('update instance %-15s' % (name))
for k, v in six.iteritems(settings):
self.change_setting(name, k, v)
|
Updates settings of instance <name> with the
dictionary <settings>.
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/core.py#L235-L245
|
[
"def change_setting(self, instance_name, key, raw_value):\n \"\"\" Change the settings <key> to <raw_value> of an instance\n named <instance_name>. <raw_value> should be a string and\n is properly converted. \"\"\"\n ii = self.insts[instance_name]\n mo = self.modules[ii.module]\n if key in mo.deps:\n if raw_value not in self.insts:\n raise ValueError(\"No such instance %s\" % raw_value)\n vii = self.insts[raw_value]\n vmo = self.modules[vii.module]\n if not (mo.deps[key].type in vmo.inherits or\n mo.deps[key].type == vii.module):\n raise ValueError(\"%s isn't a %s\" % (\n raw_value, mo.deps[key].type))\n value = vii.object\n elif key in mo.vsettings:\n value = self.valueTypes[mo.vsettings[key].type](\n raw_value)\n else:\n raise ValueError(\"No such settings %s\" % key)\n self.l.info(\"Changing %s.%s to %s\" % (instance_name,\n key,\n raw_value))\n ii.settings[key] = value\n ii.object.change_setting(key, value)\n"
] |
class Manager(Module):
def __init__(self, logger=None):
if logger is None:
logger = logging.getLogger(object.__repr__(self))
super(Manager, self).__init__({}, logger)
self.running = False
self.running_event = threading.Event()
self.modules = dict()
self.to_stop = list() # objects to stop
self.daemons = list() # and to join
self.valueTypes = {'str': str,
'float': float,
'bool': bool,
'int': int}
self.insts = dict()
# module -> concrete modules implementing module
self.modules_implementing = dict()
self.insts_implementing = dict()
self.add_module_definition('module', ModuleDefinition())
self.add_module_definition('manager', ModuleDefinition())
self.add_module_definition('threadPool', ModuleDefinition(
vsettings={'minFree': VSettingDefinition('int', 4),
'maxFree': VSettingDefinition('int', 8),
'min': VSettingDefinition('int', 8)},
implementedBy='mirte.threadPool.ThreadPool'))
self.register_instance('manager', 'manager', self, {}, {})
self.create_instance('threadPool', 'threadPool', {})
self.sleep_event = KeyboardInterruptableEvent()
# set of paths of the mirteFiles that already have been loaded
self.loaded_mirteFiles = set([])
def _get_all(self, _type):
""" Gets all instances implementing type <_type> """
if _type not in self.modules:
raise ValueError("No such module, %s" % _type)
if not self.insts_implementing.get(_type, None):
raise ValueError("No instance implementing %s" % _type)
return self.insts_implementing[_type]
def get_a(self, _type):
""" Gets an instance implementing type <_type> """
return self.insts[self._get_a(_type)].object
def _get_a(self, _type):
""" Gets an instance implementing type <_type> """
tmp = self._get_all(_type)
ret = pick(tmp)
if len(tmp) != 1:
self.l.warn(("get_a: %s all implement %s; " +
"picking %s") % (tmp, _type, ret))
return ret
def got_a(self, _type):
""" Returns whether there is an instance implementing <_type>
"""
if _type not in self.modules:
raise ValueError("No such module, %s" % _type)
return (_type in self.insts_implementing and
self.insts_implementing[_type])
class GoCa_Plan(object):
""" A partial plan for a get_or_create_a call """
def __init__(self, man, targets, insts=None,
insts_implementing=None):
self.man = man
self.targets = targets
self.insts = dict() if insts is None else insts
self.insts_implementing = (dict() if insts_implementing
is None else insts_implementing)
def free_instance_name_like(self, name):
if (name not in self.insts and
name not in self.man.insts):
return name
suffix = 2
while True:
shot = "%s-%s" % (name, suffix)
if (shot not in self.insts and
name not in self.man.insts):
return shot
suffix += 1
def got_a(self, _type):
if self.man.got_a(_type):
return True
return (_type in self.insts_implementing and
self.insts_implementing[_type])
def get_all(self, _type):
ret = list()
if self.man.got_a(_type):
ret.extend(self.man._get_all(_type))
if _type in self.insts_implementing:
ret.extend(self.insts_implementing[_type])
return ret
def get_a(self, _type):
return pick(self.get_all(_type))
@property
def finished(self):
return not self.targets
def plan_a(self, mod):
name = self.free_instance_name_like(mod)
self.insts[name] = (name, mod, {})
md = self.man.modules[mod]
for mod2 in chain(md.inherits, (mod,)):
if mod2 not in self.insts_implementing:
self.insts_implementing[mod2] = list()
self.insts_implementing[mod2].append(name)
for depName, dep in six.iteritems(md.deps):
if dep.type in self.targets:
self.targets[dep.type].append(
(name, depName))
else:
self.targets[dep.type] = [
(name, depName)]
return name
def branches(self):
choices = dict()
for target in self.targets:
if self.got_a(target):
choices[target] = [(True, name) for
name in self.get_all(target)]
continue
choices[target] = [(False, name) for name
in self.man.modules_implementing[
target]]
choices_t = list(six.iteritems(choices))
for choice in product(*[list(range(len(v)))
for k, v in choices_t]):
plan2 = Manager.GoCa_Plan(self.man, dict(),
dict(self.insts),
dict(self.insts_implementing))
tmp = [(choices_t[n][0], choices_t[n][1][m])
for n, m in enumerate(choice)]
for target, inst_or_mod in tmp:
if inst_or_mod[0]:
name = inst_or_mod[1]
else:
name = plan2.plan_a(inst_or_mod[1])
for _in, depName in self.targets[target]:
plan2.insts[_in][2][depName] = name
yield plan2
def execute(self):
insts = frozenset(six.iterkeys(self.insts))
inst_list = tuple(sort_by_successors(
insts,
lambda inst: [self.insts[inst][2][k] for k
in self.man.modules[self.insts[inst][1]].deps
if self.insts[inst][2][k] in insts]
))
for name in reversed(inst_list):
self.man.create_instance(*self.insts[name])
def get_or_create_a(self, _type):
""" Gets or creates an instance of type <_type> """
return self.insts[self._get_or_create_a(_type)].object
def _get_or_create_a(self, _type):
""" Gets or creates an instance of type <_type> """
self.l.debug("get_or_create_a: %s" % _type)
stack = [Manager.GoCa_Plan(self, {_type: ()})]
while stack:
p = stack.pop()
if p.finished:
p.execute()
return p.get_a(_type)
for c in p.branches():
stack.append(c)
raise NotImplementedError
def free_instance_name_like(self, name):
if name not in self.insts:
return name
suffix = 2
while True:
shot = "%s-%s" % (name, suffix)
if shot not in self.insts:
return shot
suffix += 1
def add_module_definition(self, name, definition):
if name in self.modules:
raise ValueError("Duplicate module name")
self.modules[name] = definition
if definition.implementedBy is not None:
for t in chain(definition.inherits, (name,)):
if t not in self.modules_implementing:
self.insts_implementing[t] = set()
self.modules_implementing[t] = set()
self.modules_implementing[t].add(name)
def create_instance(self, name, moduleName, settings):
""" Creates an instance of <moduleName> at <name> with
<settings>. """
if name in self.insts:
raise ValueError("There's already an instance named %s" %
name)
if moduleName not in self.modules:
raise ValueError("There's no module %s" % moduleName)
md = self.modules[moduleName]
deps = dict()
for k, v in six.iteritems(md.deps):
if k not in settings:
settings[k] = self._get_or_create_a(v.type)
if settings[k] is None:
if not v.allow_null:
raise ValueError("`null' not allowed for %s" % k)
elif settings[k] not in self.insts:
raise ValueError("No such instance %s" % settings[k])
else:
settings[k] = self.insts[settings[k]].object
deps[k] = settings[k]
for k, v in six.iteritems(md.vsettings):
if k not in settings:
settings[k] = v.default
if v.default is None:
self.l.warn('%s:%s not set' % (name, k))
self.l.info('create_instance %-15s %s' % (name, md.implementedBy))
cl = get_by_path(md.implementedBy)
il = logging.getLogger(name)
obj = cl(settings, il)
self.register_instance(name, moduleName, obj, settings, deps)
return obj
def register_instance(self, name, moduleName, obj, settings, deps):
md = self.modules[moduleName]
self.insts[name] = InstanceInfo(name, moduleName, obj, settings, deps)
for mn in chain(md.inherits, (moduleName,)):
if mn not in self.insts_implementing:
self.insts_implementing[mn] = set()
self.insts_implementing[mn].add(name)
if md.run:
self.to_stop.append(name)
self.daemons.append(name)
if self.running:
self._run_instance(name)
elif hasattr(obj, 'stop'):
self.to_stop.append(name)
def _run_instance(self, name):
ii = self.insts[name]
self.insts['threadPool'].object.execute_named(
self._daemon_entry, "mirte run %s" % name, ii)
def _daemon_entry(self, ii):
try:
ii.object.run()
except Exception:
self.l.exception(("Module %s exited " +
"abnormally") % ii.name)
return
self.l.info("Module %s exited normally" % ii.name)
def run(self):
assert not self.running
self.running = True
self.running_event.set()
tp = self.insts['threadPool'].object
tp.start()
# Note that self.daemons is already dependency ordered for us
for name in self.daemons:
self._run_instance(name)
while self.running:
try:
self.sleep_event.wait()
except KeyboardInterrupt:
self.l.warn("Keyboard interrupt")
self.stop()
self.l.info("Woke up")
self.l.info("Stopping modules")
for name in reversed(self.to_stop):
ii = self.insts[name]
self.l.info(" %s" % ii.name)
ii.object.stop()
self.l.info("Joining threadPool")
tp.join()
def change_setting(self, instance_name, key, raw_value):
""" Change the settings <key> to <raw_value> of an instance
named <instance_name>. <raw_value> should be a string and
is properly converted. """
ii = self.insts[instance_name]
mo = self.modules[ii.module]
if key in mo.deps:
if raw_value not in self.insts:
raise ValueError("No such instance %s" % raw_value)
vii = self.insts[raw_value]
vmo = self.modules[vii.module]
if not (mo.deps[key].type in vmo.inherits or
mo.deps[key].type == vii.module):
raise ValueError("%s isn't a %s" % (
raw_value, mo.deps[key].type))
value = vii.object
elif key in mo.vsettings:
value = self.valueTypes[mo.vsettings[key].type](
raw_value)
else:
raise ValueError("No such settings %s" % key)
self.l.info("Changing %s.%s to %s" % (instance_name,
key,
raw_value))
ii.settings[key] = value
ii.object.change_setting(key, value)
def stop(self):
if not self.running:
return
self.running_event.clear()
self.running = False
self.sleep_event.set()
|
bwesterb/mirte
|
src/core.py
|
Manager.create_instance
|
python
|
def create_instance(self, name, moduleName, settings):
if name in self.insts:
raise ValueError("There's already an instance named %s" %
name)
if moduleName not in self.modules:
raise ValueError("There's no module %s" % moduleName)
md = self.modules[moduleName]
deps = dict()
for k, v in six.iteritems(md.deps):
if k not in settings:
settings[k] = self._get_or_create_a(v.type)
if settings[k] is None:
if not v.allow_null:
raise ValueError("`null' not allowed for %s" % k)
elif settings[k] not in self.insts:
raise ValueError("No such instance %s" % settings[k])
else:
settings[k] = self.insts[settings[k]].object
deps[k] = settings[k]
for k, v in six.iteritems(md.vsettings):
if k not in settings:
settings[k] = v.default
if v.default is None:
self.l.warn('%s:%s not set' % (name, k))
self.l.info('create_instance %-15s %s' % (name, md.implementedBy))
cl = get_by_path(md.implementedBy)
il = logging.getLogger(name)
obj = cl(settings, il)
self.register_instance(name, moduleName, obj, settings, deps)
return obj
|
Creates an instance of <moduleName> at <name> with
<settings>.
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/core.py#L247-L278
|
[
"def _get_or_create_a(self, _type):\n \"\"\" Gets or creates an instance of type <_type> \"\"\"\n self.l.debug(\"get_or_create_a: %s\" % _type)\n stack = [Manager.GoCa_Plan(self, {_type: ()})]\n while stack:\n p = stack.pop()\n if p.finished:\n p.execute()\n return p.get_a(_type)\n for c in p.branches():\n stack.append(c)\n raise NotImplementedError\n",
"def register_instance(self, name, moduleName, obj, settings, deps):\n md = self.modules[moduleName]\n self.insts[name] = InstanceInfo(name, moduleName, obj, settings, deps)\n for mn in chain(md.inherits, (moduleName,)):\n if mn not in self.insts_implementing:\n self.insts_implementing[mn] = set()\n self.insts_implementing[mn].add(name)\n if md.run:\n self.to_stop.append(name)\n self.daemons.append(name)\n if self.running:\n self._run_instance(name)\n elif hasattr(obj, 'stop'):\n self.to_stop.append(name)\n"
] |
class Manager(Module):
def __init__(self, logger=None):
if logger is None:
logger = logging.getLogger(object.__repr__(self))
super(Manager, self).__init__({}, logger)
self.running = False
self.running_event = threading.Event()
self.modules = dict()
self.to_stop = list() # objects to stop
self.daemons = list() # and to join
self.valueTypes = {'str': str,
'float': float,
'bool': bool,
'int': int}
self.insts = dict()
# module -> concrete modules implementing module
self.modules_implementing = dict()
self.insts_implementing = dict()
self.add_module_definition('module', ModuleDefinition())
self.add_module_definition('manager', ModuleDefinition())
self.add_module_definition('threadPool', ModuleDefinition(
vsettings={'minFree': VSettingDefinition('int', 4),
'maxFree': VSettingDefinition('int', 8),
'min': VSettingDefinition('int', 8)},
implementedBy='mirte.threadPool.ThreadPool'))
self.register_instance('manager', 'manager', self, {}, {})
self.create_instance('threadPool', 'threadPool', {})
self.sleep_event = KeyboardInterruptableEvent()
# set of paths of the mirteFiles that already have been loaded
self.loaded_mirteFiles = set([])
def _get_all(self, _type):
""" Gets all instances implementing type <_type> """
if _type not in self.modules:
raise ValueError("No such module, %s" % _type)
if not self.insts_implementing.get(_type, None):
raise ValueError("No instance implementing %s" % _type)
return self.insts_implementing[_type]
def get_a(self, _type):
""" Gets an instance implementing type <_type> """
return self.insts[self._get_a(_type)].object
def _get_a(self, _type):
""" Gets an instance implementing type <_type> """
tmp = self._get_all(_type)
ret = pick(tmp)
if len(tmp) != 1:
self.l.warn(("get_a: %s all implement %s; " +
"picking %s") % (tmp, _type, ret))
return ret
def got_a(self, _type):
""" Returns whether there is an instance implementing <_type>
"""
if _type not in self.modules:
raise ValueError("No such module, %s" % _type)
return (_type in self.insts_implementing and
self.insts_implementing[_type])
class GoCa_Plan(object):
""" A partial plan for a get_or_create_a call """
def __init__(self, man, targets, insts=None,
insts_implementing=None):
self.man = man
self.targets = targets
self.insts = dict() if insts is None else insts
self.insts_implementing = (dict() if insts_implementing
is None else insts_implementing)
def free_instance_name_like(self, name):
if (name not in self.insts and
name not in self.man.insts):
return name
suffix = 2
while True:
shot = "%s-%s" % (name, suffix)
if (shot not in self.insts and
name not in self.man.insts):
return shot
suffix += 1
def got_a(self, _type):
if self.man.got_a(_type):
return True
return (_type in self.insts_implementing and
self.insts_implementing[_type])
def get_all(self, _type):
ret = list()
if self.man.got_a(_type):
ret.extend(self.man._get_all(_type))
if _type in self.insts_implementing:
ret.extend(self.insts_implementing[_type])
return ret
def get_a(self, _type):
return pick(self.get_all(_type))
@property
def finished(self):
return not self.targets
def plan_a(self, mod):
name = self.free_instance_name_like(mod)
self.insts[name] = (name, mod, {})
md = self.man.modules[mod]
for mod2 in chain(md.inherits, (mod,)):
if mod2 not in self.insts_implementing:
self.insts_implementing[mod2] = list()
self.insts_implementing[mod2].append(name)
for depName, dep in six.iteritems(md.deps):
if dep.type in self.targets:
self.targets[dep.type].append(
(name, depName))
else:
self.targets[dep.type] = [
(name, depName)]
return name
def branches(self):
choices = dict()
for target in self.targets:
if self.got_a(target):
choices[target] = [(True, name) for
name in self.get_all(target)]
continue
choices[target] = [(False, name) for name
in self.man.modules_implementing[
target]]
choices_t = list(six.iteritems(choices))
for choice in product(*[list(range(len(v)))
for k, v in choices_t]):
plan2 = Manager.GoCa_Plan(self.man, dict(),
dict(self.insts),
dict(self.insts_implementing))
tmp = [(choices_t[n][0], choices_t[n][1][m])
for n, m in enumerate(choice)]
for target, inst_or_mod in tmp:
if inst_or_mod[0]:
name = inst_or_mod[1]
else:
name = plan2.plan_a(inst_or_mod[1])
for _in, depName in self.targets[target]:
plan2.insts[_in][2][depName] = name
yield plan2
def execute(self):
insts = frozenset(six.iterkeys(self.insts))
inst_list = tuple(sort_by_successors(
insts,
lambda inst: [self.insts[inst][2][k] for k
in self.man.modules[self.insts[inst][1]].deps
if self.insts[inst][2][k] in insts]
))
for name in reversed(inst_list):
self.man.create_instance(*self.insts[name])
def get_or_create_a(self, _type):
""" Gets or creates an instance of type <_type> """
return self.insts[self._get_or_create_a(_type)].object
def _get_or_create_a(self, _type):
""" Gets or creates an instance of type <_type> """
self.l.debug("get_or_create_a: %s" % _type)
stack = [Manager.GoCa_Plan(self, {_type: ()})]
while stack:
p = stack.pop()
if p.finished:
p.execute()
return p.get_a(_type)
for c in p.branches():
stack.append(c)
raise NotImplementedError
def free_instance_name_like(self, name):
if name not in self.insts:
return name
suffix = 2
while True:
shot = "%s-%s" % (name, suffix)
if shot not in self.insts:
return shot
suffix += 1
def add_module_definition(self, name, definition):
if name in self.modules:
raise ValueError("Duplicate module name")
self.modules[name] = definition
if definition.implementedBy is not None:
for t in chain(definition.inherits, (name,)):
if t not in self.modules_implementing:
self.insts_implementing[t] = set()
self.modules_implementing[t] = set()
self.modules_implementing[t].add(name)
def update_instance(self, name, settings):
""" Updates settings of instance <name> with the
dictionary <settings>. """
if name not in self.insts:
raise ValueError("There's no instance named %s" % name)
if 'module' in settings:
raise ValueError(("Can't change module of existing instan"
+ "ce %s") % name)
self.l.info('update instance %-15s' % (name))
for k, v in six.iteritems(settings):
self.change_setting(name, k, v)
def register_instance(self, name, moduleName, obj, settings, deps):
md = self.modules[moduleName]
self.insts[name] = InstanceInfo(name, moduleName, obj, settings, deps)
for mn in chain(md.inherits, (moduleName,)):
if mn not in self.insts_implementing:
self.insts_implementing[mn] = set()
self.insts_implementing[mn].add(name)
if md.run:
self.to_stop.append(name)
self.daemons.append(name)
if self.running:
self._run_instance(name)
elif hasattr(obj, 'stop'):
self.to_stop.append(name)
def _run_instance(self, name):
ii = self.insts[name]
self.insts['threadPool'].object.execute_named(
self._daemon_entry, "mirte run %s" % name, ii)
def _daemon_entry(self, ii):
try:
ii.object.run()
except Exception:
self.l.exception(("Module %s exited " +
"abnormally") % ii.name)
return
self.l.info("Module %s exited normally" % ii.name)
def run(self):
assert not self.running
self.running = True
self.running_event.set()
tp = self.insts['threadPool'].object
tp.start()
# Note that self.daemons is already dependency ordered for us
for name in self.daemons:
self._run_instance(name)
while self.running:
try:
self.sleep_event.wait()
except KeyboardInterrupt:
self.l.warn("Keyboard interrupt")
self.stop()
self.l.info("Woke up")
self.l.info("Stopping modules")
for name in reversed(self.to_stop):
ii = self.insts[name]
self.l.info(" %s" % ii.name)
ii.object.stop()
self.l.info("Joining threadPool")
tp.join()
def change_setting(self, instance_name, key, raw_value):
""" Change the settings <key> to <raw_value> of an instance
named <instance_name>. <raw_value> should be a string and
is properly converted. """
ii = self.insts[instance_name]
mo = self.modules[ii.module]
if key in mo.deps:
if raw_value not in self.insts:
raise ValueError("No such instance %s" % raw_value)
vii = self.insts[raw_value]
vmo = self.modules[vii.module]
if not (mo.deps[key].type in vmo.inherits or
mo.deps[key].type == vii.module):
raise ValueError("%s isn't a %s" % (
raw_value, mo.deps[key].type))
value = vii.object
elif key in mo.vsettings:
value = self.valueTypes[mo.vsettings[key].type](
raw_value)
else:
raise ValueError("No such settings %s" % key)
self.l.info("Changing %s.%s to %s" % (instance_name,
key,
raw_value))
ii.settings[key] = value
ii.object.change_setting(key, value)
def stop(self):
if not self.running:
return
self.running_event.clear()
self.running = False
self.sleep_event.set()
|
bwesterb/mirte
|
src/core.py
|
Manager.change_setting
|
python
|
def change_setting(self, instance_name, key, raw_value):
ii = self.insts[instance_name]
mo = self.modules[ii.module]
if key in mo.deps:
if raw_value not in self.insts:
raise ValueError("No such instance %s" % raw_value)
vii = self.insts[raw_value]
vmo = self.modules[vii.module]
if not (mo.deps[key].type in vmo.inherits or
mo.deps[key].type == vii.module):
raise ValueError("%s isn't a %s" % (
raw_value, mo.deps[key].type))
value = vii.object
elif key in mo.vsettings:
value = self.valueTypes[mo.vsettings[key].type](
raw_value)
else:
raise ValueError("No such settings %s" % key)
self.l.info("Changing %s.%s to %s" % (instance_name,
key,
raw_value))
ii.settings[key] = value
ii.object.change_setting(key, value)
|
Change the settings <key> to <raw_value> of an instance
named <instance_name>. <raw_value> should be a string and
is properly converted.
|
train
|
https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/core.py#L333-L358
| null |
class Manager(Module):
def __init__(self, logger=None):
if logger is None:
logger = logging.getLogger(object.__repr__(self))
super(Manager, self).__init__({}, logger)
self.running = False
self.running_event = threading.Event()
self.modules = dict()
self.to_stop = list() # objects to stop
self.daemons = list() # and to join
self.valueTypes = {'str': str,
'float': float,
'bool': bool,
'int': int}
self.insts = dict()
# module -> concrete modules implementing module
self.modules_implementing = dict()
self.insts_implementing = dict()
self.add_module_definition('module', ModuleDefinition())
self.add_module_definition('manager', ModuleDefinition())
self.add_module_definition('threadPool', ModuleDefinition(
vsettings={'minFree': VSettingDefinition('int', 4),
'maxFree': VSettingDefinition('int', 8),
'min': VSettingDefinition('int', 8)},
implementedBy='mirte.threadPool.ThreadPool'))
self.register_instance('manager', 'manager', self, {}, {})
self.create_instance('threadPool', 'threadPool', {})
self.sleep_event = KeyboardInterruptableEvent()
# set of paths of the mirteFiles that already have been loaded
self.loaded_mirteFiles = set([])
def _get_all(self, _type):
""" Gets all instances implementing type <_type> """
if _type not in self.modules:
raise ValueError("No such module, %s" % _type)
if not self.insts_implementing.get(_type, None):
raise ValueError("No instance implementing %s" % _type)
return self.insts_implementing[_type]
def get_a(self, _type):
""" Gets an instance implementing type <_type> """
return self.insts[self._get_a(_type)].object
def _get_a(self, _type):
""" Gets an instance implementing type <_type> """
tmp = self._get_all(_type)
ret = pick(tmp)
if len(tmp) != 1:
self.l.warn(("get_a: %s all implement %s; " +
"picking %s") % (tmp, _type, ret))
return ret
def got_a(self, _type):
""" Returns whether there is an instance implementing <_type>
"""
if _type not in self.modules:
raise ValueError("No such module, %s" % _type)
return (_type in self.insts_implementing and
self.insts_implementing[_type])
class GoCa_Plan(object):
""" A partial plan for a get_or_create_a call """
def __init__(self, man, targets, insts=None,
insts_implementing=None):
self.man = man
self.targets = targets
self.insts = dict() if insts is None else insts
self.insts_implementing = (dict() if insts_implementing
is None else insts_implementing)
def free_instance_name_like(self, name):
if (name not in self.insts and
name not in self.man.insts):
return name
suffix = 2
while True:
shot = "%s-%s" % (name, suffix)
if (shot not in self.insts and
name not in self.man.insts):
return shot
suffix += 1
def got_a(self, _type):
if self.man.got_a(_type):
return True
return (_type in self.insts_implementing and
self.insts_implementing[_type])
def get_all(self, _type):
ret = list()
if self.man.got_a(_type):
ret.extend(self.man._get_all(_type))
if _type in self.insts_implementing:
ret.extend(self.insts_implementing[_type])
return ret
def get_a(self, _type):
return pick(self.get_all(_type))
@property
def finished(self):
return not self.targets
def plan_a(self, mod):
name = self.free_instance_name_like(mod)
self.insts[name] = (name, mod, {})
md = self.man.modules[mod]
for mod2 in chain(md.inherits, (mod,)):
if mod2 not in self.insts_implementing:
self.insts_implementing[mod2] = list()
self.insts_implementing[mod2].append(name)
for depName, dep in six.iteritems(md.deps):
if dep.type in self.targets:
self.targets[dep.type].append(
(name, depName))
else:
self.targets[dep.type] = [
(name, depName)]
return name
def branches(self):
choices = dict()
for target in self.targets:
if self.got_a(target):
choices[target] = [(True, name) for
name in self.get_all(target)]
continue
choices[target] = [(False, name) for name
in self.man.modules_implementing[
target]]
choices_t = list(six.iteritems(choices))
for choice in product(*[list(range(len(v)))
for k, v in choices_t]):
plan2 = Manager.GoCa_Plan(self.man, dict(),
dict(self.insts),
dict(self.insts_implementing))
tmp = [(choices_t[n][0], choices_t[n][1][m])
for n, m in enumerate(choice)]
for target, inst_or_mod in tmp:
if inst_or_mod[0]:
name = inst_or_mod[1]
else:
name = plan2.plan_a(inst_or_mod[1])
for _in, depName in self.targets[target]:
plan2.insts[_in][2][depName] = name
yield plan2
def execute(self):
insts = frozenset(six.iterkeys(self.insts))
inst_list = tuple(sort_by_successors(
insts,
lambda inst: [self.insts[inst][2][k] for k
in self.man.modules[self.insts[inst][1]].deps
if self.insts[inst][2][k] in insts]
))
for name in reversed(inst_list):
self.man.create_instance(*self.insts[name])
def get_or_create_a(self, _type):
""" Gets or creates an instance of type <_type> """
return self.insts[self._get_or_create_a(_type)].object
def _get_or_create_a(self, _type):
""" Gets or creates an instance of type <_type> """
self.l.debug("get_or_create_a: %s" % _type)
stack = [Manager.GoCa_Plan(self, {_type: ()})]
while stack:
p = stack.pop()
if p.finished:
p.execute()
return p.get_a(_type)
for c in p.branches():
stack.append(c)
raise NotImplementedError
def free_instance_name_like(self, name):
if name not in self.insts:
return name
suffix = 2
while True:
shot = "%s-%s" % (name, suffix)
if shot not in self.insts:
return shot
suffix += 1
def add_module_definition(self, name, definition):
if name in self.modules:
raise ValueError("Duplicate module name")
self.modules[name] = definition
if definition.implementedBy is not None:
for t in chain(definition.inherits, (name,)):
if t not in self.modules_implementing:
self.insts_implementing[t] = set()
self.modules_implementing[t] = set()
self.modules_implementing[t].add(name)
def update_instance(self, name, settings):
""" Updates settings of instance <name> with the
dictionary <settings>. """
if name not in self.insts:
raise ValueError("There's no instance named %s" % name)
if 'module' in settings:
raise ValueError(("Can't change module of existing instan"
+ "ce %s") % name)
self.l.info('update instance %-15s' % (name))
for k, v in six.iteritems(settings):
self.change_setting(name, k, v)
def create_instance(self, name, moduleName, settings):
""" Creates an instance of <moduleName> at <name> with
<settings>. """
if name in self.insts:
raise ValueError("There's already an instance named %s" %
name)
if moduleName not in self.modules:
raise ValueError("There's no module %s" % moduleName)
md = self.modules[moduleName]
deps = dict()
for k, v in six.iteritems(md.deps):
if k not in settings:
settings[k] = self._get_or_create_a(v.type)
if settings[k] is None:
if not v.allow_null:
raise ValueError("`null' not allowed for %s" % k)
elif settings[k] not in self.insts:
raise ValueError("No such instance %s" % settings[k])
else:
settings[k] = self.insts[settings[k]].object
deps[k] = settings[k]
for k, v in six.iteritems(md.vsettings):
if k not in settings:
settings[k] = v.default
if v.default is None:
self.l.warn('%s:%s not set' % (name, k))
self.l.info('create_instance %-15s %s' % (name, md.implementedBy))
cl = get_by_path(md.implementedBy)
il = logging.getLogger(name)
obj = cl(settings, il)
self.register_instance(name, moduleName, obj, settings, deps)
return obj
def register_instance(self, name, moduleName, obj, settings, deps):
md = self.modules[moduleName]
self.insts[name] = InstanceInfo(name, moduleName, obj, settings, deps)
for mn in chain(md.inherits, (moduleName,)):
if mn not in self.insts_implementing:
self.insts_implementing[mn] = set()
self.insts_implementing[mn].add(name)
if md.run:
self.to_stop.append(name)
self.daemons.append(name)
if self.running:
self._run_instance(name)
elif hasattr(obj, 'stop'):
self.to_stop.append(name)
def _run_instance(self, name):
ii = self.insts[name]
self.insts['threadPool'].object.execute_named(
self._daemon_entry, "mirte run %s" % name, ii)
def _daemon_entry(self, ii):
try:
ii.object.run()
except Exception:
self.l.exception(("Module %s exited " +
"abnormally") % ii.name)
return
self.l.info("Module %s exited normally" % ii.name)
def run(self):
assert not self.running
self.running = True
self.running_event.set()
tp = self.insts['threadPool'].object
tp.start()
# Note that self.daemons is already dependency ordered for us
for name in self.daemons:
self._run_instance(name)
while self.running:
try:
self.sleep_event.wait()
except KeyboardInterrupt:
self.l.warn("Keyboard interrupt")
self.stop()
self.l.info("Woke up")
self.l.info("Stopping modules")
for name in reversed(self.to_stop):
ii = self.insts[name]
self.l.info(" %s" % ii.name)
ii.object.stop()
self.l.info("Joining threadPool")
tp.join()
def stop(self):
if not self.running:
return
self.running_event.clear()
self.running = False
self.sleep_event.set()
|
mrallen1/pygett
|
pygett/request.py
|
BaseRequest.get
|
python
|
def get(self, endpoint, *args, **kwargs):
endpoint = self.base_url + endpoint
return self._make_request(endpoint, type='GET')
|
**get**
Make a GET call to a remote endpoint
Input:
* An endpoint relative to the ``base_url``
Output:
* A :py:mod:`pygett.request.GettResponse` object
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/request.py#L45-L58
|
[
"def _make_request(self, endpoint, type='GET'):\n pass\n",
"def _make_request(self, endpoint, **kwargs):\n status_code = None\n response = None\n\n self.endpoint = endpoint\n self.__dict__.update(kwargs)\n\n if self.type == \"GET\":\n response = requests.get(self.endpoint)\n elif self.type == \"POST\":\n response = requests.post(self.endpoint, data=self.data)\n elif self.type == \"PUT\":\n response = requests.put(self.endpoint, data=self.data)\n else:\n raise NotImplementedError(\"%s is not supported\" % self.type)\n\n if response.status_code == requests.codes.ok:\n return GettResponse(response.status_code, response.content)\n else:\n raise GettError(response.status_code, self.endpoint, response.content)\n"
] |
class BaseRequest(object):
"""
Base request class
"""
def __init__(self, *args, **kwargs):
self.base_url = "https://open.ge.tt/1"
def _make_request(self, endpoint, type='GET'):
pass
def post(self, endpoint, d, *args, **kwargs):
"""
**post**
Make a POST call to a remote endpoint
Input:
* An endpoint relative to the ``base_url``
* POST data
**NOTE**: Passed POST data will be automatically serialized to a JSON string
if it's not already a string
Output:
* A :py:mod:`pygett.request.GettResponse` object
"""
endpoint = self.base_url + endpoint
if not isinstance(d, str):
d = json.dumps(d)
return self._make_request(endpoint, type='POST', data=d)
def put(self, endpoint, d, *args, **kwargs):
"""
**put**
Make a PUT call to a remove endpoint
Input:
* An absolute endpoint
* A data stream
Output:
* A :py:mod:`pygett.request.GettResponse` object
"""
return self._make_request(endpoint, type='PUT', data=d)
|
mrallen1/pygett
|
pygett/request.py
|
BaseRequest.post
|
python
|
def post(self, endpoint, d, *args, **kwargs):
endpoint = self.base_url + endpoint
if not isinstance(d, str):
d = json.dumps(d)
return self._make_request(endpoint, type='POST', data=d)
|
**post**
Make a POST call to a remote endpoint
Input:
* An endpoint relative to the ``base_url``
* POST data
**NOTE**: Passed POST data will be automatically serialized to a JSON string
if it's not already a string
Output:
* A :py:mod:`pygett.request.GettResponse` object
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/request.py#L60-L80
|
[
"def _make_request(self, endpoint, type='GET'):\n pass\n",
"def _make_request(self, endpoint, **kwargs):\n status_code = None\n response = None\n\n self.endpoint = endpoint\n self.__dict__.update(kwargs)\n\n if self.type == \"GET\":\n response = requests.get(self.endpoint)\n elif self.type == \"POST\":\n response = requests.post(self.endpoint, data=self.data)\n elif self.type == \"PUT\":\n response = requests.put(self.endpoint, data=self.data)\n else:\n raise NotImplementedError(\"%s is not supported\" % self.type)\n\n if response.status_code == requests.codes.ok:\n return GettResponse(response.status_code, response.content)\n else:\n raise GettError(response.status_code, self.endpoint, response.content)\n"
] |
class BaseRequest(object):
"""
Base request class
"""
def __init__(self, *args, **kwargs):
self.base_url = "https://open.ge.tt/1"
def _make_request(self, endpoint, type='GET'):
pass
def get(self, endpoint, *args, **kwargs):
"""
**get**
Make a GET call to a remote endpoint
Input:
* An endpoint relative to the ``base_url``
Output:
* A :py:mod:`pygett.request.GettResponse` object
"""
endpoint = self.base_url + endpoint
return self._make_request(endpoint, type='GET')
def put(self, endpoint, d, *args, **kwargs):
"""
**put**
Make a PUT call to a remove endpoint
Input:
* An absolute endpoint
* A data stream
Output:
* A :py:mod:`pygett.request.GettResponse` object
"""
return self._make_request(endpoint, type='PUT', data=d)
|
mrallen1/pygett
|
pygett/request.py
|
BaseRequest.put
|
python
|
def put(self, endpoint, d, *args, **kwargs):
return self._make_request(endpoint, type='PUT', data=d)
|
**put**
Make a PUT call to a remove endpoint
Input:
* An absolute endpoint
* A data stream
Output:
* A :py:mod:`pygett.request.GettResponse` object
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/request.py#L82-L96
|
[
"def _make_request(self, endpoint, type='GET'):\n pass\n",
"def _make_request(self, endpoint, **kwargs):\n status_code = None\n response = None\n\n self.endpoint = endpoint\n self.__dict__.update(kwargs)\n\n if self.type == \"GET\":\n response = requests.get(self.endpoint)\n elif self.type == \"POST\":\n response = requests.post(self.endpoint, data=self.data)\n elif self.type == \"PUT\":\n response = requests.put(self.endpoint, data=self.data)\n else:\n raise NotImplementedError(\"%s is not supported\" % self.type)\n\n if response.status_code == requests.codes.ok:\n return GettResponse(response.status_code, response.content)\n else:\n raise GettError(response.status_code, self.endpoint, response.content)\n"
] |
class BaseRequest(object):
"""
Base request class
"""
def __init__(self, *args, **kwargs):
self.base_url = "https://open.ge.tt/1"
def _make_request(self, endpoint, type='GET'):
pass
def get(self, endpoint, *args, **kwargs):
"""
**get**
Make a GET call to a remote endpoint
Input:
* An endpoint relative to the ``base_url``
Output:
* A :py:mod:`pygett.request.GettResponse` object
"""
endpoint = self.base_url + endpoint
return self._make_request(endpoint, type='GET')
def post(self, endpoint, d, *args, **kwargs):
"""
**post**
Make a POST call to a remote endpoint
Input:
* An endpoint relative to the ``base_url``
* POST data
**NOTE**: Passed POST data will be automatically serialized to a JSON string
if it's not already a string
Output:
* A :py:mod:`pygett.request.GettResponse` object
"""
endpoint = self.base_url + endpoint
if not isinstance(d, str):
d = json.dumps(d)
return self._make_request(endpoint, type='POST', data=d)
|
mrallen1/pygett
|
pygett/files.py
|
GettFile.contents
|
python
|
def contents(self):
response = GettRequest().get("/files/%s/%s/blob" % (self.sharename, self.fileid))
return response.response
|
This method downloads the contents of the file represented by a `GettFile` object's metadata.
Input:
* None
Output:
* A byte stream
**NOTE**: You are responsible for handling any encoding/decoding which may be necessary.
Example::
file = client.get_file("4ddfds", 0)
print file.contents()
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/files.py#L48-L67
|
[
"def get(self, endpoint, *args, **kwargs):\n \"\"\"\n **get**\n\n Make a GET call to a remote endpoint\n\n Input:\n * An endpoint relative to the ``base_url``\n\n Output:\n * A :py:mod:`pygett.request.GettResponse` object\n \"\"\"\n endpoint = self.base_url + endpoint\n return self._make_request(endpoint, type='GET')\n"
] |
class GettFile(object):
"""
Encapsulate a file in the Gett service.
**Attributes**
This object has the following attributes:
- ``fileid`` - A file id as assigned by the Gett service
- ``sharename`` - The sharename in which this file is contained
- ``downloads`` - The number of downloads of this file
- ``getturl`` - The URL at which this file can be viewed in a browser
- ``filename`` - The user specified filename
- ``readystate`` - The Gett state of this file
During file uploads, the following attributes will be set:
- ``put_upload_url`` - A URL suitable for use with the PUT HTTP verb (see ``send_file()``)
- ``post_upload_url`` - A URL suitable for use with the POST HTTP verb
"""
def __init__(self, user, **kwargs):
self.user = user
self.fileid = None
self.sharename = None
self.downloads = None
self.getturl = None
self.created = None
self.filename = None
self.readystate = None
self.__dict__.update(kwargs)
if 'upload' in kwargs:
self.put_upload_url = kwargs['upload']['puturl']
self.post_upload_url = kwargs['upload']['posturl']
else:
self.put_upload_url = None
self.post_upload_url = None
def __repr__(self):
return "<GettFile: %s (%s/%s)>" % (self.filename, self.sharename, self.fileid)
def __str__(self):
return "<GettFile: %s (%s/%s)>" % (self.filename, self.sharename, self.fileid)
def thumbnail(self):
"""
This method returns a thumbnail representation of the file if the data is a supported graphics format.
Input:
* None
Output:
* A byte stream representing a thumbnail of a support graphics file
Example::
file = client.get_file("4ddfds", 0)
open("thumbnail.jpg", "wb").write(file.thumbnail())
"""
response = GettRequest().get("/files/%s/%s/blob/thumb" % (self.sharename, self.fileid))
return response.response
def destroy(self):
"""
This method removes the file's content and metadata from the Gett service. There is no way to recover
the data once this method has successfully completed.
Input:
* None
Output:
* ``True``
Example::
client.get_file("4ddfds", 0).destroy()
"""
response = GettRequest().post(("/files/%s/%s/destroy?accesstoken=%s" % self.user.access_token()), None)
if response.http_status == 200:
return True
def upload_url(self):
"""
This method generates URLs which allow overwriting a file's content with new content. The output is suitable
for use in the ``send_data()`` method below.
Input:
* None
Output:
* A URL (string)
Example::
file = client.get_file("4ddfds", 0)
file.send_data(put_url=file.upload_url, data=open("example.txt", "rb").read())
"""
if self.put_upload_url:
return self.put_upload_url
else:
response = GettRequest().get("/files/%s/%s/upload?accesstoken=%s" % (self.sharename, self.fileid, self.user.access_token()))
if response.http_status == 200:
return response.response['puturl']
def refresh(self):
"""
Retrieve current file metadata from the Gett service.
Input:
* None
Output:
* None
Example::
file = client.get_file("4ddfds", 0)
print "File size: %s" % file.size # File size: 96
file.send_data(put_url=file.upload_url, data=open("example.txt", "rb").read())
file.refresh()
print "File size: %s" % file.size # File size: 109
"""
response = GettRequest().get("/files/%s/%s" % (self.sharename, self.fileid))
if response.http_status == 200:
self.__init__(self.user, response.response)
def send_data(self, **kwargs):
"""
This method transmits data to the Gett service.
Input:
* ``put_url`` A PUT url to use when transmitting the data (required)
* ``data`` A byte stream (required)
Output:
* ``True``
Example::
if file.send_data(put_url=file.upload_url, data=open("example.txt", "rb").read()):
print "Your file has been uploaded."
"""
put_url = None
if 'put_url' in kwargs:
put_url = kwargs['put_url']
else:
put_url = self.put_upload_url
if 'data' not in kwargs:
raise AttributeError("'data' parameter is required")
if not put_url:
raise AttributeError("'put_url' cannot be None")
if not isinstance(kwargs['data'], str):
raise TypeError("'data' parameter must be of type 'str'")
response = GettRequest().put(put_url, kwargs['data'])
if response.http_status == 200:
return True
|
mrallen1/pygett
|
pygett/files.py
|
GettFile.thumbnail
|
python
|
def thumbnail(self):
response = GettRequest().get("/files/%s/%s/blob/thumb" % (self.sharename, self.fileid))
return response.response
|
This method returns a thumbnail representation of the file if the data is a supported graphics format.
Input:
* None
Output:
* A byte stream representing a thumbnail of a support graphics file
Example::
file = client.get_file("4ddfds", 0)
open("thumbnail.jpg", "wb").write(file.thumbnail())
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/files.py#L69-L86
|
[
"def get(self, endpoint, *args, **kwargs):\n \"\"\"\n **get**\n\n Make a GET call to a remote endpoint\n\n Input:\n * An endpoint relative to the ``base_url``\n\n Output:\n * A :py:mod:`pygett.request.GettResponse` object\n \"\"\"\n endpoint = self.base_url + endpoint\n return self._make_request(endpoint, type='GET')\n"
] |
class GettFile(object):
"""
Encapsulate a file in the Gett service.
**Attributes**
This object has the following attributes:
- ``fileid`` - A file id as assigned by the Gett service
- ``sharename`` - The sharename in which this file is contained
- ``downloads`` - The number of downloads of this file
- ``getturl`` - The URL at which this file can be viewed in a browser
- ``filename`` - The user specified filename
- ``readystate`` - The Gett state of this file
During file uploads, the following attributes will be set:
- ``put_upload_url`` - A URL suitable for use with the PUT HTTP verb (see ``send_file()``)
- ``post_upload_url`` - A URL suitable for use with the POST HTTP verb
"""
def __init__(self, user, **kwargs):
self.user = user
self.fileid = None
self.sharename = None
self.downloads = None
self.getturl = None
self.created = None
self.filename = None
self.readystate = None
self.__dict__.update(kwargs)
if 'upload' in kwargs:
self.put_upload_url = kwargs['upload']['puturl']
self.post_upload_url = kwargs['upload']['posturl']
else:
self.put_upload_url = None
self.post_upload_url = None
def __repr__(self):
return "<GettFile: %s (%s/%s)>" % (self.filename, self.sharename, self.fileid)
def __str__(self):
return "<GettFile: %s (%s/%s)>" % (self.filename, self.sharename, self.fileid)
def contents(self):
"""
This method downloads the contents of the file represented by a `GettFile` object's metadata.
Input:
* None
Output:
* A byte stream
**NOTE**: You are responsible for handling any encoding/decoding which may be necessary.
Example::
file = client.get_file("4ddfds", 0)
print file.contents()
"""
response = GettRequest().get("/files/%s/%s/blob" % (self.sharename, self.fileid))
return response.response
def destroy(self):
"""
This method removes the file's content and metadata from the Gett service. There is no way to recover
the data once this method has successfully completed.
Input:
* None
Output:
* ``True``
Example::
client.get_file("4ddfds", 0).destroy()
"""
response = GettRequest().post(("/files/%s/%s/destroy?accesstoken=%s" % self.user.access_token()), None)
if response.http_status == 200:
return True
def upload_url(self):
"""
This method generates URLs which allow overwriting a file's content with new content. The output is suitable
for use in the ``send_data()`` method below.
Input:
* None
Output:
* A URL (string)
Example::
file = client.get_file("4ddfds", 0)
file.send_data(put_url=file.upload_url, data=open("example.txt", "rb").read())
"""
if self.put_upload_url:
return self.put_upload_url
else:
response = GettRequest().get("/files/%s/%s/upload?accesstoken=%s" % (self.sharename, self.fileid, self.user.access_token()))
if response.http_status == 200:
return response.response['puturl']
def refresh(self):
"""
Retrieve current file metadata from the Gett service.
Input:
* None
Output:
* None
Example::
file = client.get_file("4ddfds", 0)
print "File size: %s" % file.size # File size: 96
file.send_data(put_url=file.upload_url, data=open("example.txt", "rb").read())
file.refresh()
print "File size: %s" % file.size # File size: 109
"""
response = GettRequest().get("/files/%s/%s" % (self.sharename, self.fileid))
if response.http_status == 200:
self.__init__(self.user, response.response)
def send_data(self, **kwargs):
"""
This method transmits data to the Gett service.
Input:
* ``put_url`` A PUT url to use when transmitting the data (required)
* ``data`` A byte stream (required)
Output:
* ``True``
Example::
if file.send_data(put_url=file.upload_url, data=open("example.txt", "rb").read()):
print "Your file has been uploaded."
"""
put_url = None
if 'put_url' in kwargs:
put_url = kwargs['put_url']
else:
put_url = self.put_upload_url
if 'data' not in kwargs:
raise AttributeError("'data' parameter is required")
if not put_url:
raise AttributeError("'put_url' cannot be None")
if not isinstance(kwargs['data'], str):
raise TypeError("'data' parameter must be of type 'str'")
response = GettRequest().put(put_url, kwargs['data'])
if response.http_status == 200:
return True
|
mrallen1/pygett
|
pygett/files.py
|
GettFile.upload_url
|
python
|
def upload_url(self):
if self.put_upload_url:
return self.put_upload_url
else:
response = GettRequest().get("/files/%s/%s/upload?accesstoken=%s" % (self.sharename, self.fileid, self.user.access_token()))
if response.http_status == 200:
return response.response['puturl']
|
This method generates URLs which allow overwriting a file's content with new content. The output is suitable
for use in the ``send_data()`` method below.
Input:
* None
Output:
* A URL (string)
Example::
file = client.get_file("4ddfds", 0)
file.send_data(put_url=file.upload_url, data=open("example.txt", "rb").read())
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/files.py#L108-L130
|
[
"def get(self, endpoint, *args, **kwargs):\n \"\"\"\n **get**\n\n Make a GET call to a remote endpoint\n\n Input:\n * An endpoint relative to the ``base_url``\n\n Output:\n * A :py:mod:`pygett.request.GettResponse` object\n \"\"\"\n endpoint = self.base_url + endpoint\n return self._make_request(endpoint, type='GET')\n"
] |
class GettFile(object):
"""
Encapsulate a file in the Gett service.
**Attributes**
This object has the following attributes:
- ``fileid`` - A file id as assigned by the Gett service
- ``sharename`` - The sharename in which this file is contained
- ``downloads`` - The number of downloads of this file
- ``getturl`` - The URL at which this file can be viewed in a browser
- ``filename`` - The user specified filename
- ``readystate`` - The Gett state of this file
During file uploads, the following attributes will be set:
- ``put_upload_url`` - A URL suitable for use with the PUT HTTP verb (see ``send_file()``)
- ``post_upload_url`` - A URL suitable for use with the POST HTTP verb
"""
def __init__(self, user, **kwargs):
self.user = user
self.fileid = None
self.sharename = None
self.downloads = None
self.getturl = None
self.created = None
self.filename = None
self.readystate = None
self.__dict__.update(kwargs)
if 'upload' in kwargs:
self.put_upload_url = kwargs['upload']['puturl']
self.post_upload_url = kwargs['upload']['posturl']
else:
self.put_upload_url = None
self.post_upload_url = None
def __repr__(self):
return "<GettFile: %s (%s/%s)>" % (self.filename, self.sharename, self.fileid)
def __str__(self):
return "<GettFile: %s (%s/%s)>" % (self.filename, self.sharename, self.fileid)
def contents(self):
"""
This method downloads the contents of the file represented by a `GettFile` object's metadata.
Input:
* None
Output:
* A byte stream
**NOTE**: You are responsible for handling any encoding/decoding which may be necessary.
Example::
file = client.get_file("4ddfds", 0)
print file.contents()
"""
response = GettRequest().get("/files/%s/%s/blob" % (self.sharename, self.fileid))
return response.response
def thumbnail(self):
"""
This method returns a thumbnail representation of the file if the data is a supported graphics format.
Input:
* None
Output:
* A byte stream representing a thumbnail of a support graphics file
Example::
file = client.get_file("4ddfds", 0)
open("thumbnail.jpg", "wb").write(file.thumbnail())
"""
response = GettRequest().get("/files/%s/%s/blob/thumb" % (self.sharename, self.fileid))
return response.response
def destroy(self):
"""
This method removes the file's content and metadata from the Gett service. There is no way to recover
the data once this method has successfully completed.
Input:
* None
Output:
* ``True``
Example::
client.get_file("4ddfds", 0).destroy()
"""
response = GettRequest().post(("/files/%s/%s/destroy?accesstoken=%s" % self.user.access_token()), None)
if response.http_status == 200:
return True
def refresh(self):
"""
Retrieve current file metadata from the Gett service.
Input:
* None
Output:
* None
Example::
file = client.get_file("4ddfds", 0)
print "File size: %s" % file.size # File size: 96
file.send_data(put_url=file.upload_url, data=open("example.txt", "rb").read())
file.refresh()
print "File size: %s" % file.size # File size: 109
"""
response = GettRequest().get("/files/%s/%s" % (self.sharename, self.fileid))
if response.http_status == 200:
self.__init__(self.user, response.response)
def send_data(self, **kwargs):
"""
This method transmits data to the Gett service.
Input:
* ``put_url`` A PUT url to use when transmitting the data (required)
* ``data`` A byte stream (required)
Output:
* ``True``
Example::
if file.send_data(put_url=file.upload_url, data=open("example.txt", "rb").read()):
print "Your file has been uploaded."
"""
put_url = None
if 'put_url' in kwargs:
put_url = kwargs['put_url']
else:
put_url = self.put_upload_url
if 'data' not in kwargs:
raise AttributeError("'data' parameter is required")
if not put_url:
raise AttributeError("'put_url' cannot be None")
if not isinstance(kwargs['data'], str):
raise TypeError("'data' parameter must be of type 'str'")
response = GettRequest().put(put_url, kwargs['data'])
if response.http_status == 200:
return True
|
mrallen1/pygett
|
pygett/files.py
|
GettFile.send_data
|
python
|
def send_data(self, **kwargs):
put_url = None
if 'put_url' in kwargs:
put_url = kwargs['put_url']
else:
put_url = self.put_upload_url
if 'data' not in kwargs:
raise AttributeError("'data' parameter is required")
if not put_url:
raise AttributeError("'put_url' cannot be None")
if not isinstance(kwargs['data'], str):
raise TypeError("'data' parameter must be of type 'str'")
response = GettRequest().put(put_url, kwargs['data'])
if response.http_status == 200:
return True
|
This method transmits data to the Gett service.
Input:
* ``put_url`` A PUT url to use when transmitting the data (required)
* ``data`` A byte stream (required)
Output:
* ``True``
Example::
if file.send_data(put_url=file.upload_url, data=open("example.txt", "rb").read()):
print "Your file has been uploaded."
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/files.py#L155-L189
|
[
"def put(self, endpoint, d, *args, **kwargs):\n \"\"\"\n **put**\n\n Make a PUT call to a remove endpoint\n\n Input:\n * An absolute endpoint\n * A data stream\n\n Output:\n * A :py:mod:`pygett.request.GettResponse` object\n \"\"\"\n\n return self._make_request(endpoint, type='PUT', data=d)\n"
] |
class GettFile(object):
"""
Encapsulate a file in the Gett service.
**Attributes**
This object has the following attributes:
- ``fileid`` - A file id as assigned by the Gett service
- ``sharename`` - The sharename in which this file is contained
- ``downloads`` - The number of downloads of this file
- ``getturl`` - The URL at which this file can be viewed in a browser
- ``filename`` - The user specified filename
- ``readystate`` - The Gett state of this file
During file uploads, the following attributes will be set:
- ``put_upload_url`` - A URL suitable for use with the PUT HTTP verb (see ``send_file()``)
- ``post_upload_url`` - A URL suitable for use with the POST HTTP verb
"""
def __init__(self, user, **kwargs):
self.user = user
self.fileid = None
self.sharename = None
self.downloads = None
self.getturl = None
self.created = None
self.filename = None
self.readystate = None
self.__dict__.update(kwargs)
if 'upload' in kwargs:
self.put_upload_url = kwargs['upload']['puturl']
self.post_upload_url = kwargs['upload']['posturl']
else:
self.put_upload_url = None
self.post_upload_url = None
def __repr__(self):
return "<GettFile: %s (%s/%s)>" % (self.filename, self.sharename, self.fileid)
def __str__(self):
return "<GettFile: %s (%s/%s)>" % (self.filename, self.sharename, self.fileid)
def contents(self):
"""
This method downloads the contents of the file represented by a `GettFile` object's metadata.
Input:
* None
Output:
* A byte stream
**NOTE**: You are responsible for handling any encoding/decoding which may be necessary.
Example::
file = client.get_file("4ddfds", 0)
print file.contents()
"""
response = GettRequest().get("/files/%s/%s/blob" % (self.sharename, self.fileid))
return response.response
def thumbnail(self):
"""
This method returns a thumbnail representation of the file if the data is a supported graphics format.
Input:
* None
Output:
* A byte stream representing a thumbnail of a support graphics file
Example::
file = client.get_file("4ddfds", 0)
open("thumbnail.jpg", "wb").write(file.thumbnail())
"""
response = GettRequest().get("/files/%s/%s/blob/thumb" % (self.sharename, self.fileid))
return response.response
def destroy(self):
"""
This method removes the file's content and metadata from the Gett service. There is no way to recover
the data once this method has successfully completed.
Input:
* None
Output:
* ``True``
Example::
client.get_file("4ddfds", 0).destroy()
"""
response = GettRequest().post(("/files/%s/%s/destroy?accesstoken=%s" % self.user.access_token()), None)
if response.http_status == 200:
return True
def upload_url(self):
"""
This method generates URLs which allow overwriting a file's content with new content. The output is suitable
for use in the ``send_data()`` method below.
Input:
* None
Output:
* A URL (string)
Example::
file = client.get_file("4ddfds", 0)
file.send_data(put_url=file.upload_url, data=open("example.txt", "rb").read())
"""
if self.put_upload_url:
return self.put_upload_url
else:
response = GettRequest().get("/files/%s/%s/upload?accesstoken=%s" % (self.sharename, self.fileid, self.user.access_token()))
if response.http_status == 200:
return response.response['puturl']
def refresh(self):
"""
Retrieve current file metadata from the Gett service.
Input:
* None
Output:
* None
Example::
file = client.get_file("4ddfds", 0)
print "File size: %s" % file.size # File size: 96
file.send_data(put_url=file.upload_url, data=open("example.txt", "rb").read())
file.refresh()
print "File size: %s" % file.size # File size: 109
"""
response = GettRequest().get("/files/%s/%s" % (self.sharename, self.fileid))
if response.http_status == 200:
self.__init__(self.user, response.response)
|
mrallen1/pygett
|
pygett/shares.py
|
GettShare.update
|
python
|
def update(self, **kwargs):
if 'title' in kwargs:
params = {"title": kwargs['title']}
else:
params = {"title": None}
response = GettRequest().post("/shares/%s/update?accesstoken=%s" % (self.sharename, self.user.access_token()), params)
if response.http_status == 200:
self.__init__(self.user, **response.response)
|
Add, remove or modify a share's title.
Input:
* ``title`` The share title, if any (optional)
**NOTE**: Passing ``None`` or calling this method with an empty argument list will remove the share's title.
Output:
* None
Example::
share = client.get_share("4ddfds")
share.update(title="Example") # Set title to Example
share.update() # Remove title
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/shares.py#L38-L64
|
[
"def post(self, endpoint, d, *args, **kwargs):\n \"\"\"\n **post**\n\n Make a POST call to a remote endpoint\n\n Input:\n * An endpoint relative to the ``base_url``\n * POST data\n\n **NOTE**: Passed POST data will be automatically serialized to a JSON string\n if it's not already a string\n\n Output:\n * A :py:mod:`pygett.request.GettResponse` object\n \"\"\"\n endpoint = self.base_url + endpoint\n if not isinstance(d, str):\n d = json.dumps(d)\n\n return self._make_request(endpoint, type='POST', data=d)\n",
"def __init__(self, user, **kwargs):\n self.user = user\n self.sharename = None\n self.title = None\n self.created = None\n self.files = list()\n\n if 'files' in kwargs:\n files = kwargs['files']\n del kwargs['files']\n for f in files:\n if not 'sharename' in f:\n f['sharename'] = kwargs['sharename']\n self.files.append(GettFile(self.user, **f))\n\n self.__dict__.update(kwargs)\n"
] |
class GettShare(object):
"""
Encapsulate a share in the Gett service.
**Attributes**
- ``sharename`` The sharename
- ``title`` The share title (if any)
- ``created`` Unix epoch seconds when the share was created
- ``files`` A list of all files contained in a share as :py:mod:`pygett.files.GettFile` objects
"""
def __init__(self, user, **kwargs):
self.user = user
self.sharename = None
self.title = None
self.created = None
self.files = list()
if 'files' in kwargs:
files = kwargs['files']
del kwargs['files']
for f in files:
if not 'sharename' in f:
f['sharename'] = kwargs['sharename']
self.files.append(GettFile(self.user, **f))
self.__dict__.update(kwargs)
def __repr__(self):
return "<GettShare: %s>" % self.sharename
def __str__(self):
return "<GettShare: %s>" % self.sharename
def destroy(self):
"""
This method removes this share and all of its associated files. There is no way to recover a share or its contents
once this method has been called.
Input:
* None
Output:
* ``True``
Example::
client.get_share("4ddfds").destroy()
"""
response = GettRequest().post("/shares/%s/destroy?accesstoken=%s" % (self.sharename, self.user.access_token()), None)
if response.http_status == 200:
return True
def refresh(self):
"""
This method refreshes the object with current metadata from the Gett service.
Input:
* None
Output:
* None
Example::
share = client.get_share("4ddfds")
print share.files[0].filename # prints 'foobar'
if share.files[0].destroy():
share.refresh()
print share.files[0].filename # now prints 'barbaz'
"""
response = GettRequest().get("/shares/%s" % self.sharename)
if response.http_status == 200:
self.__init__(self.user, **response.response)
|
mrallen1/pygett
|
pygett/shares.py
|
GettShare.destroy
|
python
|
def destroy(self):
response = GettRequest().post("/shares/%s/destroy?accesstoken=%s" % (self.sharename, self.user.access_token()), None)
if response.http_status == 200:
return True
|
This method removes this share and all of its associated files. There is no way to recover a share or its contents
once this method has been called.
Input:
* None
Output:
* ``True``
Example::
client.get_share("4ddfds").destroy()
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/shares.py#L66-L84
|
[
"def post(self, endpoint, d, *args, **kwargs):\n \"\"\"\n **post**\n\n Make a POST call to a remote endpoint\n\n Input:\n * An endpoint relative to the ``base_url``\n * POST data\n\n **NOTE**: Passed POST data will be automatically serialized to a JSON string\n if it's not already a string\n\n Output:\n * A :py:mod:`pygett.request.GettResponse` object\n \"\"\"\n endpoint = self.base_url + endpoint\n if not isinstance(d, str):\n d = json.dumps(d)\n\n return self._make_request(endpoint, type='POST', data=d)\n"
] |
class GettShare(object):
"""
Encapsulate a share in the Gett service.
**Attributes**
- ``sharename`` The sharename
- ``title`` The share title (if any)
- ``created`` Unix epoch seconds when the share was created
- ``files`` A list of all files contained in a share as :py:mod:`pygett.files.GettFile` objects
"""
def __init__(self, user, **kwargs):
self.user = user
self.sharename = None
self.title = None
self.created = None
self.files = list()
if 'files' in kwargs:
files = kwargs['files']
del kwargs['files']
for f in files:
if not 'sharename' in f:
f['sharename'] = kwargs['sharename']
self.files.append(GettFile(self.user, **f))
self.__dict__.update(kwargs)
def __repr__(self):
return "<GettShare: %s>" % self.sharename
def __str__(self):
return "<GettShare: %s>" % self.sharename
def update(self, **kwargs):
"""
Add, remove or modify a share's title.
Input:
* ``title`` The share title, if any (optional)
**NOTE**: Passing ``None`` or calling this method with an empty argument list will remove the share's title.
Output:
* None
Example::
share = client.get_share("4ddfds")
share.update(title="Example") # Set title to Example
share.update() # Remove title
"""
if 'title' in kwargs:
params = {"title": kwargs['title']}
else:
params = {"title": None}
response = GettRequest().post("/shares/%s/update?accesstoken=%s" % (self.sharename, self.user.access_token()), params)
if response.http_status == 200:
self.__init__(self.user, **response.response)
def refresh(self):
"""
This method refreshes the object with current metadata from the Gett service.
Input:
* None
Output:
* None
Example::
share = client.get_share("4ddfds")
print share.files[0].filename # prints 'foobar'
if share.files[0].destroy():
share.refresh()
print share.files[0].filename # now prints 'barbaz'
"""
response = GettRequest().get("/shares/%s" % self.sharename)
if response.http_status == 200:
self.__init__(self.user, **response.response)
|
mrallen1/pygett
|
pygett/shares.py
|
GettShare.refresh
|
python
|
def refresh(self):
response = GettRequest().get("/shares/%s" % self.sharename)
if response.http_status == 200:
self.__init__(self.user, **response.response)
|
This method refreshes the object with current metadata from the Gett service.
Input:
* None
Output:
* None
Example::
share = client.get_share("4ddfds")
print share.files[0].filename # prints 'foobar'
if share.files[0].destroy():
share.refresh()
print share.files[0].filename # now prints 'barbaz'
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/shares.py#L86-L107
|
[
"def get(self, endpoint, *args, **kwargs):\n \"\"\"\n **get**\n\n Make a GET call to a remote endpoint\n\n Input:\n * An endpoint relative to the ``base_url``\n\n Output:\n * A :py:mod:`pygett.request.GettResponse` object\n \"\"\"\n endpoint = self.base_url + endpoint\n return self._make_request(endpoint, type='GET')\n",
"def __init__(self, user, **kwargs):\n self.user = user\n self.sharename = None\n self.title = None\n self.created = None\n self.files = list()\n\n if 'files' in kwargs:\n files = kwargs['files']\n del kwargs['files']\n for f in files:\n if not 'sharename' in f:\n f['sharename'] = kwargs['sharename']\n self.files.append(GettFile(self.user, **f))\n\n self.__dict__.update(kwargs)\n"
] |
class GettShare(object):
"""
Encapsulate a share in the Gett service.
**Attributes**
- ``sharename`` The sharename
- ``title`` The share title (if any)
- ``created`` Unix epoch seconds when the share was created
- ``files`` A list of all files contained in a share as :py:mod:`pygett.files.GettFile` objects
"""
def __init__(self, user, **kwargs):
self.user = user
self.sharename = None
self.title = None
self.created = None
self.files = list()
if 'files' in kwargs:
files = kwargs['files']
del kwargs['files']
for f in files:
if not 'sharename' in f:
f['sharename'] = kwargs['sharename']
self.files.append(GettFile(self.user, **f))
self.__dict__.update(kwargs)
def __repr__(self):
return "<GettShare: %s>" % self.sharename
def __str__(self):
return "<GettShare: %s>" % self.sharename
def update(self, **kwargs):
"""
Add, remove or modify a share's title.
Input:
* ``title`` The share title, if any (optional)
**NOTE**: Passing ``None`` or calling this method with an empty argument list will remove the share's title.
Output:
* None
Example::
share = client.get_share("4ddfds")
share.update(title="Example") # Set title to Example
share.update() # Remove title
"""
if 'title' in kwargs:
params = {"title": kwargs['title']}
else:
params = {"title": None}
response = GettRequest().post("/shares/%s/update?accesstoken=%s" % (self.sharename, self.user.access_token()), params)
if response.http_status == 200:
self.__init__(self.user, **response.response)
def destroy(self):
"""
This method removes this share and all of its associated files. There is no way to recover a share or its contents
once this method has been called.
Input:
* None
Output:
* ``True``
Example::
client.get_share("4ddfds").destroy()
"""
response = GettRequest().post("/shares/%s/destroy?accesstoken=%s" % (self.sharename, self.user.access_token()), None)
if response.http_status == 200:
return True
|
mrallen1/pygett
|
pygett/base.py
|
Gett.get_shares
|
python
|
def get_shares(self, **kwargs):
response = self._get_shares(**kwargs)
rv = dict()
if response.http_status == 200:
for share in response.response:
rv[share['sharename']] = GettShare(self.user, **share)
return rv
|
Gets *all* shares.
Input:
* ``skip`` the number of shares to skip (optional)
* ``limit`` the maximum number of shares to return (optional)
Output:
* a dict where keys are sharenames and the values are corresponding :py:mod:`pygett.shares.GettShare` objects
Example::
shares = client.get_shares()
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/base.py#L63-L87
|
[
"def _get_shares(self, **kwargs):\n endpoint = \"/shares?accesstoken=%s\" % self.user.access_token()\n if 'limit' in kwargs and isinstance(kwargs['limit'], int) and kwargs['limit'] > 0:\n endpoint = endpoint + \"&limit=%d\" % kwargs['limit']\n if 'skip' in kwargs and isinstance(kwargs['skip'], int) and kwargs['skip'] > 0:\n endpoint = endpoint + \"&skip=%d\" % kwargs['skip']\n\n return GettRequest().get(endpoint)\n"
] |
class Gett(object):
"""
Base client object
Requires the following keyword arguments:
- ``apikey`` - The API key assigned to an application by Gett
- ``email`` - The email address linked to the API key
- ``password`` - The password linked to the API key
**Attribute**
- ``user`` - a :py:mod:`pygett.user.GettUser` object
"""
def __init__(self, *args, **kwargs):
self.required_params = [
'apikey',
'email',
'password'
]
self._check_params(**kwargs)
self.user = GettUser(kwargs['apikey'], kwargs['email'], kwargs['password'])
def _check_params(self, **kwargs):
if not kwargs:
raise AttributeError('Missing required parameters: %s' % self.required_params)
for param in self.required_params:
if param not in kwargs:
raise AttributeError('Missing required parameter %s' % param)
for k, v in kwargs.items():
if not v:
raise AttributeError('Parameter %s must not be None' % k)
if k == 'apikey':
if not isinstance(v, str):
raise AttributeError("Parameter 'apikey' must be a string")
if k == 'email':
if not re.search(r'\w+@\w+', v):
raise AttributeError("Parameter 'email' must be an email address")
if k == 'password':
if not isinstance(v, str):
raise AttributeError("Parameter 'password' must be a string")
def _get_shares(self, **kwargs):
endpoint = "/shares?accesstoken=%s" % self.user.access_token()
if 'limit' in kwargs and isinstance(kwargs['limit'], int) and kwargs['limit'] > 0:
endpoint = endpoint + "&limit=%d" % kwargs['limit']
if 'skip' in kwargs and isinstance(kwargs['skip'], int) and kwargs['skip'] > 0:
endpoint = endpoint + "&skip=%d" % kwargs['skip']
return GettRequest().get(endpoint)
def get_shares_list(self, **kwargs):
"""
Gets *all* shares.
Input:
* ``skip`` the number of shares to skip (optional)
* ``limit`` the maximum number of shares to return (optional)
Output:
* a list of :py:mod:`pygett.shares.GettShare` objects
Example::
shares_list = client.get_shares_list()
"""
response = self._get_shares(**kwargs)
rv = list()
if response.http_status == 200:
for share in response.response:
rv.append(GettShare(self.user, **share))
return rv
def get_share(self, sharename):
"""
Get a specific share. Does not require authentication.
Input:
* A sharename
Output:
* A :py:mod:`pygett.shares.GettShare` object
Example::
share = client.get_share("4ddfds")
"""
response = GettRequest().get("/shares/%s" % sharename)
if response.http_status == 200:
return GettShare(self.user, **response.response)
def get_file(self, sharename, fileid):
"""
Get a specific file. Does not require authentication.
Input:
* A sharename
* A fileid - must be an integer
Output:
* A :py:mod:`pygett.files.GettFile` object
Example::
file = client.get_file("4ddfds", 0)
"""
if not isinstance(fileid, int):
raise TypeError("'fileid' must be an integer")
response = GettRequest().get("/files/%s/%d" % (sharename, fileid))
if response.http_status == 200:
return GettFile(self.user, **response.response)
def create_share(self, **kwargs):
"""
Create a new share. Takes a keyword argument.
Input:
* ``title`` optional share title (optional)
Output:
* A :py:mod:`pygett.shares.GettShare` object
Example::
new_share = client.create_share( title="Example Title" )
"""
params = None
if 'title' in kwargs:
params = {"title": kwargs['title']}
response = GettRequest().post(("/shares/create?accesstoken=%s" % self.user.access_token()), params)
if response.http_status == 200:
return GettShare(self.user, **response.response)
def upload_file(self, **kwargs):
"""
Upload a file to the Gett service. Takes keyword arguments.
Input:
* ``filename`` the filename to use in the Gett service (required)
* ``data`` the file contents to store in the Gett service (required) - must be a string
* ``sharename`` the name of the share in which to store the data (optional); if not given, a new share will be created.
* ``title`` the share title to use if a new share is created (optional)
Output:
* A :py:mod:`pygett.files.GettFile` object
Example::
file = client.upload_file(filaname="foo", data=open("foo.txt").read())
"""
params = None
if 'filename' not in kwargs:
raise AttributeError("Parameter 'filename' must be given")
else:
params = {
"filename": kwargs['filename']
}
if 'data' not in kwargs:
raise AttributeError("Parameter 'data' must be given")
sharename = None
if 'sharename' not in kwargs:
share = None
if 'title' in kwargs:
share = self.create_share(title=kwargs['title'])
else:
share = self.create_share()
sharename = share.sharename
else:
sharename = kwargs['sharename']
response = GettRequest().post("/files/%s/create?accesstoken=%s" % (sharename, self.user.access_token()), params)
f = None
if response.http_status == 200:
if 'sharename' not in response.response:
response.response['sharename'] = sharename
f = GettFile(self.user, **response.response)
if f.send_data(data=kwargs['data']):
return f
|
mrallen1/pygett
|
pygett/base.py
|
Gett.get_shares_list
|
python
|
def get_shares_list(self, **kwargs):
response = self._get_shares(**kwargs)
rv = list()
if response.http_status == 200:
for share in response.response:
rv.append(GettShare(self.user, **share))
return rv
|
Gets *all* shares.
Input:
* ``skip`` the number of shares to skip (optional)
* ``limit`` the maximum number of shares to return (optional)
Output:
* a list of :py:mod:`pygett.shares.GettShare` objects
Example::
shares_list = client.get_shares_list()
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/base.py#L89-L113
|
[
"def _get_shares(self, **kwargs):\n endpoint = \"/shares?accesstoken=%s\" % self.user.access_token()\n if 'limit' in kwargs and isinstance(kwargs['limit'], int) and kwargs['limit'] > 0:\n endpoint = endpoint + \"&limit=%d\" % kwargs['limit']\n if 'skip' in kwargs and isinstance(kwargs['skip'], int) and kwargs['skip'] > 0:\n endpoint = endpoint + \"&skip=%d\" % kwargs['skip']\n\n return GettRequest().get(endpoint)\n"
] |
class Gett(object):
"""
Base client object
Requires the following keyword arguments:
- ``apikey`` - The API key assigned to an application by Gett
- ``email`` - The email address linked to the API key
- ``password`` - The password linked to the API key
**Attribute**
- ``user`` - a :py:mod:`pygett.user.GettUser` object
"""
def __init__(self, *args, **kwargs):
self.required_params = [
'apikey',
'email',
'password'
]
self._check_params(**kwargs)
self.user = GettUser(kwargs['apikey'], kwargs['email'], kwargs['password'])
def _check_params(self, **kwargs):
if not kwargs:
raise AttributeError('Missing required parameters: %s' % self.required_params)
for param in self.required_params:
if param not in kwargs:
raise AttributeError('Missing required parameter %s' % param)
for k, v in kwargs.items():
if not v:
raise AttributeError('Parameter %s must not be None' % k)
if k == 'apikey':
if not isinstance(v, str):
raise AttributeError("Parameter 'apikey' must be a string")
if k == 'email':
if not re.search(r'\w+@\w+', v):
raise AttributeError("Parameter 'email' must be an email address")
if k == 'password':
if not isinstance(v, str):
raise AttributeError("Parameter 'password' must be a string")
def _get_shares(self, **kwargs):
endpoint = "/shares?accesstoken=%s" % self.user.access_token()
if 'limit' in kwargs and isinstance(kwargs['limit'], int) and kwargs['limit'] > 0:
endpoint = endpoint + "&limit=%d" % kwargs['limit']
if 'skip' in kwargs and isinstance(kwargs['skip'], int) and kwargs['skip'] > 0:
endpoint = endpoint + "&skip=%d" % kwargs['skip']
return GettRequest().get(endpoint)
def get_shares(self, **kwargs):
"""
Gets *all* shares.
Input:
* ``skip`` the number of shares to skip (optional)
* ``limit`` the maximum number of shares to return (optional)
Output:
* a dict where keys are sharenames and the values are corresponding :py:mod:`pygett.shares.GettShare` objects
Example::
shares = client.get_shares()
"""
response = self._get_shares(**kwargs)
rv = dict()
if response.http_status == 200:
for share in response.response:
rv[share['sharename']] = GettShare(self.user, **share)
return rv
def get_share(self, sharename):
"""
Get a specific share. Does not require authentication.
Input:
* A sharename
Output:
* A :py:mod:`pygett.shares.GettShare` object
Example::
share = client.get_share("4ddfds")
"""
response = GettRequest().get("/shares/%s" % sharename)
if response.http_status == 200:
return GettShare(self.user, **response.response)
def get_file(self, sharename, fileid):
"""
Get a specific file. Does not require authentication.
Input:
* A sharename
* A fileid - must be an integer
Output:
* A :py:mod:`pygett.files.GettFile` object
Example::
file = client.get_file("4ddfds", 0)
"""
if not isinstance(fileid, int):
raise TypeError("'fileid' must be an integer")
response = GettRequest().get("/files/%s/%d" % (sharename, fileid))
if response.http_status == 200:
return GettFile(self.user, **response.response)
def create_share(self, **kwargs):
"""
Create a new share. Takes a keyword argument.
Input:
* ``title`` optional share title (optional)
Output:
* A :py:mod:`pygett.shares.GettShare` object
Example::
new_share = client.create_share( title="Example Title" )
"""
params = None
if 'title' in kwargs:
params = {"title": kwargs['title']}
response = GettRequest().post(("/shares/create?accesstoken=%s" % self.user.access_token()), params)
if response.http_status == 200:
return GettShare(self.user, **response.response)
def upload_file(self, **kwargs):
"""
Upload a file to the Gett service. Takes keyword arguments.
Input:
* ``filename`` the filename to use in the Gett service (required)
* ``data`` the file contents to store in the Gett service (required) - must be a string
* ``sharename`` the name of the share in which to store the data (optional); if not given, a new share will be created.
* ``title`` the share title to use if a new share is created (optional)
Output:
* A :py:mod:`pygett.files.GettFile` object
Example::
file = client.upload_file(filaname="foo", data=open("foo.txt").read())
"""
params = None
if 'filename' not in kwargs:
raise AttributeError("Parameter 'filename' must be given")
else:
params = {
"filename": kwargs['filename']
}
if 'data' not in kwargs:
raise AttributeError("Parameter 'data' must be given")
sharename = None
if 'sharename' not in kwargs:
share = None
if 'title' in kwargs:
share = self.create_share(title=kwargs['title'])
else:
share = self.create_share()
sharename = share.sharename
else:
sharename = kwargs['sharename']
response = GettRequest().post("/files/%s/create?accesstoken=%s" % (sharename, self.user.access_token()), params)
f = None
if response.http_status == 200:
if 'sharename' not in response.response:
response.response['sharename'] = sharename
f = GettFile(self.user, **response.response)
if f.send_data(data=kwargs['data']):
return f
|
mrallen1/pygett
|
pygett/base.py
|
Gett.get_share
|
python
|
def get_share(self, sharename):
response = GettRequest().get("/shares/%s" % sharename)
if response.http_status == 200:
return GettShare(self.user, **response.response)
|
Get a specific share. Does not require authentication.
Input:
* A sharename
Output:
* A :py:mod:`pygett.shares.GettShare` object
Example::
share = client.get_share("4ddfds")
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/base.py#L115-L133
|
[
"def get(self, endpoint, *args, **kwargs):\n \"\"\"\n **get**\n\n Make a GET call to a remote endpoint\n\n Input:\n * An endpoint relative to the ``base_url``\n\n Output:\n * A :py:mod:`pygett.request.GettResponse` object\n \"\"\"\n endpoint = self.base_url + endpoint\n return self._make_request(endpoint, type='GET')\n"
] |
class Gett(object):
"""
Base client object
Requires the following keyword arguments:
- ``apikey`` - The API key assigned to an application by Gett
- ``email`` - The email address linked to the API key
- ``password`` - The password linked to the API key
**Attribute**
- ``user`` - a :py:mod:`pygett.user.GettUser` object
"""
def __init__(self, *args, **kwargs):
self.required_params = [
'apikey',
'email',
'password'
]
self._check_params(**kwargs)
self.user = GettUser(kwargs['apikey'], kwargs['email'], kwargs['password'])
def _check_params(self, **kwargs):
if not kwargs:
raise AttributeError('Missing required parameters: %s' % self.required_params)
for param in self.required_params:
if param not in kwargs:
raise AttributeError('Missing required parameter %s' % param)
for k, v in kwargs.items():
if not v:
raise AttributeError('Parameter %s must not be None' % k)
if k == 'apikey':
if not isinstance(v, str):
raise AttributeError("Parameter 'apikey' must be a string")
if k == 'email':
if not re.search(r'\w+@\w+', v):
raise AttributeError("Parameter 'email' must be an email address")
if k == 'password':
if not isinstance(v, str):
raise AttributeError("Parameter 'password' must be a string")
def _get_shares(self, **kwargs):
endpoint = "/shares?accesstoken=%s" % self.user.access_token()
if 'limit' in kwargs and isinstance(kwargs['limit'], int) and kwargs['limit'] > 0:
endpoint = endpoint + "&limit=%d" % kwargs['limit']
if 'skip' in kwargs and isinstance(kwargs['skip'], int) and kwargs['skip'] > 0:
endpoint = endpoint + "&skip=%d" % kwargs['skip']
return GettRequest().get(endpoint)
def get_shares(self, **kwargs):
"""
Gets *all* shares.
Input:
* ``skip`` the number of shares to skip (optional)
* ``limit`` the maximum number of shares to return (optional)
Output:
* a dict where keys are sharenames and the values are corresponding :py:mod:`pygett.shares.GettShare` objects
Example::
shares = client.get_shares()
"""
response = self._get_shares(**kwargs)
rv = dict()
if response.http_status == 200:
for share in response.response:
rv[share['sharename']] = GettShare(self.user, **share)
return rv
def get_shares_list(self, **kwargs):
"""
Gets *all* shares.
Input:
* ``skip`` the number of shares to skip (optional)
* ``limit`` the maximum number of shares to return (optional)
Output:
* a list of :py:mod:`pygett.shares.GettShare` objects
Example::
shares_list = client.get_shares_list()
"""
response = self._get_shares(**kwargs)
rv = list()
if response.http_status == 200:
for share in response.response:
rv.append(GettShare(self.user, **share))
return rv
def get_file(self, sharename, fileid):
"""
Get a specific file. Does not require authentication.
Input:
* A sharename
* A fileid - must be an integer
Output:
* A :py:mod:`pygett.files.GettFile` object
Example::
file = client.get_file("4ddfds", 0)
"""
if not isinstance(fileid, int):
raise TypeError("'fileid' must be an integer")
response = GettRequest().get("/files/%s/%d" % (sharename, fileid))
if response.http_status == 200:
return GettFile(self.user, **response.response)
def create_share(self, **kwargs):
"""
Create a new share. Takes a keyword argument.
Input:
* ``title`` optional share title (optional)
Output:
* A :py:mod:`pygett.shares.GettShare` object
Example::
new_share = client.create_share( title="Example Title" )
"""
params = None
if 'title' in kwargs:
params = {"title": kwargs['title']}
response = GettRequest().post(("/shares/create?accesstoken=%s" % self.user.access_token()), params)
if response.http_status == 200:
return GettShare(self.user, **response.response)
def upload_file(self, **kwargs):
"""
Upload a file to the Gett service. Takes keyword arguments.
Input:
* ``filename`` the filename to use in the Gett service (required)
* ``data`` the file contents to store in the Gett service (required) - must be a string
* ``sharename`` the name of the share in which to store the data (optional); if not given, a new share will be created.
* ``title`` the share title to use if a new share is created (optional)
Output:
* A :py:mod:`pygett.files.GettFile` object
Example::
file = client.upload_file(filaname="foo", data=open("foo.txt").read())
"""
params = None
if 'filename' not in kwargs:
raise AttributeError("Parameter 'filename' must be given")
else:
params = {
"filename": kwargs['filename']
}
if 'data' not in kwargs:
raise AttributeError("Parameter 'data' must be given")
sharename = None
if 'sharename' not in kwargs:
share = None
if 'title' in kwargs:
share = self.create_share(title=kwargs['title'])
else:
share = self.create_share()
sharename = share.sharename
else:
sharename = kwargs['sharename']
response = GettRequest().post("/files/%s/create?accesstoken=%s" % (sharename, self.user.access_token()), params)
f = None
if response.http_status == 200:
if 'sharename' not in response.response:
response.response['sharename'] = sharename
f = GettFile(self.user, **response.response)
if f.send_data(data=kwargs['data']):
return f
|
mrallen1/pygett
|
pygett/base.py
|
Gett.get_file
|
python
|
def get_file(self, sharename, fileid):
if not isinstance(fileid, int):
raise TypeError("'fileid' must be an integer")
response = GettRequest().get("/files/%s/%d" % (sharename, fileid))
if response.http_status == 200:
return GettFile(self.user, **response.response)
|
Get a specific file. Does not require authentication.
Input:
* A sharename
* A fileid - must be an integer
Output:
* A :py:mod:`pygett.files.GettFile` object
Example::
file = client.get_file("4ddfds", 0)
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/base.py#L135-L157
|
[
"def get(self, endpoint, *args, **kwargs):\n \"\"\"\n **get**\n\n Make a GET call to a remote endpoint\n\n Input:\n * An endpoint relative to the ``base_url``\n\n Output:\n * A :py:mod:`pygett.request.GettResponse` object\n \"\"\"\n endpoint = self.base_url + endpoint\n return self._make_request(endpoint, type='GET')\n"
] |
class Gett(object):
"""
Base client object
Requires the following keyword arguments:
- ``apikey`` - The API key assigned to an application by Gett
- ``email`` - The email address linked to the API key
- ``password`` - The password linked to the API key
**Attribute**
- ``user`` - a :py:mod:`pygett.user.GettUser` object
"""
def __init__(self, *args, **kwargs):
self.required_params = [
'apikey',
'email',
'password'
]
self._check_params(**kwargs)
self.user = GettUser(kwargs['apikey'], kwargs['email'], kwargs['password'])
def _check_params(self, **kwargs):
if not kwargs:
raise AttributeError('Missing required parameters: %s' % self.required_params)
for param in self.required_params:
if param not in kwargs:
raise AttributeError('Missing required parameter %s' % param)
for k, v in kwargs.items():
if not v:
raise AttributeError('Parameter %s must not be None' % k)
if k == 'apikey':
if not isinstance(v, str):
raise AttributeError("Parameter 'apikey' must be a string")
if k == 'email':
if not re.search(r'\w+@\w+', v):
raise AttributeError("Parameter 'email' must be an email address")
if k == 'password':
if not isinstance(v, str):
raise AttributeError("Parameter 'password' must be a string")
def _get_shares(self, **kwargs):
endpoint = "/shares?accesstoken=%s" % self.user.access_token()
if 'limit' in kwargs and isinstance(kwargs['limit'], int) and kwargs['limit'] > 0:
endpoint = endpoint + "&limit=%d" % kwargs['limit']
if 'skip' in kwargs and isinstance(kwargs['skip'], int) and kwargs['skip'] > 0:
endpoint = endpoint + "&skip=%d" % kwargs['skip']
return GettRequest().get(endpoint)
def get_shares(self, **kwargs):
"""
Gets *all* shares.
Input:
* ``skip`` the number of shares to skip (optional)
* ``limit`` the maximum number of shares to return (optional)
Output:
* a dict where keys are sharenames and the values are corresponding :py:mod:`pygett.shares.GettShare` objects
Example::
shares = client.get_shares()
"""
response = self._get_shares(**kwargs)
rv = dict()
if response.http_status == 200:
for share in response.response:
rv[share['sharename']] = GettShare(self.user, **share)
return rv
def get_shares_list(self, **kwargs):
"""
Gets *all* shares.
Input:
* ``skip`` the number of shares to skip (optional)
* ``limit`` the maximum number of shares to return (optional)
Output:
* a list of :py:mod:`pygett.shares.GettShare` objects
Example::
shares_list = client.get_shares_list()
"""
response = self._get_shares(**kwargs)
rv = list()
if response.http_status == 200:
for share in response.response:
rv.append(GettShare(self.user, **share))
return rv
def get_share(self, sharename):
"""
Get a specific share. Does not require authentication.
Input:
* A sharename
Output:
* A :py:mod:`pygett.shares.GettShare` object
Example::
share = client.get_share("4ddfds")
"""
response = GettRequest().get("/shares/%s" % sharename)
if response.http_status == 200:
return GettShare(self.user, **response.response)
def create_share(self, **kwargs):
"""
Create a new share. Takes a keyword argument.
Input:
* ``title`` optional share title (optional)
Output:
* A :py:mod:`pygett.shares.GettShare` object
Example::
new_share = client.create_share( title="Example Title" )
"""
params = None
if 'title' in kwargs:
params = {"title": kwargs['title']}
response = GettRequest().post(("/shares/create?accesstoken=%s" % self.user.access_token()), params)
if response.http_status == 200:
return GettShare(self.user, **response.response)
def upload_file(self, **kwargs):
"""
Upload a file to the Gett service. Takes keyword arguments.
Input:
* ``filename`` the filename to use in the Gett service (required)
* ``data`` the file contents to store in the Gett service (required) - must be a string
* ``sharename`` the name of the share in which to store the data (optional); if not given, a new share will be created.
* ``title`` the share title to use if a new share is created (optional)
Output:
* A :py:mod:`pygett.files.GettFile` object
Example::
file = client.upload_file(filaname="foo", data=open("foo.txt").read())
"""
params = None
if 'filename' not in kwargs:
raise AttributeError("Parameter 'filename' must be given")
else:
params = {
"filename": kwargs['filename']
}
if 'data' not in kwargs:
raise AttributeError("Parameter 'data' must be given")
sharename = None
if 'sharename' not in kwargs:
share = None
if 'title' in kwargs:
share = self.create_share(title=kwargs['title'])
else:
share = self.create_share()
sharename = share.sharename
else:
sharename = kwargs['sharename']
response = GettRequest().post("/files/%s/create?accesstoken=%s" % (sharename, self.user.access_token()), params)
f = None
if response.http_status == 200:
if 'sharename' not in response.response:
response.response['sharename'] = sharename
f = GettFile(self.user, **response.response)
if f.send_data(data=kwargs['data']):
return f
|
mrallen1/pygett
|
pygett/base.py
|
Gett.create_share
|
python
|
def create_share(self, **kwargs):
params = None
if 'title' in kwargs:
params = {"title": kwargs['title']}
response = GettRequest().post(("/shares/create?accesstoken=%s" % self.user.access_token()), params)
if response.http_status == 200:
return GettShare(self.user, **response.response)
|
Create a new share. Takes a keyword argument.
Input:
* ``title`` optional share title (optional)
Output:
* A :py:mod:`pygett.shares.GettShare` object
Example::
new_share = client.create_share( title="Example Title" )
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/base.py#L159-L181
|
[
"def post(self, endpoint, d, *args, **kwargs):\n \"\"\"\n **post**\n\n Make a POST call to a remote endpoint\n\n Input:\n * An endpoint relative to the ``base_url``\n * POST data\n\n **NOTE**: Passed POST data will be automatically serialized to a JSON string\n if it's not already a string\n\n Output:\n * A :py:mod:`pygett.request.GettResponse` object\n \"\"\"\n endpoint = self.base_url + endpoint\n if not isinstance(d, str):\n d = json.dumps(d)\n\n return self._make_request(endpoint, type='POST', data=d)\n"
] |
class Gett(object):
"""
Base client object
Requires the following keyword arguments:
- ``apikey`` - The API key assigned to an application by Gett
- ``email`` - The email address linked to the API key
- ``password`` - The password linked to the API key
**Attribute**
- ``user`` - a :py:mod:`pygett.user.GettUser` object
"""
def __init__(self, *args, **kwargs):
self.required_params = [
'apikey',
'email',
'password'
]
self._check_params(**kwargs)
self.user = GettUser(kwargs['apikey'], kwargs['email'], kwargs['password'])
def _check_params(self, **kwargs):
if not kwargs:
raise AttributeError('Missing required parameters: %s' % self.required_params)
for param in self.required_params:
if param not in kwargs:
raise AttributeError('Missing required parameter %s' % param)
for k, v in kwargs.items():
if not v:
raise AttributeError('Parameter %s must not be None' % k)
if k == 'apikey':
if not isinstance(v, str):
raise AttributeError("Parameter 'apikey' must be a string")
if k == 'email':
if not re.search(r'\w+@\w+', v):
raise AttributeError("Parameter 'email' must be an email address")
if k == 'password':
if not isinstance(v, str):
raise AttributeError("Parameter 'password' must be a string")
def _get_shares(self, **kwargs):
endpoint = "/shares?accesstoken=%s" % self.user.access_token()
if 'limit' in kwargs and isinstance(kwargs['limit'], int) and kwargs['limit'] > 0:
endpoint = endpoint + "&limit=%d" % kwargs['limit']
if 'skip' in kwargs and isinstance(kwargs['skip'], int) and kwargs['skip'] > 0:
endpoint = endpoint + "&skip=%d" % kwargs['skip']
return GettRequest().get(endpoint)
def get_shares(self, **kwargs):
"""
Gets *all* shares.
Input:
* ``skip`` the number of shares to skip (optional)
* ``limit`` the maximum number of shares to return (optional)
Output:
* a dict where keys are sharenames and the values are corresponding :py:mod:`pygett.shares.GettShare` objects
Example::
shares = client.get_shares()
"""
response = self._get_shares(**kwargs)
rv = dict()
if response.http_status == 200:
for share in response.response:
rv[share['sharename']] = GettShare(self.user, **share)
return rv
def get_shares_list(self, **kwargs):
"""
Gets *all* shares.
Input:
* ``skip`` the number of shares to skip (optional)
* ``limit`` the maximum number of shares to return (optional)
Output:
* a list of :py:mod:`pygett.shares.GettShare` objects
Example::
shares_list = client.get_shares_list()
"""
response = self._get_shares(**kwargs)
rv = list()
if response.http_status == 200:
for share in response.response:
rv.append(GettShare(self.user, **share))
return rv
def get_share(self, sharename):
"""
Get a specific share. Does not require authentication.
Input:
* A sharename
Output:
* A :py:mod:`pygett.shares.GettShare` object
Example::
share = client.get_share("4ddfds")
"""
response = GettRequest().get("/shares/%s" % sharename)
if response.http_status == 200:
return GettShare(self.user, **response.response)
def get_file(self, sharename, fileid):
"""
Get a specific file. Does not require authentication.
Input:
* A sharename
* A fileid - must be an integer
Output:
* A :py:mod:`pygett.files.GettFile` object
Example::
file = client.get_file("4ddfds", 0)
"""
if not isinstance(fileid, int):
raise TypeError("'fileid' must be an integer")
response = GettRequest().get("/files/%s/%d" % (sharename, fileid))
if response.http_status == 200:
return GettFile(self.user, **response.response)
def upload_file(self, **kwargs):
"""
Upload a file to the Gett service. Takes keyword arguments.
Input:
* ``filename`` the filename to use in the Gett service (required)
* ``data`` the file contents to store in the Gett service (required) - must be a string
* ``sharename`` the name of the share in which to store the data (optional); if not given, a new share will be created.
* ``title`` the share title to use if a new share is created (optional)
Output:
* A :py:mod:`pygett.files.GettFile` object
Example::
file = client.upload_file(filaname="foo", data=open("foo.txt").read())
"""
params = None
if 'filename' not in kwargs:
raise AttributeError("Parameter 'filename' must be given")
else:
params = {
"filename": kwargs['filename']
}
if 'data' not in kwargs:
raise AttributeError("Parameter 'data' must be given")
sharename = None
if 'sharename' not in kwargs:
share = None
if 'title' in kwargs:
share = self.create_share(title=kwargs['title'])
else:
share = self.create_share()
sharename = share.sharename
else:
sharename = kwargs['sharename']
response = GettRequest().post("/files/%s/create?accesstoken=%s" % (sharename, self.user.access_token()), params)
f = None
if response.http_status == 200:
if 'sharename' not in response.response:
response.response['sharename'] = sharename
f = GettFile(self.user, **response.response)
if f.send_data(data=kwargs['data']):
return f
|
mrallen1/pygett
|
pygett/base.py
|
Gett.upload_file
|
python
|
def upload_file(self, **kwargs):
params = None
if 'filename' not in kwargs:
raise AttributeError("Parameter 'filename' must be given")
else:
params = {
"filename": kwargs['filename']
}
if 'data' not in kwargs:
raise AttributeError("Parameter 'data' must be given")
sharename = None
if 'sharename' not in kwargs:
share = None
if 'title' in kwargs:
share = self.create_share(title=kwargs['title'])
else:
share = self.create_share()
sharename = share.sharename
else:
sharename = kwargs['sharename']
response = GettRequest().post("/files/%s/create?accesstoken=%s" % (sharename, self.user.access_token()), params)
f = None
if response.http_status == 200:
if 'sharename' not in response.response:
response.response['sharename'] = sharename
f = GettFile(self.user, **response.response)
if f.send_data(data=kwargs['data']):
return f
|
Upload a file to the Gett service. Takes keyword arguments.
Input:
* ``filename`` the filename to use in the Gett service (required)
* ``data`` the file contents to store in the Gett service (required) - must be a string
* ``sharename`` the name of the share in which to store the data (optional); if not given, a new share will be created.
* ``title`` the share title to use if a new share is created (optional)
Output:
* A :py:mod:`pygett.files.GettFile` object
Example::
file = client.upload_file(filaname="foo", data=open("foo.txt").read())
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/base.py#L183-L230
|
[
"def create_share(self, **kwargs):\n \"\"\"\n Create a new share. Takes a keyword argument.\n\n Input:\n * ``title`` optional share title (optional)\n\n Output:\n * A :py:mod:`pygett.shares.GettShare` object\n\n Example::\n\n new_share = client.create_share( title=\"Example Title\" )\n \"\"\"\n\n params = None\n if 'title' in kwargs:\n params = {\"title\": kwargs['title']}\n\n response = GettRequest().post((\"/shares/create?accesstoken=%s\" % self.user.access_token()), params)\n\n if response.http_status == 200:\n return GettShare(self.user, **response.response)\n",
"def send_data(self, **kwargs):\n \"\"\"\n This method transmits data to the Gett service.\n\n Input:\n * ``put_url`` A PUT url to use when transmitting the data (required)\n * ``data`` A byte stream (required)\n\n Output:\n * ``True``\n\n Example::\n\n if file.send_data(put_url=file.upload_url, data=open(\"example.txt\", \"rb\").read()):\n print \"Your file has been uploaded.\"\n \"\"\"\n put_url = None\n if 'put_url' in kwargs:\n put_url = kwargs['put_url']\n else:\n put_url = self.put_upload_url\n\n if 'data' not in kwargs:\n raise AttributeError(\"'data' parameter is required\")\n\n if not put_url:\n raise AttributeError(\"'put_url' cannot be None\")\n\n if not isinstance(kwargs['data'], str):\n raise TypeError(\"'data' parameter must be of type 'str'\")\n\n response = GettRequest().put(put_url, kwargs['data'])\n\n if response.http_status == 200:\n return True\n",
"def post(self, endpoint, d, *args, **kwargs):\n \"\"\"\n **post**\n\n Make a POST call to a remote endpoint\n\n Input:\n * An endpoint relative to the ``base_url``\n * POST data\n\n **NOTE**: Passed POST data will be automatically serialized to a JSON string\n if it's not already a string\n\n Output:\n * A :py:mod:`pygett.request.GettResponse` object\n \"\"\"\n endpoint = self.base_url + endpoint\n if not isinstance(d, str):\n d = json.dumps(d)\n\n return self._make_request(endpoint, type='POST', data=d)\n"
] |
class Gett(object):
"""
Base client object
Requires the following keyword arguments:
- ``apikey`` - The API key assigned to an application by Gett
- ``email`` - The email address linked to the API key
- ``password`` - The password linked to the API key
**Attribute**
- ``user`` - a :py:mod:`pygett.user.GettUser` object
"""
def __init__(self, *args, **kwargs):
self.required_params = [
'apikey',
'email',
'password'
]
self._check_params(**kwargs)
self.user = GettUser(kwargs['apikey'], kwargs['email'], kwargs['password'])
def _check_params(self, **kwargs):
if not kwargs:
raise AttributeError('Missing required parameters: %s' % self.required_params)
for param in self.required_params:
if param not in kwargs:
raise AttributeError('Missing required parameter %s' % param)
for k, v in kwargs.items():
if not v:
raise AttributeError('Parameter %s must not be None' % k)
if k == 'apikey':
if not isinstance(v, str):
raise AttributeError("Parameter 'apikey' must be a string")
if k == 'email':
if not re.search(r'\w+@\w+', v):
raise AttributeError("Parameter 'email' must be an email address")
if k == 'password':
if not isinstance(v, str):
raise AttributeError("Parameter 'password' must be a string")
def _get_shares(self, **kwargs):
endpoint = "/shares?accesstoken=%s" % self.user.access_token()
if 'limit' in kwargs and isinstance(kwargs['limit'], int) and kwargs['limit'] > 0:
endpoint = endpoint + "&limit=%d" % kwargs['limit']
if 'skip' in kwargs and isinstance(kwargs['skip'], int) and kwargs['skip'] > 0:
endpoint = endpoint + "&skip=%d" % kwargs['skip']
return GettRequest().get(endpoint)
def get_shares(self, **kwargs):
"""
Gets *all* shares.
Input:
* ``skip`` the number of shares to skip (optional)
* ``limit`` the maximum number of shares to return (optional)
Output:
* a dict where keys are sharenames and the values are corresponding :py:mod:`pygett.shares.GettShare` objects
Example::
shares = client.get_shares()
"""
response = self._get_shares(**kwargs)
rv = dict()
if response.http_status == 200:
for share in response.response:
rv[share['sharename']] = GettShare(self.user, **share)
return rv
def get_shares_list(self, **kwargs):
"""
Gets *all* shares.
Input:
* ``skip`` the number of shares to skip (optional)
* ``limit`` the maximum number of shares to return (optional)
Output:
* a list of :py:mod:`pygett.shares.GettShare` objects
Example::
shares_list = client.get_shares_list()
"""
response = self._get_shares(**kwargs)
rv = list()
if response.http_status == 200:
for share in response.response:
rv.append(GettShare(self.user, **share))
return rv
def get_share(self, sharename):
"""
Get a specific share. Does not require authentication.
Input:
* A sharename
Output:
* A :py:mod:`pygett.shares.GettShare` object
Example::
share = client.get_share("4ddfds")
"""
response = GettRequest().get("/shares/%s" % sharename)
if response.http_status == 200:
return GettShare(self.user, **response.response)
def get_file(self, sharename, fileid):
"""
Get a specific file. Does not require authentication.
Input:
* A sharename
* A fileid - must be an integer
Output:
* A :py:mod:`pygett.files.GettFile` object
Example::
file = client.get_file("4ddfds", 0)
"""
if not isinstance(fileid, int):
raise TypeError("'fileid' must be an integer")
response = GettRequest().get("/files/%s/%d" % (sharename, fileid))
if response.http_status == 200:
return GettFile(self.user, **response.response)
def create_share(self, **kwargs):
"""
Create a new share. Takes a keyword argument.
Input:
* ``title`` optional share title (optional)
Output:
* A :py:mod:`pygett.shares.GettShare` object
Example::
new_share = client.create_share( title="Example Title" )
"""
params = None
if 'title' in kwargs:
params = {"title": kwargs['title']}
response = GettRequest().post(("/shares/create?accesstoken=%s" % self.user.access_token()), params)
if response.http_status == 200:
return GettShare(self.user, **response.response)
|
mrallen1/pygett
|
pygett/user.py
|
GettUser.login
|
python
|
def login(self, **params):
if not params:
params = {
"apikey": self.apikey,
"email": self.email,
"password": self.password
}
response = GettRequest().post("/users/login", params)
if response.http_status == 200:
self._access_token = response.response['accesstoken']
self.refresh_token = response.response['refreshtoken']
self.access_token_expires = int(time()) + response.response['expires']
self.userid = response.response['user']['userid']
self.fullname = response.response['user']['fullname']
self.storage_used = response.response['user']['storage']['used']
self.storage_limit = response.response['user']['storage']['limit']
return True
|
**login**
Use the current credentials to get a valid Gett access token.
Input:
* A dict of parameters to use for the login attempt (optional)
Output:
* ``True``
Example::
if client.user.login():
print "You have %s bytes of storage remaining." % ( client.user.storage_limit - client_user.storage_used )
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/user.py#L49-L85
|
[
"def post(self, endpoint, d, *args, **kwargs):\n \"\"\"\n **post**\n\n Make a POST call to a remote endpoint\n\n Input:\n * An endpoint relative to the ``base_url``\n * POST data\n\n **NOTE**: Passed POST data will be automatically serialized to a JSON string\n if it's not already a string\n\n Output:\n * A :py:mod:`pygett.request.GettResponse` object\n \"\"\"\n endpoint = self.base_url + endpoint\n if not isinstance(d, str):\n d = json.dumps(d)\n\n return self._make_request(endpoint, type='POST', data=d)\n"
] |
class GettUser(object):
"""
Encapsulates Gett user functionality
**Attributes**
- ``apikey`` The API key assigned by Gett for an application
- ``email`` The email linked to the API key
- ``password`` The password linked to the API key
After a successful login the following attributes are populated:
- ``refresh_token`` Used to get a new valid access token without requiring the API key, email and password
- ``access_token_expires`` - Epoch seconds until the current access token is no longer valid. Typically 86400 seconds from login. (Suitable for use with ``time.localtime()``)
- ``access_token_grace`` - How many seconds before an access token is scheduled to expire to attempt a renewal. (Defaults to 3600 seconds)
- ``userid`` - User ID string supplied by Gett
- ``fullname`` - The full name linked to an authenticated user account
- ``storage_used`` - The amount of storage consumed (in total) for this user account. (Unit: bytes)
- ``storage_limit`` - The maximum number of bytes available for storage. (Unit: bytes)
"""
def __init__(self, apikey, email, password):
self.apikey = apikey
self.email = email
self.password = password
self._access_token = None
self.refresh_token = None
self.access_token_expires = None
self.access_token_grace = 3600
self.userid = None
self.fullname = None
self.storage_used = None
self.storage_limit = None
def __str__(self):
if self.fullname:
return "<GettUser: %s>" % self.fullname
else:
return "<GettUser: %s (not logged in)>" % self.email
def __repr__(self):
if self.fullname:
return "<GettUser: %s>" % self.fullname
else:
return "<GettUser: %s (not logged in)>" % self.email
def access_token(self):
"""
**access_token**
Returns a valid access token. If the user is not currently logged in, attempts to do so.
If the current time exceeds the grace period, attempts to retrieve a new access token.
Input:
* None
Output:
* A valid access token
Example::
print "Your access token is currently %s" % client.user.access_token()
"""
if not self._access_token:
self.login()
if time() > (self.access_token_expires - self.access_token_grace):
self.login({"refreshtoken": self.refresh_token})
return self._access_token
def refresh(self):
"""
**refresh**
Refresh this user object with data from the Gett service
Input:
* None
Output:
* ``True``
Example::
if client.user.refresh():
print "User data refreshed!"
print "You have %s bytes of storage remaining." % ( client.user.storage_limit - client_user.storage_used )
"""
response = GettRequest().get("/users/me?accesstoken=%s" % self.access_token())
if response.http_status == 200:
self.userid = response.response['userid']
self.fullname = response.response['fullname']
self.storage_used = response.response['storage']['used']
self.storage_limit = response.response['storage']['limit']
return True
|
mrallen1/pygett
|
pygett/user.py
|
GettUser.access_token
|
python
|
def access_token(self):
if not self._access_token:
self.login()
if time() > (self.access_token_expires - self.access_token_grace):
self.login({"refreshtoken": self.refresh_token})
return self._access_token
|
**access_token**
Returns a valid access token. If the user is not currently logged in, attempts to do so.
If the current time exceeds the grace period, attempts to retrieve a new access token.
Input:
* None
Output:
* A valid access token
Example::
print "Your access token is currently %s" % client.user.access_token()
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/user.py#L87-L110
|
[
"def login(self, **params):\n \"\"\"\n **login**\n\n Use the current credentials to get a valid Gett access token.\n\n Input:\n * A dict of parameters to use for the login attempt (optional)\n\n Output:\n * ``True``\n\n Example::\n\n if client.user.login():\n print \"You have %s bytes of storage remaining.\" % ( client.user.storage_limit - client_user.storage_used )\n \"\"\"\n\n if not params:\n params = {\n \"apikey\": self.apikey,\n \"email\": self.email,\n \"password\": self.password\n }\n\n response = GettRequest().post(\"/users/login\", params)\n\n if response.http_status == 200:\n self._access_token = response.response['accesstoken']\n self.refresh_token = response.response['refreshtoken']\n self.access_token_expires = int(time()) + response.response['expires']\n self.userid = response.response['user']['userid']\n self.fullname = response.response['user']['fullname']\n self.storage_used = response.response['user']['storage']['used']\n self.storage_limit = response.response['user']['storage']['limit']\n\n return True\n"
] |
class GettUser(object):
"""
Encapsulates Gett user functionality
**Attributes**
- ``apikey`` The API key assigned by Gett for an application
- ``email`` The email linked to the API key
- ``password`` The password linked to the API key
After a successful login the following attributes are populated:
- ``refresh_token`` Used to get a new valid access token without requiring the API key, email and password
- ``access_token_expires`` - Epoch seconds until the current access token is no longer valid. Typically 86400 seconds from login. (Suitable for use with ``time.localtime()``)
- ``access_token_grace`` - How many seconds before an access token is scheduled to expire to attempt a renewal. (Defaults to 3600 seconds)
- ``userid`` - User ID string supplied by Gett
- ``fullname`` - The full name linked to an authenticated user account
- ``storage_used`` - The amount of storage consumed (in total) for this user account. (Unit: bytes)
- ``storage_limit`` - The maximum number of bytes available for storage. (Unit: bytes)
"""
def __init__(self, apikey, email, password):
self.apikey = apikey
self.email = email
self.password = password
self._access_token = None
self.refresh_token = None
self.access_token_expires = None
self.access_token_grace = 3600
self.userid = None
self.fullname = None
self.storage_used = None
self.storage_limit = None
def __str__(self):
if self.fullname:
return "<GettUser: %s>" % self.fullname
else:
return "<GettUser: %s (not logged in)>" % self.email
def __repr__(self):
if self.fullname:
return "<GettUser: %s>" % self.fullname
else:
return "<GettUser: %s (not logged in)>" % self.email
def login(self, **params):
"""
**login**
Use the current credentials to get a valid Gett access token.
Input:
* A dict of parameters to use for the login attempt (optional)
Output:
* ``True``
Example::
if client.user.login():
print "You have %s bytes of storage remaining." % ( client.user.storage_limit - client_user.storage_used )
"""
if not params:
params = {
"apikey": self.apikey,
"email": self.email,
"password": self.password
}
response = GettRequest().post("/users/login", params)
if response.http_status == 200:
self._access_token = response.response['accesstoken']
self.refresh_token = response.response['refreshtoken']
self.access_token_expires = int(time()) + response.response['expires']
self.userid = response.response['user']['userid']
self.fullname = response.response['user']['fullname']
self.storage_used = response.response['user']['storage']['used']
self.storage_limit = response.response['user']['storage']['limit']
return True
def refresh(self):
"""
**refresh**
Refresh this user object with data from the Gett service
Input:
* None
Output:
* ``True``
Example::
if client.user.refresh():
print "User data refreshed!"
print "You have %s bytes of storage remaining." % ( client.user.storage_limit - client_user.storage_used )
"""
response = GettRequest().get("/users/me?accesstoken=%s" % self.access_token())
if response.http_status == 200:
self.userid = response.response['userid']
self.fullname = response.response['fullname']
self.storage_used = response.response['storage']['used']
self.storage_limit = response.response['storage']['limit']
return True
|
mrallen1/pygett
|
pygett/user.py
|
GettUser.refresh
|
python
|
def refresh(self):
response = GettRequest().get("/users/me?accesstoken=%s" % self.access_token())
if response.http_status == 200:
self.userid = response.response['userid']
self.fullname = response.response['fullname']
self.storage_used = response.response['storage']['used']
self.storage_limit = response.response['storage']['limit']
return True
|
**refresh**
Refresh this user object with data from the Gett service
Input:
* None
Output:
* ``True``
Example::
if client.user.refresh():
print "User data refreshed!"
print "You have %s bytes of storage remaining." % ( client.user.storage_limit - client_user.storage_used )
|
train
|
https://github.com/mrallen1/pygett/blob/1e21f8674a3634a901af054226670174b5ce2d87/pygett/user.py#L112-L139
|
[
"def get(self, endpoint, *args, **kwargs):\n \"\"\"\n **get**\n\n Make a GET call to a remote endpoint\n\n Input:\n * An endpoint relative to the ``base_url``\n\n Output:\n * A :py:mod:`pygett.request.GettResponse` object\n \"\"\"\n endpoint = self.base_url + endpoint\n return self._make_request(endpoint, type='GET')\n",
"def access_token(self):\n \"\"\"\n **access_token**\n\n Returns a valid access token. If the user is not currently logged in, attempts to do so.\n If the current time exceeds the grace period, attempts to retrieve a new access token.\n\n Input:\n * None\n\n Output:\n * A valid access token\n\n Example::\n\n print \"Your access token is currently %s\" % client.user.access_token()\n \"\"\"\n if not self._access_token:\n self.login()\n\n if time() > (self.access_token_expires - self.access_token_grace):\n self.login({\"refreshtoken\": self.refresh_token})\n\n return self._access_token\n"
] |
class GettUser(object):
"""
Encapsulates Gett user functionality
**Attributes**
- ``apikey`` The API key assigned by Gett for an application
- ``email`` The email linked to the API key
- ``password`` The password linked to the API key
After a successful login the following attributes are populated:
- ``refresh_token`` Used to get a new valid access token without requiring the API key, email and password
- ``access_token_expires`` - Epoch seconds until the current access token is no longer valid. Typically 86400 seconds from login. (Suitable for use with ``time.localtime()``)
- ``access_token_grace`` - How many seconds before an access token is scheduled to expire to attempt a renewal. (Defaults to 3600 seconds)
- ``userid`` - User ID string supplied by Gett
- ``fullname`` - The full name linked to an authenticated user account
- ``storage_used`` - The amount of storage consumed (in total) for this user account. (Unit: bytes)
- ``storage_limit`` - The maximum number of bytes available for storage. (Unit: bytes)
"""
def __init__(self, apikey, email, password):
self.apikey = apikey
self.email = email
self.password = password
self._access_token = None
self.refresh_token = None
self.access_token_expires = None
self.access_token_grace = 3600
self.userid = None
self.fullname = None
self.storage_used = None
self.storage_limit = None
def __str__(self):
if self.fullname:
return "<GettUser: %s>" % self.fullname
else:
return "<GettUser: %s (not logged in)>" % self.email
def __repr__(self):
if self.fullname:
return "<GettUser: %s>" % self.fullname
else:
return "<GettUser: %s (not logged in)>" % self.email
def login(self, **params):
"""
**login**
Use the current credentials to get a valid Gett access token.
Input:
* A dict of parameters to use for the login attempt (optional)
Output:
* ``True``
Example::
if client.user.login():
print "You have %s bytes of storage remaining." % ( client.user.storage_limit - client_user.storage_used )
"""
if not params:
params = {
"apikey": self.apikey,
"email": self.email,
"password": self.password
}
response = GettRequest().post("/users/login", params)
if response.http_status == 200:
self._access_token = response.response['accesstoken']
self.refresh_token = response.response['refreshtoken']
self.access_token_expires = int(time()) + response.response['expires']
self.userid = response.response['user']['userid']
self.fullname = response.response['user']['fullname']
self.storage_used = response.response['user']['storage']['used']
self.storage_limit = response.response['user']['storage']['limit']
return True
def access_token(self):
"""
**access_token**
Returns a valid access token. If the user is not currently logged in, attempts to do so.
If the current time exceeds the grace period, attempts to retrieve a new access token.
Input:
* None
Output:
* A valid access token
Example::
print "Your access token is currently %s" % client.user.access_token()
"""
if not self._access_token:
self.login()
if time() > (self.access_token_expires - self.access_token_grace):
self.login({"refreshtoken": self.refresh_token})
return self._access_token
|
hkff/FodtlMon
|
fodtlmon/fotl/fotl.py
|
IPredicate.eval
|
python
|
def eval(self, valuation=None, trace=None):
args2 = []
for a in self.args:
if isinstance(a, Function) or isinstance(a, IPredicate):
args2.append(Constant(a.eval(valuation=valuation, trace=trace)))
elif isinstance(a, Variable):
found = False
for v in valuation:
if str(v.var) == a.name:
args2.append(Constant(str(v.value.name)))
found = True
break
if not found:
raise Exception("IPredicate instantiation failed : missing vars")
else:
args2.append(Constant(a))
return args2
|
This method should be always called by subclasses
:param valuation
:param trace
:return: Arguments evaluation
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/fotl/fotl.py#L154-L178
| null |
class IPredicate(Exp, metaclass=MetaBase):
"""
Interpreted Predicate
"""
def __init__(self, *args):
# IMPORTANT : Test the size of args, because of subclass super call
self.args = list(args[0] if len(args) == 1 else args)
def __str__(self):
return "%s(%s)" % (self.__class__.__name__, ",".join([str(x) for x in self.args]))
def toCODE(self):
return "%s(%s)" % (self.__class__.__name__, ",".join([str(x) for x in self.args]))
@staticmethod
def get_class_from_name(klass):
return next(filter(lambda x: not issubclass(x, Function) and x.__name__ == klass, MetaBase.classes), None)
|
hkff/FodtlMon
|
fodtlmon/fotl/fotl.py
|
Function.eval
|
python
|
def eval(self, valuation=None, trace=None):
return super().eval(valuation=valuation, trace=trace)
|
This method should be override to return some value
:param valuation
:param trace
:return:
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/fotl/fotl.py#L201-L208
|
[
"def eval(self, valuation=None, trace=None):\n \"\"\"\n This method should be always called by subclasses\n :param valuation\n :param trace\n :return: Arguments evaluation\n \"\"\"\n args2 = []\n for a in self.args:\n if isinstance(a, Function) or isinstance(a, IPredicate):\n args2.append(Constant(a.eval(valuation=valuation, trace=trace)))\n\n elif isinstance(a, Variable):\n found = False\n for v in valuation:\n if str(v.var) == a.name:\n args2.append(Constant(str(v.value.name)))\n found = True\n break\n if not found:\n raise Exception(\"IPredicate instantiation failed : missing vars\")\n else:\n args2.append(Constant(a))\n\n return args2\n"
] |
class Function(IPredicate):
"""
Function
"""
@staticmethod
def get_class_from_name(klass):
return next(filter(lambda x: issubclass(x, Function) and x.__name__ == klass, MetaBase.classes), None)
|
hkff/FodtlMon
|
fodtlmon/tools/color.py
|
_pad_input
|
python
|
def _pad_input(incoming):
incoming_expanded = incoming.replace('{', '{{').replace('}', '}}')
for key in _BASE_CODES:
before, after = '{{%s}}' % key, '{%s}' % key
if before in incoming_expanded:
incoming_expanded = incoming_expanded.replace(before, after)
return incoming_expanded
|
Avoid IndexError and KeyError by ignoring un-related fields.
Example: '{0}{autored}' becomes '{{0}}{autored}'.
Positional arguments:
incoming -- the input unicode value.
Returns:
Padded unicode value.
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/tools/color.py#L218-L234
| null |
"""Colorful worry-free console applications for Linux, Mac OS X, and Windows.
Supported natively on Linux and Mac OSX (Just Works), and on Windows it works the same if Windows.enable() is called.
Gives you expected and sane results from methods like len() and .capitalize().
https://github.com/Robpol86/colorclass
https://pypi.python.org/pypi/colorclass
"""
import atexit
from collections import Mapping
import ctypes
import os
import re
import sys
if os.name == 'nt':
import ctypes.wintypes
__author__ = '@Robpol86'
__license__ = 'MIT'
__version__ = '1.1.1'
_BASE_CODES = {
'/all': 0, 'b': 1, 'f': 2, 'i': 3, 'u': 4, 'flash': 5, 'outline': 6, 'negative': 7, 'invis': 8, 'strike': 9,
'/b': 22, '/f': 22, '/i': 23, '/u': 24, '/flash': 25, '/outline': 26, '/negative': 27, '/invis': 28,
'/strike': 29, '/fg': 39, '/bg': 49,
'black': 30, 'red': 31, 'green': 32, 'yellow': 33, 'blue': 34, 'magenta': 35, 'cyan': 36, 'white': 37,
'bgblack': 40, 'bgred': 41, 'bggreen': 42, 'bgyellow': 43, 'bgblue': 44, 'bgmagenta': 45, 'bgcyan': 46,
'bgwhite': 47,
'hiblack': 90, 'hired': 91, 'higreen': 92, 'hiyellow': 93, 'hiblue': 94, 'himagenta': 95, 'hicyan': 96,
'hiwhite': 97,
'hibgblack': 100, 'hibgred': 101, 'hibggreen': 102, 'hibgyellow': 103, 'hibgblue': 104, 'hibgmagenta': 105,
'hibgcyan': 106, 'hibgwhite': 107,
'autored': None, 'autoblack': None, 'automagenta': None, 'autowhite': None, 'autoblue': None, 'autoyellow': None,
'autogreen': None, 'autocyan': None,
'autobgred': None, 'autobgblack': None, 'autobgmagenta': None, 'autobgwhite': None, 'autobgblue': None,
'autobgyellow': None, 'autobggreen': None, 'autobgcyan': None,
'/black': 39, '/red': 39, '/green': 39, '/yellow': 39, '/blue': 39, '/magenta': 39, '/cyan': 39, '/white': 39,
'/hiblack': 39, '/hired': 39, '/higreen': 39, '/hiyellow': 39, '/hiblue': 39, '/himagenta': 39, '/hicyan': 39,
'/hiwhite': 39,
'/bgblack': 49, '/bgred': 49, '/bggreen': 49, '/bgyellow': 49, '/bgblue': 49, '/bgmagenta': 49, '/bgcyan': 49,
'/bgwhite': 49, '/hibgblack': 49, '/hibgred': 49, '/hibggreen': 49, '/hibgyellow': 49, '/hibgblue': 49,
'/hibgmagenta': 49, '/hibgcyan': 49, '/hibgwhite': 49,
'/autored': 39, '/autoblack': 39, '/automagenta': 39, '/autowhite': 39, '/autoblue': 39, '/autoyellow': 39,
'/autogreen': 39, '/autocyan': 39,
'/autobgred': 49, '/autobgblack': 49, '/autobgmagenta': 49, '/autobgwhite': 49, '/autobgblue': 49,
'/autobgyellow': 49, '/autobggreen': 49, '/autobgcyan': 49,
}
_WINDOWS_CODES = {
'/all': -33, '/fg': -39, '/bg': -49,
'black': 0, 'red': 4, 'green': 2, 'yellow': 6, 'blue': 1, 'magenta': 5, 'cyan': 3, 'white': 7,
'bgblack': -8, 'bgred': 64, 'bggreen': 32, 'bgyellow': 96, 'bgblue': 16, 'bgmagenta': 80, 'bgcyan': 48,
'bgwhite': 112,
'hiblack': 8, 'hired': 12, 'higreen': 10, 'hiyellow': 14, 'hiblue': 9, 'himagenta': 13, 'hicyan': 11, 'hiwhite': 15,
'hibgblack': 128, 'hibgred': 192, 'hibggreen': 160, 'hibgyellow': 224, 'hibgblue': 144, 'hibgmagenta': 208,
'hibgcyan': 176, 'hibgwhite': 240,
'/black': -39, '/red': -39, '/green': -39, '/yellow': -39, '/blue': -39, '/magenta': -39, '/cyan': -39,
'/white': -39, '/hiblack': -39, '/hired': -39, '/higreen': -39, '/hiyellow': -39, '/hiblue': -39, '/himagenta': -39,
'/hicyan': -39, '/hiwhite': -39,
'/bgblack': -49, '/bgred': -49, '/bggreen': -49, '/bgyellow': -49, '/bgblue': -49, '/bgmagenta': -49,
'/bgcyan': -49, '/bgwhite': -49, '/hibgblack': -49, '/hibgred': -49, '/hibggreen': -49, '/hibgyellow': -49,
'/hibgblue': -49, '/hibgmagenta': -49, '/hibgcyan': -49, '/hibgwhite': -49,
}
_RE_GROUP_SEARCH = re.compile(r'(?:\033\[[\d;]+m)+')
_RE_NUMBER_SEARCH = re.compile(r'\033\[([\d;]+)m')
_RE_SPLIT = re.compile(r'(\033\[[\d;]+m)')
PARENT_CLASS = unicode if sys.version_info[0] == 2 else str
class _AutoCodes(Mapping):
"""Read-only subclass of dict, resolves closing tags (based on colorclass.CODES) and automatic colors."""
DISABLE_COLORS = False
LIGHT_BACKGROUND = False
def __init__(self):
self.__dict = _BASE_CODES.copy()
def __getitem__(self, item):
if item == 'autoblack':
answer = self.autoblack
elif item == 'autored':
answer = self.autored
elif item == 'autogreen':
answer = self.autogreen
elif item == 'autoyellow':
answer = self.autoyellow
elif item == 'autoblue':
answer = self.autoblue
elif item == 'automagenta':
answer = self.automagenta
elif item == 'autocyan':
answer = self.autocyan
elif item == 'autowhite':
answer = self.autowhite
elif item == 'autobgblack':
answer = self.autobgblack
elif item == 'autobgred':
answer = self.autobgred
elif item == 'autobggreen':
answer = self.autobggreen
elif item == 'autobgyellow':
answer = self.autobgyellow
elif item == 'autobgblue':
answer = self.autobgblue
elif item == 'autobgmagenta':
answer = self.autobgmagenta
elif item == 'autobgcyan':
answer = self.autobgcyan
elif item == 'autobgwhite':
answer = self.autobgwhite
else:
answer = self.__dict[item]
return answer
def __iter__(self):
return iter(self.__dict)
def __len__(self):
return len(self.__dict)
@property
def autoblack(self):
"""Returns automatic black foreground color depending on background color."""
return self.__dict['black' if _AutoCodes.LIGHT_BACKGROUND else 'hiblack']
@property
def autored(self):
"""Returns automatic red foreground color depending on background color."""
return self.__dict['red' if _AutoCodes.LIGHT_BACKGROUND else 'hired']
@property
def autogreen(self):
"""Returns automatic green foreground color depending on background color."""
return self.__dict['green' if _AutoCodes.LIGHT_BACKGROUND else 'higreen']
@property
def autoyellow(self):
"""Returns automatic yellow foreground color depending on background color."""
return self.__dict['yellow' if _AutoCodes.LIGHT_BACKGROUND else 'hiyellow']
@property
def autoblue(self):
"""Returns automatic blue foreground color depending on background color."""
return self.__dict['blue' if _AutoCodes.LIGHT_BACKGROUND else 'hiblue']
@property
def automagenta(self):
"""Returns automatic magenta foreground color depending on background color."""
return self.__dict['magenta' if _AutoCodes.LIGHT_BACKGROUND else 'himagenta']
@property
def autocyan(self):
"""Returns automatic cyan foreground color depending on background color."""
return self.__dict['cyan' if _AutoCodes.LIGHT_BACKGROUND else 'hicyan']
@property
def autowhite(self):
"""Returns automatic white foreground color depending on background color."""
return self.__dict['white' if _AutoCodes.LIGHT_BACKGROUND else 'hiwhite']
@property
def autobgblack(self):
"""Returns automatic black background color depending on background color."""
return self.__dict['bgblack' if _AutoCodes.LIGHT_BACKGROUND else 'hibgblack']
@property
def autobgred(self):
"""Returns automatic red background color depending on background color."""
return self.__dict['bgred' if _AutoCodes.LIGHT_BACKGROUND else 'hibgred']
@property
def autobggreen(self):
"""Returns automatic green background color depending on background color."""
return self.__dict['bggreen' if _AutoCodes.LIGHT_BACKGROUND else 'hibggreen']
@property
def autobgyellow(self):
"""Returns automatic yellow background color depending on background color."""
return self.__dict['bgyellow' if _AutoCodes.LIGHT_BACKGROUND else 'hibgyellow']
@property
def autobgblue(self):
"""Returns automatic blue background color depending on background color."""
return self.__dict['bgblue' if _AutoCodes.LIGHT_BACKGROUND else 'hibgblue']
@property
def autobgmagenta(self):
"""Returns automatic magenta background color depending on background color."""
return self.__dict['bgmagenta' if _AutoCodes.LIGHT_BACKGROUND else 'hibgmagenta']
@property
def autobgcyan(self):
"""Returns automatic cyan background color depending on background color."""
return self.__dict['bgcyan' if _AutoCodes.LIGHT_BACKGROUND else 'hibgcyan']
@property
def autobgwhite(self):
"""Returns automatic white background color depending on background color."""
return self.__dict['bgwhite' if _AutoCodes.LIGHT_BACKGROUND else 'hibgwhite']
def _parse_input(incoming):
"""Performs the actual conversion of tags to ANSI escaped codes.
Provides a version of the input without any colors for len() and other methods.
Positional arguments:
incoming -- the input unicode value.
Returns:
2-item tuple. First item is the parsed output. Second item is a version of the input without any colors.
"""
codes = dict((k, v) for k, v in _AutoCodes().items() if '{%s}' % k in incoming)
color_codes = dict((k, '' if _AutoCodes.DISABLE_COLORS else '\033[{0}m'.format(v)) for k, v in codes.items())
incoming_padded = _pad_input(incoming)
output_colors = incoming_padded.format(**color_codes)
# Simplify: '{b}{red}' -> '\033[1m\033[31m' -> '\033[1;31m'
groups = sorted(set(_RE_GROUP_SEARCH.findall(output_colors)), key=len, reverse=True) # Get codes, grouped adjacent.
groups_simplified = [[x for n in _RE_NUMBER_SEARCH.findall(i) for x in n.split(';')] for i in groups]
groups_compiled = ['\033[{0}m'.format(';'.join(g)) for g in groups_simplified] # Final codes.
assert len(groups_compiled) == len(groups) # For testing.
output_colors_simplified = output_colors
for i in range(len(groups)):
output_colors_simplified = output_colors_simplified.replace(groups[i], groups_compiled[i])
output_no_colors = _RE_SPLIT.sub('', output_colors_simplified)
# Strip any remaining color codes.
if _AutoCodes.DISABLE_COLORS:
output_colors_simplified = _RE_NUMBER_SEARCH.sub('', output_colors_simplified)
return output_colors_simplified, output_no_colors
def disable_all_colors():
"""Disable all colors. Strips any color tags or codes."""
_AutoCodes.DISABLE_COLORS = True
def set_light_background():
"""Chooses dark colors for all 'auto'-prefixed codes for readability on light backgrounds."""
_AutoCodes.DISABLE_COLORS = False
_AutoCodes.LIGHT_BACKGROUND = True
def set_dark_background():
"""Chooses dark colors for all 'auto'-prefixed codes for readability on light backgrounds."""
_AutoCodes.DISABLE_COLORS = False
_AutoCodes.LIGHT_BACKGROUND = False
def list_tags():
"""Lists the available tags.
Returns:
Tuple of tuples. Child tuples are four items: ('opening tag', 'closing tag', main ansi value, closing ansi value).
"""
codes = _AutoCodes()
grouped = set([(k, '/{0}'.format(k), codes[k], codes['/{0}'.format(k)]) for k in codes if not k.startswith('/')])
# Add half-tags like /all.
found = [c for r in grouped for c in r[:2]]
missing = set([('', r[0], None, r[1]) if r[0].startswith('/') else (r[0], '', r[1], None)
for r in _AutoCodes().items() if r[0] not in found])
grouped |= missing
# Sort.
payload = sorted([i for i in grouped if i[2] is None], key=lambda x: x[3]) # /all /fg /bg
grouped -= set(payload)
payload.extend(sorted([i for i in grouped if i[2] < 10], key=lambda x: x[2])) # b i u flash
grouped -= set(payload)
payload.extend(sorted([i for i in grouped if i[0].startswith('auto')], key=lambda x: x[2])) # auto colors
grouped -= set(payload)
payload.extend(sorted([i for i in grouped if not i[0].startswith('hi')], key=lambda x: x[2])) # dark colors
grouped -= set(payload)
payload.extend(sorted(grouped, key=lambda x: x[2])) # light colors
return tuple(payload)
class Color(PARENT_CLASS):
"""Unicode (str in Python3) subclass with ANSI terminal text color support.
Example syntax: Color('{red}Sample Text{/red}')
For a list of codes, call: colorclass.list_tags()
"""
def __new__(cls, *args, **kwargs):
parent_class = cls.__bases__[0]
value_markup = args[0] if args else parent_class()
value_colors, value_no_colors = _parse_input(value_markup)
if args:
args = [value_colors] + list(args[1:])
obj = parent_class.__new__(cls, *args, **kwargs)
obj.value_colors, obj.value_no_colors = value_colors, value_no_colors
obj.has_colors = bool(_RE_NUMBER_SEARCH.match(value_colors))
return obj
def __len__(self):
return self.value_no_colors.__len__()
def capitalize(self):
split = _RE_SPLIT.split(self.value_colors)
for i in range(len(split)):
if _RE_SPLIT.match(split[i]):
continue
split[i] = PARENT_CLASS(split[i]).capitalize()
return Color().join(split)
def center(self, width, fillchar=None):
if fillchar is not None:
result = PARENT_CLASS(self.value_no_colors).center(width, fillchar)
else:
result = PARENT_CLASS(self.value_no_colors).center(width)
return result.replace(self.value_no_colors, self.value_colors)
def count(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).count(*args, **kwargs)
def endswith(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).endswith(*args, **kwargs)
def find(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).find(*args, **kwargs)
def format(*args, **kwargs):
return Color(super(Color, args[0]).format(*args[1:], **kwargs))
def index(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).index(*args, **kwargs)
def isalnum(self):
return PARENT_CLASS(self.value_no_colors).isalnum()
def isalpha(self):
return PARENT_CLASS(self.value_no_colors).isalpha()
def isdecimal(self):
return PARENT_CLASS(self.value_no_colors).isdecimal()
def isdigit(self):
return PARENT_CLASS(self.value_no_colors).isdigit()
def isnumeric(self):
return PARENT_CLASS(self.value_no_colors).isnumeric()
def isspace(self):
return PARENT_CLASS(self.value_no_colors).isspace()
def istitle(self):
return PARENT_CLASS(self.value_no_colors).istitle()
def isupper(self):
return PARENT_CLASS(self.value_no_colors).isupper()
def ljust(self, width, fillchar=None):
if fillchar is not None:
result = PARENT_CLASS(self.value_no_colors).ljust(width, fillchar)
else:
result = PARENT_CLASS(self.value_no_colors).ljust(width)
return result.replace(self.value_no_colors, self.value_colors)
def rfind(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).rfind(*args, **kwargs)
def rindex(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).rindex(*args, **kwargs)
def rjust(self, width, fillchar=None):
if fillchar is not None:
result = PARENT_CLASS(self.value_no_colors).rjust(width, fillchar)
else:
result = PARENT_CLASS(self.value_no_colors).rjust(width)
return result.replace(self.value_no_colors, self.value_colors)
def splitlines(self):
return [Color(l) for l in PARENT_CLASS(self.value_colors).splitlines()]
def startswith(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).startswith(*args, **kwargs)
def swapcase(self):
split = _RE_SPLIT.split(self.value_colors)
for i in range(len(split)):
if _RE_SPLIT.match(split[i]):
continue
split[i] = PARENT_CLASS(split[i]).swapcase()
return Color().join(split)
def title(self):
split = _RE_SPLIT.split(self.value_colors)
for i in range(len(split)):
if _RE_SPLIT.match(split[i]):
continue
split[i] = PARENT_CLASS(split[i]).title()
return Color().join(split)
def translate(self, table):
split = _RE_SPLIT.split(self.value_colors)
for i in range(len(split)):
if _RE_SPLIT.match(split[i]):
continue
split[i] = PARENT_CLASS(split[i]).translate(table)
return Color().join(split)
def upper(self):
split = _RE_SPLIT.split(self.value_colors)
for i in range(len(split)):
if _RE_SPLIT.match(split[i]):
continue
split[i] = PARENT_CLASS(split[i]).upper()
return Color().join(split)
def zfill(self, width):
if not self.value_no_colors:
return PARENT_CLASS().zfill(width)
split = _RE_SPLIT.split(self.value_colors)
filled = PARENT_CLASS(self.value_no_colors).zfill(width)
if len(split) == 1:
return filled
padding = filled.replace(self.value_no_colors, '')
if not split[0]:
split[2] = padding + split[2]
else:
split[0] = padding + split[0]
return Color().join(split)
class Windows(object):
"""Enable and disable Windows support for ANSI color character codes.
Call static method Windows.enable() to enable color support for the remainder of the process' lifetime.
This class is also a context manager. You can do this:
with Windows():
print(Color('{autored}Test{/autored}'))
Or this:
with Windows(auto_colors=True):
print(Color('{autored}Test{/autored}'))
"""
@staticmethod
def disable():
"""Restore sys.stderr and sys.stdout to their original objects. Resets colors to their original values."""
if os.name != 'nt' or not Windows.is_enabled():
return False
getattr(sys.stderr, '_reset_colors', lambda: False)()
getattr(sys.stdout, '_reset_colors', lambda: False)()
if isinstance(sys.stderr, _WindowsStream):
sys.stderr = getattr(sys.stderr, 'original_stream')
if isinstance(sys.stderr, _WindowsStream):
sys.stdout = getattr(sys.stdout, 'original_stream')
return True
@staticmethod
def is_enabled():
"""Returns True if either stderr or stdout has colors enabled."""
return isinstance(sys.stderr, _WindowsStream) or isinstance(sys.stdout, _WindowsStream)
@staticmethod
def enable(auto_colors=False, reset_atexit=False):
"""Enables color text with print() or sys.stdout.write() (stderr too).
Keyword arguments:
auto_colors -- automatically selects dark or light colors based on current terminal's background color. Only
works with {autored} and related tags.
reset_atexit -- resets original colors upon Python exit (in case you forget to reset it yourself with a closing
tag).
"""
if os.name != 'nt':
return False
# Overwrite stream references.
if not isinstance(sys.stderr, _WindowsStream):
sys.stderr.flush()
sys.stderr = _WindowsStream(stderr=True)
if not isinstance(sys.stdout, _WindowsStream):
sys.stdout.flush()
sys.stdout = _WindowsStream(stderr=False)
if not isinstance(sys.stderr, _WindowsStream) and not isinstance(sys.stdout, _WindowsStream):
return False
# Automatically select which colors to display.
bg_color = getattr(sys.stdout, 'default_bg', getattr(sys.stderr, 'default_bg', None))
if auto_colors and bg_color is not None:
set_light_background() if bg_color in (112, 96, 240, 176, 224, 208, 160) else set_dark_background()
# Reset on exit if requested.
if reset_atexit:
atexit.register(lambda: Windows.disable())
return True
def __init__(self, auto_colors=False):
self.auto_colors = auto_colors
def __enter__(self):
Windows.enable(auto_colors=self.auto_colors)
def __exit__(self, *_):
Windows.disable()
class _WindowsCSBI(object):
"""Interfaces with Windows CONSOLE_SCREEN_BUFFER_INFO API/DLL calls. Gets info for stderr and stdout.
References:
https://code.google.com/p/colorama/issues/detail?id=47.
pytest's py project: py/_io/terminalwriter.py.
Class variables:
CSBI -- ConsoleScreenBufferInfo class/struct (not instance, the class definition itself) defined in _define_csbi().
HANDLE_STDERR -- GetStdHandle() return integer for stderr.
HANDLE_STDOUT -- GetStdHandle() return integer for stdout.
WINDLL -- my own loaded instance of ctypes.WinDLL.
"""
CSBI = None
HANDLE_STDERR = None
HANDLE_STDOUT = None
WINDLL = ctypes.LibraryLoader(getattr(ctypes, 'WinDLL', None))
@staticmethod
def _define_csbi():
"""Defines structs and populates _WindowsCSBI.CSBI."""
if _WindowsCSBI.CSBI is not None:
return
class COORD(ctypes.Structure):
"""Windows COORD structure. http://msdn.microsoft.com/en-us/library/windows/desktop/ms682119"""
_fields_ = [('X', ctypes.c_short), ('Y', ctypes.c_short)]
class SmallRECT(ctypes.Structure):
"""Windows SMALL_RECT structure. http://msdn.microsoft.com/en-us/library/windows/desktop/ms686311"""
_fields_ = [('Left', ctypes.c_short), ('Top', ctypes.c_short), ('Right', ctypes.c_short),
('Bottom', ctypes.c_short)]
class ConsoleScreenBufferInfo(ctypes.Structure):
"""Windows CONSOLE_SCREEN_BUFFER_INFO structure.
http://msdn.microsoft.com/en-us/library/windows/desktop/ms682093
"""
_fields_ = [
('dwSize', COORD),
('dwCursorPosition', COORD),
('wAttributes', ctypes.wintypes.WORD),
('srWindow', SmallRECT),
('dwMaximumWindowSize', COORD)
]
_WindowsCSBI.CSBI = ConsoleScreenBufferInfo
@staticmethod
def initialize():
"""Initializes the WINDLL resource and populated the CSBI class variable."""
_WindowsCSBI._define_csbi()
_WindowsCSBI.HANDLE_STDERR = _WindowsCSBI.HANDLE_STDERR or _WindowsCSBI.WINDLL.kernel32.GetStdHandle(-12)
_WindowsCSBI.HANDLE_STDOUT = _WindowsCSBI.HANDLE_STDOUT or _WindowsCSBI.WINDLL.kernel32.GetStdHandle(-11)
if _WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo.argtypes:
return
_WindowsCSBI.WINDLL.kernel32.GetStdHandle.argtypes = [ctypes.wintypes.DWORD]
_WindowsCSBI.WINDLL.kernel32.GetStdHandle.restype = ctypes.wintypes.HANDLE
_WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo.restype = ctypes.wintypes.BOOL
_WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo.argtypes = [
ctypes.wintypes.HANDLE, ctypes.POINTER(_WindowsCSBI.CSBI)
]
@staticmethod
def get_info(handle):
"""Get information about this current console window (for Microsoft Windows only).
Raises IOError if attempt to get information fails (if there is no console window).
Don't forget to call _WindowsCSBI.initialize() once in your application before calling this method.
Positional arguments:
handle -- either _WindowsCSBI.HANDLE_STDERR or _WindowsCSBI.HANDLE_STDOUT.
Returns:
Dictionary with different integer values. Keys are:
buffer_width -- width of the buffer (Screen Buffer Size in cmd.exe layout tab).
buffer_height -- height of the buffer (Screen Buffer Size in cmd.exe layout tab).
terminal_width -- width of the terminal window.
terminal_height -- height of the terminal window.
bg_color -- current background color (http://msdn.microsoft.com/en-us/library/windows/desktop/ms682088).
fg_color -- current text color code.
"""
# Query Win32 API.
csbi = _WindowsCSBI.CSBI()
try:
if not _WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo(handle, ctypes.byref(csbi)):
raise IOError('Unable to get console screen buffer info from win32 API.')
except ctypes.ArgumentError:
raise IOError('Unable to get console screen buffer info from win32 API.')
# Parse data.
result = dict(
buffer_width=int(csbi.dwSize.X - 1),
buffer_height=int(csbi.dwSize.Y),
terminal_width=int(csbi.srWindow.Right - csbi.srWindow.Left),
terminal_height=int(csbi.srWindow.Bottom - csbi.srWindow.Top),
bg_color=int(csbi.wAttributes & 240),
fg_color=int(csbi.wAttributes % 16),
)
return result
class _WindowsStream(object):
"""Replacement stream (overwrites sys.stdout and sys.stderr). When writing or printing, ANSI codes are converted.
ANSI (Linux/Unix) color codes are converted into win32 system calls, changing the next character's color before
printing it. Resources referenced:
https://github.com/tartley/colorama
http://www.cplusplus.com/articles/2ywTURfi/
http://thomasfischer.biz/python-and-windows-terminal-colors/
http://stackoverflow.com/questions/17125440/c-win32-console-color
http://www.tysos.org/svn/trunk/mono/corlib/System/WindowsConsoleDriver.cs
http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python
http://msdn.microsoft.com/en-us/library/windows/desktop/ms682088#_win32_character_attributes
Class variables:
ALL_BG_CODES -- list of background Windows codes. Used to determine if requested color is foreground or background.
COMPILED_CODES -- 'translation' dictionary. Keys are ANSI codes (values of _BASE_CODES), values are Windows codes.
STD_ERROR_HANDLE -- http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231
STD_OUTPUT_HANDLE -- http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231
Instance variables:
original_stream -- the original stream to write non-code text to.
win32_stream_handle -- handle to the Windows stderr or stdout device. Used by other Windows functions.
default_fg -- the foreground Windows color code at the time of instantiation.
default_bg -- the background Windows color code at the time of instantiation.
"""
ALL_BG_CODES = [v for k, v in _WINDOWS_CODES.items() if k.startswith('bg') or k.startswith('hibg')]
COMPILED_CODES = dict((v, _WINDOWS_CODES[k]) for k, v in _BASE_CODES.items() if k in _WINDOWS_CODES)
def __init__(self, stderr=False):
_WindowsCSBI.initialize()
self.original_stream = sys.stderr if stderr else sys.stdout
self.win32_stream_handle = _WindowsCSBI.HANDLE_STDERR if stderr else _WindowsCSBI.HANDLE_STDOUT
self.default_fg, self.default_bg = self._get_colors()
def __getattr__(self, item):
"""If an attribute/function/etc is not defined in this function, retrieve the one from the original stream.
Fixes ipython arrow key presses.
"""
return getattr(self.original_stream, item)
def _get_colors(self):
"""Returns a tuple of two integers representing current colors: (foreground, background)."""
try:
csbi = _WindowsCSBI.get_info(self.win32_stream_handle)
return csbi['fg_color'], csbi['bg_color']
except IOError:
return 7, 0
def _reset_colors(self):
"""Sets the foreground and background colors to their original values (when class was instantiated)."""
self._set_color(-33)
def _set_color(self, color_code):
"""Changes the foreground and background colors for subsequently printed characters.
Since setting a color requires including both foreground and background codes (merged), setting just the
foreground color resets the background color to black, and vice versa.
This function first gets the current background and foreground colors, merges in the requested color code, and
sets the result.
However if we need to remove just the foreground color but leave the background color the same (or vice versa)
such as when {/red} is used, we must merge the default foreground color with the current background color. This
is the reason for those negative values.
Positional arguments:
color_code -- integer color code from _WINDOWS_CODES.
"""
# Get current color code.
current_fg, current_bg = self._get_colors()
# Handle special negative codes. Also determine the final color code.
if color_code == -39:
final_color_code = self.default_fg | current_bg # Reset the foreground only.
elif color_code == -49:
final_color_code = current_fg | self.default_bg # Reset the background only.
elif color_code == -33:
final_color_code = self.default_fg | self.default_bg # Reset both.
elif color_code == -8:
final_color_code = current_fg # Black background.
else:
new_is_bg = color_code in self.ALL_BG_CODES
final_color_code = color_code | (current_fg if new_is_bg else current_bg)
# Set new code.
_WindowsCSBI.WINDLL.kernel32.SetConsoleTextAttribute(self.win32_stream_handle, final_color_code)
def write(self, p_str):
for segment in _RE_SPLIT.split(p_str):
if not segment:
# Empty string. p_str probably starts with colors so the first item is always ''.
continue
if not _RE_SPLIT.match(segment):
# No color codes, print regular text.
self.original_stream.write(segment)
self.original_stream.flush()
continue
for color_code in (int(c) for c in _RE_NUMBER_SEARCH.findall(segment)[0].split(';')):
if color_code in self.COMPILED_CODES:
self._set_color(self.COMPILED_CODES[color_code])
|
hkff/FodtlMon
|
fodtlmon/tools/color.py
|
_parse_input
|
python
|
def _parse_input(incoming):
codes = dict((k, v) for k, v in _AutoCodes().items() if '{%s}' % k in incoming)
color_codes = dict((k, '' if _AutoCodes.DISABLE_COLORS else '\033[{0}m'.format(v)) for k, v in codes.items())
incoming_padded = _pad_input(incoming)
output_colors = incoming_padded.format(**color_codes)
# Simplify: '{b}{red}' -> '\033[1m\033[31m' -> '\033[1;31m'
groups = sorted(set(_RE_GROUP_SEARCH.findall(output_colors)), key=len, reverse=True) # Get codes, grouped adjacent.
groups_simplified = [[x for n in _RE_NUMBER_SEARCH.findall(i) for x in n.split(';')] for i in groups]
groups_compiled = ['\033[{0}m'.format(';'.join(g)) for g in groups_simplified] # Final codes.
assert len(groups_compiled) == len(groups) # For testing.
output_colors_simplified = output_colors
for i in range(len(groups)):
output_colors_simplified = output_colors_simplified.replace(groups[i], groups_compiled[i])
output_no_colors = _RE_SPLIT.sub('', output_colors_simplified)
# Strip any remaining color codes.
if _AutoCodes.DISABLE_COLORS:
output_colors_simplified = _RE_NUMBER_SEARCH.sub('', output_colors_simplified)
return output_colors_simplified, output_no_colors
|
Performs the actual conversion of tags to ANSI escaped codes.
Provides a version of the input without any colors for len() and other methods.
Positional arguments:
incoming -- the input unicode value.
Returns:
2-item tuple. First item is the parsed output. Second item is a version of the input without any colors.
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/tools/color.py#L237-L267
|
[
"def _pad_input(incoming):\n \"\"\"Avoid IndexError and KeyError by ignoring un-related fields.\n\n Example: '{0}{autored}' becomes '{{0}}{autored}'.\n\n Positional arguments:\n incoming -- the input unicode value.\n\n Returns:\n Padded unicode value.\n \"\"\"\n incoming_expanded = incoming.replace('{', '{{').replace('}', '}}')\n for key in _BASE_CODES:\n before, after = '{{%s}}' % key, '{%s}' % key\n if before in incoming_expanded:\n incoming_expanded = incoming_expanded.replace(before, after)\n return incoming_expanded\n"
] |
"""Colorful worry-free console applications for Linux, Mac OS X, and Windows.
Supported natively on Linux and Mac OSX (Just Works), and on Windows it works the same if Windows.enable() is called.
Gives you expected and sane results from methods like len() and .capitalize().
https://github.com/Robpol86/colorclass
https://pypi.python.org/pypi/colorclass
"""
import atexit
from collections import Mapping
import ctypes
import os
import re
import sys
if os.name == 'nt':
import ctypes.wintypes
__author__ = '@Robpol86'
__license__ = 'MIT'
__version__ = '1.1.1'
_BASE_CODES = {
'/all': 0, 'b': 1, 'f': 2, 'i': 3, 'u': 4, 'flash': 5, 'outline': 6, 'negative': 7, 'invis': 8, 'strike': 9,
'/b': 22, '/f': 22, '/i': 23, '/u': 24, '/flash': 25, '/outline': 26, '/negative': 27, '/invis': 28,
'/strike': 29, '/fg': 39, '/bg': 49,
'black': 30, 'red': 31, 'green': 32, 'yellow': 33, 'blue': 34, 'magenta': 35, 'cyan': 36, 'white': 37,
'bgblack': 40, 'bgred': 41, 'bggreen': 42, 'bgyellow': 43, 'bgblue': 44, 'bgmagenta': 45, 'bgcyan': 46,
'bgwhite': 47,
'hiblack': 90, 'hired': 91, 'higreen': 92, 'hiyellow': 93, 'hiblue': 94, 'himagenta': 95, 'hicyan': 96,
'hiwhite': 97,
'hibgblack': 100, 'hibgred': 101, 'hibggreen': 102, 'hibgyellow': 103, 'hibgblue': 104, 'hibgmagenta': 105,
'hibgcyan': 106, 'hibgwhite': 107,
'autored': None, 'autoblack': None, 'automagenta': None, 'autowhite': None, 'autoblue': None, 'autoyellow': None,
'autogreen': None, 'autocyan': None,
'autobgred': None, 'autobgblack': None, 'autobgmagenta': None, 'autobgwhite': None, 'autobgblue': None,
'autobgyellow': None, 'autobggreen': None, 'autobgcyan': None,
'/black': 39, '/red': 39, '/green': 39, '/yellow': 39, '/blue': 39, '/magenta': 39, '/cyan': 39, '/white': 39,
'/hiblack': 39, '/hired': 39, '/higreen': 39, '/hiyellow': 39, '/hiblue': 39, '/himagenta': 39, '/hicyan': 39,
'/hiwhite': 39,
'/bgblack': 49, '/bgred': 49, '/bggreen': 49, '/bgyellow': 49, '/bgblue': 49, '/bgmagenta': 49, '/bgcyan': 49,
'/bgwhite': 49, '/hibgblack': 49, '/hibgred': 49, '/hibggreen': 49, '/hibgyellow': 49, '/hibgblue': 49,
'/hibgmagenta': 49, '/hibgcyan': 49, '/hibgwhite': 49,
'/autored': 39, '/autoblack': 39, '/automagenta': 39, '/autowhite': 39, '/autoblue': 39, '/autoyellow': 39,
'/autogreen': 39, '/autocyan': 39,
'/autobgred': 49, '/autobgblack': 49, '/autobgmagenta': 49, '/autobgwhite': 49, '/autobgblue': 49,
'/autobgyellow': 49, '/autobggreen': 49, '/autobgcyan': 49,
}
_WINDOWS_CODES = {
'/all': -33, '/fg': -39, '/bg': -49,
'black': 0, 'red': 4, 'green': 2, 'yellow': 6, 'blue': 1, 'magenta': 5, 'cyan': 3, 'white': 7,
'bgblack': -8, 'bgred': 64, 'bggreen': 32, 'bgyellow': 96, 'bgblue': 16, 'bgmagenta': 80, 'bgcyan': 48,
'bgwhite': 112,
'hiblack': 8, 'hired': 12, 'higreen': 10, 'hiyellow': 14, 'hiblue': 9, 'himagenta': 13, 'hicyan': 11, 'hiwhite': 15,
'hibgblack': 128, 'hibgred': 192, 'hibggreen': 160, 'hibgyellow': 224, 'hibgblue': 144, 'hibgmagenta': 208,
'hibgcyan': 176, 'hibgwhite': 240,
'/black': -39, '/red': -39, '/green': -39, '/yellow': -39, '/blue': -39, '/magenta': -39, '/cyan': -39,
'/white': -39, '/hiblack': -39, '/hired': -39, '/higreen': -39, '/hiyellow': -39, '/hiblue': -39, '/himagenta': -39,
'/hicyan': -39, '/hiwhite': -39,
'/bgblack': -49, '/bgred': -49, '/bggreen': -49, '/bgyellow': -49, '/bgblue': -49, '/bgmagenta': -49,
'/bgcyan': -49, '/bgwhite': -49, '/hibgblack': -49, '/hibgred': -49, '/hibggreen': -49, '/hibgyellow': -49,
'/hibgblue': -49, '/hibgmagenta': -49, '/hibgcyan': -49, '/hibgwhite': -49,
}
_RE_GROUP_SEARCH = re.compile(r'(?:\033\[[\d;]+m)+')
_RE_NUMBER_SEARCH = re.compile(r'\033\[([\d;]+)m')
_RE_SPLIT = re.compile(r'(\033\[[\d;]+m)')
PARENT_CLASS = unicode if sys.version_info[0] == 2 else str
class _AutoCodes(Mapping):
"""Read-only subclass of dict, resolves closing tags (based on colorclass.CODES) and automatic colors."""
DISABLE_COLORS = False
LIGHT_BACKGROUND = False
def __init__(self):
self.__dict = _BASE_CODES.copy()
def __getitem__(self, item):
if item == 'autoblack':
answer = self.autoblack
elif item == 'autored':
answer = self.autored
elif item == 'autogreen':
answer = self.autogreen
elif item == 'autoyellow':
answer = self.autoyellow
elif item == 'autoblue':
answer = self.autoblue
elif item == 'automagenta':
answer = self.automagenta
elif item == 'autocyan':
answer = self.autocyan
elif item == 'autowhite':
answer = self.autowhite
elif item == 'autobgblack':
answer = self.autobgblack
elif item == 'autobgred':
answer = self.autobgred
elif item == 'autobggreen':
answer = self.autobggreen
elif item == 'autobgyellow':
answer = self.autobgyellow
elif item == 'autobgblue':
answer = self.autobgblue
elif item == 'autobgmagenta':
answer = self.autobgmagenta
elif item == 'autobgcyan':
answer = self.autobgcyan
elif item == 'autobgwhite':
answer = self.autobgwhite
else:
answer = self.__dict[item]
return answer
def __iter__(self):
return iter(self.__dict)
def __len__(self):
return len(self.__dict)
@property
def autoblack(self):
"""Returns automatic black foreground color depending on background color."""
return self.__dict['black' if _AutoCodes.LIGHT_BACKGROUND else 'hiblack']
@property
def autored(self):
"""Returns automatic red foreground color depending on background color."""
return self.__dict['red' if _AutoCodes.LIGHT_BACKGROUND else 'hired']
@property
def autogreen(self):
"""Returns automatic green foreground color depending on background color."""
return self.__dict['green' if _AutoCodes.LIGHT_BACKGROUND else 'higreen']
@property
def autoyellow(self):
"""Returns automatic yellow foreground color depending on background color."""
return self.__dict['yellow' if _AutoCodes.LIGHT_BACKGROUND else 'hiyellow']
@property
def autoblue(self):
"""Returns automatic blue foreground color depending on background color."""
return self.__dict['blue' if _AutoCodes.LIGHT_BACKGROUND else 'hiblue']
@property
def automagenta(self):
"""Returns automatic magenta foreground color depending on background color."""
return self.__dict['magenta' if _AutoCodes.LIGHT_BACKGROUND else 'himagenta']
@property
def autocyan(self):
"""Returns automatic cyan foreground color depending on background color."""
return self.__dict['cyan' if _AutoCodes.LIGHT_BACKGROUND else 'hicyan']
@property
def autowhite(self):
"""Returns automatic white foreground color depending on background color."""
return self.__dict['white' if _AutoCodes.LIGHT_BACKGROUND else 'hiwhite']
@property
def autobgblack(self):
"""Returns automatic black background color depending on background color."""
return self.__dict['bgblack' if _AutoCodes.LIGHT_BACKGROUND else 'hibgblack']
@property
def autobgred(self):
"""Returns automatic red background color depending on background color."""
return self.__dict['bgred' if _AutoCodes.LIGHT_BACKGROUND else 'hibgred']
@property
def autobggreen(self):
"""Returns automatic green background color depending on background color."""
return self.__dict['bggreen' if _AutoCodes.LIGHT_BACKGROUND else 'hibggreen']
@property
def autobgyellow(self):
"""Returns automatic yellow background color depending on background color."""
return self.__dict['bgyellow' if _AutoCodes.LIGHT_BACKGROUND else 'hibgyellow']
@property
def autobgblue(self):
"""Returns automatic blue background color depending on background color."""
return self.__dict['bgblue' if _AutoCodes.LIGHT_BACKGROUND else 'hibgblue']
@property
def autobgmagenta(self):
"""Returns automatic magenta background color depending on background color."""
return self.__dict['bgmagenta' if _AutoCodes.LIGHT_BACKGROUND else 'hibgmagenta']
@property
def autobgcyan(self):
"""Returns automatic cyan background color depending on background color."""
return self.__dict['bgcyan' if _AutoCodes.LIGHT_BACKGROUND else 'hibgcyan']
@property
def autobgwhite(self):
"""Returns automatic white background color depending on background color."""
return self.__dict['bgwhite' if _AutoCodes.LIGHT_BACKGROUND else 'hibgwhite']
def _pad_input(incoming):
"""Avoid IndexError and KeyError by ignoring un-related fields.
Example: '{0}{autored}' becomes '{{0}}{autored}'.
Positional arguments:
incoming -- the input unicode value.
Returns:
Padded unicode value.
"""
incoming_expanded = incoming.replace('{', '{{').replace('}', '}}')
for key in _BASE_CODES:
before, after = '{{%s}}' % key, '{%s}' % key
if before in incoming_expanded:
incoming_expanded = incoming_expanded.replace(before, after)
return incoming_expanded
def disable_all_colors():
"""Disable all colors. Strips any color tags or codes."""
_AutoCodes.DISABLE_COLORS = True
def set_light_background():
"""Chooses dark colors for all 'auto'-prefixed codes for readability on light backgrounds."""
_AutoCodes.DISABLE_COLORS = False
_AutoCodes.LIGHT_BACKGROUND = True
def set_dark_background():
"""Chooses dark colors for all 'auto'-prefixed codes for readability on light backgrounds."""
_AutoCodes.DISABLE_COLORS = False
_AutoCodes.LIGHT_BACKGROUND = False
def list_tags():
"""Lists the available tags.
Returns:
Tuple of tuples. Child tuples are four items: ('opening tag', 'closing tag', main ansi value, closing ansi value).
"""
codes = _AutoCodes()
grouped = set([(k, '/{0}'.format(k), codes[k], codes['/{0}'.format(k)]) for k in codes if not k.startswith('/')])
# Add half-tags like /all.
found = [c for r in grouped for c in r[:2]]
missing = set([('', r[0], None, r[1]) if r[0].startswith('/') else (r[0], '', r[1], None)
for r in _AutoCodes().items() if r[0] not in found])
grouped |= missing
# Sort.
payload = sorted([i for i in grouped if i[2] is None], key=lambda x: x[3]) # /all /fg /bg
grouped -= set(payload)
payload.extend(sorted([i for i in grouped if i[2] < 10], key=lambda x: x[2])) # b i u flash
grouped -= set(payload)
payload.extend(sorted([i for i in grouped if i[0].startswith('auto')], key=lambda x: x[2])) # auto colors
grouped -= set(payload)
payload.extend(sorted([i for i in grouped if not i[0].startswith('hi')], key=lambda x: x[2])) # dark colors
grouped -= set(payload)
payload.extend(sorted(grouped, key=lambda x: x[2])) # light colors
return tuple(payload)
class Color(PARENT_CLASS):
"""Unicode (str in Python3) subclass with ANSI terminal text color support.
Example syntax: Color('{red}Sample Text{/red}')
For a list of codes, call: colorclass.list_tags()
"""
def __new__(cls, *args, **kwargs):
parent_class = cls.__bases__[0]
value_markup = args[0] if args else parent_class()
value_colors, value_no_colors = _parse_input(value_markup)
if args:
args = [value_colors] + list(args[1:])
obj = parent_class.__new__(cls, *args, **kwargs)
obj.value_colors, obj.value_no_colors = value_colors, value_no_colors
obj.has_colors = bool(_RE_NUMBER_SEARCH.match(value_colors))
return obj
def __len__(self):
return self.value_no_colors.__len__()
def capitalize(self):
split = _RE_SPLIT.split(self.value_colors)
for i in range(len(split)):
if _RE_SPLIT.match(split[i]):
continue
split[i] = PARENT_CLASS(split[i]).capitalize()
return Color().join(split)
def center(self, width, fillchar=None):
if fillchar is not None:
result = PARENT_CLASS(self.value_no_colors).center(width, fillchar)
else:
result = PARENT_CLASS(self.value_no_colors).center(width)
return result.replace(self.value_no_colors, self.value_colors)
def count(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).count(*args, **kwargs)
def endswith(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).endswith(*args, **kwargs)
def find(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).find(*args, **kwargs)
def format(*args, **kwargs):
return Color(super(Color, args[0]).format(*args[1:], **kwargs))
def index(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).index(*args, **kwargs)
def isalnum(self):
return PARENT_CLASS(self.value_no_colors).isalnum()
def isalpha(self):
return PARENT_CLASS(self.value_no_colors).isalpha()
def isdecimal(self):
return PARENT_CLASS(self.value_no_colors).isdecimal()
def isdigit(self):
return PARENT_CLASS(self.value_no_colors).isdigit()
def isnumeric(self):
return PARENT_CLASS(self.value_no_colors).isnumeric()
def isspace(self):
return PARENT_CLASS(self.value_no_colors).isspace()
def istitle(self):
return PARENT_CLASS(self.value_no_colors).istitle()
def isupper(self):
return PARENT_CLASS(self.value_no_colors).isupper()
def ljust(self, width, fillchar=None):
if fillchar is not None:
result = PARENT_CLASS(self.value_no_colors).ljust(width, fillchar)
else:
result = PARENT_CLASS(self.value_no_colors).ljust(width)
return result.replace(self.value_no_colors, self.value_colors)
def rfind(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).rfind(*args, **kwargs)
def rindex(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).rindex(*args, **kwargs)
def rjust(self, width, fillchar=None):
if fillchar is not None:
result = PARENT_CLASS(self.value_no_colors).rjust(width, fillchar)
else:
result = PARENT_CLASS(self.value_no_colors).rjust(width)
return result.replace(self.value_no_colors, self.value_colors)
def splitlines(self):
return [Color(l) for l in PARENT_CLASS(self.value_colors).splitlines()]
def startswith(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).startswith(*args, **kwargs)
def swapcase(self):
split = _RE_SPLIT.split(self.value_colors)
for i in range(len(split)):
if _RE_SPLIT.match(split[i]):
continue
split[i] = PARENT_CLASS(split[i]).swapcase()
return Color().join(split)
def title(self):
split = _RE_SPLIT.split(self.value_colors)
for i in range(len(split)):
if _RE_SPLIT.match(split[i]):
continue
split[i] = PARENT_CLASS(split[i]).title()
return Color().join(split)
def translate(self, table):
split = _RE_SPLIT.split(self.value_colors)
for i in range(len(split)):
if _RE_SPLIT.match(split[i]):
continue
split[i] = PARENT_CLASS(split[i]).translate(table)
return Color().join(split)
def upper(self):
split = _RE_SPLIT.split(self.value_colors)
for i in range(len(split)):
if _RE_SPLIT.match(split[i]):
continue
split[i] = PARENT_CLASS(split[i]).upper()
return Color().join(split)
def zfill(self, width):
if not self.value_no_colors:
return PARENT_CLASS().zfill(width)
split = _RE_SPLIT.split(self.value_colors)
filled = PARENT_CLASS(self.value_no_colors).zfill(width)
if len(split) == 1:
return filled
padding = filled.replace(self.value_no_colors, '')
if not split[0]:
split[2] = padding + split[2]
else:
split[0] = padding + split[0]
return Color().join(split)
class Windows(object):
"""Enable and disable Windows support for ANSI color character codes.
Call static method Windows.enable() to enable color support for the remainder of the process' lifetime.
This class is also a context manager. You can do this:
with Windows():
print(Color('{autored}Test{/autored}'))
Or this:
with Windows(auto_colors=True):
print(Color('{autored}Test{/autored}'))
"""
@staticmethod
def disable():
"""Restore sys.stderr and sys.stdout to their original objects. Resets colors to their original values."""
if os.name != 'nt' or not Windows.is_enabled():
return False
getattr(sys.stderr, '_reset_colors', lambda: False)()
getattr(sys.stdout, '_reset_colors', lambda: False)()
if isinstance(sys.stderr, _WindowsStream):
sys.stderr = getattr(sys.stderr, 'original_stream')
if isinstance(sys.stderr, _WindowsStream):
sys.stdout = getattr(sys.stdout, 'original_stream')
return True
@staticmethod
def is_enabled():
"""Returns True if either stderr or stdout has colors enabled."""
return isinstance(sys.stderr, _WindowsStream) or isinstance(sys.stdout, _WindowsStream)
@staticmethod
def enable(auto_colors=False, reset_atexit=False):
"""Enables color text with print() or sys.stdout.write() (stderr too).
Keyword arguments:
auto_colors -- automatically selects dark or light colors based on current terminal's background color. Only
works with {autored} and related tags.
reset_atexit -- resets original colors upon Python exit (in case you forget to reset it yourself with a closing
tag).
"""
if os.name != 'nt':
return False
# Overwrite stream references.
if not isinstance(sys.stderr, _WindowsStream):
sys.stderr.flush()
sys.stderr = _WindowsStream(stderr=True)
if not isinstance(sys.stdout, _WindowsStream):
sys.stdout.flush()
sys.stdout = _WindowsStream(stderr=False)
if not isinstance(sys.stderr, _WindowsStream) and not isinstance(sys.stdout, _WindowsStream):
return False
# Automatically select which colors to display.
bg_color = getattr(sys.stdout, 'default_bg', getattr(sys.stderr, 'default_bg', None))
if auto_colors and bg_color is not None:
set_light_background() if bg_color in (112, 96, 240, 176, 224, 208, 160) else set_dark_background()
# Reset on exit if requested.
if reset_atexit:
atexit.register(lambda: Windows.disable())
return True
def __init__(self, auto_colors=False):
self.auto_colors = auto_colors
def __enter__(self):
Windows.enable(auto_colors=self.auto_colors)
def __exit__(self, *_):
Windows.disable()
class _WindowsCSBI(object):
"""Interfaces with Windows CONSOLE_SCREEN_BUFFER_INFO API/DLL calls. Gets info for stderr and stdout.
References:
https://code.google.com/p/colorama/issues/detail?id=47.
pytest's py project: py/_io/terminalwriter.py.
Class variables:
CSBI -- ConsoleScreenBufferInfo class/struct (not instance, the class definition itself) defined in _define_csbi().
HANDLE_STDERR -- GetStdHandle() return integer for stderr.
HANDLE_STDOUT -- GetStdHandle() return integer for stdout.
WINDLL -- my own loaded instance of ctypes.WinDLL.
"""
CSBI = None
HANDLE_STDERR = None
HANDLE_STDOUT = None
WINDLL = ctypes.LibraryLoader(getattr(ctypes, 'WinDLL', None))
@staticmethod
def _define_csbi():
"""Defines structs and populates _WindowsCSBI.CSBI."""
if _WindowsCSBI.CSBI is not None:
return
class COORD(ctypes.Structure):
"""Windows COORD structure. http://msdn.microsoft.com/en-us/library/windows/desktop/ms682119"""
_fields_ = [('X', ctypes.c_short), ('Y', ctypes.c_short)]
class SmallRECT(ctypes.Structure):
"""Windows SMALL_RECT structure. http://msdn.microsoft.com/en-us/library/windows/desktop/ms686311"""
_fields_ = [('Left', ctypes.c_short), ('Top', ctypes.c_short), ('Right', ctypes.c_short),
('Bottom', ctypes.c_short)]
class ConsoleScreenBufferInfo(ctypes.Structure):
"""Windows CONSOLE_SCREEN_BUFFER_INFO structure.
http://msdn.microsoft.com/en-us/library/windows/desktop/ms682093
"""
_fields_ = [
('dwSize', COORD),
('dwCursorPosition', COORD),
('wAttributes', ctypes.wintypes.WORD),
('srWindow', SmallRECT),
('dwMaximumWindowSize', COORD)
]
_WindowsCSBI.CSBI = ConsoleScreenBufferInfo
@staticmethod
def initialize():
"""Initializes the WINDLL resource and populated the CSBI class variable."""
_WindowsCSBI._define_csbi()
_WindowsCSBI.HANDLE_STDERR = _WindowsCSBI.HANDLE_STDERR or _WindowsCSBI.WINDLL.kernel32.GetStdHandle(-12)
_WindowsCSBI.HANDLE_STDOUT = _WindowsCSBI.HANDLE_STDOUT or _WindowsCSBI.WINDLL.kernel32.GetStdHandle(-11)
if _WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo.argtypes:
return
_WindowsCSBI.WINDLL.kernel32.GetStdHandle.argtypes = [ctypes.wintypes.DWORD]
_WindowsCSBI.WINDLL.kernel32.GetStdHandle.restype = ctypes.wintypes.HANDLE
_WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo.restype = ctypes.wintypes.BOOL
_WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo.argtypes = [
ctypes.wintypes.HANDLE, ctypes.POINTER(_WindowsCSBI.CSBI)
]
@staticmethod
def get_info(handle):
"""Get information about this current console window (for Microsoft Windows only).
Raises IOError if attempt to get information fails (if there is no console window).
Don't forget to call _WindowsCSBI.initialize() once in your application before calling this method.
Positional arguments:
handle -- either _WindowsCSBI.HANDLE_STDERR or _WindowsCSBI.HANDLE_STDOUT.
Returns:
Dictionary with different integer values. Keys are:
buffer_width -- width of the buffer (Screen Buffer Size in cmd.exe layout tab).
buffer_height -- height of the buffer (Screen Buffer Size in cmd.exe layout tab).
terminal_width -- width of the terminal window.
terminal_height -- height of the terminal window.
bg_color -- current background color (http://msdn.microsoft.com/en-us/library/windows/desktop/ms682088).
fg_color -- current text color code.
"""
# Query Win32 API.
csbi = _WindowsCSBI.CSBI()
try:
if not _WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo(handle, ctypes.byref(csbi)):
raise IOError('Unable to get console screen buffer info from win32 API.')
except ctypes.ArgumentError:
raise IOError('Unable to get console screen buffer info from win32 API.')
# Parse data.
result = dict(
buffer_width=int(csbi.dwSize.X - 1),
buffer_height=int(csbi.dwSize.Y),
terminal_width=int(csbi.srWindow.Right - csbi.srWindow.Left),
terminal_height=int(csbi.srWindow.Bottom - csbi.srWindow.Top),
bg_color=int(csbi.wAttributes & 240),
fg_color=int(csbi.wAttributes % 16),
)
return result
class _WindowsStream(object):
"""Replacement stream (overwrites sys.stdout and sys.stderr). When writing or printing, ANSI codes are converted.
ANSI (Linux/Unix) color codes are converted into win32 system calls, changing the next character's color before
printing it. Resources referenced:
https://github.com/tartley/colorama
http://www.cplusplus.com/articles/2ywTURfi/
http://thomasfischer.biz/python-and-windows-terminal-colors/
http://stackoverflow.com/questions/17125440/c-win32-console-color
http://www.tysos.org/svn/trunk/mono/corlib/System/WindowsConsoleDriver.cs
http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python
http://msdn.microsoft.com/en-us/library/windows/desktop/ms682088#_win32_character_attributes
Class variables:
ALL_BG_CODES -- list of background Windows codes. Used to determine if requested color is foreground or background.
COMPILED_CODES -- 'translation' dictionary. Keys are ANSI codes (values of _BASE_CODES), values are Windows codes.
STD_ERROR_HANDLE -- http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231
STD_OUTPUT_HANDLE -- http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231
Instance variables:
original_stream -- the original stream to write non-code text to.
win32_stream_handle -- handle to the Windows stderr or stdout device. Used by other Windows functions.
default_fg -- the foreground Windows color code at the time of instantiation.
default_bg -- the background Windows color code at the time of instantiation.
"""
ALL_BG_CODES = [v for k, v in _WINDOWS_CODES.items() if k.startswith('bg') or k.startswith('hibg')]
COMPILED_CODES = dict((v, _WINDOWS_CODES[k]) for k, v in _BASE_CODES.items() if k in _WINDOWS_CODES)
def __init__(self, stderr=False):
_WindowsCSBI.initialize()
self.original_stream = sys.stderr if stderr else sys.stdout
self.win32_stream_handle = _WindowsCSBI.HANDLE_STDERR if stderr else _WindowsCSBI.HANDLE_STDOUT
self.default_fg, self.default_bg = self._get_colors()
def __getattr__(self, item):
"""If an attribute/function/etc is not defined in this function, retrieve the one from the original stream.
Fixes ipython arrow key presses.
"""
return getattr(self.original_stream, item)
def _get_colors(self):
"""Returns a tuple of two integers representing current colors: (foreground, background)."""
try:
csbi = _WindowsCSBI.get_info(self.win32_stream_handle)
return csbi['fg_color'], csbi['bg_color']
except IOError:
return 7, 0
def _reset_colors(self):
"""Sets the foreground and background colors to their original values (when class was instantiated)."""
self._set_color(-33)
def _set_color(self, color_code):
"""Changes the foreground and background colors for subsequently printed characters.
Since setting a color requires including both foreground and background codes (merged), setting just the
foreground color resets the background color to black, and vice versa.
This function first gets the current background and foreground colors, merges in the requested color code, and
sets the result.
However if we need to remove just the foreground color but leave the background color the same (or vice versa)
such as when {/red} is used, we must merge the default foreground color with the current background color. This
is the reason for those negative values.
Positional arguments:
color_code -- integer color code from _WINDOWS_CODES.
"""
# Get current color code.
current_fg, current_bg = self._get_colors()
# Handle special negative codes. Also determine the final color code.
if color_code == -39:
final_color_code = self.default_fg | current_bg # Reset the foreground only.
elif color_code == -49:
final_color_code = current_fg | self.default_bg # Reset the background only.
elif color_code == -33:
final_color_code = self.default_fg | self.default_bg # Reset both.
elif color_code == -8:
final_color_code = current_fg # Black background.
else:
new_is_bg = color_code in self.ALL_BG_CODES
final_color_code = color_code | (current_fg if new_is_bg else current_bg)
# Set new code.
_WindowsCSBI.WINDLL.kernel32.SetConsoleTextAttribute(self.win32_stream_handle, final_color_code)
def write(self, p_str):
for segment in _RE_SPLIT.split(p_str):
if not segment:
# Empty string. p_str probably starts with colors so the first item is always ''.
continue
if not _RE_SPLIT.match(segment):
# No color codes, print regular text.
self.original_stream.write(segment)
self.original_stream.flush()
continue
for color_code in (int(c) for c in _RE_NUMBER_SEARCH.findall(segment)[0].split(';')):
if color_code in self.COMPILED_CODES:
self._set_color(self.COMPILED_CODES[color_code])
|
hkff/FodtlMon
|
fodtlmon/tools/color.py
|
list_tags
|
python
|
def list_tags():
codes = _AutoCodes()
grouped = set([(k, '/{0}'.format(k), codes[k], codes['/{0}'.format(k)]) for k in codes if not k.startswith('/')])
# Add half-tags like /all.
found = [c for r in grouped for c in r[:2]]
missing = set([('', r[0], None, r[1]) if r[0].startswith('/') else (r[0], '', r[1], None)
for r in _AutoCodes().items() if r[0] not in found])
grouped |= missing
# Sort.
payload = sorted([i for i in grouped if i[2] is None], key=lambda x: x[3]) # /all /fg /bg
grouped -= set(payload)
payload.extend(sorted([i for i in grouped if i[2] < 10], key=lambda x: x[2])) # b i u flash
grouped -= set(payload)
payload.extend(sorted([i for i in grouped if i[0].startswith('auto')], key=lambda x: x[2])) # auto colors
grouped -= set(payload)
payload.extend(sorted([i for i in grouped if not i[0].startswith('hi')], key=lambda x: x[2])) # dark colors
grouped -= set(payload)
payload.extend(sorted(grouped, key=lambda x: x[2])) # light colors
return tuple(payload)
|
Lists the available tags.
Returns:
Tuple of tuples. Child tuples are four items: ('opening tag', 'closing tag', main ansi value, closing ansi value).
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/tools/color.py#L287-L312
| null |
"""Colorful worry-free console applications for Linux, Mac OS X, and Windows.
Supported natively on Linux and Mac OSX (Just Works), and on Windows it works the same if Windows.enable() is called.
Gives you expected and sane results from methods like len() and .capitalize().
https://github.com/Robpol86/colorclass
https://pypi.python.org/pypi/colorclass
"""
import atexit
from collections import Mapping
import ctypes
import os
import re
import sys
if os.name == 'nt':
import ctypes.wintypes
__author__ = '@Robpol86'
__license__ = 'MIT'
__version__ = '1.1.1'
_BASE_CODES = {
'/all': 0, 'b': 1, 'f': 2, 'i': 3, 'u': 4, 'flash': 5, 'outline': 6, 'negative': 7, 'invis': 8, 'strike': 9,
'/b': 22, '/f': 22, '/i': 23, '/u': 24, '/flash': 25, '/outline': 26, '/negative': 27, '/invis': 28,
'/strike': 29, '/fg': 39, '/bg': 49,
'black': 30, 'red': 31, 'green': 32, 'yellow': 33, 'blue': 34, 'magenta': 35, 'cyan': 36, 'white': 37,
'bgblack': 40, 'bgred': 41, 'bggreen': 42, 'bgyellow': 43, 'bgblue': 44, 'bgmagenta': 45, 'bgcyan': 46,
'bgwhite': 47,
'hiblack': 90, 'hired': 91, 'higreen': 92, 'hiyellow': 93, 'hiblue': 94, 'himagenta': 95, 'hicyan': 96,
'hiwhite': 97,
'hibgblack': 100, 'hibgred': 101, 'hibggreen': 102, 'hibgyellow': 103, 'hibgblue': 104, 'hibgmagenta': 105,
'hibgcyan': 106, 'hibgwhite': 107,
'autored': None, 'autoblack': None, 'automagenta': None, 'autowhite': None, 'autoblue': None, 'autoyellow': None,
'autogreen': None, 'autocyan': None,
'autobgred': None, 'autobgblack': None, 'autobgmagenta': None, 'autobgwhite': None, 'autobgblue': None,
'autobgyellow': None, 'autobggreen': None, 'autobgcyan': None,
'/black': 39, '/red': 39, '/green': 39, '/yellow': 39, '/blue': 39, '/magenta': 39, '/cyan': 39, '/white': 39,
'/hiblack': 39, '/hired': 39, '/higreen': 39, '/hiyellow': 39, '/hiblue': 39, '/himagenta': 39, '/hicyan': 39,
'/hiwhite': 39,
'/bgblack': 49, '/bgred': 49, '/bggreen': 49, '/bgyellow': 49, '/bgblue': 49, '/bgmagenta': 49, '/bgcyan': 49,
'/bgwhite': 49, '/hibgblack': 49, '/hibgred': 49, '/hibggreen': 49, '/hibgyellow': 49, '/hibgblue': 49,
'/hibgmagenta': 49, '/hibgcyan': 49, '/hibgwhite': 49,
'/autored': 39, '/autoblack': 39, '/automagenta': 39, '/autowhite': 39, '/autoblue': 39, '/autoyellow': 39,
'/autogreen': 39, '/autocyan': 39,
'/autobgred': 49, '/autobgblack': 49, '/autobgmagenta': 49, '/autobgwhite': 49, '/autobgblue': 49,
'/autobgyellow': 49, '/autobggreen': 49, '/autobgcyan': 49,
}
_WINDOWS_CODES = {
'/all': -33, '/fg': -39, '/bg': -49,
'black': 0, 'red': 4, 'green': 2, 'yellow': 6, 'blue': 1, 'magenta': 5, 'cyan': 3, 'white': 7,
'bgblack': -8, 'bgred': 64, 'bggreen': 32, 'bgyellow': 96, 'bgblue': 16, 'bgmagenta': 80, 'bgcyan': 48,
'bgwhite': 112,
'hiblack': 8, 'hired': 12, 'higreen': 10, 'hiyellow': 14, 'hiblue': 9, 'himagenta': 13, 'hicyan': 11, 'hiwhite': 15,
'hibgblack': 128, 'hibgred': 192, 'hibggreen': 160, 'hibgyellow': 224, 'hibgblue': 144, 'hibgmagenta': 208,
'hibgcyan': 176, 'hibgwhite': 240,
'/black': -39, '/red': -39, '/green': -39, '/yellow': -39, '/blue': -39, '/magenta': -39, '/cyan': -39,
'/white': -39, '/hiblack': -39, '/hired': -39, '/higreen': -39, '/hiyellow': -39, '/hiblue': -39, '/himagenta': -39,
'/hicyan': -39, '/hiwhite': -39,
'/bgblack': -49, '/bgred': -49, '/bggreen': -49, '/bgyellow': -49, '/bgblue': -49, '/bgmagenta': -49,
'/bgcyan': -49, '/bgwhite': -49, '/hibgblack': -49, '/hibgred': -49, '/hibggreen': -49, '/hibgyellow': -49,
'/hibgblue': -49, '/hibgmagenta': -49, '/hibgcyan': -49, '/hibgwhite': -49,
}
_RE_GROUP_SEARCH = re.compile(r'(?:\033\[[\d;]+m)+')
_RE_NUMBER_SEARCH = re.compile(r'\033\[([\d;]+)m')
_RE_SPLIT = re.compile(r'(\033\[[\d;]+m)')
PARENT_CLASS = unicode if sys.version_info[0] == 2 else str
class _AutoCodes(Mapping):
"""Read-only subclass of dict, resolves closing tags (based on colorclass.CODES) and automatic colors."""
DISABLE_COLORS = False
LIGHT_BACKGROUND = False
def __init__(self):
self.__dict = _BASE_CODES.copy()
def __getitem__(self, item):
if item == 'autoblack':
answer = self.autoblack
elif item == 'autored':
answer = self.autored
elif item == 'autogreen':
answer = self.autogreen
elif item == 'autoyellow':
answer = self.autoyellow
elif item == 'autoblue':
answer = self.autoblue
elif item == 'automagenta':
answer = self.automagenta
elif item == 'autocyan':
answer = self.autocyan
elif item == 'autowhite':
answer = self.autowhite
elif item == 'autobgblack':
answer = self.autobgblack
elif item == 'autobgred':
answer = self.autobgred
elif item == 'autobggreen':
answer = self.autobggreen
elif item == 'autobgyellow':
answer = self.autobgyellow
elif item == 'autobgblue':
answer = self.autobgblue
elif item == 'autobgmagenta':
answer = self.autobgmagenta
elif item == 'autobgcyan':
answer = self.autobgcyan
elif item == 'autobgwhite':
answer = self.autobgwhite
else:
answer = self.__dict[item]
return answer
def __iter__(self):
return iter(self.__dict)
def __len__(self):
return len(self.__dict)
@property
def autoblack(self):
"""Returns automatic black foreground color depending on background color."""
return self.__dict['black' if _AutoCodes.LIGHT_BACKGROUND else 'hiblack']
@property
def autored(self):
"""Returns automatic red foreground color depending on background color."""
return self.__dict['red' if _AutoCodes.LIGHT_BACKGROUND else 'hired']
@property
def autogreen(self):
"""Returns automatic green foreground color depending on background color."""
return self.__dict['green' if _AutoCodes.LIGHT_BACKGROUND else 'higreen']
@property
def autoyellow(self):
"""Returns automatic yellow foreground color depending on background color."""
return self.__dict['yellow' if _AutoCodes.LIGHT_BACKGROUND else 'hiyellow']
@property
def autoblue(self):
"""Returns automatic blue foreground color depending on background color."""
return self.__dict['blue' if _AutoCodes.LIGHT_BACKGROUND else 'hiblue']
@property
def automagenta(self):
"""Returns automatic magenta foreground color depending on background color."""
return self.__dict['magenta' if _AutoCodes.LIGHT_BACKGROUND else 'himagenta']
@property
def autocyan(self):
"""Returns automatic cyan foreground color depending on background color."""
return self.__dict['cyan' if _AutoCodes.LIGHT_BACKGROUND else 'hicyan']
@property
def autowhite(self):
"""Returns automatic white foreground color depending on background color."""
return self.__dict['white' if _AutoCodes.LIGHT_BACKGROUND else 'hiwhite']
@property
def autobgblack(self):
"""Returns automatic black background color depending on background color."""
return self.__dict['bgblack' if _AutoCodes.LIGHT_BACKGROUND else 'hibgblack']
@property
def autobgred(self):
"""Returns automatic red background color depending on background color."""
return self.__dict['bgred' if _AutoCodes.LIGHT_BACKGROUND else 'hibgred']
@property
def autobggreen(self):
"""Returns automatic green background color depending on background color."""
return self.__dict['bggreen' if _AutoCodes.LIGHT_BACKGROUND else 'hibggreen']
@property
def autobgyellow(self):
"""Returns automatic yellow background color depending on background color."""
return self.__dict['bgyellow' if _AutoCodes.LIGHT_BACKGROUND else 'hibgyellow']
@property
def autobgblue(self):
"""Returns automatic blue background color depending on background color."""
return self.__dict['bgblue' if _AutoCodes.LIGHT_BACKGROUND else 'hibgblue']
@property
def autobgmagenta(self):
"""Returns automatic magenta background color depending on background color."""
return self.__dict['bgmagenta' if _AutoCodes.LIGHT_BACKGROUND else 'hibgmagenta']
@property
def autobgcyan(self):
"""Returns automatic cyan background color depending on background color."""
return self.__dict['bgcyan' if _AutoCodes.LIGHT_BACKGROUND else 'hibgcyan']
@property
def autobgwhite(self):
"""Returns automatic white background color depending on background color."""
return self.__dict['bgwhite' if _AutoCodes.LIGHT_BACKGROUND else 'hibgwhite']
def _pad_input(incoming):
"""Avoid IndexError and KeyError by ignoring un-related fields.
Example: '{0}{autored}' becomes '{{0}}{autored}'.
Positional arguments:
incoming -- the input unicode value.
Returns:
Padded unicode value.
"""
incoming_expanded = incoming.replace('{', '{{').replace('}', '}}')
for key in _BASE_CODES:
before, after = '{{%s}}' % key, '{%s}' % key
if before in incoming_expanded:
incoming_expanded = incoming_expanded.replace(before, after)
return incoming_expanded
def _parse_input(incoming):
"""Performs the actual conversion of tags to ANSI escaped codes.
Provides a version of the input without any colors for len() and other methods.
Positional arguments:
incoming -- the input unicode value.
Returns:
2-item tuple. First item is the parsed output. Second item is a version of the input without any colors.
"""
codes = dict((k, v) for k, v in _AutoCodes().items() if '{%s}' % k in incoming)
color_codes = dict((k, '' if _AutoCodes.DISABLE_COLORS else '\033[{0}m'.format(v)) for k, v in codes.items())
incoming_padded = _pad_input(incoming)
output_colors = incoming_padded.format(**color_codes)
# Simplify: '{b}{red}' -> '\033[1m\033[31m' -> '\033[1;31m'
groups = sorted(set(_RE_GROUP_SEARCH.findall(output_colors)), key=len, reverse=True) # Get codes, grouped adjacent.
groups_simplified = [[x for n in _RE_NUMBER_SEARCH.findall(i) for x in n.split(';')] for i in groups]
groups_compiled = ['\033[{0}m'.format(';'.join(g)) for g in groups_simplified] # Final codes.
assert len(groups_compiled) == len(groups) # For testing.
output_colors_simplified = output_colors
for i in range(len(groups)):
output_colors_simplified = output_colors_simplified.replace(groups[i], groups_compiled[i])
output_no_colors = _RE_SPLIT.sub('', output_colors_simplified)
# Strip any remaining color codes.
if _AutoCodes.DISABLE_COLORS:
output_colors_simplified = _RE_NUMBER_SEARCH.sub('', output_colors_simplified)
return output_colors_simplified, output_no_colors
def disable_all_colors():
"""Disable all colors. Strips any color tags or codes."""
_AutoCodes.DISABLE_COLORS = True
def set_light_background():
"""Chooses dark colors for all 'auto'-prefixed codes for readability on light backgrounds."""
_AutoCodes.DISABLE_COLORS = False
_AutoCodes.LIGHT_BACKGROUND = True
def set_dark_background():
"""Chooses dark colors for all 'auto'-prefixed codes for readability on light backgrounds."""
_AutoCodes.DISABLE_COLORS = False
_AutoCodes.LIGHT_BACKGROUND = False
class Color(PARENT_CLASS):
"""Unicode (str in Python3) subclass with ANSI terminal text color support.
Example syntax: Color('{red}Sample Text{/red}')
For a list of codes, call: colorclass.list_tags()
"""
def __new__(cls, *args, **kwargs):
parent_class = cls.__bases__[0]
value_markup = args[0] if args else parent_class()
value_colors, value_no_colors = _parse_input(value_markup)
if args:
args = [value_colors] + list(args[1:])
obj = parent_class.__new__(cls, *args, **kwargs)
obj.value_colors, obj.value_no_colors = value_colors, value_no_colors
obj.has_colors = bool(_RE_NUMBER_SEARCH.match(value_colors))
return obj
def __len__(self):
return self.value_no_colors.__len__()
def capitalize(self):
split = _RE_SPLIT.split(self.value_colors)
for i in range(len(split)):
if _RE_SPLIT.match(split[i]):
continue
split[i] = PARENT_CLASS(split[i]).capitalize()
return Color().join(split)
def center(self, width, fillchar=None):
if fillchar is not None:
result = PARENT_CLASS(self.value_no_colors).center(width, fillchar)
else:
result = PARENT_CLASS(self.value_no_colors).center(width)
return result.replace(self.value_no_colors, self.value_colors)
def count(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).count(*args, **kwargs)
def endswith(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).endswith(*args, **kwargs)
def find(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).find(*args, **kwargs)
def format(*args, **kwargs):
return Color(super(Color, args[0]).format(*args[1:], **kwargs))
def index(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).index(*args, **kwargs)
def isalnum(self):
return PARENT_CLASS(self.value_no_colors).isalnum()
def isalpha(self):
return PARENT_CLASS(self.value_no_colors).isalpha()
def isdecimal(self):
return PARENT_CLASS(self.value_no_colors).isdecimal()
def isdigit(self):
return PARENT_CLASS(self.value_no_colors).isdigit()
def isnumeric(self):
return PARENT_CLASS(self.value_no_colors).isnumeric()
def isspace(self):
return PARENT_CLASS(self.value_no_colors).isspace()
def istitle(self):
return PARENT_CLASS(self.value_no_colors).istitle()
def isupper(self):
return PARENT_CLASS(self.value_no_colors).isupper()
def ljust(self, width, fillchar=None):
if fillchar is not None:
result = PARENT_CLASS(self.value_no_colors).ljust(width, fillchar)
else:
result = PARENT_CLASS(self.value_no_colors).ljust(width)
return result.replace(self.value_no_colors, self.value_colors)
def rfind(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).rfind(*args, **kwargs)
def rindex(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).rindex(*args, **kwargs)
def rjust(self, width, fillchar=None):
if fillchar is not None:
result = PARENT_CLASS(self.value_no_colors).rjust(width, fillchar)
else:
result = PARENT_CLASS(self.value_no_colors).rjust(width)
return result.replace(self.value_no_colors, self.value_colors)
def splitlines(self):
return [Color(l) for l in PARENT_CLASS(self.value_colors).splitlines()]
def startswith(self, *args, **kwargs):
return PARENT_CLASS(self.value_no_colors).startswith(*args, **kwargs)
def swapcase(self):
split = _RE_SPLIT.split(self.value_colors)
for i in range(len(split)):
if _RE_SPLIT.match(split[i]):
continue
split[i] = PARENT_CLASS(split[i]).swapcase()
return Color().join(split)
def title(self):
split = _RE_SPLIT.split(self.value_colors)
for i in range(len(split)):
if _RE_SPLIT.match(split[i]):
continue
split[i] = PARENT_CLASS(split[i]).title()
return Color().join(split)
def translate(self, table):
split = _RE_SPLIT.split(self.value_colors)
for i in range(len(split)):
if _RE_SPLIT.match(split[i]):
continue
split[i] = PARENT_CLASS(split[i]).translate(table)
return Color().join(split)
def upper(self):
split = _RE_SPLIT.split(self.value_colors)
for i in range(len(split)):
if _RE_SPLIT.match(split[i]):
continue
split[i] = PARENT_CLASS(split[i]).upper()
return Color().join(split)
def zfill(self, width):
if not self.value_no_colors:
return PARENT_CLASS().zfill(width)
split = _RE_SPLIT.split(self.value_colors)
filled = PARENT_CLASS(self.value_no_colors).zfill(width)
if len(split) == 1:
return filled
padding = filled.replace(self.value_no_colors, '')
if not split[0]:
split[2] = padding + split[2]
else:
split[0] = padding + split[0]
return Color().join(split)
class Windows(object):
"""Enable and disable Windows support for ANSI color character codes.
Call static method Windows.enable() to enable color support for the remainder of the process' lifetime.
This class is also a context manager. You can do this:
with Windows():
print(Color('{autored}Test{/autored}'))
Or this:
with Windows(auto_colors=True):
print(Color('{autored}Test{/autored}'))
"""
@staticmethod
def disable():
"""Restore sys.stderr and sys.stdout to their original objects. Resets colors to their original values."""
if os.name != 'nt' or not Windows.is_enabled():
return False
getattr(sys.stderr, '_reset_colors', lambda: False)()
getattr(sys.stdout, '_reset_colors', lambda: False)()
if isinstance(sys.stderr, _WindowsStream):
sys.stderr = getattr(sys.stderr, 'original_stream')
if isinstance(sys.stderr, _WindowsStream):
sys.stdout = getattr(sys.stdout, 'original_stream')
return True
@staticmethod
def is_enabled():
"""Returns True if either stderr or stdout has colors enabled."""
return isinstance(sys.stderr, _WindowsStream) or isinstance(sys.stdout, _WindowsStream)
@staticmethod
def enable(auto_colors=False, reset_atexit=False):
"""Enables color text with print() or sys.stdout.write() (stderr too).
Keyword arguments:
auto_colors -- automatically selects dark or light colors based on current terminal's background color. Only
works with {autored} and related tags.
reset_atexit -- resets original colors upon Python exit (in case you forget to reset it yourself with a closing
tag).
"""
if os.name != 'nt':
return False
# Overwrite stream references.
if not isinstance(sys.stderr, _WindowsStream):
sys.stderr.flush()
sys.stderr = _WindowsStream(stderr=True)
if not isinstance(sys.stdout, _WindowsStream):
sys.stdout.flush()
sys.stdout = _WindowsStream(stderr=False)
if not isinstance(sys.stderr, _WindowsStream) and not isinstance(sys.stdout, _WindowsStream):
return False
# Automatically select which colors to display.
bg_color = getattr(sys.stdout, 'default_bg', getattr(sys.stderr, 'default_bg', None))
if auto_colors and bg_color is not None:
set_light_background() if bg_color in (112, 96, 240, 176, 224, 208, 160) else set_dark_background()
# Reset on exit if requested.
if reset_atexit:
atexit.register(lambda: Windows.disable())
return True
def __init__(self, auto_colors=False):
self.auto_colors = auto_colors
def __enter__(self):
Windows.enable(auto_colors=self.auto_colors)
def __exit__(self, *_):
Windows.disable()
class _WindowsCSBI(object):
"""Interfaces with Windows CONSOLE_SCREEN_BUFFER_INFO API/DLL calls. Gets info for stderr and stdout.
References:
https://code.google.com/p/colorama/issues/detail?id=47.
pytest's py project: py/_io/terminalwriter.py.
Class variables:
CSBI -- ConsoleScreenBufferInfo class/struct (not instance, the class definition itself) defined in _define_csbi().
HANDLE_STDERR -- GetStdHandle() return integer for stderr.
HANDLE_STDOUT -- GetStdHandle() return integer for stdout.
WINDLL -- my own loaded instance of ctypes.WinDLL.
"""
CSBI = None
HANDLE_STDERR = None
HANDLE_STDOUT = None
WINDLL = ctypes.LibraryLoader(getattr(ctypes, 'WinDLL', None))
@staticmethod
def _define_csbi():
"""Defines structs and populates _WindowsCSBI.CSBI."""
if _WindowsCSBI.CSBI is not None:
return
class COORD(ctypes.Structure):
"""Windows COORD structure. http://msdn.microsoft.com/en-us/library/windows/desktop/ms682119"""
_fields_ = [('X', ctypes.c_short), ('Y', ctypes.c_short)]
class SmallRECT(ctypes.Structure):
"""Windows SMALL_RECT structure. http://msdn.microsoft.com/en-us/library/windows/desktop/ms686311"""
_fields_ = [('Left', ctypes.c_short), ('Top', ctypes.c_short), ('Right', ctypes.c_short),
('Bottom', ctypes.c_short)]
class ConsoleScreenBufferInfo(ctypes.Structure):
"""Windows CONSOLE_SCREEN_BUFFER_INFO structure.
http://msdn.microsoft.com/en-us/library/windows/desktop/ms682093
"""
_fields_ = [
('dwSize', COORD),
('dwCursorPosition', COORD),
('wAttributes', ctypes.wintypes.WORD),
('srWindow', SmallRECT),
('dwMaximumWindowSize', COORD)
]
_WindowsCSBI.CSBI = ConsoleScreenBufferInfo
@staticmethod
def initialize():
"""Initializes the WINDLL resource and populated the CSBI class variable."""
_WindowsCSBI._define_csbi()
_WindowsCSBI.HANDLE_STDERR = _WindowsCSBI.HANDLE_STDERR or _WindowsCSBI.WINDLL.kernel32.GetStdHandle(-12)
_WindowsCSBI.HANDLE_STDOUT = _WindowsCSBI.HANDLE_STDOUT or _WindowsCSBI.WINDLL.kernel32.GetStdHandle(-11)
if _WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo.argtypes:
return
_WindowsCSBI.WINDLL.kernel32.GetStdHandle.argtypes = [ctypes.wintypes.DWORD]
_WindowsCSBI.WINDLL.kernel32.GetStdHandle.restype = ctypes.wintypes.HANDLE
_WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo.restype = ctypes.wintypes.BOOL
_WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo.argtypes = [
ctypes.wintypes.HANDLE, ctypes.POINTER(_WindowsCSBI.CSBI)
]
@staticmethod
def get_info(handle):
"""Get information about this current console window (for Microsoft Windows only).
Raises IOError if attempt to get information fails (if there is no console window).
Don't forget to call _WindowsCSBI.initialize() once in your application before calling this method.
Positional arguments:
handle -- either _WindowsCSBI.HANDLE_STDERR or _WindowsCSBI.HANDLE_STDOUT.
Returns:
Dictionary with different integer values. Keys are:
buffer_width -- width of the buffer (Screen Buffer Size in cmd.exe layout tab).
buffer_height -- height of the buffer (Screen Buffer Size in cmd.exe layout tab).
terminal_width -- width of the terminal window.
terminal_height -- height of the terminal window.
bg_color -- current background color (http://msdn.microsoft.com/en-us/library/windows/desktop/ms682088).
fg_color -- current text color code.
"""
# Query Win32 API.
csbi = _WindowsCSBI.CSBI()
try:
if not _WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo(handle, ctypes.byref(csbi)):
raise IOError('Unable to get console screen buffer info from win32 API.')
except ctypes.ArgumentError:
raise IOError('Unable to get console screen buffer info from win32 API.')
# Parse data.
result = dict(
buffer_width=int(csbi.dwSize.X - 1),
buffer_height=int(csbi.dwSize.Y),
terminal_width=int(csbi.srWindow.Right - csbi.srWindow.Left),
terminal_height=int(csbi.srWindow.Bottom - csbi.srWindow.Top),
bg_color=int(csbi.wAttributes & 240),
fg_color=int(csbi.wAttributes % 16),
)
return result
class _WindowsStream(object):
"""Replacement stream (overwrites sys.stdout and sys.stderr). When writing or printing, ANSI codes are converted.
ANSI (Linux/Unix) color codes are converted into win32 system calls, changing the next character's color before
printing it. Resources referenced:
https://github.com/tartley/colorama
http://www.cplusplus.com/articles/2ywTURfi/
http://thomasfischer.biz/python-and-windows-terminal-colors/
http://stackoverflow.com/questions/17125440/c-win32-console-color
http://www.tysos.org/svn/trunk/mono/corlib/System/WindowsConsoleDriver.cs
http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python
http://msdn.microsoft.com/en-us/library/windows/desktop/ms682088#_win32_character_attributes
Class variables:
ALL_BG_CODES -- list of background Windows codes. Used to determine if requested color is foreground or background.
COMPILED_CODES -- 'translation' dictionary. Keys are ANSI codes (values of _BASE_CODES), values are Windows codes.
STD_ERROR_HANDLE -- http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231
STD_OUTPUT_HANDLE -- http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231
Instance variables:
original_stream -- the original stream to write non-code text to.
win32_stream_handle -- handle to the Windows stderr or stdout device. Used by other Windows functions.
default_fg -- the foreground Windows color code at the time of instantiation.
default_bg -- the background Windows color code at the time of instantiation.
"""
ALL_BG_CODES = [v for k, v in _WINDOWS_CODES.items() if k.startswith('bg') or k.startswith('hibg')]
COMPILED_CODES = dict((v, _WINDOWS_CODES[k]) for k, v in _BASE_CODES.items() if k in _WINDOWS_CODES)
def __init__(self, stderr=False):
_WindowsCSBI.initialize()
self.original_stream = sys.stderr if stderr else sys.stdout
self.win32_stream_handle = _WindowsCSBI.HANDLE_STDERR if stderr else _WindowsCSBI.HANDLE_STDOUT
self.default_fg, self.default_bg = self._get_colors()
def __getattr__(self, item):
"""If an attribute/function/etc is not defined in this function, retrieve the one from the original stream.
Fixes ipython arrow key presses.
"""
return getattr(self.original_stream, item)
def _get_colors(self):
"""Returns a tuple of two integers representing current colors: (foreground, background)."""
try:
csbi = _WindowsCSBI.get_info(self.win32_stream_handle)
return csbi['fg_color'], csbi['bg_color']
except IOError:
return 7, 0
def _reset_colors(self):
"""Sets the foreground and background colors to their original values (when class was instantiated)."""
self._set_color(-33)
def _set_color(self, color_code):
"""Changes the foreground and background colors for subsequently printed characters.
Since setting a color requires including both foreground and background codes (merged), setting just the
foreground color resets the background color to black, and vice versa.
This function first gets the current background and foreground colors, merges in the requested color code, and
sets the result.
However if we need to remove just the foreground color but leave the background color the same (or vice versa)
such as when {/red} is used, we must merge the default foreground color with the current background color. This
is the reason for those negative values.
Positional arguments:
color_code -- integer color code from _WINDOWS_CODES.
"""
# Get current color code.
current_fg, current_bg = self._get_colors()
# Handle special negative codes. Also determine the final color code.
if color_code == -39:
final_color_code = self.default_fg | current_bg # Reset the foreground only.
elif color_code == -49:
final_color_code = current_fg | self.default_bg # Reset the background only.
elif color_code == -33:
final_color_code = self.default_fg | self.default_bg # Reset both.
elif color_code == -8:
final_color_code = current_fg # Black background.
else:
new_is_bg = color_code in self.ALL_BG_CODES
final_color_code = color_code | (current_fg if new_is_bg else current_bg)
# Set new code.
_WindowsCSBI.WINDLL.kernel32.SetConsoleTextAttribute(self.win32_stream_handle, final_color_code)
def write(self, p_str):
for segment in _RE_SPLIT.split(p_str):
if not segment:
# Empty string. p_str probably starts with colors so the first item is always ''.
continue
if not _RE_SPLIT.match(segment):
# No color codes, print regular text.
self.original_stream.write(segment)
self.original_stream.flush()
continue
for color_code in (int(c) for c in _RE_NUMBER_SEARCH.findall(segment)[0].split(';')):
if color_code in self.COMPILED_CODES:
self._set_color(self.COMPILED_CODES[color_code])
|
hkff/FodtlMon
|
fodtlmon/tools/color.py
|
Windows.disable
|
python
|
def disable():
if os.name != 'nt' or not Windows.is_enabled():
return False
getattr(sys.stderr, '_reset_colors', lambda: False)()
getattr(sys.stdout, '_reset_colors', lambda: False)()
if isinstance(sys.stderr, _WindowsStream):
sys.stderr = getattr(sys.stderr, 'original_stream')
if isinstance(sys.stderr, _WindowsStream):
sys.stdout = getattr(sys.stdout, 'original_stream')
return True
|
Restore sys.stderr and sys.stdout to their original objects. Resets colors to their original values.
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/tools/color.py#L483-L496
| null |
class Windows(object):
"""Enable and disable Windows support for ANSI color character codes.
Call static method Windows.enable() to enable color support for the remainder of the process' lifetime.
This class is also a context manager. You can do this:
with Windows():
print(Color('{autored}Test{/autored}'))
Or this:
with Windows(auto_colors=True):
print(Color('{autored}Test{/autored}'))
"""
@staticmethod
@staticmethod
def is_enabled():
"""Returns True if either stderr or stdout has colors enabled."""
return isinstance(sys.stderr, _WindowsStream) or isinstance(sys.stdout, _WindowsStream)
@staticmethod
def enable(auto_colors=False, reset_atexit=False):
"""Enables color text with print() or sys.stdout.write() (stderr too).
Keyword arguments:
auto_colors -- automatically selects dark or light colors based on current terminal's background color. Only
works with {autored} and related tags.
reset_atexit -- resets original colors upon Python exit (in case you forget to reset it yourself with a closing
tag).
"""
if os.name != 'nt':
return False
# Overwrite stream references.
if not isinstance(sys.stderr, _WindowsStream):
sys.stderr.flush()
sys.stderr = _WindowsStream(stderr=True)
if not isinstance(sys.stdout, _WindowsStream):
sys.stdout.flush()
sys.stdout = _WindowsStream(stderr=False)
if not isinstance(sys.stderr, _WindowsStream) and not isinstance(sys.stdout, _WindowsStream):
return False
# Automatically select which colors to display.
bg_color = getattr(sys.stdout, 'default_bg', getattr(sys.stderr, 'default_bg', None))
if auto_colors and bg_color is not None:
set_light_background() if bg_color in (112, 96, 240, 176, 224, 208, 160) else set_dark_background()
# Reset on exit if requested.
if reset_atexit:
atexit.register(lambda: Windows.disable())
return True
def __init__(self, auto_colors=False):
self.auto_colors = auto_colors
def __enter__(self):
Windows.enable(auto_colors=self.auto_colors)
def __exit__(self, *_):
Windows.disable()
|
hkff/FodtlMon
|
fodtlmon/tools/color.py
|
Windows.enable
|
python
|
def enable(auto_colors=False, reset_atexit=False):
if os.name != 'nt':
return False
# Overwrite stream references.
if not isinstance(sys.stderr, _WindowsStream):
sys.stderr.flush()
sys.stderr = _WindowsStream(stderr=True)
if not isinstance(sys.stdout, _WindowsStream):
sys.stdout.flush()
sys.stdout = _WindowsStream(stderr=False)
if not isinstance(sys.stderr, _WindowsStream) and not isinstance(sys.stdout, _WindowsStream):
return False
# Automatically select which colors to display.
bg_color = getattr(sys.stdout, 'default_bg', getattr(sys.stderr, 'default_bg', None))
if auto_colors and bg_color is not None:
set_light_background() if bg_color in (112, 96, 240, 176, 224, 208, 160) else set_dark_background()
# Reset on exit if requested.
if reset_atexit:
atexit.register(lambda: Windows.disable())
return True
|
Enables color text with print() or sys.stdout.write() (stderr too).
Keyword arguments:
auto_colors -- automatically selects dark or light colors based on current terminal's background color. Only
works with {autored} and related tags.
reset_atexit -- resets original colors upon Python exit (in case you forget to reset it yourself with a closing
tag).
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/tools/color.py#L504-L535
| null |
class Windows(object):
"""Enable and disable Windows support for ANSI color character codes.
Call static method Windows.enable() to enable color support for the remainder of the process' lifetime.
This class is also a context manager. You can do this:
with Windows():
print(Color('{autored}Test{/autored}'))
Or this:
with Windows(auto_colors=True):
print(Color('{autored}Test{/autored}'))
"""
@staticmethod
def disable():
"""Restore sys.stderr and sys.stdout to their original objects. Resets colors to their original values."""
if os.name != 'nt' or not Windows.is_enabled():
return False
getattr(sys.stderr, '_reset_colors', lambda: False)()
getattr(sys.stdout, '_reset_colors', lambda: False)()
if isinstance(sys.stderr, _WindowsStream):
sys.stderr = getattr(sys.stderr, 'original_stream')
if isinstance(sys.stderr, _WindowsStream):
sys.stdout = getattr(sys.stdout, 'original_stream')
return True
@staticmethod
def is_enabled():
"""Returns True if either stderr or stdout has colors enabled."""
return isinstance(sys.stderr, _WindowsStream) or isinstance(sys.stdout, _WindowsStream)
@staticmethod
def __init__(self, auto_colors=False):
self.auto_colors = auto_colors
def __enter__(self):
Windows.enable(auto_colors=self.auto_colors)
def __exit__(self, *_):
Windows.disable()
|
hkff/FodtlMon
|
fodtlmon/tools/color.py
|
_WindowsStream._get_colors
|
python
|
def _get_colors(self):
try:
csbi = _WindowsCSBI.get_info(self.win32_stream_handle)
return csbi['fg_color'], csbi['bg_color']
except IOError:
return 7, 0
|
Returns a tuple of two integers representing current colors: (foreground, background).
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/tools/color.py#L693-L699
|
[
"def get_info(handle):\n \"\"\"Get information about this current console window (for Microsoft Windows only).\n\n Raises IOError if attempt to get information fails (if there is no console window).\n\n Don't forget to call _WindowsCSBI.initialize() once in your application before calling this method.\n\n Positional arguments:\n handle -- either _WindowsCSBI.HANDLE_STDERR or _WindowsCSBI.HANDLE_STDOUT.\n\n Returns:\n Dictionary with different integer values. Keys are:\n buffer_width -- width of the buffer (Screen Buffer Size in cmd.exe layout tab).\n buffer_height -- height of the buffer (Screen Buffer Size in cmd.exe layout tab).\n terminal_width -- width of the terminal window.\n terminal_height -- height of the terminal window.\n bg_color -- current background color (http://msdn.microsoft.com/en-us/library/windows/desktop/ms682088).\n fg_color -- current text color code.\n \"\"\"\n # Query Win32 API.\n csbi = _WindowsCSBI.CSBI()\n try:\n if not _WindowsCSBI.WINDLL.kernel32.GetConsoleScreenBufferInfo(handle, ctypes.byref(csbi)):\n raise IOError('Unable to get console screen buffer info from win32 API.')\n except ctypes.ArgumentError:\n raise IOError('Unable to get console screen buffer info from win32 API.')\n\n # Parse data.\n result = dict(\n buffer_width=int(csbi.dwSize.X - 1),\n buffer_height=int(csbi.dwSize.Y),\n terminal_width=int(csbi.srWindow.Right - csbi.srWindow.Left),\n terminal_height=int(csbi.srWindow.Bottom - csbi.srWindow.Top),\n bg_color=int(csbi.wAttributes & 240),\n fg_color=int(csbi.wAttributes % 16),\n )\n return result\n"
] |
class _WindowsStream(object):
"""Replacement stream (overwrites sys.stdout and sys.stderr). When writing or printing, ANSI codes are converted.
ANSI (Linux/Unix) color codes are converted into win32 system calls, changing the next character's color before
printing it. Resources referenced:
https://github.com/tartley/colorama
http://www.cplusplus.com/articles/2ywTURfi/
http://thomasfischer.biz/python-and-windows-terminal-colors/
http://stackoverflow.com/questions/17125440/c-win32-console-color
http://www.tysos.org/svn/trunk/mono/corlib/System/WindowsConsoleDriver.cs
http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python
http://msdn.microsoft.com/en-us/library/windows/desktop/ms682088#_win32_character_attributes
Class variables:
ALL_BG_CODES -- list of background Windows codes. Used to determine if requested color is foreground or background.
COMPILED_CODES -- 'translation' dictionary. Keys are ANSI codes (values of _BASE_CODES), values are Windows codes.
STD_ERROR_HANDLE -- http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231
STD_OUTPUT_HANDLE -- http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231
Instance variables:
original_stream -- the original stream to write non-code text to.
win32_stream_handle -- handle to the Windows stderr or stdout device. Used by other Windows functions.
default_fg -- the foreground Windows color code at the time of instantiation.
default_bg -- the background Windows color code at the time of instantiation.
"""
ALL_BG_CODES = [v for k, v in _WINDOWS_CODES.items() if k.startswith('bg') or k.startswith('hibg')]
COMPILED_CODES = dict((v, _WINDOWS_CODES[k]) for k, v in _BASE_CODES.items() if k in _WINDOWS_CODES)
def __init__(self, stderr=False):
_WindowsCSBI.initialize()
self.original_stream = sys.stderr if stderr else sys.stdout
self.win32_stream_handle = _WindowsCSBI.HANDLE_STDERR if stderr else _WindowsCSBI.HANDLE_STDOUT
self.default_fg, self.default_bg = self._get_colors()
def __getattr__(self, item):
"""If an attribute/function/etc is not defined in this function, retrieve the one from the original stream.
Fixes ipython arrow key presses.
"""
return getattr(self.original_stream, item)
def _reset_colors(self):
"""Sets the foreground and background colors to their original values (when class was instantiated)."""
self._set_color(-33)
def _set_color(self, color_code):
"""Changes the foreground and background colors for subsequently printed characters.
Since setting a color requires including both foreground and background codes (merged), setting just the
foreground color resets the background color to black, and vice versa.
This function first gets the current background and foreground colors, merges in the requested color code, and
sets the result.
However if we need to remove just the foreground color but leave the background color the same (or vice versa)
such as when {/red} is used, we must merge the default foreground color with the current background color. This
is the reason for those negative values.
Positional arguments:
color_code -- integer color code from _WINDOWS_CODES.
"""
# Get current color code.
current_fg, current_bg = self._get_colors()
# Handle special negative codes. Also determine the final color code.
if color_code == -39:
final_color_code = self.default_fg | current_bg # Reset the foreground only.
elif color_code == -49:
final_color_code = current_fg | self.default_bg # Reset the background only.
elif color_code == -33:
final_color_code = self.default_fg | self.default_bg # Reset both.
elif color_code == -8:
final_color_code = current_fg # Black background.
else:
new_is_bg = color_code in self.ALL_BG_CODES
final_color_code = color_code | (current_fg if new_is_bg else current_bg)
# Set new code.
_WindowsCSBI.WINDLL.kernel32.SetConsoleTextAttribute(self.win32_stream_handle, final_color_code)
def write(self, p_str):
for segment in _RE_SPLIT.split(p_str):
if not segment:
# Empty string. p_str probably starts with colors so the first item is always ''.
continue
if not _RE_SPLIT.match(segment):
# No color codes, print regular text.
self.original_stream.write(segment)
self.original_stream.flush()
continue
for color_code in (int(c) for c in _RE_NUMBER_SEARCH.findall(segment)[0].split(';')):
if color_code in self.COMPILED_CODES:
self._set_color(self.COMPILED_CODES[color_code])
|
hkff/FodtlMon
|
fodtlmon/tools/color.py
|
_WindowsStream._set_color
|
python
|
def _set_color(self, color_code):
# Get current color code.
current_fg, current_bg = self._get_colors()
# Handle special negative codes. Also determine the final color code.
if color_code == -39:
final_color_code = self.default_fg | current_bg # Reset the foreground only.
elif color_code == -49:
final_color_code = current_fg | self.default_bg # Reset the background only.
elif color_code == -33:
final_color_code = self.default_fg | self.default_bg # Reset both.
elif color_code == -8:
final_color_code = current_fg # Black background.
else:
new_is_bg = color_code in self.ALL_BG_CODES
final_color_code = color_code | (current_fg if new_is_bg else current_bg)
# Set new code.
_WindowsCSBI.WINDLL.kernel32.SetConsoleTextAttribute(self.win32_stream_handle, final_color_code)
|
Changes the foreground and background colors for subsequently printed characters.
Since setting a color requires including both foreground and background codes (merged), setting just the
foreground color resets the background color to black, and vice versa.
This function first gets the current background and foreground colors, merges in the requested color code, and
sets the result.
However if we need to remove just the foreground color but leave the background color the same (or vice versa)
such as when {/red} is used, we must merge the default foreground color with the current background color. This
is the reason for those negative values.
Positional arguments:
color_code -- integer color code from _WINDOWS_CODES.
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/tools/color.py#L705-L738
| null |
class _WindowsStream(object):
"""Replacement stream (overwrites sys.stdout and sys.stderr). When writing or printing, ANSI codes are converted.
ANSI (Linux/Unix) color codes are converted into win32 system calls, changing the next character's color before
printing it. Resources referenced:
https://github.com/tartley/colorama
http://www.cplusplus.com/articles/2ywTURfi/
http://thomasfischer.biz/python-and-windows-terminal-colors/
http://stackoverflow.com/questions/17125440/c-win32-console-color
http://www.tysos.org/svn/trunk/mono/corlib/System/WindowsConsoleDriver.cs
http://stackoverflow.com/questions/287871/print-in-terminal-with-colors-using-python
http://msdn.microsoft.com/en-us/library/windows/desktop/ms682088#_win32_character_attributes
Class variables:
ALL_BG_CODES -- list of background Windows codes. Used to determine if requested color is foreground or background.
COMPILED_CODES -- 'translation' dictionary. Keys are ANSI codes (values of _BASE_CODES), values are Windows codes.
STD_ERROR_HANDLE -- http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231
STD_OUTPUT_HANDLE -- http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231
Instance variables:
original_stream -- the original stream to write non-code text to.
win32_stream_handle -- handle to the Windows stderr or stdout device. Used by other Windows functions.
default_fg -- the foreground Windows color code at the time of instantiation.
default_bg -- the background Windows color code at the time of instantiation.
"""
ALL_BG_CODES = [v for k, v in _WINDOWS_CODES.items() if k.startswith('bg') or k.startswith('hibg')]
COMPILED_CODES = dict((v, _WINDOWS_CODES[k]) for k, v in _BASE_CODES.items() if k in _WINDOWS_CODES)
def __init__(self, stderr=False):
_WindowsCSBI.initialize()
self.original_stream = sys.stderr if stderr else sys.stdout
self.win32_stream_handle = _WindowsCSBI.HANDLE_STDERR if stderr else _WindowsCSBI.HANDLE_STDOUT
self.default_fg, self.default_bg = self._get_colors()
def __getattr__(self, item):
"""If an attribute/function/etc is not defined in this function, retrieve the one from the original stream.
Fixes ipython arrow key presses.
"""
return getattr(self.original_stream, item)
def _get_colors(self):
"""Returns a tuple of two integers representing current colors: (foreground, background)."""
try:
csbi = _WindowsCSBI.get_info(self.win32_stream_handle)
return csbi['fg_color'], csbi['bg_color']
except IOError:
return 7, 0
def _reset_colors(self):
"""Sets the foreground and background colors to their original values (when class was instantiated)."""
self._set_color(-33)
def write(self, p_str):
for segment in _RE_SPLIT.split(p_str):
if not segment:
# Empty string. p_str probably starts with colors so the first item is always ''.
continue
if not _RE_SPLIT.match(segment):
# No color codes, print regular text.
self.original_stream.write(segment)
self.original_stream.flush()
continue
for color_code in (int(c) for c in _RE_NUMBER_SEARCH.findall(segment)[0].split(';')):
if color_code in self.COMPILED_CODES:
self._set_color(self.COMPILED_CODES[color_code])
|
hkff/FodtlMon
|
fodtlmon/dtl/systemd.py
|
Actor.run
|
python
|
def run(self, once=True):
print("\n- Actor %s " % self.name)
for m in self.submons:
res = m.monitor(once=once)
print(" | Submonitor %s : %s" % (self.name, res))
# print("%s %s %s %s lst %s" % (self.name, m.fid, m.last, m.counter, m.last))
self.get_kv().update(KVector.Entry(m.fid, agent=self.name, value=m.last, timestamp=m.counter))
res = self.monitor.monitor(once=once)
print(" Main monitor %s : %s" % (self.name, res))
|
Run main monitor and all sub monitors
:param : once
:return:
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/dtl/systemd.py#L93-L106
|
[
"def get_kv(self):\n return self.monitor.get_kv()\n"
] |
class Actor:
"""
Actor class
"""
class Event:
"""
Internal actor event
in : actor-> event coming from actor
out : ->actor sending event to actor
"""
class EventType(Enum):
IN = "in",
OUT = "out"
EMPTY = "EMPTY"
def __init__(self, target="", e_type=EventType.EMPTY):
self.target = target
self.e_type = e_type
def __str__(self):
if self.e_type is Actor.Event.EventType.IN:
return "%s->" % self.target
elif self.e_type is Actor.Event.EventType.OUT:
return "->%s" % self.target
else:
return "->"
@staticmethod
def parse(ste):
ste = ste.strip()
if ste == "" or ste == "->":
return Actor.Event(e_type=Actor.Event.EventType.EMPTY)
else:
res = ste.split("->")
if len(res) < 2:
raise Exception("Malformed Actor internal event !")
if res[0] == "":
return Actor.Event(target=res[1], e_type=Actor.Event.EventType.OUT)
else:
return Actor.Event(target=res[0], e_type=Actor.Event.EventType.IN)
def __init__(self, name="", formula=None, trace=None, events=None, speed=1):
self.name = name
self.formula = formula
self.trace = trace
self.monitor = None
self.submons = []
self.events = [] if events is None else events
self.speed = speed
def __str__(self):
evs = "[%s]" % ",".join(str(e) for e in self.events)
return "{%s; %s; %s; %s; %s; %s}" % (self.name, self.formula, self.trace, self.monitor, self.submons, evs)
def update_kv(self, kv):
"""
Update the KV of the main monitor
:param kv:
:return:
"""
self.monitor.update_kv(kv)
def get_kv(self):
return self.monitor.get_kv()
# print(self.get_kv())
def push_event(self, event):
self.monitor.push_event(event)
|
hkff/FodtlMon
|
fodtlmon/dtl/systemd.py
|
System.add_actors
|
python
|
def add_actors(self, actor):
if isinstance(actor, list):
self.actors.extend(actor)
elif isinstance(actor, Actor):
self.actors.append(actor)
return self
|
Add an actor / actor list to the system's actors
:param actor: Actor | list<Actor>
:return: self
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/dtl/systemd.py#L137-L147
| null |
class System:
"""
Distributed system representation
"""
def __init__(self, actors=None, kv_implementation=KVector):
"""
Init the system
coms is a dictionary that contains
:param actors: actors list
:param kv_implementation: the Knowledge vector implementation (IKVector)
:return:
"""
self.actors = [] if actors is None else actors
self.mons = []
self.turn = 0
self.kv_implementation = kv_implementation
self.coms = {}
def __str__(self):
return " | ".join([str(a) for a in self.actors])
def get_actor(self, name):
"""
Get an actor by name
:param name: str
:return:
"""
return next((x for x in self.actors if x.name == name), None)
def generate_monitors(self):
"""
Generate monitors for each actor in the system
:return:
"""
submons = []
for a in self.actors:
# Get all remote formula
remotes = a.formula.walk(filter_type=At)
# Compute formula hash
for f in remotes:
f.compute_hash(sid=a.name)
# Create the global monitor for the actor
a.monitor = Dtlmon(a.formula, a.trace)
# Create the remote sub monitors for each @Formula
for f in remotes:
remote_actor = self.get_actor(f.agent)
remote_actor.submons.append(Dtlmon(formula=f.inner, trace=remote_actor.trace, parent=remote_actor.monitor, fid=f.fid))
submons.append({"fid": f.fid, "actor": remote_actor.name})
# Create the com entry in the system
for a2 in self.actors:
self.coms["%s->%s" % (a.name, a2.name)] = []
self.coms["%s->%s" % (a2.name, a.name)] = []
# Create the general KV structure
kv = self.kv_implementation()
for m in submons:
kv.add_entry(self.kv_implementation.Entry(m["fid"], agent=m["actor"], value=Boolean3.Unknown, timestamp=0))
# Add a copy of KV structure for each actor
for a in self.actors:
a.monitor.KV = copy.deepcopy(kv)
def run(self, once=True):
"""
Run the system
:param once
:return:
"""
print(Color("{autored}\n====== System round %s ======{/red}" % self.turn))
print(Color("{autoblue}== Updating actors events...{/blue}"))
# Handling OUT messages
for a in self.actors:
if self.turn < len(a.events):
es = a.events[self.turn]
for e in es:
if e.e_type == Actor.Event.EventType.OUT:
# register
# print("Sending from %s to %s %s" % (a.name, e.target, a.get_kv()))
if e.target is "*":
print("Broadcasting")
for ac in self.actors:
self.coms["%s->%s" % (a.name, ac.name)].append(copy.deepcopy(a.get_kv()))
else:
self.coms["%s->%s" % (a.name, e.target)].append(copy.deepcopy(a.get_kv()))
# Handling IN messages
for a in self.actors:
if self.turn < len(a.events):
es = a.events[self.turn]
for e in es:
if e.e_type == Actor.Event.EventType.IN:
# Update KV and check pop the send
print("%s received a message from %s ..." % (a.name, e.target))
if len(self.coms["%s->%s" % (e.target, a.name)]) > 0:
a.update_kv(self.coms["%s->%s" % (e.target, a.name)].pop(0))
print(" - IN %s %s" % (a.name, a.get_kv()))
else:
print(" - Error %s trying to receive a message from %s that was not sent by %s" % (a.name, e.target, e.target))
print(Color("{autoblue}\n== Running monitors on each actor...{/blue}"))
for a in self.actors:
for i in range(a.speed):
a.run(once=once)
self.turn += 1
def update_events(self, e):
"""
Update KV of each actor
:param e:
:return:
"""
for a in self.actors:
a.update_kv(e)
@staticmethod
def parseJSON(js):
"""
{
kv_type : "",
type : "",
actors : <Actors list>
[
{
actorName : <String>,
formula: <String>,
events: ["->b", "b->"],
trace: [],
speed: 1,2,3...
}
]
}
:param json:
:return:
"""
decoded = json.JSONDecoder().decode(js)
actors = decoded.get("actors")
if actors is None:
raise Exception("No actors found in the system !")
s = System()
for a in actors:
# Getting actor info
a_name = a.get("name")
a_formula = a.get("formula")
a_trace = a.get("trace")
sa_events = a.get("events")
a_events = []
a_speed = 1 if a.get("speed") is None else int(a["speed"])
# Parsing actor info
for e in sa_events:
tmp = e.split("|")
tmp2 = []
for x in tmp:
tmp2.append(Actor.Event.parse(x))
a_events.append(tmp2)
a_formula = eval(a_formula)
a_trace = Trace.parse(a_trace)
# Creating the actor
actor = Actor(name=a_name, formula=a_formula, trace=a_trace, events=a_events, speed=a_speed)
# Add actor to the system
s.add_actors(actor)
s.generate_monitors()
return s
|
hkff/FodtlMon
|
fodtlmon/dtl/systemd.py
|
System.get_actor
|
python
|
def get_actor(self, name):
return next((x for x in self.actors if x.name == name), None)
|
Get an actor by name
:param name: str
:return:
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/dtl/systemd.py#L149-L155
| null |
class System:
"""
Distributed system representation
"""
def __init__(self, actors=None, kv_implementation=KVector):
"""
Init the system
coms is a dictionary that contains
:param actors: actors list
:param kv_implementation: the Knowledge vector implementation (IKVector)
:return:
"""
self.actors = [] if actors is None else actors
self.mons = []
self.turn = 0
self.kv_implementation = kv_implementation
self.coms = {}
def __str__(self):
return " | ".join([str(a) for a in self.actors])
def add_actors(self, actor):
"""
Add an actor / actor list to the system's actors
:param actor: Actor | list<Actor>
:return: self
"""
if isinstance(actor, list):
self.actors.extend(actor)
elif isinstance(actor, Actor):
self.actors.append(actor)
return self
def generate_monitors(self):
"""
Generate monitors for each actor in the system
:return:
"""
submons = []
for a in self.actors:
# Get all remote formula
remotes = a.formula.walk(filter_type=At)
# Compute formula hash
for f in remotes:
f.compute_hash(sid=a.name)
# Create the global monitor for the actor
a.monitor = Dtlmon(a.formula, a.trace)
# Create the remote sub monitors for each @Formula
for f in remotes:
remote_actor = self.get_actor(f.agent)
remote_actor.submons.append(Dtlmon(formula=f.inner, trace=remote_actor.trace, parent=remote_actor.monitor, fid=f.fid))
submons.append({"fid": f.fid, "actor": remote_actor.name})
# Create the com entry in the system
for a2 in self.actors:
self.coms["%s->%s" % (a.name, a2.name)] = []
self.coms["%s->%s" % (a2.name, a.name)] = []
# Create the general KV structure
kv = self.kv_implementation()
for m in submons:
kv.add_entry(self.kv_implementation.Entry(m["fid"], agent=m["actor"], value=Boolean3.Unknown, timestamp=0))
# Add a copy of KV structure for each actor
for a in self.actors:
a.monitor.KV = copy.deepcopy(kv)
def run(self, once=True):
"""
Run the system
:param once
:return:
"""
print(Color("{autored}\n====== System round %s ======{/red}" % self.turn))
print(Color("{autoblue}== Updating actors events...{/blue}"))
# Handling OUT messages
for a in self.actors:
if self.turn < len(a.events):
es = a.events[self.turn]
for e in es:
if e.e_type == Actor.Event.EventType.OUT:
# register
# print("Sending from %s to %s %s" % (a.name, e.target, a.get_kv()))
if e.target is "*":
print("Broadcasting")
for ac in self.actors:
self.coms["%s->%s" % (a.name, ac.name)].append(copy.deepcopy(a.get_kv()))
else:
self.coms["%s->%s" % (a.name, e.target)].append(copy.deepcopy(a.get_kv()))
# Handling IN messages
for a in self.actors:
if self.turn < len(a.events):
es = a.events[self.turn]
for e in es:
if e.e_type == Actor.Event.EventType.IN:
# Update KV and check pop the send
print("%s received a message from %s ..." % (a.name, e.target))
if len(self.coms["%s->%s" % (e.target, a.name)]) > 0:
a.update_kv(self.coms["%s->%s" % (e.target, a.name)].pop(0))
print(" - IN %s %s" % (a.name, a.get_kv()))
else:
print(" - Error %s trying to receive a message from %s that was not sent by %s" % (a.name, e.target, e.target))
print(Color("{autoblue}\n== Running monitors on each actor...{/blue}"))
for a in self.actors:
for i in range(a.speed):
a.run(once=once)
self.turn += 1
def update_events(self, e):
"""
Update KV of each actor
:param e:
:return:
"""
for a in self.actors:
a.update_kv(e)
@staticmethod
def parseJSON(js):
"""
{
kv_type : "",
type : "",
actors : <Actors list>
[
{
actorName : <String>,
formula: <String>,
events: ["->b", "b->"],
trace: [],
speed: 1,2,3...
}
]
}
:param json:
:return:
"""
decoded = json.JSONDecoder().decode(js)
actors = decoded.get("actors")
if actors is None:
raise Exception("No actors found in the system !")
s = System()
for a in actors:
# Getting actor info
a_name = a.get("name")
a_formula = a.get("formula")
a_trace = a.get("trace")
sa_events = a.get("events")
a_events = []
a_speed = 1 if a.get("speed") is None else int(a["speed"])
# Parsing actor info
for e in sa_events:
tmp = e.split("|")
tmp2 = []
for x in tmp:
tmp2.append(Actor.Event.parse(x))
a_events.append(tmp2)
a_formula = eval(a_formula)
a_trace = Trace.parse(a_trace)
# Creating the actor
actor = Actor(name=a_name, formula=a_formula, trace=a_trace, events=a_events, speed=a_speed)
# Add actor to the system
s.add_actors(actor)
s.generate_monitors()
return s
|
hkff/FodtlMon
|
fodtlmon/dtl/systemd.py
|
System.generate_monitors
|
python
|
def generate_monitors(self):
submons = []
for a in self.actors:
# Get all remote formula
remotes = a.formula.walk(filter_type=At)
# Compute formula hash
for f in remotes:
f.compute_hash(sid=a.name)
# Create the global monitor for the actor
a.monitor = Dtlmon(a.formula, a.trace)
# Create the remote sub monitors for each @Formula
for f in remotes:
remote_actor = self.get_actor(f.agent)
remote_actor.submons.append(Dtlmon(formula=f.inner, trace=remote_actor.trace, parent=remote_actor.monitor, fid=f.fid))
submons.append({"fid": f.fid, "actor": remote_actor.name})
# Create the com entry in the system
for a2 in self.actors:
self.coms["%s->%s" % (a.name, a2.name)] = []
self.coms["%s->%s" % (a2.name, a.name)] = []
# Create the general KV structure
kv = self.kv_implementation()
for m in submons:
kv.add_entry(self.kv_implementation.Entry(m["fid"], agent=m["actor"], value=Boolean3.Unknown, timestamp=0))
# Add a copy of KV structure for each actor
for a in self.actors:
a.monitor.KV = copy.deepcopy(kv)
|
Generate monitors for each actor in the system
:return:
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/dtl/systemd.py#L157-L191
|
[
"def get_actor(self, name):\n \"\"\"\n Get an actor by name\n :param name: str\n :return:\n \"\"\"\n return next((x for x in self.actors if x.name == name), None)\n"
] |
class System:
"""
Distributed system representation
"""
def __init__(self, actors=None, kv_implementation=KVector):
"""
Init the system
coms is a dictionary that contains
:param actors: actors list
:param kv_implementation: the Knowledge vector implementation (IKVector)
:return:
"""
self.actors = [] if actors is None else actors
self.mons = []
self.turn = 0
self.kv_implementation = kv_implementation
self.coms = {}
def __str__(self):
return " | ".join([str(a) for a in self.actors])
def add_actors(self, actor):
"""
Add an actor / actor list to the system's actors
:param actor: Actor | list<Actor>
:return: self
"""
if isinstance(actor, list):
self.actors.extend(actor)
elif isinstance(actor, Actor):
self.actors.append(actor)
return self
def get_actor(self, name):
"""
Get an actor by name
:param name: str
:return:
"""
return next((x for x in self.actors if x.name == name), None)
def run(self, once=True):
"""
Run the system
:param once
:return:
"""
print(Color("{autored}\n====== System round %s ======{/red}" % self.turn))
print(Color("{autoblue}== Updating actors events...{/blue}"))
# Handling OUT messages
for a in self.actors:
if self.turn < len(a.events):
es = a.events[self.turn]
for e in es:
if e.e_type == Actor.Event.EventType.OUT:
# register
# print("Sending from %s to %s %s" % (a.name, e.target, a.get_kv()))
if e.target is "*":
print("Broadcasting")
for ac in self.actors:
self.coms["%s->%s" % (a.name, ac.name)].append(copy.deepcopy(a.get_kv()))
else:
self.coms["%s->%s" % (a.name, e.target)].append(copy.deepcopy(a.get_kv()))
# Handling IN messages
for a in self.actors:
if self.turn < len(a.events):
es = a.events[self.turn]
for e in es:
if e.e_type == Actor.Event.EventType.IN:
# Update KV and check pop the send
print("%s received a message from %s ..." % (a.name, e.target))
if len(self.coms["%s->%s" % (e.target, a.name)]) > 0:
a.update_kv(self.coms["%s->%s" % (e.target, a.name)].pop(0))
print(" - IN %s %s" % (a.name, a.get_kv()))
else:
print(" - Error %s trying to receive a message from %s that was not sent by %s" % (a.name, e.target, e.target))
print(Color("{autoblue}\n== Running monitors on each actor...{/blue}"))
for a in self.actors:
for i in range(a.speed):
a.run(once=once)
self.turn += 1
def update_events(self, e):
"""
Update KV of each actor
:param e:
:return:
"""
for a in self.actors:
a.update_kv(e)
@staticmethod
def parseJSON(js):
"""
{
kv_type : "",
type : "",
actors : <Actors list>
[
{
actorName : <String>,
formula: <String>,
events: ["->b", "b->"],
trace: [],
speed: 1,2,3...
}
]
}
:param json:
:return:
"""
decoded = json.JSONDecoder().decode(js)
actors = decoded.get("actors")
if actors is None:
raise Exception("No actors found in the system !")
s = System()
for a in actors:
# Getting actor info
a_name = a.get("name")
a_formula = a.get("formula")
a_trace = a.get("trace")
sa_events = a.get("events")
a_events = []
a_speed = 1 if a.get("speed") is None else int(a["speed"])
# Parsing actor info
for e in sa_events:
tmp = e.split("|")
tmp2 = []
for x in tmp:
tmp2.append(Actor.Event.parse(x))
a_events.append(tmp2)
a_formula = eval(a_formula)
a_trace = Trace.parse(a_trace)
# Creating the actor
actor = Actor(name=a_name, formula=a_formula, trace=a_trace, events=a_events, speed=a_speed)
# Add actor to the system
s.add_actors(actor)
s.generate_monitors()
return s
|
hkff/FodtlMon
|
fodtlmon/dtl/systemd.py
|
System.run
|
python
|
def run(self, once=True):
print(Color("{autored}\n====== System round %s ======{/red}" % self.turn))
print(Color("{autoblue}== Updating actors events...{/blue}"))
# Handling OUT messages
for a in self.actors:
if self.turn < len(a.events):
es = a.events[self.turn]
for e in es:
if e.e_type == Actor.Event.EventType.OUT:
# register
# print("Sending from %s to %s %s" % (a.name, e.target, a.get_kv()))
if e.target is "*":
print("Broadcasting")
for ac in self.actors:
self.coms["%s->%s" % (a.name, ac.name)].append(copy.deepcopy(a.get_kv()))
else:
self.coms["%s->%s" % (a.name, e.target)].append(copy.deepcopy(a.get_kv()))
# Handling IN messages
for a in self.actors:
if self.turn < len(a.events):
es = a.events[self.turn]
for e in es:
if e.e_type == Actor.Event.EventType.IN:
# Update KV and check pop the send
print("%s received a message from %s ..." % (a.name, e.target))
if len(self.coms["%s->%s" % (e.target, a.name)]) > 0:
a.update_kv(self.coms["%s->%s" % (e.target, a.name)].pop(0))
print(" - IN %s %s" % (a.name, a.get_kv()))
else:
print(" - Error %s trying to receive a message from %s that was not sent by %s" % (a.name, e.target, e.target))
print(Color("{autoblue}\n== Running monitors on each actor...{/blue}"))
for a in self.actors:
for i in range(a.speed):
a.run(once=once)
self.turn += 1
|
Run the system
:param once
:return:
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/dtl/systemd.py#L193-L234
| null |
class System:
"""
Distributed system representation
"""
def __init__(self, actors=None, kv_implementation=KVector):
"""
Init the system
coms is a dictionary that contains
:param actors: actors list
:param kv_implementation: the Knowledge vector implementation (IKVector)
:return:
"""
self.actors = [] if actors is None else actors
self.mons = []
self.turn = 0
self.kv_implementation = kv_implementation
self.coms = {}
def __str__(self):
return " | ".join([str(a) for a in self.actors])
def add_actors(self, actor):
"""
Add an actor / actor list to the system's actors
:param actor: Actor | list<Actor>
:return: self
"""
if isinstance(actor, list):
self.actors.extend(actor)
elif isinstance(actor, Actor):
self.actors.append(actor)
return self
def get_actor(self, name):
"""
Get an actor by name
:param name: str
:return:
"""
return next((x for x in self.actors if x.name == name), None)
def generate_monitors(self):
"""
Generate monitors for each actor in the system
:return:
"""
submons = []
for a in self.actors:
# Get all remote formula
remotes = a.formula.walk(filter_type=At)
# Compute formula hash
for f in remotes:
f.compute_hash(sid=a.name)
# Create the global monitor for the actor
a.monitor = Dtlmon(a.formula, a.trace)
# Create the remote sub monitors for each @Formula
for f in remotes:
remote_actor = self.get_actor(f.agent)
remote_actor.submons.append(Dtlmon(formula=f.inner, trace=remote_actor.trace, parent=remote_actor.monitor, fid=f.fid))
submons.append({"fid": f.fid, "actor": remote_actor.name})
# Create the com entry in the system
for a2 in self.actors:
self.coms["%s->%s" % (a.name, a2.name)] = []
self.coms["%s->%s" % (a2.name, a.name)] = []
# Create the general KV structure
kv = self.kv_implementation()
for m in submons:
kv.add_entry(self.kv_implementation.Entry(m["fid"], agent=m["actor"], value=Boolean3.Unknown, timestamp=0))
# Add a copy of KV structure for each actor
for a in self.actors:
a.monitor.KV = copy.deepcopy(kv)
def update_events(self, e):
"""
Update KV of each actor
:param e:
:return:
"""
for a in self.actors:
a.update_kv(e)
@staticmethod
def parseJSON(js):
"""
{
kv_type : "",
type : "",
actors : <Actors list>
[
{
actorName : <String>,
formula: <String>,
events: ["->b", "b->"],
trace: [],
speed: 1,2,3...
}
]
}
:param json:
:return:
"""
decoded = json.JSONDecoder().decode(js)
actors = decoded.get("actors")
if actors is None:
raise Exception("No actors found in the system !")
s = System()
for a in actors:
# Getting actor info
a_name = a.get("name")
a_formula = a.get("formula")
a_trace = a.get("trace")
sa_events = a.get("events")
a_events = []
a_speed = 1 if a.get("speed") is None else int(a["speed"])
# Parsing actor info
for e in sa_events:
tmp = e.split("|")
tmp2 = []
for x in tmp:
tmp2.append(Actor.Event.parse(x))
a_events.append(tmp2)
a_formula = eval(a_formula)
a_trace = Trace.parse(a_trace)
# Creating the actor
actor = Actor(name=a_name, formula=a_formula, trace=a_trace, events=a_events, speed=a_speed)
# Add actor to the system
s.add_actors(actor)
s.generate_monitors()
return s
|
hkff/FodtlMon
|
fodtlmon/dtl/systemd.py
|
System.parseJSON
|
python
|
def parseJSON(js):
decoded = json.JSONDecoder().decode(js)
actors = decoded.get("actors")
if actors is None:
raise Exception("No actors found in the system !")
s = System()
for a in actors:
# Getting actor info
a_name = a.get("name")
a_formula = a.get("formula")
a_trace = a.get("trace")
sa_events = a.get("events")
a_events = []
a_speed = 1 if a.get("speed") is None else int(a["speed"])
# Parsing actor info
for e in sa_events:
tmp = e.split("|")
tmp2 = []
for x in tmp:
tmp2.append(Actor.Event.parse(x))
a_events.append(tmp2)
a_formula = eval(a_formula)
a_trace = Trace.parse(a_trace)
# Creating the actor
actor = Actor(name=a_name, formula=a_formula, trace=a_trace, events=a_events, speed=a_speed)
# Add actor to the system
s.add_actors(actor)
s.generate_monitors()
return s
|
{
kv_type : "",
type : "",
actors : <Actors list>
[
{
actorName : <String>,
formula: <String>,
events: ["->b", "b->"],
trace: [],
speed: 1,2,3...
}
]
}
:param json:
:return:
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/dtl/systemd.py#L246-L295
|
[
"def add_actors(self, actor):\n \"\"\"\n Add an actor / actor list to the system's actors\n :param actor: Actor | list<Actor>\n :return: self\n \"\"\"\n if isinstance(actor, list):\n self.actors.extend(actor)\n elif isinstance(actor, Actor):\n self.actors.append(actor)\n return self\n",
"def generate_monitors(self):\n \"\"\"\n Generate monitors for each actor in the system\n :return:\n \"\"\"\n submons = []\n for a in self.actors:\n # Get all remote formula\n remotes = a.formula.walk(filter_type=At)\n # Compute formula hash\n for f in remotes:\n f.compute_hash(sid=a.name)\n\n # Create the global monitor for the actor\n a.monitor = Dtlmon(a.formula, a.trace)\n\n # Create the remote sub monitors for each @Formula\n for f in remotes:\n remote_actor = self.get_actor(f.agent)\n remote_actor.submons.append(Dtlmon(formula=f.inner, trace=remote_actor.trace, parent=remote_actor.monitor, fid=f.fid))\n submons.append({\"fid\": f.fid, \"actor\": remote_actor.name})\n\n # Create the com entry in the system\n for a2 in self.actors:\n self.coms[\"%s->%s\" % (a.name, a2.name)] = []\n self.coms[\"%s->%s\" % (a2.name, a.name)] = []\n\n # Create the general KV structure\n kv = self.kv_implementation()\n for m in submons:\n kv.add_entry(self.kv_implementation.Entry(m[\"fid\"], agent=m[\"actor\"], value=Boolean3.Unknown, timestamp=0))\n\n # Add a copy of KV structure for each actor\n for a in self.actors:\n a.monitor.KV = copy.deepcopy(kv)\n"
] |
class System:
"""
Distributed system representation
"""
def __init__(self, actors=None, kv_implementation=KVector):
"""
Init the system
coms is a dictionary that contains
:param actors: actors list
:param kv_implementation: the Knowledge vector implementation (IKVector)
:return:
"""
self.actors = [] if actors is None else actors
self.mons = []
self.turn = 0
self.kv_implementation = kv_implementation
self.coms = {}
def __str__(self):
return " | ".join([str(a) for a in self.actors])
def add_actors(self, actor):
"""
Add an actor / actor list to the system's actors
:param actor: Actor | list<Actor>
:return: self
"""
if isinstance(actor, list):
self.actors.extend(actor)
elif isinstance(actor, Actor):
self.actors.append(actor)
return self
def get_actor(self, name):
"""
Get an actor by name
:param name: str
:return:
"""
return next((x for x in self.actors if x.name == name), None)
def generate_monitors(self):
"""
Generate monitors for each actor in the system
:return:
"""
submons = []
for a in self.actors:
# Get all remote formula
remotes = a.formula.walk(filter_type=At)
# Compute formula hash
for f in remotes:
f.compute_hash(sid=a.name)
# Create the global monitor for the actor
a.monitor = Dtlmon(a.formula, a.trace)
# Create the remote sub monitors for each @Formula
for f in remotes:
remote_actor = self.get_actor(f.agent)
remote_actor.submons.append(Dtlmon(formula=f.inner, trace=remote_actor.trace, parent=remote_actor.monitor, fid=f.fid))
submons.append({"fid": f.fid, "actor": remote_actor.name})
# Create the com entry in the system
for a2 in self.actors:
self.coms["%s->%s" % (a.name, a2.name)] = []
self.coms["%s->%s" % (a2.name, a.name)] = []
# Create the general KV structure
kv = self.kv_implementation()
for m in submons:
kv.add_entry(self.kv_implementation.Entry(m["fid"], agent=m["actor"], value=Boolean3.Unknown, timestamp=0))
# Add a copy of KV structure for each actor
for a in self.actors:
a.monitor.KV = copy.deepcopy(kv)
def run(self, once=True):
"""
Run the system
:param once
:return:
"""
print(Color("{autored}\n====== System round %s ======{/red}" % self.turn))
print(Color("{autoblue}== Updating actors events...{/blue}"))
# Handling OUT messages
for a in self.actors:
if self.turn < len(a.events):
es = a.events[self.turn]
for e in es:
if e.e_type == Actor.Event.EventType.OUT:
# register
# print("Sending from %s to %s %s" % (a.name, e.target, a.get_kv()))
if e.target is "*":
print("Broadcasting")
for ac in self.actors:
self.coms["%s->%s" % (a.name, ac.name)].append(copy.deepcopy(a.get_kv()))
else:
self.coms["%s->%s" % (a.name, e.target)].append(copy.deepcopy(a.get_kv()))
# Handling IN messages
for a in self.actors:
if self.turn < len(a.events):
es = a.events[self.turn]
for e in es:
if e.e_type == Actor.Event.EventType.IN:
# Update KV and check pop the send
print("%s received a message from %s ..." % (a.name, e.target))
if len(self.coms["%s->%s" % (e.target, a.name)]) > 0:
a.update_kv(self.coms["%s->%s" % (e.target, a.name)].pop(0))
print(" - IN %s %s" % (a.name, a.get_kv()))
else:
print(" - Error %s trying to receive a message from %s that was not sent by %s" % (a.name, e.target, e.target))
print(Color("{autoblue}\n== Running monitors on each actor...{/blue}"))
for a in self.actors:
for i in range(a.speed):
a.run(once=once)
self.turn += 1
def update_events(self, e):
"""
Update KV of each actor
:param e:
:return:
"""
for a in self.actors:
a.update_kv(e)
@staticmethod
|
hkff/FodtlMon
|
fodtlmon/ltl/ltl.py
|
B3
|
python
|
def B3(formula):
if isinstance(formula, true) or formula is True or formula == Boolean3.Top.name or formula == Boolean3.Top.value:
return Boolean3.Top
if isinstance(formula, false) or formula is False or formula == Boolean3.Bottom.name or formula == Boolean3.Bottom.value:
return Boolean3.Bottom
else:
return Boolean3.Unknown
|
Rewrite formula eval result into Boolean3
:param formula:
:return: Boolean3
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/ltl/ltl.py#L626-L637
| null |
"""
ltl
Copyright (C) 2015 Walid Benghabrit
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
__author__ = 'walid'
from enum import Enum
#############################
# Abstract operators
#############################
class Formula:
"""
Abstract formula
"""
symbol = ""
tspass = ""
ltlfo = ""
code = ""
def toTSPASS(self):
return str(self)
def toLTLFO(self):
return str(self)
def prefix_print(self):
return str(self)
def toCODE(self):
return self.__class__.__name__ + "()"
def reduce(self):
pass
def eval(self):
return self
def clos(self):
pass
def nnf(self):
pass
def and_(self, exp):
return And(self, exp)
def or_(self, exp):
return Or(self, exp)
def size(self):
return 1 + sum([s.size() for s in self.children()])
def children(self):
return []
def walk(self, filters: str=None, filter_type: type=None, pprint=False, depth=-1):
"""
Iterate tree in pre-order wide-first search order
:param filters: filter by python expression
:param filter_type: Filter by class
:return:
"""
children = self.children()
if children is None:
children = []
res = []
if depth == 0:
return res
elif depth != -1:
depth -= 1
for child in children:
if isinstance(child, Formula):
tmp = child.walk(filters=filters, filter_type=filter_type, pprint=pprint, depth=depth)
if tmp:
res.extend(tmp)
if filter_type is None:
if filters is not None:
if eval(filters) is True:
res.append(self)
else:
res.append(self)
elif isinstance(self, filter_type):
if filters is not None:
if eval(filters) is True:
res.append(self)
else:
res.append(self)
if pprint:
res = [str(x) + " " for x in res]
res = "\n".join(res)
return res
class Exp(Formula):
pass
class Atom(Exp):
"""
Atom
"""
symbol = ""
def __str__(self):
return str(self.symbol)
class true(Atom):
"""
True
"""
symbol = "true"
def eval(self):
return true()
# def and_(self, exp):
# if isinstance(exp, true): return true()
# elif isinstance(exp, false): return false()
# else: return exp
#
# def or_(self, exp):
# return self
class false(Atom):
"""
False
"""
symbol = "false"
def eval(self):
return false()
# def and_(self, exp):
# return self
#
# def or_(self, exp):
# if isinstance(exp, true): return true()
# elif isinstance(exp, false): return false()
# else: return exp
class Parameter(Exp):
"""
Parameter
"""
def __init__(self, name=""):
self.name = str(name)
def __str__(self):
return "%s" % self.name
def equal(self, o):
return (o is not None) and isinstance(o, Parameter) and (o.name == self.name)
def toCODE(self):
return "%s('%s')" % (self.__class__.__name__, self.name)
@staticmethod
def parse(string: str, cts=False):
string = string.strip()
if (string.startswith("'") and string.endswith("'")) or (string.startswith('"') and string.endswith('"')):
return Constant(string[1:-1])
elif cts:
return Constant(string)
else:
return Variable(string)
class Variable(Parameter):
"""
Data variable
"""
def equal(self, o):
return (o is not None) and (isinstance(o, Variable) and (o.name == self.name))
def toLTLFO(self):
return "%s" % self.name
V = Variable
class Constant(Parameter):
"""
Constant
"""
def __init__(self, name=""):
super().__init__(name=name)
if self.name.startswith("'") and self.name.endswith("'"):
self.name = self.name[1:-1]
def equal(self, o):
if isinstance(o, Regexp):
return o.equal(self)
return (o is not None) and (isinstance(o, Constant) and (str(o.name) == str(self.name)))
def toLTLFO(self):
return "'%s'" % self.name
def __str__(self):
return "'%s'" % self.name
C = Constant
class Regexp(Constant):
"""
regexp
"""
def equal(self, o):
try:
if o is not None:
p = re.compile(str(self.name))
return False if p.match(o.name) is None else True
except:
return False
class Predicate(Exp):
"""
Predicate
"""
def __init__(self, name="", args=None):
if args is None:
p = Predicate.parse(name)
self.name = p.name
self.args = p.args
else:
self.name = name
self.args = args
def __str__(self):
args = ",".join([str(p) for p in self.args])
return "%s(%s)" % (self.name, args)
@staticmethod
def parse(string: str, cts=False):
string = string.strip()
if string.endswith(")"):
name = string[0: string.find("(")]
args = string[string.find("(")+1:-1].split(",")
arguments = []
for ar in args:
if ar != '':
arguments.append(Parameter.parse(ar, cts=cts))
else:
print("Invalid predicate format !")
return
return Predicate(name, arguments)
def equal(self, p):
res = False
if isinstance(p, Predicate):
res = (p.name == self.name) and (len(p.args) == len(self.args))
if res:
for a1, a2 in zip(self.args, p.args):
if not a1.equal(a2):
return False
return res
def toLTLFO(self):
args = ",".join([p.toLTLFO() for p in self.args])
return "%s(%s)" % (self.name, args)
def toCODE(self):
args = ",".join([p.toCODE() for p in self.args])
return "%s('%s', %s)" % (self.__class__.__name__, self.name, "[" + args + "]")
def children(self):
return self.args
def isIn(self, preds):
for x in preds:
if self.equal(x):
return True
return False
def instantiate(self, valuation):
p = Predicate(name=self.name, args=[])
for x in self.args:
if isinstance(x, Variable):
# Lookup in valuation
found = False
for v in valuation:
if str(v.var) == x.name:
p.args.append(Constant(str(v.value.name)))
found = True
break
if not found:
# raise Exception("Predicate instantiation failed : missing vars")
# p.args.append(Variable(str(x.name)))
return None
elif isinstance(x, Constant):
p.args.append(x)
return p
P = Predicate
class UExp(Exp):
"""
Unary expression
"""
symbol = ""
def __init__(self, inner=None):
self.inner = inner
def __str__(self):
return "%s(%s)" % (self.symbol, self.inner)
def prefix_print(self):
return "(%s %s)" % (self.symbol, self.inner.prefix_print())
def toTSPASS(self):
return "(%s %s)" % (self.tspass, self.inner.toTSPASS())
def toLTLFO(self):
return "(%s %s)" % (self.ltlfo, self.inner.toLTLFO())
def toCODE(self):
return "%s(%s)" % (self.__class__.__name__, self.inner.toCODE())
def children(self):
return [self.inner]
class BExp(Exp):
"""
Binary expression
"""
symbol = ""
def __init__(self, left=None, right=None):
self.left = left
self.right = right
def __str__(self):
return "(%s %s %s)" % (self.left, self.symbol, self.right)
def prefix_print(self):
return "(%s %s %s)" % (self.symbol, self.left, self.right)
def toTSPASS(self):
return "(%s %s %s)" % (self.left.toTSPASS(), self.tspass, self.right.toTSPASS())
def toLTLFO(self):
return "(%s %s %s)" % (self.left.toLTLFO(), self.ltlfo, self.right.toLTLFO())
def toCODE(self):
return "%s(%s,%s)" % (self.__class__.__name__, self.left.toCODE(), self.right.toCODE())
def children(self):
return [self.left, self.right]
#############################
# LTL Operators
#############################
##
# Propositional operators
##
class And(BExp):
symbol = "and"
tspass = "&&"
ltlfo = "/\\"
def eval(self):
# return self.left.eval().and_(self.right.eval())
if isinstance(self.left, true) or self.left is Boolean3.Top:
return self.right
elif isinstance(self.left, false) or self.left is Boolean3.Bottom:
return false()
else:
if isinstance(self.right, true) or self.right is Boolean3.Top: return self.left
elif isinstance(self.right, false) or self.right is Boolean3.Bottom: return false()
else: return self
# def and_(self, exp):
# return self.left.and_(self.right)
#
# def or_(self, exp):
# return self.left.or_(self.right)
class Or(BExp):
symbol = "or"
tspass = "||"
ltlfo = "\/"
def eval(self):
# return self.left.eval().or_(self.right.eval())
if isinstance(self.left, true) or self.left is Boolean3.Top:
return true()
elif isinstance(self.left, false) or self.left is Boolean3.Bottom:
return self.right
else:
if isinstance(self.right, true) or self.right is Boolean3.Top: return true()
elif isinstance(self.right, false) or self.right is Boolean3.Bottom: return self.left
else: return self
# def and_(self, exp):
# return self.left.and_(self.right)
#
# def or_(self, exp):
# return self.left.or_(self.right)
class Neg(UExp):
symbol = "not"
tspass = "~"
ltlfo = "~"
def eval(self):
if isinstance(self.inner, true) or self.inner is Boolean3.Top: return false()
elif isinstance(self.inner, false) or self.inner is Boolean3.Bottom: return true()
elif isinstance(self.inner, Neg): return self.inner.inner
else: return self
Not = Neg
class Imply(Or):
symbol = "=>"
tspass = "=>"
ltlfo = "=>"
def __init__(self, left=None, right=None):
super().__init__(Neg(left), right)
def __str__(self):
return "(%s %s %s)" % (self.left.inner, self.symbol, self.right)
def toCODE(self):
return "%s(%s,%s)" % (self.__class__.__name__, self.left.inner.toCODE(), self.right.toCODE())
##
# Temporal operators
##
# Always
class Always(UExp):
symbol = "always"
tspass = "always"
ltlfo = "G"
class G(Always):
symbol = "G"
# Future
class Future(UExp):
symbol = "future"
tspass = "sometime"
ltlfo = "F"
class F(Future):
symbol = "F"
# Next
class Next(UExp):
symbol = "next"
tspass = "next"
ltlfo = "X"
class X(Next):
symbol = "X"
# Until
class Until(BExp):
symbol = "until"
tspass = "until"
ltlfo = "U"
class U(Until):
symbol = "U"
# Release
class Release(BExp):
symbol = "release"
tspass = "unless"
ltlfo = ""
def toLTLFO(self):
""" Change to until form """
return "~(~(%s) U ~(%s))" % (self.left.toLTLFO(), self.right.toLTLFO())
class R(Release):
symbol = "R"
#############################
# Trace / Events
#############################
class Event:
"""
Event that contains a set of predicates
"""
def __init__(self, predicates=None, step="0"):
self.predicates = [] if predicates is None else predicates
self.step = step
def __str__(self):
return "{" + " | ".join([str(p) for p in self.predicates]) + "}"
@staticmethod
def parse(string):
string = string.strip()
predicates = []
if string.startswith("{") and string.endswith("}"):
prs = string[1:-1].split("|")
if len(prs) == 1 and prs[0] is "":
return Event()
for p in prs:
predicates.append(Predicate.parse(p, cts=True))
else:
print("Invalid event format ! A trace should be between {}")
return
return Event(predicates)
def push_predicate(self, predicate):
self.predicates.append(predicate)
return self
def contains(self, predicate):
for p in self.predicates:
if isinstance(p, Predicate):
if p.equal(predicate):
return True
return False
p = push_predicate
def toLTLFO(self):
return "{" + ",".join([p.toLTLFO() for p in self.predicates]) + "}"
class Trace:
"""
Trace that contains a set of event
"""
def __init__(self, events=None):
self.events = [] if events is None else events
def __str__(self):
return ";".join([str(e) for e in self.events])
@staticmethod
def parse(string):
string = string.strip()
events = []
evs = string.split(";")
[events.append(Event.parse(e)) if e != "" else None for e in evs]
return Trace(events)
def push_event(self, event):
self.events.append(event)
return self
def contains(self, f):
if isinstance(f, Event):
return f in self.events
elif isinstance(f, Predicate):
for e in self.events:
if e.contains(f): return True
return False
else:
return False
e = push_event
def toLTLFO(self):
return ",".join([e.toLTLFO() for e in self.events])
#############################
# Three valued boolean
#############################
class Boolean3(Enum):
"""
Boolean3 values
"""
Top = "\u22A4"
Bottom = "\u22A5"
Unknown = "?"
def __str__(self):
return self.value
|
hkff/FodtlMon
|
fodtlmon/ltl/ltl.py
|
Formula.walk
|
python
|
def walk(self, filters: str=None, filter_type: type=None, pprint=False, depth=-1):
children = self.children()
if children is None:
children = []
res = []
if depth == 0:
return res
elif depth != -1:
depth -= 1
for child in children:
if isinstance(child, Formula):
tmp = child.walk(filters=filters, filter_type=filter_type, pprint=pprint, depth=depth)
if tmp:
res.extend(tmp)
if filter_type is None:
if filters is not None:
if eval(filters) is True:
res.append(self)
else:
res.append(self)
elif isinstance(self, filter_type):
if filters is not None:
if eval(filters) is True:
res.append(self)
else:
res.append(self)
if pprint:
res = [str(x) + " " for x in res]
res = "\n".join(res)
return res
|
Iterate tree in pre-order wide-first search order
:param filters: filter by python expression
:param filter_type: Filter by class
:return:
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/ltl/ltl.py#L73-L113
|
[
"def children(self):\n return []\n"
] |
class Formula:
"""
Abstract formula
"""
symbol = ""
tspass = ""
ltlfo = ""
code = ""
def toTSPASS(self):
return str(self)
def toLTLFO(self):
return str(self)
def prefix_print(self):
return str(self)
def toCODE(self):
return self.__class__.__name__ + "()"
def reduce(self):
pass
def eval(self):
return self
def clos(self):
pass
def nnf(self):
pass
def and_(self, exp):
return And(self, exp)
def or_(self, exp):
return Or(self, exp)
def size(self):
return 1 + sum([s.size() for s in self.children()])
def children(self):
return []
|
hkff/FodtlMon
|
fodtlmon/ltl/ltlmon.py
|
ltlfo2mon
|
python
|
def ltlfo2mon(formula:Formula, trace:Trace):
fl = formula.toLTLFO() if isinstance(formula, Formula) else formula
tr = trace.toLTLFO() if isinstance(trace, Trace) else trace
cmd = "echo \"%s\" | java -jar fodtlmon/tools/ltlfo2mon.jar -p \"%s\"" % (tr, fl)
p = os.popen(cmd)
res = p.readline()[:-1]
p.close()
return res
|
Run ltlfo2mon
:param formula: Formula | ltlfo string formula
:param trace: Trace | ltlfo string trace
:return:
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/ltl/ltlmon.py#L133-L146
| null |
"""
ltlmon LTL monitor
Copyright (C) 2015 Walid Benghabrit
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__author__ = 'walid'
from fodtlmon.ltl.ltl import *
from fodtlmon.parser.Parser import *
import os
import copy
import time
DEBUG = False
def Debug(*args):
if DEBUG:
print(*args)
class Mon:
"""
Abstract monitor
"""
AP = []
def __init__(self, formula, trace):
self.formula = FodtlParser.parse(formula) if isinstance(formula, str) else formula
self.trace = trace
self.counter = 0
self.last = Boolean3.Unknown
self.counter2 = 0
self.rewrite = copy.deepcopy(self.formula)
def monitor(self, *args, **kargs):
pass
def prg(self, *args, **kargs):
pass
def push_event(self, event):
self.trace.push_event(event)
def reset(self):
self.rewrite = copy.deepcopy(self.formula)
class Ltlmon(Mon):
"""
LTL monitor using progression technique.
"""
def monitor(self, once=False, debug=False, struct_res=False):
if debug:
start_time = time.time()
for e in self.trace.events[self.counter:]:
if self.last == Boolean3.Top or self.last == Boolean3.Bottom:
break
else:
self.counter += 1
self.counter2 += 1
self.rewrite = self.prg(self.rewrite, e)
# Debug(self.rewrite)
self.last = B3(self.rewrite.eval()) if isinstance(self.rewrite, Formula) else self.rewrite
if once: break
if struct_res:
ret = {"result": self.last, "at": self.counter2, "step": self.counter}
else:
ret = "Result Progression: %s after %s events." % (self.last, self.counter)
# print(ret)
if debug:
exec_time = time.time() - start_time
print("Execution time : %5.4f ms" % (exec_time*1000))
return ret
def prg(self, formula, event, valuation=None):
# print(formula)
if isinstance(formula, Predicate):
# Todo : Check if Predicate is in AP
res = true() if event.contains(formula) else false()
elif isinstance(formula, true):
res = true()
elif isinstance(formula, false):
res = false()
elif isinstance(formula, Neg):
res = Neg(self.prg(formula.inner, event, valuation)).eval()
elif isinstance(formula, Or):
res = Or(self.prg(formula.left, event, valuation), self.prg(formula.right, event, valuation)).eval()
elif isinstance(formula, And):
res = And(self.prg(formula.left, event, valuation), self.prg(formula.right, event, valuation)).eval()
elif isinstance(formula, Always):
res = And(self.prg(formula.inner, event, valuation), G(formula.inner)).eval()
elif isinstance(formula, Future):
res = Or(self.prg(formula.inner, event, valuation), F(formula.inner)).eval()
elif isinstance(formula, Until):
res = Or(self.prg(formula.right, event, valuation),
And(self.prg(formula.left, event, valuation), U(formula.left, formula.right)).eval()).eval()
elif isinstance(formula, Release):
res = Or(self.prg(formula.left, event, valuation),
And(self.prg(formula.right, event, valuation), R(formula.left, formula.right)).eval()).eval()
elif isinstance(formula, Next):
res = formula.inner
else:
raise Exception("Error %s of type %s" % (formula, type(formula)))
return res
|
hkff/FodtlMon
|
fodtlmon/mon.py
|
main
|
python
|
def main(argv):
input_file = ""
output_file = ""
monitor = None
formula = None
trace = None
iformula = None
itrace = None
isys = None
online = False
fuzzer = False
l2m = False
debug = False
rounds = 1
server_port = 8080
webservice = False
help_str_extended = "fodtlmon V 0.1 .\n" + \
"For more information see fodtlmon home page\n Usage : mon.py [OPTIONS] formula trace" + \
"\n -h \t--help " + "\t display this help and exit" + \
"\n -i \t--input= [file] " + "\t the input file" + \
"\n -o \t--output= [path]" + "\t the output file" + \
"\n -f \t--formula " + "\t the formula" + \
"\n \t--iformula " + "\t path to file that contains the formula" + \
"\n -t \t--trace " + "\t the trace" + \
"\n \t--itrace " + "\t path to file that contains the trace" + \
"\n -1 \t--ltl " + "\t use LTL monitor" + \
"\n \t--l2m " + "\t call ltl2mon also" + \
"\n -2 \t--fotl " + "\t use FOTL monitor" + \
"\n -3 \t--dtl " + "\t use DTL monitor" + \
"\n -4 \t--fodtl " + "\t use FODTL monitor" + \
"\n \t--sys= [file] " + "\t Run a system from json file" + \
"\n \t--rounds= int " + "\t Number of rounds to run in the system" + \
"\n -z \t--fuzzer " + "\t run fuzzing tester" + \
"\n -d \t--debug " + "\t enable debug mode" + \
"\n \t--server " + "\t start web service" + \
"\n \t--port= int " + "\t server port number" + \
"\n\nReport fodtlmon bugs to walid.benghabrit@mines-nantes.fr" + \
"\nfodtlmon home page: <https://github.com/hkff/fodtlmon>" + \
"\nfodtlmon is a free software released under GPL 3"
# Checking options
try:
opts, args = getopt.getopt(argv[1:], "hi:o:f:t:1234zd",
["help", "input=", "output=", "trace=", "formula=" "ltl", "fotl", "dtl",
"fodtl", "sys=", "fuzzer", "itrace=", "iformula=", "rounds=", "l2m", "debug",
"server", "port="])
except getopt.GetoptError:
print(help_str_extended)
sys.exit(2)
if len(opts) == 0:
print(help_str_extended)
# Handling options
for opt, arg in opts:
if opt in ("-h", "--help"):
print(help_str_extended)
sys.exit()
elif opt in ("-i", "--input"):
input_file = arg
elif opt in ("-o", "--output"):
output_file = arg
elif opt in ("-1", "--ltl"):
monitor = Ltlmon
elif opt in ("-2", "--fotl"):
monitor = Fotlmon
elif opt in ("-3", "--dtl"):
monitor = Dtlmon
elif opt in ("-4", "--fodtl"):
monitor = Fodtlmon
elif opt in ("-f", "--formula"):
formula = arg
elif opt in ("-t", "--trace"):
trace = arg
elif opt in "--sys":
isys = arg
elif opt in "--rounds":
rounds = int(arg)
elif opt in ("-z", "--fuzzer"):
fuzzer = True
elif opt in "--iformula":
iformula = arg
elif opt in "--itrace":
itrace = arg
elif opt in "--l2m":
l2m = True
elif opt in ("-d", "--debug"):
debug = True
elif opt in "--server":
webservice = True
elif opt in "--port":
server_port = int(arg)
if webservice:
Webservice.start(server_port)
return
if fuzzer:
if monitor is Ltlmon:
run_ltl_tests(monitor="ltl", alphabet=["P"], constants=["a", "b", "c"], trace_lenght=10000, formula_depth=5,
formula_nbr=10000, debug=debug)
elif monitor is Dtlmon:
run_dtl_tests()
return
if itrace is not None:
with open(itrace, "r") as f:
trace = f.read()
if iformula is not None:
with open(iformula, "r") as f:
formula = f.read()
if isys is not None:
with open(isys, "r") as f:
js = f.read()
s = System.parseJSON(js)
for x in range(rounds):
s.run()
return
# print(argv)
if None not in (monitor, trace, formula):
tr = Trace().parse(trace)
fl = eval(formula[1:]) if formula.startswith(":") else FodtlParser.parse(formula)
mon = monitor(fl, tr)
res = mon.monitor(debug=debug)
print("")
print("Trace : %s" % tr)
print("Formula : %s" % fl)
print("Code : %s" % fl.toCODE())
print("PPrint : %s" % fl.prefix_print())
print("TSPASS : %s" % fl.toTSPASS())
print("LTLFO : %s" % fl.toLTLFO())
print("Result : %s" % res)
if l2m:
print(fl.toLTLFO())
res = ltlfo2mon(fl.toLTLFO(), tr.toLTLFO())
print("ltl2mon : %s" % res)
|
Main mon
:param argv: console arguments
:return:
|
train
|
https://github.com/hkff/FodtlMon/blob/0c9015a1a1f0a4a64d52945c86b45441d5871c56/fodtlmon/mon.py#L35-L179
|
[
"def ltlfo2mon(formula:Formula, trace:Trace):\n \"\"\"\n Run ltlfo2mon\n :param formula: Formula | ltlfo string formula\n :param trace: Trace | ltlfo string trace\n :return:\n \"\"\"\n fl = formula.toLTLFO() if isinstance(formula, Formula) else formula\n tr = trace.toLTLFO() if isinstance(trace, Trace) else trace\n cmd = \"echo \\\"%s\\\" | java -jar fodtlmon/tools/ltlfo2mon.jar -p \\\"%s\\\"\" % (tr, fl)\n p = os.popen(cmd)\n res = p.readline()[:-1]\n p.close()\n return res\n",
"def parse(string):\n string = string.strip()\n events = []\n evs = string.split(\";\")\n [events.append(Event.parse(e)) if e != \"\" else None for e in evs]\n return Trace(events)\n"
] |
#!/usr/bin/python3.4
"""
fodtlmon version 1.0
Copyright (C) 2015 Walid Benghabrit
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from fodtlmon.webservice import webservice
__author__ = 'walid'
import sys
import getopt
from fodtlmon.ltl.test import *
from fodtlmon.dtl.test import *
from fodtlmon.fotl.test import *
from fodtlmon.fodtl.test import *
from fodtlmon.parser.Parser import *
from fodtlmon.webservice.webservice import *
###################
# Main
###################
|
BenjaminSchubert/NitPycker
|
nitpycker/result.py
|
InterProcessResult.add_result
|
python
|
def add_result(self, _type, test, exc_info=None):
if exc_info is not None:
exc_info = FrozenExcInfo(exc_info)
test.time_taken = time.time() - self.start_time
test._outcome = None
self.result_queue.put((_type, test, exc_info))
|
Adds the given result to the list
:param _type: type of the state of the test (TestState.failure, TestState.error, ...)
:param test: the test
:param exc_info: additional execution information
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/result.py#L58-L70
| null |
class InterProcessResult(unittest.result.TestResult):
"""
A TestResult implementation to put results in a queue, for another thread to consume
"""
def __init__(self, result_queue: queue.Queue):
super().__init__()
self.result_queue = result_queue
self.start_time = self.stop_time = None
def startTest(self, test: unittest.case.TestCase) -> None:
"""
Saves the time before starting the test
:param test: the test that is going to be run
"""
self.start_time = time.time()
def addSuccess(self, test: unittest.case.TestCase) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
"""
# noinspection PyTypeChecker
self.add_result(TestState.success, test)
def addFailure(self, test: unittest.case.TestCase, exc_info: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param exc_info: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.failure, test, exc_info)
def addError(self, test: unittest.case.TestCase, exc_info: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param exc_info: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.error, test, exc_info)
def addExpectedFailure(self, test: unittest.case.TestCase, err: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param err: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.expected_failure, test, err)
def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
"""
# noinspection PyTypeChecker
self.add_result(TestState.unexpected_success, test)
def addSkip(self, test: unittest.case.TestCase, reason: str):
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param reason: the reason why the test was skipped
"""
test.time_taken = time.time() - self.start_time
test._outcome = None
self.result_queue.put((TestState.skipped, test, reason))
|
BenjaminSchubert/NitPycker
|
nitpycker/result.py
|
InterProcessResult.addSuccess
|
python
|
def addSuccess(self, test: unittest.case.TestCase) -> None:
# noinspection PyTypeChecker
self.add_result(TestState.success, test)
|
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/result.py#L72-L79
|
[
"def add_result(self, _type, test, exc_info=None):\n \"\"\"\n Adds the given result to the list\n\n :param _type: type of the state of the test (TestState.failure, TestState.error, ...)\n :param test: the test\n :param exc_info: additional execution information\n \"\"\"\n if exc_info is not None:\n exc_info = FrozenExcInfo(exc_info)\n test.time_taken = time.time() - self.start_time\n test._outcome = None\n self.result_queue.put((_type, test, exc_info))\n"
] |
class InterProcessResult(unittest.result.TestResult):
"""
A TestResult implementation to put results in a queue, for another thread to consume
"""
def __init__(self, result_queue: queue.Queue):
super().__init__()
self.result_queue = result_queue
self.start_time = self.stop_time = None
def startTest(self, test: unittest.case.TestCase) -> None:
"""
Saves the time before starting the test
:param test: the test that is going to be run
"""
self.start_time = time.time()
def add_result(self, _type, test, exc_info=None):
"""
Adds the given result to the list
:param _type: type of the state of the test (TestState.failure, TestState.error, ...)
:param test: the test
:param exc_info: additional execution information
"""
if exc_info is not None:
exc_info = FrozenExcInfo(exc_info)
test.time_taken = time.time() - self.start_time
test._outcome = None
self.result_queue.put((_type, test, exc_info))
def addFailure(self, test: unittest.case.TestCase, exc_info: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param exc_info: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.failure, test, exc_info)
def addError(self, test: unittest.case.TestCase, exc_info: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param exc_info: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.error, test, exc_info)
def addExpectedFailure(self, test: unittest.case.TestCase, err: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param err: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.expected_failure, test, err)
def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
"""
# noinspection PyTypeChecker
self.add_result(TestState.unexpected_success, test)
def addSkip(self, test: unittest.case.TestCase, reason: str):
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param reason: the reason why the test was skipped
"""
test.time_taken = time.time() - self.start_time
test._outcome = None
self.result_queue.put((TestState.skipped, test, reason))
|
BenjaminSchubert/NitPycker
|
nitpycker/result.py
|
InterProcessResult.addFailure
|
python
|
def addFailure(self, test: unittest.case.TestCase, exc_info: tuple) -> None:
# noinspection PyTypeChecker
self.add_result(TestState.failure, test, exc_info)
|
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param exc_info: tuple of the form (Exception class, Exception instance, traceback)
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/result.py#L81-L89
|
[
"def add_result(self, _type, test, exc_info=None):\n \"\"\"\n Adds the given result to the list\n\n :param _type: type of the state of the test (TestState.failure, TestState.error, ...)\n :param test: the test\n :param exc_info: additional execution information\n \"\"\"\n if exc_info is not None:\n exc_info = FrozenExcInfo(exc_info)\n test.time_taken = time.time() - self.start_time\n test._outcome = None\n self.result_queue.put((_type, test, exc_info))\n"
] |
class InterProcessResult(unittest.result.TestResult):
"""
A TestResult implementation to put results in a queue, for another thread to consume
"""
def __init__(self, result_queue: queue.Queue):
super().__init__()
self.result_queue = result_queue
self.start_time = self.stop_time = None
def startTest(self, test: unittest.case.TestCase) -> None:
"""
Saves the time before starting the test
:param test: the test that is going to be run
"""
self.start_time = time.time()
def add_result(self, _type, test, exc_info=None):
"""
Adds the given result to the list
:param _type: type of the state of the test (TestState.failure, TestState.error, ...)
:param test: the test
:param exc_info: additional execution information
"""
if exc_info is not None:
exc_info = FrozenExcInfo(exc_info)
test.time_taken = time.time() - self.start_time
test._outcome = None
self.result_queue.put((_type, test, exc_info))
def addSuccess(self, test: unittest.case.TestCase) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
"""
# noinspection PyTypeChecker
self.add_result(TestState.success, test)
def addError(self, test: unittest.case.TestCase, exc_info: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param exc_info: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.error, test, exc_info)
def addExpectedFailure(self, test: unittest.case.TestCase, err: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param err: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.expected_failure, test, err)
def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
"""
# noinspection PyTypeChecker
self.add_result(TestState.unexpected_success, test)
def addSkip(self, test: unittest.case.TestCase, reason: str):
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param reason: the reason why the test was skipped
"""
test.time_taken = time.time() - self.start_time
test._outcome = None
self.result_queue.put((TestState.skipped, test, reason))
|
BenjaminSchubert/NitPycker
|
nitpycker/result.py
|
InterProcessResult.addError
|
python
|
def addError(self, test: unittest.case.TestCase, exc_info: tuple) -> None:
# noinspection PyTypeChecker
self.add_result(TestState.error, test, exc_info)
|
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param exc_info: tuple of the form (Exception class, Exception instance, traceback)
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/result.py#L91-L99
|
[
"def add_result(self, _type, test, exc_info=None):\n \"\"\"\n Adds the given result to the list\n\n :param _type: type of the state of the test (TestState.failure, TestState.error, ...)\n :param test: the test\n :param exc_info: additional execution information\n \"\"\"\n if exc_info is not None:\n exc_info = FrozenExcInfo(exc_info)\n test.time_taken = time.time() - self.start_time\n test._outcome = None\n self.result_queue.put((_type, test, exc_info))\n"
] |
class InterProcessResult(unittest.result.TestResult):
"""
A TestResult implementation to put results in a queue, for another thread to consume
"""
def __init__(self, result_queue: queue.Queue):
super().__init__()
self.result_queue = result_queue
self.start_time = self.stop_time = None
def startTest(self, test: unittest.case.TestCase) -> None:
"""
Saves the time before starting the test
:param test: the test that is going to be run
"""
self.start_time = time.time()
def add_result(self, _type, test, exc_info=None):
"""
Adds the given result to the list
:param _type: type of the state of the test (TestState.failure, TestState.error, ...)
:param test: the test
:param exc_info: additional execution information
"""
if exc_info is not None:
exc_info = FrozenExcInfo(exc_info)
test.time_taken = time.time() - self.start_time
test._outcome = None
self.result_queue.put((_type, test, exc_info))
def addSuccess(self, test: unittest.case.TestCase) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
"""
# noinspection PyTypeChecker
self.add_result(TestState.success, test)
def addFailure(self, test: unittest.case.TestCase, exc_info: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param exc_info: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.failure, test, exc_info)
def addExpectedFailure(self, test: unittest.case.TestCase, err: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param err: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.expected_failure, test, err)
def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
"""
# noinspection PyTypeChecker
self.add_result(TestState.unexpected_success, test)
def addSkip(self, test: unittest.case.TestCase, reason: str):
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param reason: the reason why the test was skipped
"""
test.time_taken = time.time() - self.start_time
test._outcome = None
self.result_queue.put((TestState.skipped, test, reason))
|
BenjaminSchubert/NitPycker
|
nitpycker/result.py
|
InterProcessResult.addExpectedFailure
|
python
|
def addExpectedFailure(self, test: unittest.case.TestCase, err: tuple) -> None:
# noinspection PyTypeChecker
self.add_result(TestState.expected_failure, test, err)
|
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param err: tuple of the form (Exception class, Exception instance, traceback)
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/result.py#L101-L109
|
[
"def add_result(self, _type, test, exc_info=None):\n \"\"\"\n Adds the given result to the list\n\n :param _type: type of the state of the test (TestState.failure, TestState.error, ...)\n :param test: the test\n :param exc_info: additional execution information\n \"\"\"\n if exc_info is not None:\n exc_info = FrozenExcInfo(exc_info)\n test.time_taken = time.time() - self.start_time\n test._outcome = None\n self.result_queue.put((_type, test, exc_info))\n"
] |
class InterProcessResult(unittest.result.TestResult):
"""
A TestResult implementation to put results in a queue, for another thread to consume
"""
def __init__(self, result_queue: queue.Queue):
super().__init__()
self.result_queue = result_queue
self.start_time = self.stop_time = None
def startTest(self, test: unittest.case.TestCase) -> None:
"""
Saves the time before starting the test
:param test: the test that is going to be run
"""
self.start_time = time.time()
def add_result(self, _type, test, exc_info=None):
"""
Adds the given result to the list
:param _type: type of the state of the test (TestState.failure, TestState.error, ...)
:param test: the test
:param exc_info: additional execution information
"""
if exc_info is not None:
exc_info = FrozenExcInfo(exc_info)
test.time_taken = time.time() - self.start_time
test._outcome = None
self.result_queue.put((_type, test, exc_info))
def addSuccess(self, test: unittest.case.TestCase) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
"""
# noinspection PyTypeChecker
self.add_result(TestState.success, test)
def addFailure(self, test: unittest.case.TestCase, exc_info: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param exc_info: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.failure, test, exc_info)
def addError(self, test: unittest.case.TestCase, exc_info: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param exc_info: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.error, test, exc_info)
def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
"""
# noinspection PyTypeChecker
self.add_result(TestState.unexpected_success, test)
def addSkip(self, test: unittest.case.TestCase, reason: str):
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param reason: the reason why the test was skipped
"""
test.time_taken = time.time() - self.start_time
test._outcome = None
self.result_queue.put((TestState.skipped, test, reason))
|
BenjaminSchubert/NitPycker
|
nitpycker/result.py
|
InterProcessResult.addUnexpectedSuccess
|
python
|
def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None:
# noinspection PyTypeChecker
self.add_result(TestState.unexpected_success, test)
|
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/result.py#L111-L118
|
[
"def add_result(self, _type, test, exc_info=None):\n \"\"\"\n Adds the given result to the list\n\n :param _type: type of the state of the test (TestState.failure, TestState.error, ...)\n :param test: the test\n :param exc_info: additional execution information\n \"\"\"\n if exc_info is not None:\n exc_info = FrozenExcInfo(exc_info)\n test.time_taken = time.time() - self.start_time\n test._outcome = None\n self.result_queue.put((_type, test, exc_info))\n"
] |
class InterProcessResult(unittest.result.TestResult):
"""
A TestResult implementation to put results in a queue, for another thread to consume
"""
def __init__(self, result_queue: queue.Queue):
super().__init__()
self.result_queue = result_queue
self.start_time = self.stop_time = None
def startTest(self, test: unittest.case.TestCase) -> None:
"""
Saves the time before starting the test
:param test: the test that is going to be run
"""
self.start_time = time.time()
def add_result(self, _type, test, exc_info=None):
"""
Adds the given result to the list
:param _type: type of the state of the test (TestState.failure, TestState.error, ...)
:param test: the test
:param exc_info: additional execution information
"""
if exc_info is not None:
exc_info = FrozenExcInfo(exc_info)
test.time_taken = time.time() - self.start_time
test._outcome = None
self.result_queue.put((_type, test, exc_info))
def addSuccess(self, test: unittest.case.TestCase) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
"""
# noinspection PyTypeChecker
self.add_result(TestState.success, test)
def addFailure(self, test: unittest.case.TestCase, exc_info: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param exc_info: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.failure, test, exc_info)
def addError(self, test: unittest.case.TestCase, exc_info: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param exc_info: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.error, test, exc_info)
def addExpectedFailure(self, test: unittest.case.TestCase, err: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param err: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.expected_failure, test, err)
def addSkip(self, test: unittest.case.TestCase, reason: str):
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param reason: the reason why the test was skipped
"""
test.time_taken = time.time() - self.start_time
test._outcome = None
self.result_queue.put((TestState.skipped, test, reason))
|
BenjaminSchubert/NitPycker
|
nitpycker/result.py
|
InterProcessResult.addSkip
|
python
|
def addSkip(self, test: unittest.case.TestCase, reason: str):
test.time_taken = time.time() - self.start_time
test._outcome = None
self.result_queue.put((TestState.skipped, test, reason))
|
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param reason: the reason why the test was skipped
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/result.py#L120-L129
| null |
class InterProcessResult(unittest.result.TestResult):
"""
A TestResult implementation to put results in a queue, for another thread to consume
"""
def __init__(self, result_queue: queue.Queue):
super().__init__()
self.result_queue = result_queue
self.start_time = self.stop_time = None
def startTest(self, test: unittest.case.TestCase) -> None:
"""
Saves the time before starting the test
:param test: the test that is going to be run
"""
self.start_time = time.time()
def add_result(self, _type, test, exc_info=None):
"""
Adds the given result to the list
:param _type: type of the state of the test (TestState.failure, TestState.error, ...)
:param test: the test
:param exc_info: additional execution information
"""
if exc_info is not None:
exc_info = FrozenExcInfo(exc_info)
test.time_taken = time.time() - self.start_time
test._outcome = None
self.result_queue.put((_type, test, exc_info))
def addSuccess(self, test: unittest.case.TestCase) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
"""
# noinspection PyTypeChecker
self.add_result(TestState.success, test)
def addFailure(self, test: unittest.case.TestCase, exc_info: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param exc_info: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.failure, test, exc_info)
def addError(self, test: unittest.case.TestCase, exc_info: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param exc_info: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.error, test, exc_info)
def addExpectedFailure(self, test: unittest.case.TestCase, err: tuple) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
:param err: tuple of the form (Exception class, Exception instance, traceback)
"""
# noinspection PyTypeChecker
self.add_result(TestState.expected_failure, test, err)
def addUnexpectedSuccess(self, test: unittest.case.TestCase) -> None:
"""
Transforms the test in a serializable version of it and sends it to a queue for further analysis
:param test: the test to save
"""
# noinspection PyTypeChecker
self.add_result(TestState.unexpected_success, test)
|
BenjaminSchubert/NitPycker
|
nitpycker/result.py
|
ResultCollector.addError
|
python
|
def addError(self, test, err):
super().addError(test, err)
self.test_info(test)
self._call_test_results('addError', test, err)
|
registers a test as error
:param test: test to register
:param err: error the test gave
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/result.py#L211-L220
|
[
"def _call_test_results(self, method_name, *args, **kwargs):\n \"\"\"\n calls the given method on every test results instances\n\n :param method_name: name of the method to call\n :param args: arguments to pass to the method\n :param kwargs: keyword arguments to pass to the method\n \"\"\"\n method = operator.methodcaller(method_name, *args, **kwargs)\n for testResult in self.test_results:\n method(testResult)\n",
"def test_info(self, test):\n \"\"\"\n writes test description on the stream used for reporting\n\n :param test: test for which to display information\n \"\"\"\n if self.showAll:\n self.stream.write(self.getDescription(test))\n self.stream.write(\" ... \")\n self.stream.flush()\n"
] |
class ResultCollector(threading.Thread, unittest.result.TestResult):
"""
Results handler. Given a report queue, will reform a complete report from it as what would come from a run
of unittest.TestResult
:param stream: stream on which to write information
:param descriptions: whether to display tests descriptions or not
:param verbosity: the verbosity used for the test result reporters
:param result_queue: queue form which to get the test results
:param test_results: list of testResults instances to use
:param tests: list of tests that are currently run
"""
def __init__(self, stream=None, descriptions=None, verbosity=None, *, result_queue: queue.Queue, test_results,
tests):
threading.Thread.__init__(self)
unittest.result.TestResult.__init__(self, stream, descriptions, verbosity)
self.test_results = test_results
for testResult in self.test_results:
if hasattr(testResult, "separator1"):
self.separator1 = testResult.separator1
break
for testResult in self.test_results:
if hasattr(testResult, "separator2"):
self.separator2 = testResult.separator2
break
self.result_queue = result_queue
self.cleanup = False
self.showAll = verbosity > 1
self.dots = verbosity == 1
self.stream = stream
self.descriptions = descriptions
self.tests = tests
def end_collection(self) -> None:
""" Tells the thread that is it time to end """
self.cleanup = True
def _call_test_results(self, method_name, *args, **kwargs):
"""
calls the given method on every test results instances
:param method_name: name of the method to call
:param args: arguments to pass to the method
:param kwargs: keyword arguments to pass to the method
"""
method = operator.methodcaller(method_name, *args, **kwargs)
for testResult in self.test_results:
method(testResult)
# noinspection PyPep8Naming
def getDescription(self, test):
"""
Get the description of the test
:param test: test from which to get the description
:return: description of the test
"""
doc_first_line = test.shortDescription()
if self.descriptions and doc_first_line:
return '\n'.join((str(test), doc_first_line))
else:
return str(test)
def test_info(self, test):
"""
writes test description on the stream used for reporting
:param test: test for which to display information
"""
if self.showAll:
self.stream.write(self.getDescription(test))
self.stream.write(" ... ")
self.stream.flush()
def addExpectedFailure(self, test, err):
"""
registers as test as expected failure
:param test: test to register
:param err: error the test gave
"""
super().addExpectedFailure(test, err)
self.test_info(test)
self._call_test_results('addExpectedFailure', test, err)
def addFailure(self, test, err):
"""
registers a test as failure
:param test: test to register
:param err: error the test gave
"""
super().addFailure(test, err)
self.test_info(test)
self._call_test_results('addFailure', test, err)
def addSkip(self, test, reason):
"""
registers a test as skipped
:param test: test to register
:param reason: reason why the test was skipped
"""
super().addSkip(test, reason)
self.test_info(test)
self._call_test_results('addSkip', test, reason)
def addSuccess(self, test):
"""
registers a test as successful
:param test: test to register
"""
super().addSuccess(test)
self.test_info(test)
self._call_test_results('addSuccess', test)
def addUnexpectedSuccess(self, test):
"""
registers a test as an unexpected success
:param test: test to register
"""
super().addUnexpectedSuccess(test)
self.test_info(test)
self._call_test_results('addUnexpectedSuccess', test)
def printErrors(self):
"""
print test report
"""
self._call_test_results('printErrors')
def run(self) -> None:
"""
processes entries in the queue until told to stop
"""
while not self.cleanup:
try:
result, test, additional_info = self.result_queue.get(timeout=1)
except queue.Empty:
continue
self.result_queue.task_done()
if result == TestState.serialization_failure:
test = self.tests[test]
warnings.warn("Serialization error: {} on test {}".format(
additional_info, test), SerializationWarning)
test(self)
else:
self.testsRun += 1
if result == TestState.success:
self.addSuccess(test)
elif result == TestState.failure:
self.addFailure(test, additional_info)
elif result == TestState.error:
self.addError(test, additional_info)
elif result == TestState.skipped:
self.addSkip(test, additional_info)
elif result == TestState.expected_failure:
self.addExpectedFailure(test, additional_info)
elif result == TestState.unexpected_success:
self.addUnexpectedSuccess(test)
else:
raise Exception("This is not a valid test type :", result)
|
BenjaminSchubert/NitPycker
|
nitpycker/result.py
|
ResultCollector.addExpectedFailure
|
python
|
def addExpectedFailure(self, test, err):
super().addExpectedFailure(test, err)
self.test_info(test)
self._call_test_results('addExpectedFailure', test, err)
|
registers as test as expected failure
:param test: test to register
:param err: error the test gave
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/result.py#L222-L231
|
[
"def _call_test_results(self, method_name, *args, **kwargs):\n \"\"\"\n calls the given method on every test results instances\n\n :param method_name: name of the method to call\n :param args: arguments to pass to the method\n :param kwargs: keyword arguments to pass to the method\n \"\"\"\n method = operator.methodcaller(method_name, *args, **kwargs)\n for testResult in self.test_results:\n method(testResult)\n",
"def test_info(self, test):\n \"\"\"\n writes test description on the stream used for reporting\n\n :param test: test for which to display information\n \"\"\"\n if self.showAll:\n self.stream.write(self.getDescription(test))\n self.stream.write(\" ... \")\n self.stream.flush()\n"
] |
class ResultCollector(threading.Thread, unittest.result.TestResult):
"""
Results handler. Given a report queue, will reform a complete report from it as what would come from a run
of unittest.TestResult
:param stream: stream on which to write information
:param descriptions: whether to display tests descriptions or not
:param verbosity: the verbosity used for the test result reporters
:param result_queue: queue form which to get the test results
:param test_results: list of testResults instances to use
:param tests: list of tests that are currently run
"""
def __init__(self, stream=None, descriptions=None, verbosity=None, *, result_queue: queue.Queue, test_results,
tests):
threading.Thread.__init__(self)
unittest.result.TestResult.__init__(self, stream, descriptions, verbosity)
self.test_results = test_results
for testResult in self.test_results:
if hasattr(testResult, "separator1"):
self.separator1 = testResult.separator1
break
for testResult in self.test_results:
if hasattr(testResult, "separator2"):
self.separator2 = testResult.separator2
break
self.result_queue = result_queue
self.cleanup = False
self.showAll = verbosity > 1
self.dots = verbosity == 1
self.stream = stream
self.descriptions = descriptions
self.tests = tests
def end_collection(self) -> None:
""" Tells the thread that is it time to end """
self.cleanup = True
def _call_test_results(self, method_name, *args, **kwargs):
"""
calls the given method on every test results instances
:param method_name: name of the method to call
:param args: arguments to pass to the method
:param kwargs: keyword arguments to pass to the method
"""
method = operator.methodcaller(method_name, *args, **kwargs)
for testResult in self.test_results:
method(testResult)
# noinspection PyPep8Naming
def getDescription(self, test):
"""
Get the description of the test
:param test: test from which to get the description
:return: description of the test
"""
doc_first_line = test.shortDescription()
if self.descriptions and doc_first_line:
return '\n'.join((str(test), doc_first_line))
else:
return str(test)
def test_info(self, test):
"""
writes test description on the stream used for reporting
:param test: test for which to display information
"""
if self.showAll:
self.stream.write(self.getDescription(test))
self.stream.write(" ... ")
self.stream.flush()
def addError(self, test, err):
"""
registers a test as error
:param test: test to register
:param err: error the test gave
"""
super().addError(test, err)
self.test_info(test)
self._call_test_results('addError', test, err)
def addFailure(self, test, err):
"""
registers a test as failure
:param test: test to register
:param err: error the test gave
"""
super().addFailure(test, err)
self.test_info(test)
self._call_test_results('addFailure', test, err)
def addSkip(self, test, reason):
"""
registers a test as skipped
:param test: test to register
:param reason: reason why the test was skipped
"""
super().addSkip(test, reason)
self.test_info(test)
self._call_test_results('addSkip', test, reason)
def addSuccess(self, test):
"""
registers a test as successful
:param test: test to register
"""
super().addSuccess(test)
self.test_info(test)
self._call_test_results('addSuccess', test)
def addUnexpectedSuccess(self, test):
"""
registers a test as an unexpected success
:param test: test to register
"""
super().addUnexpectedSuccess(test)
self.test_info(test)
self._call_test_results('addUnexpectedSuccess', test)
def printErrors(self):
"""
print test report
"""
self._call_test_results('printErrors')
def run(self) -> None:
"""
processes entries in the queue until told to stop
"""
while not self.cleanup:
try:
result, test, additional_info = self.result_queue.get(timeout=1)
except queue.Empty:
continue
self.result_queue.task_done()
if result == TestState.serialization_failure:
test = self.tests[test]
warnings.warn("Serialization error: {} on test {}".format(
additional_info, test), SerializationWarning)
test(self)
else:
self.testsRun += 1
if result == TestState.success:
self.addSuccess(test)
elif result == TestState.failure:
self.addFailure(test, additional_info)
elif result == TestState.error:
self.addError(test, additional_info)
elif result == TestState.skipped:
self.addSkip(test, additional_info)
elif result == TestState.expected_failure:
self.addExpectedFailure(test, additional_info)
elif result == TestState.unexpected_success:
self.addUnexpectedSuccess(test)
else:
raise Exception("This is not a valid test type :", result)
|
BenjaminSchubert/NitPycker
|
nitpycker/result.py
|
ResultCollector.addFailure
|
python
|
def addFailure(self, test, err):
super().addFailure(test, err)
self.test_info(test)
self._call_test_results('addFailure', test, err)
|
registers a test as failure
:param test: test to register
:param err: error the test gave
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/result.py#L233-L242
|
[
"def _call_test_results(self, method_name, *args, **kwargs):\n \"\"\"\n calls the given method on every test results instances\n\n :param method_name: name of the method to call\n :param args: arguments to pass to the method\n :param kwargs: keyword arguments to pass to the method\n \"\"\"\n method = operator.methodcaller(method_name, *args, **kwargs)\n for testResult in self.test_results:\n method(testResult)\n",
"def test_info(self, test):\n \"\"\"\n writes test description on the stream used for reporting\n\n :param test: test for which to display information\n \"\"\"\n if self.showAll:\n self.stream.write(self.getDescription(test))\n self.stream.write(\" ... \")\n self.stream.flush()\n"
] |
class ResultCollector(threading.Thread, unittest.result.TestResult):
"""
Results handler. Given a report queue, will reform a complete report from it as what would come from a run
of unittest.TestResult
:param stream: stream on which to write information
:param descriptions: whether to display tests descriptions or not
:param verbosity: the verbosity used for the test result reporters
:param result_queue: queue form which to get the test results
:param test_results: list of testResults instances to use
:param tests: list of tests that are currently run
"""
def __init__(self, stream=None, descriptions=None, verbosity=None, *, result_queue: queue.Queue, test_results,
tests):
threading.Thread.__init__(self)
unittest.result.TestResult.__init__(self, stream, descriptions, verbosity)
self.test_results = test_results
for testResult in self.test_results:
if hasattr(testResult, "separator1"):
self.separator1 = testResult.separator1
break
for testResult in self.test_results:
if hasattr(testResult, "separator2"):
self.separator2 = testResult.separator2
break
self.result_queue = result_queue
self.cleanup = False
self.showAll = verbosity > 1
self.dots = verbosity == 1
self.stream = stream
self.descriptions = descriptions
self.tests = tests
def end_collection(self) -> None:
""" Tells the thread that is it time to end """
self.cleanup = True
def _call_test_results(self, method_name, *args, **kwargs):
"""
calls the given method on every test results instances
:param method_name: name of the method to call
:param args: arguments to pass to the method
:param kwargs: keyword arguments to pass to the method
"""
method = operator.methodcaller(method_name, *args, **kwargs)
for testResult in self.test_results:
method(testResult)
# noinspection PyPep8Naming
def getDescription(self, test):
"""
Get the description of the test
:param test: test from which to get the description
:return: description of the test
"""
doc_first_line = test.shortDescription()
if self.descriptions and doc_first_line:
return '\n'.join((str(test), doc_first_line))
else:
return str(test)
def test_info(self, test):
"""
writes test description on the stream used for reporting
:param test: test for which to display information
"""
if self.showAll:
self.stream.write(self.getDescription(test))
self.stream.write(" ... ")
self.stream.flush()
def addError(self, test, err):
"""
registers a test as error
:param test: test to register
:param err: error the test gave
"""
super().addError(test, err)
self.test_info(test)
self._call_test_results('addError', test, err)
def addExpectedFailure(self, test, err):
"""
registers as test as expected failure
:param test: test to register
:param err: error the test gave
"""
super().addExpectedFailure(test, err)
self.test_info(test)
self._call_test_results('addExpectedFailure', test, err)
def addSkip(self, test, reason):
"""
registers a test as skipped
:param test: test to register
:param reason: reason why the test was skipped
"""
super().addSkip(test, reason)
self.test_info(test)
self._call_test_results('addSkip', test, reason)
def addSuccess(self, test):
"""
registers a test as successful
:param test: test to register
"""
super().addSuccess(test)
self.test_info(test)
self._call_test_results('addSuccess', test)
def addUnexpectedSuccess(self, test):
"""
registers a test as an unexpected success
:param test: test to register
"""
super().addUnexpectedSuccess(test)
self.test_info(test)
self._call_test_results('addUnexpectedSuccess', test)
def printErrors(self):
"""
print test report
"""
self._call_test_results('printErrors')
def run(self) -> None:
"""
processes entries in the queue until told to stop
"""
while not self.cleanup:
try:
result, test, additional_info = self.result_queue.get(timeout=1)
except queue.Empty:
continue
self.result_queue.task_done()
if result == TestState.serialization_failure:
test = self.tests[test]
warnings.warn("Serialization error: {} on test {}".format(
additional_info, test), SerializationWarning)
test(self)
else:
self.testsRun += 1
if result == TestState.success:
self.addSuccess(test)
elif result == TestState.failure:
self.addFailure(test, additional_info)
elif result == TestState.error:
self.addError(test, additional_info)
elif result == TestState.skipped:
self.addSkip(test, additional_info)
elif result == TestState.expected_failure:
self.addExpectedFailure(test, additional_info)
elif result == TestState.unexpected_success:
self.addUnexpectedSuccess(test)
else:
raise Exception("This is not a valid test type :", result)
|
BenjaminSchubert/NitPycker
|
nitpycker/result.py
|
ResultCollector.addSkip
|
python
|
def addSkip(self, test, reason):
super().addSkip(test, reason)
self.test_info(test)
self._call_test_results('addSkip', test, reason)
|
registers a test as skipped
:param test: test to register
:param reason: reason why the test was skipped
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/result.py#L244-L253
|
[
"def _call_test_results(self, method_name, *args, **kwargs):\n \"\"\"\n calls the given method on every test results instances\n\n :param method_name: name of the method to call\n :param args: arguments to pass to the method\n :param kwargs: keyword arguments to pass to the method\n \"\"\"\n method = operator.methodcaller(method_name, *args, **kwargs)\n for testResult in self.test_results:\n method(testResult)\n",
"def test_info(self, test):\n \"\"\"\n writes test description on the stream used for reporting\n\n :param test: test for which to display information\n \"\"\"\n if self.showAll:\n self.stream.write(self.getDescription(test))\n self.stream.write(\" ... \")\n self.stream.flush()\n"
] |
class ResultCollector(threading.Thread, unittest.result.TestResult):
"""
Results handler. Given a report queue, will reform a complete report from it as what would come from a run
of unittest.TestResult
:param stream: stream on which to write information
:param descriptions: whether to display tests descriptions or not
:param verbosity: the verbosity used for the test result reporters
:param result_queue: queue form which to get the test results
:param test_results: list of testResults instances to use
:param tests: list of tests that are currently run
"""
def __init__(self, stream=None, descriptions=None, verbosity=None, *, result_queue: queue.Queue, test_results,
tests):
threading.Thread.__init__(self)
unittest.result.TestResult.__init__(self, stream, descriptions, verbosity)
self.test_results = test_results
for testResult in self.test_results:
if hasattr(testResult, "separator1"):
self.separator1 = testResult.separator1
break
for testResult in self.test_results:
if hasattr(testResult, "separator2"):
self.separator2 = testResult.separator2
break
self.result_queue = result_queue
self.cleanup = False
self.showAll = verbosity > 1
self.dots = verbosity == 1
self.stream = stream
self.descriptions = descriptions
self.tests = tests
def end_collection(self) -> None:
""" Tells the thread that is it time to end """
self.cleanup = True
def _call_test_results(self, method_name, *args, **kwargs):
"""
calls the given method on every test results instances
:param method_name: name of the method to call
:param args: arguments to pass to the method
:param kwargs: keyword arguments to pass to the method
"""
method = operator.methodcaller(method_name, *args, **kwargs)
for testResult in self.test_results:
method(testResult)
# noinspection PyPep8Naming
def getDescription(self, test):
"""
Get the description of the test
:param test: test from which to get the description
:return: description of the test
"""
doc_first_line = test.shortDescription()
if self.descriptions and doc_first_line:
return '\n'.join((str(test), doc_first_line))
else:
return str(test)
def test_info(self, test):
"""
writes test description on the stream used for reporting
:param test: test for which to display information
"""
if self.showAll:
self.stream.write(self.getDescription(test))
self.stream.write(" ... ")
self.stream.flush()
def addError(self, test, err):
"""
registers a test as error
:param test: test to register
:param err: error the test gave
"""
super().addError(test, err)
self.test_info(test)
self._call_test_results('addError', test, err)
def addExpectedFailure(self, test, err):
"""
registers as test as expected failure
:param test: test to register
:param err: error the test gave
"""
super().addExpectedFailure(test, err)
self.test_info(test)
self._call_test_results('addExpectedFailure', test, err)
def addFailure(self, test, err):
"""
registers a test as failure
:param test: test to register
:param err: error the test gave
"""
super().addFailure(test, err)
self.test_info(test)
self._call_test_results('addFailure', test, err)
def addSuccess(self, test):
"""
registers a test as successful
:param test: test to register
"""
super().addSuccess(test)
self.test_info(test)
self._call_test_results('addSuccess', test)
def addUnexpectedSuccess(self, test):
"""
registers a test as an unexpected success
:param test: test to register
"""
super().addUnexpectedSuccess(test)
self.test_info(test)
self._call_test_results('addUnexpectedSuccess', test)
def printErrors(self):
"""
print test report
"""
self._call_test_results('printErrors')
def run(self) -> None:
"""
processes entries in the queue until told to stop
"""
while not self.cleanup:
try:
result, test, additional_info = self.result_queue.get(timeout=1)
except queue.Empty:
continue
self.result_queue.task_done()
if result == TestState.serialization_failure:
test = self.tests[test]
warnings.warn("Serialization error: {} on test {}".format(
additional_info, test), SerializationWarning)
test(self)
else:
self.testsRun += 1
if result == TestState.success:
self.addSuccess(test)
elif result == TestState.failure:
self.addFailure(test, additional_info)
elif result == TestState.error:
self.addError(test, additional_info)
elif result == TestState.skipped:
self.addSkip(test, additional_info)
elif result == TestState.expected_failure:
self.addExpectedFailure(test, additional_info)
elif result == TestState.unexpected_success:
self.addUnexpectedSuccess(test)
else:
raise Exception("This is not a valid test type :", result)
|
BenjaminSchubert/NitPycker
|
nitpycker/result.py
|
ResultCollector.addSuccess
|
python
|
def addSuccess(self, test):
super().addSuccess(test)
self.test_info(test)
self._call_test_results('addSuccess', test)
|
registers a test as successful
:param test: test to register
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/result.py#L255-L263
|
[
"def _call_test_results(self, method_name, *args, **kwargs):\n \"\"\"\n calls the given method on every test results instances\n\n :param method_name: name of the method to call\n :param args: arguments to pass to the method\n :param kwargs: keyword arguments to pass to the method\n \"\"\"\n method = operator.methodcaller(method_name, *args, **kwargs)\n for testResult in self.test_results:\n method(testResult)\n",
"def test_info(self, test):\n \"\"\"\n writes test description on the stream used for reporting\n\n :param test: test for which to display information\n \"\"\"\n if self.showAll:\n self.stream.write(self.getDescription(test))\n self.stream.write(\" ... \")\n self.stream.flush()\n"
] |
class ResultCollector(threading.Thread, unittest.result.TestResult):
"""
Results handler. Given a report queue, will reform a complete report from it as what would come from a run
of unittest.TestResult
:param stream: stream on which to write information
:param descriptions: whether to display tests descriptions or not
:param verbosity: the verbosity used for the test result reporters
:param result_queue: queue form which to get the test results
:param test_results: list of testResults instances to use
:param tests: list of tests that are currently run
"""
def __init__(self, stream=None, descriptions=None, verbosity=None, *, result_queue: queue.Queue, test_results,
tests):
threading.Thread.__init__(self)
unittest.result.TestResult.__init__(self, stream, descriptions, verbosity)
self.test_results = test_results
for testResult in self.test_results:
if hasattr(testResult, "separator1"):
self.separator1 = testResult.separator1
break
for testResult in self.test_results:
if hasattr(testResult, "separator2"):
self.separator2 = testResult.separator2
break
self.result_queue = result_queue
self.cleanup = False
self.showAll = verbosity > 1
self.dots = verbosity == 1
self.stream = stream
self.descriptions = descriptions
self.tests = tests
def end_collection(self) -> None:
""" Tells the thread that is it time to end """
self.cleanup = True
def _call_test_results(self, method_name, *args, **kwargs):
"""
calls the given method on every test results instances
:param method_name: name of the method to call
:param args: arguments to pass to the method
:param kwargs: keyword arguments to pass to the method
"""
method = operator.methodcaller(method_name, *args, **kwargs)
for testResult in self.test_results:
method(testResult)
# noinspection PyPep8Naming
def getDescription(self, test):
"""
Get the description of the test
:param test: test from which to get the description
:return: description of the test
"""
doc_first_line = test.shortDescription()
if self.descriptions and doc_first_line:
return '\n'.join((str(test), doc_first_line))
else:
return str(test)
def test_info(self, test):
"""
writes test description on the stream used for reporting
:param test: test for which to display information
"""
if self.showAll:
self.stream.write(self.getDescription(test))
self.stream.write(" ... ")
self.stream.flush()
def addError(self, test, err):
"""
registers a test as error
:param test: test to register
:param err: error the test gave
"""
super().addError(test, err)
self.test_info(test)
self._call_test_results('addError', test, err)
def addExpectedFailure(self, test, err):
"""
registers as test as expected failure
:param test: test to register
:param err: error the test gave
"""
super().addExpectedFailure(test, err)
self.test_info(test)
self._call_test_results('addExpectedFailure', test, err)
def addFailure(self, test, err):
"""
registers a test as failure
:param test: test to register
:param err: error the test gave
"""
super().addFailure(test, err)
self.test_info(test)
self._call_test_results('addFailure', test, err)
def addSkip(self, test, reason):
"""
registers a test as skipped
:param test: test to register
:param reason: reason why the test was skipped
"""
super().addSkip(test, reason)
self.test_info(test)
self._call_test_results('addSkip', test, reason)
def addUnexpectedSuccess(self, test):
"""
registers a test as an unexpected success
:param test: test to register
"""
super().addUnexpectedSuccess(test)
self.test_info(test)
self._call_test_results('addUnexpectedSuccess', test)
def printErrors(self):
"""
print test report
"""
self._call_test_results('printErrors')
def run(self) -> None:
"""
processes entries in the queue until told to stop
"""
while not self.cleanup:
try:
result, test, additional_info = self.result_queue.get(timeout=1)
except queue.Empty:
continue
self.result_queue.task_done()
if result == TestState.serialization_failure:
test = self.tests[test]
warnings.warn("Serialization error: {} on test {}".format(
additional_info, test), SerializationWarning)
test(self)
else:
self.testsRun += 1
if result == TestState.success:
self.addSuccess(test)
elif result == TestState.failure:
self.addFailure(test, additional_info)
elif result == TestState.error:
self.addError(test, additional_info)
elif result == TestState.skipped:
self.addSkip(test, additional_info)
elif result == TestState.expected_failure:
self.addExpectedFailure(test, additional_info)
elif result == TestState.unexpected_success:
self.addUnexpectedSuccess(test)
else:
raise Exception("This is not a valid test type :", result)
|
BenjaminSchubert/NitPycker
|
nitpycker/result.py
|
ResultCollector.addUnexpectedSuccess
|
python
|
def addUnexpectedSuccess(self, test):
super().addUnexpectedSuccess(test)
self.test_info(test)
self._call_test_results('addUnexpectedSuccess', test)
|
registers a test as an unexpected success
:param test: test to register
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/result.py#L265-L273
|
[
"def _call_test_results(self, method_name, *args, **kwargs):\n \"\"\"\n calls the given method on every test results instances\n\n :param method_name: name of the method to call\n :param args: arguments to pass to the method\n :param kwargs: keyword arguments to pass to the method\n \"\"\"\n method = operator.methodcaller(method_name, *args, **kwargs)\n for testResult in self.test_results:\n method(testResult)\n",
"def test_info(self, test):\n \"\"\"\n writes test description on the stream used for reporting\n\n :param test: test for which to display information\n \"\"\"\n if self.showAll:\n self.stream.write(self.getDescription(test))\n self.stream.write(\" ... \")\n self.stream.flush()\n"
] |
class ResultCollector(threading.Thread, unittest.result.TestResult):
"""
Results handler. Given a report queue, will reform a complete report from it as what would come from a run
of unittest.TestResult
:param stream: stream on which to write information
:param descriptions: whether to display tests descriptions or not
:param verbosity: the verbosity used for the test result reporters
:param result_queue: queue form which to get the test results
:param test_results: list of testResults instances to use
:param tests: list of tests that are currently run
"""
def __init__(self, stream=None, descriptions=None, verbosity=None, *, result_queue: queue.Queue, test_results,
tests):
threading.Thread.__init__(self)
unittest.result.TestResult.__init__(self, stream, descriptions, verbosity)
self.test_results = test_results
for testResult in self.test_results:
if hasattr(testResult, "separator1"):
self.separator1 = testResult.separator1
break
for testResult in self.test_results:
if hasattr(testResult, "separator2"):
self.separator2 = testResult.separator2
break
self.result_queue = result_queue
self.cleanup = False
self.showAll = verbosity > 1
self.dots = verbosity == 1
self.stream = stream
self.descriptions = descriptions
self.tests = tests
def end_collection(self) -> None:
""" Tells the thread that is it time to end """
self.cleanup = True
def _call_test_results(self, method_name, *args, **kwargs):
"""
calls the given method on every test results instances
:param method_name: name of the method to call
:param args: arguments to pass to the method
:param kwargs: keyword arguments to pass to the method
"""
method = operator.methodcaller(method_name, *args, **kwargs)
for testResult in self.test_results:
method(testResult)
# noinspection PyPep8Naming
def getDescription(self, test):
"""
Get the description of the test
:param test: test from which to get the description
:return: description of the test
"""
doc_first_line = test.shortDescription()
if self.descriptions and doc_first_line:
return '\n'.join((str(test), doc_first_line))
else:
return str(test)
def test_info(self, test):
"""
writes test description on the stream used for reporting
:param test: test for which to display information
"""
if self.showAll:
self.stream.write(self.getDescription(test))
self.stream.write(" ... ")
self.stream.flush()
def addError(self, test, err):
"""
registers a test as error
:param test: test to register
:param err: error the test gave
"""
super().addError(test, err)
self.test_info(test)
self._call_test_results('addError', test, err)
def addExpectedFailure(self, test, err):
"""
registers as test as expected failure
:param test: test to register
:param err: error the test gave
"""
super().addExpectedFailure(test, err)
self.test_info(test)
self._call_test_results('addExpectedFailure', test, err)
def addFailure(self, test, err):
"""
registers a test as failure
:param test: test to register
:param err: error the test gave
"""
super().addFailure(test, err)
self.test_info(test)
self._call_test_results('addFailure', test, err)
def addSkip(self, test, reason):
"""
registers a test as skipped
:param test: test to register
:param reason: reason why the test was skipped
"""
super().addSkip(test, reason)
self.test_info(test)
self._call_test_results('addSkip', test, reason)
def addSuccess(self, test):
"""
registers a test as successful
:param test: test to register
"""
super().addSuccess(test)
self.test_info(test)
self._call_test_results('addSuccess', test)
def printErrors(self):
"""
print test report
"""
self._call_test_results('printErrors')
def run(self) -> None:
"""
processes entries in the queue until told to stop
"""
while not self.cleanup:
try:
result, test, additional_info = self.result_queue.get(timeout=1)
except queue.Empty:
continue
self.result_queue.task_done()
if result == TestState.serialization_failure:
test = self.tests[test]
warnings.warn("Serialization error: {} on test {}".format(
additional_info, test), SerializationWarning)
test(self)
else:
self.testsRun += 1
if result == TestState.success:
self.addSuccess(test)
elif result == TestState.failure:
self.addFailure(test, additional_info)
elif result == TestState.error:
self.addError(test, additional_info)
elif result == TestState.skipped:
self.addSkip(test, additional_info)
elif result == TestState.expected_failure:
self.addExpectedFailure(test, additional_info)
elif result == TestState.unexpected_success:
self.addUnexpectedSuccess(test)
else:
raise Exception("This is not a valid test type :", result)
|
BenjaminSchubert/NitPycker
|
nitpycker/result.py
|
ResultCollector.run
|
python
|
def run(self) -> None:
while not self.cleanup:
try:
result, test, additional_info = self.result_queue.get(timeout=1)
except queue.Empty:
continue
self.result_queue.task_done()
if result == TestState.serialization_failure:
test = self.tests[test]
warnings.warn("Serialization error: {} on test {}".format(
additional_info, test), SerializationWarning)
test(self)
else:
self.testsRun += 1
if result == TestState.success:
self.addSuccess(test)
elif result == TestState.failure:
self.addFailure(test, additional_info)
elif result == TestState.error:
self.addError(test, additional_info)
elif result == TestState.skipped:
self.addSkip(test, additional_info)
elif result == TestState.expected_failure:
self.addExpectedFailure(test, additional_info)
elif result == TestState.unexpected_success:
self.addUnexpectedSuccess(test)
else:
raise Exception("This is not a valid test type :", result)
|
processes entries in the queue until told to stop
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/result.py#L281-L315
|
[
"def addError(self, test, err):\n \"\"\"\n registers a test as error\n\n :param test: test to register\n :param err: error the test gave\n \"\"\"\n super().addError(test, err)\n self.test_info(test)\n self._call_test_results('addError', test, err)\n",
"def addExpectedFailure(self, test, err):\n \"\"\"\n registers as test as expected failure\n\n :param test: test to register\n :param err: error the test gave\n \"\"\"\n super().addExpectedFailure(test, err)\n self.test_info(test)\n self._call_test_results('addExpectedFailure', test, err)\n",
"def addFailure(self, test, err):\n \"\"\"\n registers a test as failure\n\n :param test: test to register\n :param err: error the test gave\n \"\"\"\n super().addFailure(test, err)\n self.test_info(test)\n self._call_test_results('addFailure', test, err)\n",
"def addSkip(self, test, reason):\n \"\"\"\n registers a test as skipped\n\n :param test: test to register\n :param reason: reason why the test was skipped\n \"\"\"\n super().addSkip(test, reason)\n self.test_info(test)\n self._call_test_results('addSkip', test, reason)\n",
"def addSuccess(self, test):\n \"\"\"\n registers a test as successful\n\n :param test: test to register\n \"\"\"\n super().addSuccess(test)\n self.test_info(test)\n self._call_test_results('addSuccess', test)\n",
"def addUnexpectedSuccess(self, test):\n \"\"\"\n registers a test as an unexpected success\n\n :param test: test to register\n \"\"\"\n super().addUnexpectedSuccess(test)\n self.test_info(test)\n self._call_test_results('addUnexpectedSuccess', test)\n"
] |
class ResultCollector(threading.Thread, unittest.result.TestResult):
"""
Results handler. Given a report queue, will reform a complete report from it as what would come from a run
of unittest.TestResult
:param stream: stream on which to write information
:param descriptions: whether to display tests descriptions or not
:param verbosity: the verbosity used for the test result reporters
:param result_queue: queue form which to get the test results
:param test_results: list of testResults instances to use
:param tests: list of tests that are currently run
"""
def __init__(self, stream=None, descriptions=None, verbosity=None, *, result_queue: queue.Queue, test_results,
tests):
threading.Thread.__init__(self)
unittest.result.TestResult.__init__(self, stream, descriptions, verbosity)
self.test_results = test_results
for testResult in self.test_results:
if hasattr(testResult, "separator1"):
self.separator1 = testResult.separator1
break
for testResult in self.test_results:
if hasattr(testResult, "separator2"):
self.separator2 = testResult.separator2
break
self.result_queue = result_queue
self.cleanup = False
self.showAll = verbosity > 1
self.dots = verbosity == 1
self.stream = stream
self.descriptions = descriptions
self.tests = tests
def end_collection(self) -> None:
""" Tells the thread that is it time to end """
self.cleanup = True
def _call_test_results(self, method_name, *args, **kwargs):
"""
calls the given method on every test results instances
:param method_name: name of the method to call
:param args: arguments to pass to the method
:param kwargs: keyword arguments to pass to the method
"""
method = operator.methodcaller(method_name, *args, **kwargs)
for testResult in self.test_results:
method(testResult)
# noinspection PyPep8Naming
def getDescription(self, test):
"""
Get the description of the test
:param test: test from which to get the description
:return: description of the test
"""
doc_first_line = test.shortDescription()
if self.descriptions and doc_first_line:
return '\n'.join((str(test), doc_first_line))
else:
return str(test)
def test_info(self, test):
"""
writes test description on the stream used for reporting
:param test: test for which to display information
"""
if self.showAll:
self.stream.write(self.getDescription(test))
self.stream.write(" ... ")
self.stream.flush()
def addError(self, test, err):
"""
registers a test as error
:param test: test to register
:param err: error the test gave
"""
super().addError(test, err)
self.test_info(test)
self._call_test_results('addError', test, err)
def addExpectedFailure(self, test, err):
"""
registers as test as expected failure
:param test: test to register
:param err: error the test gave
"""
super().addExpectedFailure(test, err)
self.test_info(test)
self._call_test_results('addExpectedFailure', test, err)
def addFailure(self, test, err):
"""
registers a test as failure
:param test: test to register
:param err: error the test gave
"""
super().addFailure(test, err)
self.test_info(test)
self._call_test_results('addFailure', test, err)
def addSkip(self, test, reason):
"""
registers a test as skipped
:param test: test to register
:param reason: reason why the test was skipped
"""
super().addSkip(test, reason)
self.test_info(test)
self._call_test_results('addSkip', test, reason)
def addSuccess(self, test):
"""
registers a test as successful
:param test: test to register
"""
super().addSuccess(test)
self.test_info(test)
self._call_test_results('addSuccess', test)
def addUnexpectedSuccess(self, test):
"""
registers a test as an unexpected success
:param test: test to register
"""
super().addUnexpectedSuccess(test)
self.test_info(test)
self._call_test_results('addUnexpectedSuccess', test)
def printErrors(self):
"""
print test report
"""
self._call_test_results('printErrors')
|
BenjaminSchubert/NitPycker
|
nitpycker/runner.py
|
ParallelRunner._makeResult
|
python
|
def _makeResult(self):
return [reporter(self.stream, self.descriptions, self.verbosity) for reporter in self.resultclass]
|
instantiates the result class reporters
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/runner.py#L108-L110
| null |
class ParallelRunner:
"""
A parallel test runner for unittest
:param stream: stream to use for tests reporting
:param descriptions: whether to display descriptions or not
:param verbosity: verbosity of the test result reporters
:param failfast: stops the test on the first error or failure
:param buffer: whether to buffer output of the test or not. On buffering, this
will discard output for passing tests and display normally on failing tests
:param resultclass: single or list of result class to use
:param warnings: warning filter that should be used while running the tests
:param tb_locals: if true, local variables will be shown in tracebacks
:param process_number: number of processes to use for running the tests
"""
# TODO implement buffering
# TODO implement failfast
# TODO implement warnings
# TODO verify tb_locals works
resultclass = (TextTestResult,)
result_collector_class = ResultCollector
class Process(multiprocessing.Process):
"""
A simple test runner for a TestSuite.
:param index: index to find the test
:param test: the unittest.TestSuite to run
:param results_queue: a queue where to put the results once done
:param manager: the plugin manager to be called before and after the run
:param task_done_notifier: semaphore to acquire to notify from end of task
:param kwargs: additional arguments to pass to the process
"""
def __init__(self, index: int, test: unittest.TestSuite, results_queue: queue.Queue,
task_done_notifier: threading.Semaphore, **kwargs):
super().__init__(**kwargs)
self.index = index
self.test = test
self.results = InterProcessResult(results_queue)
self.results_queue = results_queue
self.task_done = task_done_notifier
def run(self) -> None:
""" Launches the test and notifies of the result """
try:
self.test(self.results)
except (PicklingError, TypeError) as exc: # PicklingError is in Python 3.4, TypeError in Python 3.5
self.results_queue.put((TestState.serialization_failure, self.index, exc))
finally:
self.task_done.release()
def __init__(self, stream=None, descriptions=True, verbosity=1, failfast=False, buffer=False, resultclass=None,
warnings=None, *, tb_locals=False, process_number=multiprocessing.cpu_count(),
result_collector_class=None):
if stream is None:
stream = sys.stderr
self.stream = _WritelnDecorator(stream)
self.descriptions = descriptions
self.verbosity = verbosity
self.failfast = failfast
self.buffer = buffer
self.tb_locals = tb_locals
self.warnings = warnings
self.process_number = process_number
if resultclass is not None:
if isinstance(resultclass, collections.Iterable):
self.resultclass = resultclass
else:
self.resultclass = (resultclass,)
if result_collector_class is not None:
self.result_collector_class = result_collector_class
# noinspection PyPep8Naming
@staticmethod
def module_can_run_parallel(test_module: unittest.TestSuite) -> bool:
"""
Checks if a given module of tests can be run in parallel or not
:param test_module: the module to run
:return: True if the module can be run on parallel, False otherwise
"""
for test_class in test_module:
# if the test is already failed, we just don't filter it
# and let the test runner deal with it later.
if hasattr(unittest.loader, '_FailedTest'): # import failure in python 3.4.5+
# noinspection PyProtectedMember
if isinstance(test_class, unittest.loader._FailedTest):
continue
if not isinstance(test_class, collections.Iterable): # likely an import failure in python 3.4.4-
# before python 3.4.5, test import failures were not serializable.
# We are unable to be sure that this is a module import failure, but it very likely is
# if this is the case, we'll just run this locally and see
raise TestClassNotIterable()
for test_case in test_class:
return not getattr(sys.modules[test_case.__module__], "__no_parallel__", False)
@staticmethod
def class_can_run_parallel(test_class: unittest.TestSuite) -> bool:
"""
Checks if a given class of tests can be run in parallel or not
:param test_class: the class to run
:return: True if te class can be run in parallel, False otherwise
"""
for test_case in test_class:
return not getattr(test_case, "__no_parallel__", False)
def collect_tests(self, tests):
"""
split all tests into chunks to be executed on multiple processes
:param tests: tests that need to be run
:return: list of tests suites, test that need to be run locally
"""
test_suites = []
local_test_suites = unittest.TestSuite()
for test_module in tests:
try:
can_run_parallel = self.module_can_run_parallel(test_module)
except TestClassNotIterable:
local_test_suites.addTest(test_module)
continue
else:
if not can_run_parallel:
test_suites.append(test_module)
continue
for test_class in test_module:
if not self.class_can_run_parallel(test_class):
test_suites.append(test_class)
continue
for _test in test_class:
test_suite = unittest.TestSuite()
test_suite.addTest(_test)
test_suites.append(test_suite)
return test_suites, local_test_suites
def print_summary(self, result, time_taken):
"""
Prints the test summary, how many tests failed, how long it took, etc
:param result: result class to use to print summary
:param time_taken: the time all tests took to run
"""
if hasattr(result, "separator2"):
self.stream.writeln(result.separator2)
self.stream.writeln("Ran {number_of_tests} test{s} in {time:.3f}s\n".format(
number_of_tests=result.testsRun, s="s" if result.testsRun != 1 else "", time=time_taken
))
info = []
if not result.wasSuccessful():
self.stream.write("FAILED")
if result.failures:
info.append("failures={}".format(len(result.failures)))
if result.errors:
info.append("errors={}".format(len(result.errors)))
else:
self.stream.write("OK")
if result.skipped:
info.append("skipped={}".format(len(result.skipped)))
if result.expectedFailures:
info.append("expected failures={}".format(len(result.expectedFailures)))
if result.unexpectedSuccesses:
info.append("unexpected successes={}".format(len(result.unexpectedSuccesses)))
if info:
self.stream.writeln(" ({})".format(", ".join(info)))
else:
self.stream.write("\n")
def run(self, test: unittest.TestSuite):
"""
Given a TestSuite, will create one process per test case whenever possible and run them concurrently.
Will then wait for the result and return them
:param test: the TestSuite to run
:return: a summary of the test run
"""
start_time = time.time()
process = []
resource_manager = multiprocessing.Manager()
results_queue = resource_manager.Queue()
tasks_running = resource_manager.BoundedSemaphore(self.process_number)
test_suites, local_test_suites = self.collect_tests(test)
results_collector = ResultCollector(
self.stream, self.descriptions, self.verbosity,
result_queue=results_queue, test_results=self._makeResult(),
tests=test_suites
)
results_collector.start()
for index, suite in enumerate(test_suites):
tasks_running.acquire()
x = self.Process(index, suite, results_queue, tasks_running)
x.start()
process.append(x)
local_test_suites.run(results_collector)
for i in process:
i.join()
results_queue.join()
results_collector.end_collection()
results_collector.join()
results_collector.printErrors()
self.print_summary(results_collector, time.time() - start_time)
return results_collector
|
BenjaminSchubert/NitPycker
|
nitpycker/runner.py
|
ParallelRunner.module_can_run_parallel
|
python
|
def module_can_run_parallel(test_module: unittest.TestSuite) -> bool:
for test_class in test_module:
# if the test is already failed, we just don't filter it
# and let the test runner deal with it later.
if hasattr(unittest.loader, '_FailedTest'): # import failure in python 3.4.5+
# noinspection PyProtectedMember
if isinstance(test_class, unittest.loader._FailedTest):
continue
if not isinstance(test_class, collections.Iterable): # likely an import failure in python 3.4.4-
# before python 3.4.5, test import failures were not serializable.
# We are unable to be sure that this is a module import failure, but it very likely is
# if this is the case, we'll just run this locally and see
raise TestClassNotIterable()
for test_case in test_class:
return not getattr(sys.modules[test_case.__module__], "__no_parallel__", False)
|
Checks if a given module of tests can be run in parallel or not
:param test_module: the module to run
:return: True if the module can be run on parallel, False otherwise
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/runner.py#L113-L135
| null |
class ParallelRunner:
"""
A parallel test runner for unittest
:param stream: stream to use for tests reporting
:param descriptions: whether to display descriptions or not
:param verbosity: verbosity of the test result reporters
:param failfast: stops the test on the first error or failure
:param buffer: whether to buffer output of the test or not. On buffering, this
will discard output for passing tests and display normally on failing tests
:param resultclass: single or list of result class to use
:param warnings: warning filter that should be used while running the tests
:param tb_locals: if true, local variables will be shown in tracebacks
:param process_number: number of processes to use for running the tests
"""
# TODO implement buffering
# TODO implement failfast
# TODO implement warnings
# TODO verify tb_locals works
resultclass = (TextTestResult,)
result_collector_class = ResultCollector
class Process(multiprocessing.Process):
"""
A simple test runner for a TestSuite.
:param index: index to find the test
:param test: the unittest.TestSuite to run
:param results_queue: a queue where to put the results once done
:param manager: the plugin manager to be called before and after the run
:param task_done_notifier: semaphore to acquire to notify from end of task
:param kwargs: additional arguments to pass to the process
"""
def __init__(self, index: int, test: unittest.TestSuite, results_queue: queue.Queue,
task_done_notifier: threading.Semaphore, **kwargs):
super().__init__(**kwargs)
self.index = index
self.test = test
self.results = InterProcessResult(results_queue)
self.results_queue = results_queue
self.task_done = task_done_notifier
def run(self) -> None:
""" Launches the test and notifies of the result """
try:
self.test(self.results)
except (PicklingError, TypeError) as exc: # PicklingError is in Python 3.4, TypeError in Python 3.5
self.results_queue.put((TestState.serialization_failure, self.index, exc))
finally:
self.task_done.release()
def __init__(self, stream=None, descriptions=True, verbosity=1, failfast=False, buffer=False, resultclass=None,
warnings=None, *, tb_locals=False, process_number=multiprocessing.cpu_count(),
result_collector_class=None):
if stream is None:
stream = sys.stderr
self.stream = _WritelnDecorator(stream)
self.descriptions = descriptions
self.verbosity = verbosity
self.failfast = failfast
self.buffer = buffer
self.tb_locals = tb_locals
self.warnings = warnings
self.process_number = process_number
if resultclass is not None:
if isinstance(resultclass, collections.Iterable):
self.resultclass = resultclass
else:
self.resultclass = (resultclass,)
if result_collector_class is not None:
self.result_collector_class = result_collector_class
# noinspection PyPep8Naming
def _makeResult(self):
""" instantiates the result class reporters """
return [reporter(self.stream, self.descriptions, self.verbosity) for reporter in self.resultclass]
@staticmethod
@staticmethod
def class_can_run_parallel(test_class: unittest.TestSuite) -> bool:
"""
Checks if a given class of tests can be run in parallel or not
:param test_class: the class to run
:return: True if te class can be run in parallel, False otherwise
"""
for test_case in test_class:
return not getattr(test_case, "__no_parallel__", False)
def collect_tests(self, tests):
"""
split all tests into chunks to be executed on multiple processes
:param tests: tests that need to be run
:return: list of tests suites, test that need to be run locally
"""
test_suites = []
local_test_suites = unittest.TestSuite()
for test_module in tests:
try:
can_run_parallel = self.module_can_run_parallel(test_module)
except TestClassNotIterable:
local_test_suites.addTest(test_module)
continue
else:
if not can_run_parallel:
test_suites.append(test_module)
continue
for test_class in test_module:
if not self.class_can_run_parallel(test_class):
test_suites.append(test_class)
continue
for _test in test_class:
test_suite = unittest.TestSuite()
test_suite.addTest(_test)
test_suites.append(test_suite)
return test_suites, local_test_suites
def print_summary(self, result, time_taken):
"""
Prints the test summary, how many tests failed, how long it took, etc
:param result: result class to use to print summary
:param time_taken: the time all tests took to run
"""
if hasattr(result, "separator2"):
self.stream.writeln(result.separator2)
self.stream.writeln("Ran {number_of_tests} test{s} in {time:.3f}s\n".format(
number_of_tests=result.testsRun, s="s" if result.testsRun != 1 else "", time=time_taken
))
info = []
if not result.wasSuccessful():
self.stream.write("FAILED")
if result.failures:
info.append("failures={}".format(len(result.failures)))
if result.errors:
info.append("errors={}".format(len(result.errors)))
else:
self.stream.write("OK")
if result.skipped:
info.append("skipped={}".format(len(result.skipped)))
if result.expectedFailures:
info.append("expected failures={}".format(len(result.expectedFailures)))
if result.unexpectedSuccesses:
info.append("unexpected successes={}".format(len(result.unexpectedSuccesses)))
if info:
self.stream.writeln(" ({})".format(", ".join(info)))
else:
self.stream.write("\n")
def run(self, test: unittest.TestSuite):
"""
Given a TestSuite, will create one process per test case whenever possible and run them concurrently.
Will then wait for the result and return them
:param test: the TestSuite to run
:return: a summary of the test run
"""
start_time = time.time()
process = []
resource_manager = multiprocessing.Manager()
results_queue = resource_manager.Queue()
tasks_running = resource_manager.BoundedSemaphore(self.process_number)
test_suites, local_test_suites = self.collect_tests(test)
results_collector = ResultCollector(
self.stream, self.descriptions, self.verbosity,
result_queue=results_queue, test_results=self._makeResult(),
tests=test_suites
)
results_collector.start()
for index, suite in enumerate(test_suites):
tasks_running.acquire()
x = self.Process(index, suite, results_queue, tasks_running)
x.start()
process.append(x)
local_test_suites.run(results_collector)
for i in process:
i.join()
results_queue.join()
results_collector.end_collection()
results_collector.join()
results_collector.printErrors()
self.print_summary(results_collector, time.time() - start_time)
return results_collector
|
BenjaminSchubert/NitPycker
|
nitpycker/runner.py
|
ParallelRunner.class_can_run_parallel
|
python
|
def class_can_run_parallel(test_class: unittest.TestSuite) -> bool:
for test_case in test_class:
return not getattr(test_case, "__no_parallel__", False)
|
Checks if a given class of tests can be run in parallel or not
:param test_class: the class to run
:return: True if te class can be run in parallel, False otherwise
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/runner.py#L138-L146
| null |
class ParallelRunner:
"""
A parallel test runner for unittest
:param stream: stream to use for tests reporting
:param descriptions: whether to display descriptions or not
:param verbosity: verbosity of the test result reporters
:param failfast: stops the test on the first error or failure
:param buffer: whether to buffer output of the test or not. On buffering, this
will discard output for passing tests and display normally on failing tests
:param resultclass: single or list of result class to use
:param warnings: warning filter that should be used while running the tests
:param tb_locals: if true, local variables will be shown in tracebacks
:param process_number: number of processes to use for running the tests
"""
# TODO implement buffering
# TODO implement failfast
# TODO implement warnings
# TODO verify tb_locals works
resultclass = (TextTestResult,)
result_collector_class = ResultCollector
class Process(multiprocessing.Process):
"""
A simple test runner for a TestSuite.
:param index: index to find the test
:param test: the unittest.TestSuite to run
:param results_queue: a queue where to put the results once done
:param manager: the plugin manager to be called before and after the run
:param task_done_notifier: semaphore to acquire to notify from end of task
:param kwargs: additional arguments to pass to the process
"""
def __init__(self, index: int, test: unittest.TestSuite, results_queue: queue.Queue,
task_done_notifier: threading.Semaphore, **kwargs):
super().__init__(**kwargs)
self.index = index
self.test = test
self.results = InterProcessResult(results_queue)
self.results_queue = results_queue
self.task_done = task_done_notifier
def run(self) -> None:
""" Launches the test and notifies of the result """
try:
self.test(self.results)
except (PicklingError, TypeError) as exc: # PicklingError is in Python 3.4, TypeError in Python 3.5
self.results_queue.put((TestState.serialization_failure, self.index, exc))
finally:
self.task_done.release()
def __init__(self, stream=None, descriptions=True, verbosity=1, failfast=False, buffer=False, resultclass=None,
warnings=None, *, tb_locals=False, process_number=multiprocessing.cpu_count(),
result_collector_class=None):
if stream is None:
stream = sys.stderr
self.stream = _WritelnDecorator(stream)
self.descriptions = descriptions
self.verbosity = verbosity
self.failfast = failfast
self.buffer = buffer
self.tb_locals = tb_locals
self.warnings = warnings
self.process_number = process_number
if resultclass is not None:
if isinstance(resultclass, collections.Iterable):
self.resultclass = resultclass
else:
self.resultclass = (resultclass,)
if result_collector_class is not None:
self.result_collector_class = result_collector_class
# noinspection PyPep8Naming
def _makeResult(self):
""" instantiates the result class reporters """
return [reporter(self.stream, self.descriptions, self.verbosity) for reporter in self.resultclass]
@staticmethod
def module_can_run_parallel(test_module: unittest.TestSuite) -> bool:
"""
Checks if a given module of tests can be run in parallel or not
:param test_module: the module to run
:return: True if the module can be run on parallel, False otherwise
"""
for test_class in test_module:
# if the test is already failed, we just don't filter it
# and let the test runner deal with it later.
if hasattr(unittest.loader, '_FailedTest'): # import failure in python 3.4.5+
# noinspection PyProtectedMember
if isinstance(test_class, unittest.loader._FailedTest):
continue
if not isinstance(test_class, collections.Iterable): # likely an import failure in python 3.4.4-
# before python 3.4.5, test import failures were not serializable.
# We are unable to be sure that this is a module import failure, but it very likely is
# if this is the case, we'll just run this locally and see
raise TestClassNotIterable()
for test_case in test_class:
return not getattr(sys.modules[test_case.__module__], "__no_parallel__", False)
@staticmethod
def collect_tests(self, tests):
"""
split all tests into chunks to be executed on multiple processes
:param tests: tests that need to be run
:return: list of tests suites, test that need to be run locally
"""
test_suites = []
local_test_suites = unittest.TestSuite()
for test_module in tests:
try:
can_run_parallel = self.module_can_run_parallel(test_module)
except TestClassNotIterable:
local_test_suites.addTest(test_module)
continue
else:
if not can_run_parallel:
test_suites.append(test_module)
continue
for test_class in test_module:
if not self.class_can_run_parallel(test_class):
test_suites.append(test_class)
continue
for _test in test_class:
test_suite = unittest.TestSuite()
test_suite.addTest(_test)
test_suites.append(test_suite)
return test_suites, local_test_suites
def print_summary(self, result, time_taken):
"""
Prints the test summary, how many tests failed, how long it took, etc
:param result: result class to use to print summary
:param time_taken: the time all tests took to run
"""
if hasattr(result, "separator2"):
self.stream.writeln(result.separator2)
self.stream.writeln("Ran {number_of_tests} test{s} in {time:.3f}s\n".format(
number_of_tests=result.testsRun, s="s" if result.testsRun != 1 else "", time=time_taken
))
info = []
if not result.wasSuccessful():
self.stream.write("FAILED")
if result.failures:
info.append("failures={}".format(len(result.failures)))
if result.errors:
info.append("errors={}".format(len(result.errors)))
else:
self.stream.write("OK")
if result.skipped:
info.append("skipped={}".format(len(result.skipped)))
if result.expectedFailures:
info.append("expected failures={}".format(len(result.expectedFailures)))
if result.unexpectedSuccesses:
info.append("unexpected successes={}".format(len(result.unexpectedSuccesses)))
if info:
self.stream.writeln(" ({})".format(", ".join(info)))
else:
self.stream.write("\n")
def run(self, test: unittest.TestSuite):
"""
Given a TestSuite, will create one process per test case whenever possible and run them concurrently.
Will then wait for the result and return them
:param test: the TestSuite to run
:return: a summary of the test run
"""
start_time = time.time()
process = []
resource_manager = multiprocessing.Manager()
results_queue = resource_manager.Queue()
tasks_running = resource_manager.BoundedSemaphore(self.process_number)
test_suites, local_test_suites = self.collect_tests(test)
results_collector = ResultCollector(
self.stream, self.descriptions, self.verbosity,
result_queue=results_queue, test_results=self._makeResult(),
tests=test_suites
)
results_collector.start()
for index, suite in enumerate(test_suites):
tasks_running.acquire()
x = self.Process(index, suite, results_queue, tasks_running)
x.start()
process.append(x)
local_test_suites.run(results_collector)
for i in process:
i.join()
results_queue.join()
results_collector.end_collection()
results_collector.join()
results_collector.printErrors()
self.print_summary(results_collector, time.time() - start_time)
return results_collector
|
BenjaminSchubert/NitPycker
|
nitpycker/runner.py
|
ParallelRunner.print_summary
|
python
|
def print_summary(self, result, time_taken):
if hasattr(result, "separator2"):
self.stream.writeln(result.separator2)
self.stream.writeln("Ran {number_of_tests} test{s} in {time:.3f}s\n".format(
number_of_tests=result.testsRun, s="s" if result.testsRun != 1 else "", time=time_taken
))
info = []
if not result.wasSuccessful():
self.stream.write("FAILED")
if result.failures:
info.append("failures={}".format(len(result.failures)))
if result.errors:
info.append("errors={}".format(len(result.errors)))
else:
self.stream.write("OK")
if result.skipped:
info.append("skipped={}".format(len(result.skipped)))
if result.expectedFailures:
info.append("expected failures={}".format(len(result.expectedFailures)))
if result.unexpectedSuccesses:
info.append("unexpected successes={}".format(len(result.unexpectedSuccesses)))
if info:
self.stream.writeln(" ({})".format(", ".join(info)))
else:
self.stream.write("\n")
|
Prints the test summary, how many tests failed, how long it took, etc
:param result: result class to use to print summary
:param time_taken: the time all tests took to run
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/runner.py#L182-L217
| null |
class ParallelRunner:
"""
A parallel test runner for unittest
:param stream: stream to use for tests reporting
:param descriptions: whether to display descriptions or not
:param verbosity: verbosity of the test result reporters
:param failfast: stops the test on the first error or failure
:param buffer: whether to buffer output of the test or not. On buffering, this
will discard output for passing tests and display normally on failing tests
:param resultclass: single or list of result class to use
:param warnings: warning filter that should be used while running the tests
:param tb_locals: if true, local variables will be shown in tracebacks
:param process_number: number of processes to use for running the tests
"""
# TODO implement buffering
# TODO implement failfast
# TODO implement warnings
# TODO verify tb_locals works
resultclass = (TextTestResult,)
result_collector_class = ResultCollector
class Process(multiprocessing.Process):
"""
A simple test runner for a TestSuite.
:param index: index to find the test
:param test: the unittest.TestSuite to run
:param results_queue: a queue where to put the results once done
:param manager: the plugin manager to be called before and after the run
:param task_done_notifier: semaphore to acquire to notify from end of task
:param kwargs: additional arguments to pass to the process
"""
def __init__(self, index: int, test: unittest.TestSuite, results_queue: queue.Queue,
task_done_notifier: threading.Semaphore, **kwargs):
super().__init__(**kwargs)
self.index = index
self.test = test
self.results = InterProcessResult(results_queue)
self.results_queue = results_queue
self.task_done = task_done_notifier
def run(self) -> None:
""" Launches the test and notifies of the result """
try:
self.test(self.results)
except (PicklingError, TypeError) as exc: # PicklingError is in Python 3.4, TypeError in Python 3.5
self.results_queue.put((TestState.serialization_failure, self.index, exc))
finally:
self.task_done.release()
def __init__(self, stream=None, descriptions=True, verbosity=1, failfast=False, buffer=False, resultclass=None,
warnings=None, *, tb_locals=False, process_number=multiprocessing.cpu_count(),
result_collector_class=None):
if stream is None:
stream = sys.stderr
self.stream = _WritelnDecorator(stream)
self.descriptions = descriptions
self.verbosity = verbosity
self.failfast = failfast
self.buffer = buffer
self.tb_locals = tb_locals
self.warnings = warnings
self.process_number = process_number
if resultclass is not None:
if isinstance(resultclass, collections.Iterable):
self.resultclass = resultclass
else:
self.resultclass = (resultclass,)
if result_collector_class is not None:
self.result_collector_class = result_collector_class
# noinspection PyPep8Naming
def _makeResult(self):
""" instantiates the result class reporters """
return [reporter(self.stream, self.descriptions, self.verbosity) for reporter in self.resultclass]
@staticmethod
def module_can_run_parallel(test_module: unittest.TestSuite) -> bool:
"""
Checks if a given module of tests can be run in parallel or not
:param test_module: the module to run
:return: True if the module can be run on parallel, False otherwise
"""
for test_class in test_module:
# if the test is already failed, we just don't filter it
# and let the test runner deal with it later.
if hasattr(unittest.loader, '_FailedTest'): # import failure in python 3.4.5+
# noinspection PyProtectedMember
if isinstance(test_class, unittest.loader._FailedTest):
continue
if not isinstance(test_class, collections.Iterable): # likely an import failure in python 3.4.4-
# before python 3.4.5, test import failures were not serializable.
# We are unable to be sure that this is a module import failure, but it very likely is
# if this is the case, we'll just run this locally and see
raise TestClassNotIterable()
for test_case in test_class:
return not getattr(sys.modules[test_case.__module__], "__no_parallel__", False)
@staticmethod
def class_can_run_parallel(test_class: unittest.TestSuite) -> bool:
"""
Checks if a given class of tests can be run in parallel or not
:param test_class: the class to run
:return: True if te class can be run in parallel, False otherwise
"""
for test_case in test_class:
return not getattr(test_case, "__no_parallel__", False)
def collect_tests(self, tests):
"""
split all tests into chunks to be executed on multiple processes
:param tests: tests that need to be run
:return: list of tests suites, test that need to be run locally
"""
test_suites = []
local_test_suites = unittest.TestSuite()
for test_module in tests:
try:
can_run_parallel = self.module_can_run_parallel(test_module)
except TestClassNotIterable:
local_test_suites.addTest(test_module)
continue
else:
if not can_run_parallel:
test_suites.append(test_module)
continue
for test_class in test_module:
if not self.class_can_run_parallel(test_class):
test_suites.append(test_class)
continue
for _test in test_class:
test_suite = unittest.TestSuite()
test_suite.addTest(_test)
test_suites.append(test_suite)
return test_suites, local_test_suites
def run(self, test: unittest.TestSuite):
"""
Given a TestSuite, will create one process per test case whenever possible and run them concurrently.
Will then wait for the result and return them
:param test: the TestSuite to run
:return: a summary of the test run
"""
start_time = time.time()
process = []
resource_manager = multiprocessing.Manager()
results_queue = resource_manager.Queue()
tasks_running = resource_manager.BoundedSemaphore(self.process_number)
test_suites, local_test_suites = self.collect_tests(test)
results_collector = ResultCollector(
self.stream, self.descriptions, self.verbosity,
result_queue=results_queue, test_results=self._makeResult(),
tests=test_suites
)
results_collector.start()
for index, suite in enumerate(test_suites):
tasks_running.acquire()
x = self.Process(index, suite, results_queue, tasks_running)
x.start()
process.append(x)
local_test_suites.run(results_collector)
for i in process:
i.join()
results_queue.join()
results_collector.end_collection()
results_collector.join()
results_collector.printErrors()
self.print_summary(results_collector, time.time() - start_time)
return results_collector
|
BenjaminSchubert/NitPycker
|
nitpycker/runner.py
|
ParallelRunner.run
|
python
|
def run(self, test: unittest.TestSuite):
start_time = time.time()
process = []
resource_manager = multiprocessing.Manager()
results_queue = resource_manager.Queue()
tasks_running = resource_manager.BoundedSemaphore(self.process_number)
test_suites, local_test_suites = self.collect_tests(test)
results_collector = ResultCollector(
self.stream, self.descriptions, self.verbosity,
result_queue=results_queue, test_results=self._makeResult(),
tests=test_suites
)
results_collector.start()
for index, suite in enumerate(test_suites):
tasks_running.acquire()
x = self.Process(index, suite, results_queue, tasks_running)
x.start()
process.append(x)
local_test_suites.run(results_collector)
for i in process:
i.join()
results_queue.join()
results_collector.end_collection()
results_collector.join()
results_collector.printErrors()
self.print_summary(results_collector, time.time() - start_time)
return results_collector
|
Given a TestSuite, will create one process per test case whenever possible and run them concurrently.
Will then wait for the result and return them
:param test: the TestSuite to run
:return: a summary of the test run
|
train
|
https://github.com/BenjaminSchubert/NitPycker/blob/3ac2b3bf06f1d704b4853167a967311b0465a76f/nitpycker/runner.py#L219-L261
|
[
"def end_collection(self) -> None:\n \"\"\" Tells the thread that is it time to end \"\"\"\n self.cleanup = True\n",
"def printErrors(self):\n \"\"\"\n print test report\n \"\"\"\n self._call_test_results('printErrors')\n",
"def _makeResult(self):\n \"\"\" instantiates the result class reporters \"\"\"\n return [reporter(self.stream, self.descriptions, self.verbosity) for reporter in self.resultclass]\n",
"def collect_tests(self, tests):\n \"\"\"\n split all tests into chunks to be executed on multiple processes\n\n :param tests: tests that need to be run\n :return: list of tests suites, test that need to be run locally\n \"\"\"\n\n test_suites = []\n local_test_suites = unittest.TestSuite()\n\n for test_module in tests:\n try:\n can_run_parallel = self.module_can_run_parallel(test_module)\n except TestClassNotIterable:\n local_test_suites.addTest(test_module)\n continue\n else:\n if not can_run_parallel:\n test_suites.append(test_module)\n continue\n\n for test_class in test_module:\n if not self.class_can_run_parallel(test_class):\n test_suites.append(test_class)\n continue\n\n for _test in test_class:\n test_suite = unittest.TestSuite()\n test_suite.addTest(_test)\n test_suites.append(test_suite)\n\n return test_suites, local_test_suites\n",
"def print_summary(self, result, time_taken):\n \"\"\"\n Prints the test summary, how many tests failed, how long it took, etc\n\n :param result: result class to use to print summary\n :param time_taken: the time all tests took to run\n \"\"\"\n if hasattr(result, \"separator2\"):\n self.stream.writeln(result.separator2)\n\n self.stream.writeln(\"Ran {number_of_tests} test{s} in {time:.3f}s\\n\".format(\n number_of_tests=result.testsRun, s=\"s\" if result.testsRun != 1 else \"\", time=time_taken\n ))\n\n info = []\n if not result.wasSuccessful():\n self.stream.write(\"FAILED\")\n\n if result.failures:\n info.append(\"failures={}\".format(len(result.failures)))\n if result.errors:\n info.append(\"errors={}\".format(len(result.errors)))\n else:\n self.stream.write(\"OK\")\n\n if result.skipped:\n info.append(\"skipped={}\".format(len(result.skipped)))\n if result.expectedFailures:\n info.append(\"expected failures={}\".format(len(result.expectedFailures)))\n if result.unexpectedSuccesses:\n info.append(\"unexpected successes={}\".format(len(result.unexpectedSuccesses)))\n\n if info:\n self.stream.writeln(\" ({})\".format(\", \".join(info)))\n else:\n self.stream.write(\"\\n\")\n"
] |
class ParallelRunner:
"""
A parallel test runner for unittest
:param stream: stream to use for tests reporting
:param descriptions: whether to display descriptions or not
:param verbosity: verbosity of the test result reporters
:param failfast: stops the test on the first error or failure
:param buffer: whether to buffer output of the test or not. On buffering, this
will discard output for passing tests and display normally on failing tests
:param resultclass: single or list of result class to use
:param warnings: warning filter that should be used while running the tests
:param tb_locals: if true, local variables will be shown in tracebacks
:param process_number: number of processes to use for running the tests
"""
# TODO implement buffering
# TODO implement failfast
# TODO implement warnings
# TODO verify tb_locals works
resultclass = (TextTestResult,)
result_collector_class = ResultCollector
class Process(multiprocessing.Process):
"""
A simple test runner for a TestSuite.
:param index: index to find the test
:param test: the unittest.TestSuite to run
:param results_queue: a queue where to put the results once done
:param manager: the plugin manager to be called before and after the run
:param task_done_notifier: semaphore to acquire to notify from end of task
:param kwargs: additional arguments to pass to the process
"""
def __init__(self, index: int, test: unittest.TestSuite, results_queue: queue.Queue,
task_done_notifier: threading.Semaphore, **kwargs):
super().__init__(**kwargs)
self.index = index
self.test = test
self.results = InterProcessResult(results_queue)
self.results_queue = results_queue
self.task_done = task_done_notifier
def run(self) -> None:
""" Launches the test and notifies of the result """
try:
self.test(self.results)
except (PicklingError, TypeError) as exc: # PicklingError is in Python 3.4, TypeError in Python 3.5
self.results_queue.put((TestState.serialization_failure, self.index, exc))
finally:
self.task_done.release()
def __init__(self, stream=None, descriptions=True, verbosity=1, failfast=False, buffer=False, resultclass=None,
warnings=None, *, tb_locals=False, process_number=multiprocessing.cpu_count(),
result_collector_class=None):
if stream is None:
stream = sys.stderr
self.stream = _WritelnDecorator(stream)
self.descriptions = descriptions
self.verbosity = verbosity
self.failfast = failfast
self.buffer = buffer
self.tb_locals = tb_locals
self.warnings = warnings
self.process_number = process_number
if resultclass is not None:
if isinstance(resultclass, collections.Iterable):
self.resultclass = resultclass
else:
self.resultclass = (resultclass,)
if result_collector_class is not None:
self.result_collector_class = result_collector_class
# noinspection PyPep8Naming
def _makeResult(self):
""" instantiates the result class reporters """
return [reporter(self.stream, self.descriptions, self.verbosity) for reporter in self.resultclass]
@staticmethod
def module_can_run_parallel(test_module: unittest.TestSuite) -> bool:
"""
Checks if a given module of tests can be run in parallel or not
:param test_module: the module to run
:return: True if the module can be run on parallel, False otherwise
"""
for test_class in test_module:
# if the test is already failed, we just don't filter it
# and let the test runner deal with it later.
if hasattr(unittest.loader, '_FailedTest'): # import failure in python 3.4.5+
# noinspection PyProtectedMember
if isinstance(test_class, unittest.loader._FailedTest):
continue
if not isinstance(test_class, collections.Iterable): # likely an import failure in python 3.4.4-
# before python 3.4.5, test import failures were not serializable.
# We are unable to be sure that this is a module import failure, but it very likely is
# if this is the case, we'll just run this locally and see
raise TestClassNotIterable()
for test_case in test_class:
return not getattr(sys.modules[test_case.__module__], "__no_parallel__", False)
@staticmethod
def class_can_run_parallel(test_class: unittest.TestSuite) -> bool:
"""
Checks if a given class of tests can be run in parallel or not
:param test_class: the class to run
:return: True if te class can be run in parallel, False otherwise
"""
for test_case in test_class:
return not getattr(test_case, "__no_parallel__", False)
def collect_tests(self, tests):
"""
split all tests into chunks to be executed on multiple processes
:param tests: tests that need to be run
:return: list of tests suites, test that need to be run locally
"""
test_suites = []
local_test_suites = unittest.TestSuite()
for test_module in tests:
try:
can_run_parallel = self.module_can_run_parallel(test_module)
except TestClassNotIterable:
local_test_suites.addTest(test_module)
continue
else:
if not can_run_parallel:
test_suites.append(test_module)
continue
for test_class in test_module:
if not self.class_can_run_parallel(test_class):
test_suites.append(test_class)
continue
for _test in test_class:
test_suite = unittest.TestSuite()
test_suite.addTest(_test)
test_suites.append(test_suite)
return test_suites, local_test_suites
def print_summary(self, result, time_taken):
"""
Prints the test summary, how many tests failed, how long it took, etc
:param result: result class to use to print summary
:param time_taken: the time all tests took to run
"""
if hasattr(result, "separator2"):
self.stream.writeln(result.separator2)
self.stream.writeln("Ran {number_of_tests} test{s} in {time:.3f}s\n".format(
number_of_tests=result.testsRun, s="s" if result.testsRun != 1 else "", time=time_taken
))
info = []
if not result.wasSuccessful():
self.stream.write("FAILED")
if result.failures:
info.append("failures={}".format(len(result.failures)))
if result.errors:
info.append("errors={}".format(len(result.errors)))
else:
self.stream.write("OK")
if result.skipped:
info.append("skipped={}".format(len(result.skipped)))
if result.expectedFailures:
info.append("expected failures={}".format(len(result.expectedFailures)))
if result.unexpectedSuccesses:
info.append("unexpected successes={}".format(len(result.unexpectedSuccesses)))
if info:
self.stream.writeln(" ({})".format(", ".join(info)))
else:
self.stream.write("\n")
|
shaypal5/utilitime
|
utilitime/datetime/datetime.py
|
utc_offset_by_timezone
|
python
|
def utc_offset_by_timezone(timezone_name):
return int(pytz.timezone(timezone_name).utcoffset(
utc_time()).total_seconds()/SECONDS_IN_HOUR)
|
Returns the UTC offset of the given timezone in hours.
Arguments
---------
timezone_name: str
A string with a name of a timezone.
Returns
-------
int
The UTC offset of the given timezone, in hours.
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/datetime/datetime.py#L29-L43
|
[
"def utc_time():\n \"\"\"Returns the current server time as a datetime object.\n\n This assumes all servers run in UTC time.\n \"\"\"\n return datetime.utcnow()\n"
] |
"""Datetime-related utility functions."""
from datetime import datetime, timezone
import pytz
from decore import lazy_property
from ..constants import (
SECONDS_IN_HOUR,
)
# === datetime-related functions ===
@lazy_property
def epoch_datetime():
"""Returns the epoch as a datetime.datetime object."""
return datetime.utcfromtimestamp(0)
def utc_time():
"""Returns the current server time as a datetime object.
This assumes all servers run in UTC time.
"""
return datetime.utcnow()
def localize_datetime(datetime_obj, timezone_name):
"""Localizes the given UTC-aligned datetime by the given timezone.
Arguments
---------
datetime_obj : datetime.datetime
A datetime object decipting a specific point in time, aligned by UTC.
timezone_name: str
A string with a name of a timezone.
Returns
-------
datetime.datetime
An datetime object aligned by the given timezone.
"""
return datetime_obj.replace(tzinfo=pytz.utc).astimezone(
pytz.timezone(timezone_name))
def datetime_to_dateint(datetime_obj):
"""Converts the given datetime object to the corresponding dateint.
Arguments
---------
datetime_obj : datetime.datetime
A datetime object decipting a specific point in time.
Returns
-------
int
An integer represeting the day, month and year of the given point in
time. For example, 3:32 AM on December 3rd 2015 will be converted to
the integer 20151203.
"""
return datetime_obj.year * 10000 + datetime_obj.month * 100 \
+ datetime_obj.day
def local_datetime_to_timestamp(datetime_obj):
"""Converts the given localized naive datetime object to a UTC timestamp.
Arguments
---------
datetime_obj : datetime.datetime
A naive (not timezone-aware) datetime object decipting a specific
point in time in the local machine timezone.
Returns
-------
int
The UTC timestamp corresponding to the given datetime object.
"""
return int(datetime_obj.timestamp())
def utc_datetime_to_timestamp(datetime_obj):
"""Converts the given naive UTC-aligned datetime object to a UTC timestamp.
Arguments
---------
datetime_obj : datetime.datetime
A naive (not timezone-aware) datetime object decipting a specific
point in time in UTC time.
Returns
-------
int
The UTC timestamp corresponding to the given datetime object.
"""
return int(datetime_obj.replace(tzinfo=timezone.utc).timestamp())
|
shaypal5/utilitime
|
utilitime/datetime/datetime.py
|
localize_datetime
|
python
|
def localize_datetime(datetime_obj, timezone_name):
return datetime_obj.replace(tzinfo=pytz.utc).astimezone(
pytz.timezone(timezone_name))
|
Localizes the given UTC-aligned datetime by the given timezone.
Arguments
---------
datetime_obj : datetime.datetime
A datetime object decipting a specific point in time, aligned by UTC.
timezone_name: str
A string with a name of a timezone.
Returns
-------
datetime.datetime
An datetime object aligned by the given timezone.
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/datetime/datetime.py#L46-L62
| null |
"""Datetime-related utility functions."""
from datetime import datetime, timezone
import pytz
from decore import lazy_property
from ..constants import (
SECONDS_IN_HOUR,
)
# === datetime-related functions ===
@lazy_property
def epoch_datetime():
"""Returns the epoch as a datetime.datetime object."""
return datetime.utcfromtimestamp(0)
def utc_time():
"""Returns the current server time as a datetime object.
This assumes all servers run in UTC time.
"""
return datetime.utcnow()
def utc_offset_by_timezone(timezone_name):
"""Returns the UTC offset of the given timezone in hours.
Arguments
---------
timezone_name: str
A string with a name of a timezone.
Returns
-------
int
The UTC offset of the given timezone, in hours.
"""
return int(pytz.timezone(timezone_name).utcoffset(
utc_time()).total_seconds()/SECONDS_IN_HOUR)
def datetime_to_dateint(datetime_obj):
"""Converts the given datetime object to the corresponding dateint.
Arguments
---------
datetime_obj : datetime.datetime
A datetime object decipting a specific point in time.
Returns
-------
int
An integer represeting the day, month and year of the given point in
time. For example, 3:32 AM on December 3rd 2015 will be converted to
the integer 20151203.
"""
return datetime_obj.year * 10000 + datetime_obj.month * 100 \
+ datetime_obj.day
def local_datetime_to_timestamp(datetime_obj):
"""Converts the given localized naive datetime object to a UTC timestamp.
Arguments
---------
datetime_obj : datetime.datetime
A naive (not timezone-aware) datetime object decipting a specific
point in time in the local machine timezone.
Returns
-------
int
The UTC timestamp corresponding to the given datetime object.
"""
return int(datetime_obj.timestamp())
def utc_datetime_to_timestamp(datetime_obj):
"""Converts the given naive UTC-aligned datetime object to a UTC timestamp.
Arguments
---------
datetime_obj : datetime.datetime
A naive (not timezone-aware) datetime object decipting a specific
point in time in UTC time.
Returns
-------
int
The UTC timestamp corresponding to the given datetime object.
"""
return int(datetime_obj.replace(tzinfo=timezone.utc).timestamp())
|
shaypal5/utilitime
|
utilitime/timestamp/timestamp.py
|
timestamp_to_local_time
|
python
|
def timestamp_to_local_time(timestamp, timezone_name):
# first convert timestamp to UTC
utc_time = datetime.utcfromtimestamp(float(timestamp))
delo = Delorean(utc_time, timezone='UTC')
# shift d according to input timezone
localized_d = delo.shift(timezone_name)
return localized_d
|
Convert epoch timestamp to a localized Delorean datetime object.
Arguments
---------
timestamp : int
The timestamp to convert.
timezone_name : datetime.timezone
The timezone of the desired local time.
Returns
-------
delorean.Delorean
A localized Delorean datetime object.
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/timestamp/timestamp.py#L12-L32
| null |
"""Timestamp-related utility functions."""
from datetime import datetime
import calendar
import pytz
from delorean import Delorean
from ..datetime import datetime_to_dateint
def timestamp_to_local_time_str(
timestamp, timezone_name, fmt="yyyy-MM-dd HH:mm:ss"):
"""Convert epoch timestamp to a localized datetime string.
Arguments
---------
timestamp : int
The timestamp to convert.
timezone_name : datetime.timezone
The timezone of the desired local time.
fmt : str
The format of the output string.
Returns
-------
str
The localized datetime string.
"""
localized_d = timestamp_to_local_time(timestamp, timezone_name)
localized_datetime_str = localized_d.format_datetime(fmt)
return localized_datetime_str
def get_timestamp(timezone_name, year, month, day, hour=0, minute=0):
"""Epoch timestamp from timezone, year, month, day, hour and minute."""
tz = pytz.timezone(timezone_name)
tz_datetime = tz.localize(datetime(year, month, day, hour, minute))
timestamp = calendar.timegm(tz_datetime.utctimetuple())
return timestamp
def timestamp_to_datetime(timestamp):
"""Converts a UTC timestamp to a UTC-aligned datetime object.
Arguments
---------
timestamp : int
A UTC timestamp.
Returns
-------
datetime.datetime
A UTC-aligned datetime object corresponding to the given timestamp.
"""
return datetime.utcfromtimestamp(timestamp)
def tz_aware_dt_from_timestamp_and_tz(timestamp, timezone_name):
"""Creates a timezone-aware datetime object from given timestamp and
timezone."""
return datetime.fromtimestamp(timestamp, timezone_name)
def timestamp_to_dateint(timestamp):
"""Converts a UTC timestamp to a dateint of the corresponding day.
Arguments
---------
timestamp : int
A UTC timestamp.
Returns
-------
int
An integer object decipting the calendaric day - e.g. 20161225 -
corresponding to the given timestamp.
"""
return datetime_to_dateint(timestamp_to_datetime(timestamp))
_LEAP_YEAR_SINCE_EPOCH = [1972, 1976, 1980, 1984, 1988, 1992, 1996, 2000, 2004,
2008, 2012, 2016]
_AVG_SEC_IN_YEAR = 365 * 24 * 60 * 60 + 5 * 60 * 60 + 48 * 60 + 45
# from ..constants import (SECONDS_IN_COMMON_YEAR, SECONDS_IN_LEAP_YEAR)
def _efficient_timestamp_to_dateint():
#todo: use above constants
pass
|
shaypal5/utilitime
|
utilitime/timestamp/timestamp.py
|
timestamp_to_local_time_str
|
python
|
def timestamp_to_local_time_str(
timestamp, timezone_name, fmt="yyyy-MM-dd HH:mm:ss"):
localized_d = timestamp_to_local_time(timestamp, timezone_name)
localized_datetime_str = localized_d.format_datetime(fmt)
return localized_datetime_str
|
Convert epoch timestamp to a localized datetime string.
Arguments
---------
timestamp : int
The timestamp to convert.
timezone_name : datetime.timezone
The timezone of the desired local time.
fmt : str
The format of the output string.
Returns
-------
str
The localized datetime string.
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/timestamp/timestamp.py#L35-L55
|
[
"def timestamp_to_local_time(timestamp, timezone_name):\n \"\"\"Convert epoch timestamp to a localized Delorean datetime object.\n\n Arguments\n ---------\n timestamp : int\n The timestamp to convert.\n timezone_name : datetime.timezone\n The timezone of the desired local time.\n\n Returns\n -------\n delorean.Delorean\n A localized Delorean datetime object.\n \"\"\"\n # first convert timestamp to UTC\n utc_time = datetime.utcfromtimestamp(float(timestamp))\n delo = Delorean(utc_time, timezone='UTC')\n # shift d according to input timezone\n localized_d = delo.shift(timezone_name)\n return localized_d\n"
] |
"""Timestamp-related utility functions."""
from datetime import datetime
import calendar
import pytz
from delorean import Delorean
from ..datetime import datetime_to_dateint
def timestamp_to_local_time(timestamp, timezone_name):
"""Convert epoch timestamp to a localized Delorean datetime object.
Arguments
---------
timestamp : int
The timestamp to convert.
timezone_name : datetime.timezone
The timezone of the desired local time.
Returns
-------
delorean.Delorean
A localized Delorean datetime object.
"""
# first convert timestamp to UTC
utc_time = datetime.utcfromtimestamp(float(timestamp))
delo = Delorean(utc_time, timezone='UTC')
# shift d according to input timezone
localized_d = delo.shift(timezone_name)
return localized_d
def get_timestamp(timezone_name, year, month, day, hour=0, minute=0):
"""Epoch timestamp from timezone, year, month, day, hour and minute."""
tz = pytz.timezone(timezone_name)
tz_datetime = tz.localize(datetime(year, month, day, hour, minute))
timestamp = calendar.timegm(tz_datetime.utctimetuple())
return timestamp
def timestamp_to_datetime(timestamp):
"""Converts a UTC timestamp to a UTC-aligned datetime object.
Arguments
---------
timestamp : int
A UTC timestamp.
Returns
-------
datetime.datetime
A UTC-aligned datetime object corresponding to the given timestamp.
"""
return datetime.utcfromtimestamp(timestamp)
def tz_aware_dt_from_timestamp_and_tz(timestamp, timezone_name):
"""Creates a timezone-aware datetime object from given timestamp and
timezone."""
return datetime.fromtimestamp(timestamp, timezone_name)
def timestamp_to_dateint(timestamp):
"""Converts a UTC timestamp to a dateint of the corresponding day.
Arguments
---------
timestamp : int
A UTC timestamp.
Returns
-------
int
An integer object decipting the calendaric day - e.g. 20161225 -
corresponding to the given timestamp.
"""
return datetime_to_dateint(timestamp_to_datetime(timestamp))
_LEAP_YEAR_SINCE_EPOCH = [1972, 1976, 1980, 1984, 1988, 1992, 1996, 2000, 2004,
2008, 2012, 2016]
_AVG_SEC_IN_YEAR = 365 * 24 * 60 * 60 + 5 * 60 * 60 + 48 * 60 + 45
# from ..constants import (SECONDS_IN_COMMON_YEAR, SECONDS_IN_LEAP_YEAR)
def _efficient_timestamp_to_dateint():
#todo: use above constants
pass
|
shaypal5/utilitime
|
utilitime/timestamp/timestamp.py
|
get_timestamp
|
python
|
def get_timestamp(timezone_name, year, month, day, hour=0, minute=0):
tz = pytz.timezone(timezone_name)
tz_datetime = tz.localize(datetime(year, month, day, hour, minute))
timestamp = calendar.timegm(tz_datetime.utctimetuple())
return timestamp
|
Epoch timestamp from timezone, year, month, day, hour and minute.
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/timestamp/timestamp.py#L58-L63
| null |
"""Timestamp-related utility functions."""
from datetime import datetime
import calendar
import pytz
from delorean import Delorean
from ..datetime import datetime_to_dateint
def timestamp_to_local_time(timestamp, timezone_name):
"""Convert epoch timestamp to a localized Delorean datetime object.
Arguments
---------
timestamp : int
The timestamp to convert.
timezone_name : datetime.timezone
The timezone of the desired local time.
Returns
-------
delorean.Delorean
A localized Delorean datetime object.
"""
# first convert timestamp to UTC
utc_time = datetime.utcfromtimestamp(float(timestamp))
delo = Delorean(utc_time, timezone='UTC')
# shift d according to input timezone
localized_d = delo.shift(timezone_name)
return localized_d
def timestamp_to_local_time_str(
timestamp, timezone_name, fmt="yyyy-MM-dd HH:mm:ss"):
"""Convert epoch timestamp to a localized datetime string.
Arguments
---------
timestamp : int
The timestamp to convert.
timezone_name : datetime.timezone
The timezone of the desired local time.
fmt : str
The format of the output string.
Returns
-------
str
The localized datetime string.
"""
localized_d = timestamp_to_local_time(timestamp, timezone_name)
localized_datetime_str = localized_d.format_datetime(fmt)
return localized_datetime_str
def timestamp_to_datetime(timestamp):
"""Converts a UTC timestamp to a UTC-aligned datetime object.
Arguments
---------
timestamp : int
A UTC timestamp.
Returns
-------
datetime.datetime
A UTC-aligned datetime object corresponding to the given timestamp.
"""
return datetime.utcfromtimestamp(timestamp)
def tz_aware_dt_from_timestamp_and_tz(timestamp, timezone_name):
"""Creates a timezone-aware datetime object from given timestamp and
timezone."""
return datetime.fromtimestamp(timestamp, timezone_name)
def timestamp_to_dateint(timestamp):
"""Converts a UTC timestamp to a dateint of the corresponding day.
Arguments
---------
timestamp : int
A UTC timestamp.
Returns
-------
int
An integer object decipting the calendaric day - e.g. 20161225 -
corresponding to the given timestamp.
"""
return datetime_to_dateint(timestamp_to_datetime(timestamp))
_LEAP_YEAR_SINCE_EPOCH = [1972, 1976, 1980, 1984, 1988, 1992, 1996, 2000, 2004,
2008, 2012, 2016]
_AVG_SEC_IN_YEAR = 365 * 24 * 60 * 60 + 5 * 60 * 60 + 48 * 60 + 45
# from ..constants import (SECONDS_IN_COMMON_YEAR, SECONDS_IN_LEAP_YEAR)
def _efficient_timestamp_to_dateint():
#todo: use above constants
pass
|
shaypal5/utilitime
|
utilitime/time/time.py
|
decompose_seconds_in_day
|
python
|
def decompose_seconds_in_day(seconds):
if seconds > SECONDS_IN_DAY:
seconds = seconds - SECONDS_IN_DAY
if seconds < 0:
raise ValueError("seconds param must be non-negative!")
hour = int(seconds / 3600)
leftover = seconds - hour * 3600
minute = int(leftover / 60)
second = leftover - minute * 60
return hour, minute, second
|
Decomposes seconds in day into hour, minute and second components.
Arguments
---------
seconds : int
A time of day by the number of seconds passed since midnight.
Returns
-------
hour : int
The hour component of the given time of day.
minut : int
The minute component of the given time of day.
second : int
The second component of the given time of day.
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/time/time.py#L10-L35
| null |
"""dateime.time-related utility functions."""
from datetime import time
from ..constants import (
SECONDS_IN_DAY,
)
def seconds_in_day_to_time(seconds):
"""Decomposes atime of day into hour, minute and seconds components.
Arguments
---------
seconds : int
A time of day by the number of seconds passed since midnight.
Returns
-------
datetime.time
The corresponding time of day as a datetime.time object.
Example
-------
>>> seconds_in_day_to_time(23430)
datetime.time(6, 30, 30)
"""
try:
return time(*decompose_seconds_in_day(seconds))
except ValueError:
print("Seconds = {}".format(seconds))
print("H = {}, M={}, S={}".format(*decompose_seconds_in_day(seconds)))
raise
def minutes_in_day_to_time(minutes):
"""Decomposes atime of day into hour, minute and seconds components.
Arguments
---------
minutes : int
A time of day by the number of minutes passed since midnight.
Returns
-------
datetime.time
The corresponding time of day as a datetime.time object.
Example
-------
>>> minutes_in_day_to_time(390)
datetime.time(6, 30, 00)
"""
return seconds_in_day_to_time(minutes*60)
|
shaypal5/utilitime
|
utilitime/time/time.py
|
seconds_in_day_to_time
|
python
|
def seconds_in_day_to_time(seconds):
try:
return time(*decompose_seconds_in_day(seconds))
except ValueError:
print("Seconds = {}".format(seconds))
print("H = {}, M={}, S={}".format(*decompose_seconds_in_day(seconds)))
raise
|
Decomposes atime of day into hour, minute and seconds components.
Arguments
---------
seconds : int
A time of day by the number of seconds passed since midnight.
Returns
-------
datetime.time
The corresponding time of day as a datetime.time object.
Example
-------
>>> seconds_in_day_to_time(23430)
datetime.time(6, 30, 30)
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/time/time.py#L38-L61
|
[
"def decompose_seconds_in_day(seconds):\n \"\"\"Decomposes seconds in day into hour, minute and second components.\n\n Arguments\n ---------\n seconds : int\n A time of day by the number of seconds passed since midnight.\n\n Returns\n -------\n hour : int\n The hour component of the given time of day.\n minut : int\n The minute component of the given time of day.\n second : int\n The second component of the given time of day.\n \"\"\"\n if seconds > SECONDS_IN_DAY:\n seconds = seconds - SECONDS_IN_DAY\n if seconds < 0:\n raise ValueError(\"seconds param must be non-negative!\")\n hour = int(seconds / 3600)\n leftover = seconds - hour * 3600\n minute = int(leftover / 60)\n second = leftover - minute * 60\n return hour, minute, second\n"
] |
"""dateime.time-related utility functions."""
from datetime import time
from ..constants import (
SECONDS_IN_DAY,
)
def decompose_seconds_in_day(seconds):
"""Decomposes seconds in day into hour, minute and second components.
Arguments
---------
seconds : int
A time of day by the number of seconds passed since midnight.
Returns
-------
hour : int
The hour component of the given time of day.
minut : int
The minute component of the given time of day.
second : int
The second component of the given time of day.
"""
if seconds > SECONDS_IN_DAY:
seconds = seconds - SECONDS_IN_DAY
if seconds < 0:
raise ValueError("seconds param must be non-negative!")
hour = int(seconds / 3600)
leftover = seconds - hour * 3600
minute = int(leftover / 60)
second = leftover - minute * 60
return hour, minute, second
def minutes_in_day_to_time(minutes):
"""Decomposes atime of day into hour, minute and seconds components.
Arguments
---------
minutes : int
A time of day by the number of minutes passed since midnight.
Returns
-------
datetime.time
The corresponding time of day as a datetime.time object.
Example
-------
>>> minutes_in_day_to_time(390)
datetime.time(6, 30, 00)
"""
return seconds_in_day_to_time(minutes*60)
|
shaypal5/utilitime
|
utilitime/dateint/dateint.py
|
decompose_dateint
|
python
|
def decompose_dateint(dateint):
year = int(dateint / 10000)
leftover = dateint - year * 10000
month = int(leftover / 100)
day = leftover - month * 100
return year, month, day
|
Decomposes the given dateint into its year, month and day components.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
year : int
The year component of the given dateint.
month : int
The month component of the given dateint.
day : int
The day component of the given dateint.
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/dateint/dateint.py#L16-L37
| null |
"""Datetime-related utility functions."""
from datetime import datetime, timedelta, date
import math
from ..timestamp import get_timestamp
from ..datetime import (
utc_time,
datetime_to_dateint,
)
from ..constants import (
WEEKDAYS,
)
def dateint_to_date(dateint):
"""Converts the given integer to a datetime.date object.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.date
The corresponding date object.
Example
-------
>>> dateint_to_date(20170223)
datetime.date(2017, 2, 23)
"""
return date(*decompose_dateint(dateint))
def tz_aware_dateint_to_timestamp(dateint, timezone_name):
"""Returns the epoch timestamp for the given timezone and dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
timezone_name : str
The name of the timezone.
Returns
-------
int
The timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...) at the given timezone.
"""
return get_timestamp(timezone_name, *decompose_dateint(dateint))
def dateint_to_timestamp(dateint):
"""Converts the given dateint to a timestamp, using the local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...) at the local timezone.
"""
return int(dateint_to_datetime(dateint).timestamp())
def dateint_to_utc_timestamp(dateint):
"""Converts the given dateint to the corresponding UTC timestamp.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The UTC timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...).
"""
return tz_aware_dateint_to_timestamp(dateint, 'UTC')
def dateint_to_datetime(dateint):
"""Converts the given dateint to a datetime object, in local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.datetime
A timezone-unaware datetime object representing the start of the given
day (so at 0 hours, 0 minutes, etc...) in the local timezone.
"""
if len(str(dateint)) != 8:
raise ValueError(
'Dateints must have exactly 8 digits; the first four representing '
'the year, the next two the months, and the last two the days.')
year, month, day = decompose_dateint(dateint)
return datetime(year=year, month=month, day=day)
def dateint_to_weekday(dateint, first_day='Monday'):
"""Returns the weekday of the given dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
first_day : str, default 'Monday'
The first day of the week.
Returns
-------
int
The weekday of the given dateint, when first day of the week = 0,
last day of the week = 6.
Example
-------
>>> dateint_to_weekday(20170213)
0
>>> dateint_to_weekday(20170212)
6
>>> dateint_to_weekday(20170214)
1
>>> dateint_to_weekday(20170212, 'Sunday)
0
>>> dateint_to_weekday(20170214, 'Sunday')
2
"""
weekday_ix = dateint_to_datetime(dateint).weekday()
return (weekday_ix - WEEKDAYS.index(first_day)) % 7
def dateint_to_weekday_name(dateint):
"""Returns the weekday of the given dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
str
The weekday name of the given dateint.
Example
-------
>>> dateint_to_weekday_name(20170213)
'Monday'
>>> dateint_to_weekday_name(20170212)
'Sunday'
>>> dateint_to_weekday_name(20170214)
'Tuesday'
"""
return dateint_to_datetime(dateint).strftime("%A")
def shift_dateint(dateint, day_shift):
"""Shifts the given dateint by the given amount of days.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
days : int
The number of days to shift the given dateint by. A negative number
shifts the dateint backwards.
Returns
-------
int
A dateint corresponding to the given date shifted by the given amount
of days.
Example
-------
>>> shift_dateint(20170228, 1)
20170301
>>> shift_dateint(20170301, -1)
20170228
>>> shift_dateint(20170220, 5)
20170225
"""
dtime = dateint_to_datetime(dateint)
delta = timedelta(days=abs(day_shift))
if day_shift > 0:
dtime = dtime + delta
else:
dtime = dtime - delta
return datetime_to_dateint(dtime)
def dateint_range(first_dateint, last_dateint):
"""Returns all dateints in the given dateint range.
Arguments
---------
first_dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
last_dateint : int
An integer object decipting a specific calendaric day; e.g. 20170108.
Returns
-------
iterable
An iterable of ints representing all days in the given dateint range.
Example
-------
>>> dateint_range(20170228, 20170301)
[20170228, 20170301]
>>> dateint_range(20170225, 20170301)
[20170225, 20170226, 20170227, 20170228, 20170301]
"""
first_datetime = dateint_to_datetime(first_dateint)
last_datetime = dateint_to_datetime(last_dateint)
delta = last_datetime - first_datetime
delta_in_hours = math.ceil(delta.total_seconds() / 3600)
delta_in_days = math.ceil(delta_in_hours / 24) + 1
dateint_set = set()
for delta_i in range(0, delta_in_days * 24, 24):
datetime_i = first_datetime + timedelta(hours=delta_i)
dateint_i = datetime_to_dateint(datetime_i)
if dateint_i <= last_dateint:
dateint_set.add(dateint_i)
return sorted(dateint_set)
def today_int():
"""Returns the dateint for today."""
return datetime_to_dateint(utc_time())
def dateint_week_by_dateint(dateint, first_day='Monday'):
"""Return a dateint range of the week the given dateint belongs to.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
first_day : str, default 'Monday'
The first day of the week.
Returns
-------
iterable
An iterable of dateint representing all days of the week the given
dateint belongs to.
"""
weekday_ix = dateint_to_weekday(dateint, first_day)
first_day_dateint = shift_dateint(dateint, -weekday_ix)
last_day_dateint = shift_dateint(first_day_dateint, 6)
return dateint_range(first_day_dateint, last_day_dateint)
def dateint_difference(dateint1, dateint2):
"""Return the difference between two dateints in days.
Arguments
---------
dateint1 : int
An integer object decipting a specific calendaric day; e.g. 20161225.
dateint2 : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The difference between the two given dateints in days.
"""
dt1 = dateint_to_datetime(dateint1)
dt2 = dateint_to_datetime(dateint2)
delta = dt1 - dt2
return abs(delta.days)
|
shaypal5/utilitime
|
utilitime/dateint/dateint.py
|
dateint_to_datetime
|
python
|
def dateint_to_datetime(dateint):
if len(str(dateint)) != 8:
raise ValueError(
'Dateints must have exactly 8 digits; the first four representing '
'the year, the next two the months, and the last two the days.')
year, month, day = decompose_dateint(dateint)
return datetime(year=year, month=month, day=day)
|
Converts the given dateint to a datetime object, in local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.datetime
A timezone-unaware datetime object representing the start of the given
day (so at 0 hours, 0 minutes, etc...) in the local timezone.
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/dateint/dateint.py#L114-L133
|
[
"def decompose_dateint(dateint):\n \"\"\"Decomposes the given dateint into its year, month and day components.\n\n Arguments\n ---------\n dateint : int\n An integer object decipting a specific calendaric day; e.g. 20161225.\n\n Returns\n -------\n year : int\n The year component of the given dateint.\n month : int\n The month component of the given dateint.\n day : int\n The day component of the given dateint.\n \"\"\"\n year = int(dateint / 10000)\n leftover = dateint - year * 10000\n month = int(leftover / 100)\n day = leftover - month * 100\n return year, month, day\n"
] |
"""Datetime-related utility functions."""
from datetime import datetime, timedelta, date
import math
from ..timestamp import get_timestamp
from ..datetime import (
utc_time,
datetime_to_dateint,
)
from ..constants import (
WEEKDAYS,
)
def decompose_dateint(dateint):
"""Decomposes the given dateint into its year, month and day components.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
year : int
The year component of the given dateint.
month : int
The month component of the given dateint.
day : int
The day component of the given dateint.
"""
year = int(dateint / 10000)
leftover = dateint - year * 10000
month = int(leftover / 100)
day = leftover - month * 100
return year, month, day
def dateint_to_date(dateint):
"""Converts the given integer to a datetime.date object.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.date
The corresponding date object.
Example
-------
>>> dateint_to_date(20170223)
datetime.date(2017, 2, 23)
"""
return date(*decompose_dateint(dateint))
def tz_aware_dateint_to_timestamp(dateint, timezone_name):
"""Returns the epoch timestamp for the given timezone and dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
timezone_name : str
The name of the timezone.
Returns
-------
int
The timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...) at the given timezone.
"""
return get_timestamp(timezone_name, *decompose_dateint(dateint))
def dateint_to_timestamp(dateint):
"""Converts the given dateint to a timestamp, using the local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...) at the local timezone.
"""
return int(dateint_to_datetime(dateint).timestamp())
def dateint_to_utc_timestamp(dateint):
"""Converts the given dateint to the corresponding UTC timestamp.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The UTC timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...).
"""
return tz_aware_dateint_to_timestamp(dateint, 'UTC')
def dateint_to_weekday(dateint, first_day='Monday'):
"""Returns the weekday of the given dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
first_day : str, default 'Monday'
The first day of the week.
Returns
-------
int
The weekday of the given dateint, when first day of the week = 0,
last day of the week = 6.
Example
-------
>>> dateint_to_weekday(20170213)
0
>>> dateint_to_weekday(20170212)
6
>>> dateint_to_weekday(20170214)
1
>>> dateint_to_weekday(20170212, 'Sunday)
0
>>> dateint_to_weekday(20170214, 'Sunday')
2
"""
weekday_ix = dateint_to_datetime(dateint).weekday()
return (weekday_ix - WEEKDAYS.index(first_day)) % 7
def dateint_to_weekday_name(dateint):
"""Returns the weekday of the given dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
str
The weekday name of the given dateint.
Example
-------
>>> dateint_to_weekday_name(20170213)
'Monday'
>>> dateint_to_weekday_name(20170212)
'Sunday'
>>> dateint_to_weekday_name(20170214)
'Tuesday'
"""
return dateint_to_datetime(dateint).strftime("%A")
def shift_dateint(dateint, day_shift):
"""Shifts the given dateint by the given amount of days.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
days : int
The number of days to shift the given dateint by. A negative number
shifts the dateint backwards.
Returns
-------
int
A dateint corresponding to the given date shifted by the given amount
of days.
Example
-------
>>> shift_dateint(20170228, 1)
20170301
>>> shift_dateint(20170301, -1)
20170228
>>> shift_dateint(20170220, 5)
20170225
"""
dtime = dateint_to_datetime(dateint)
delta = timedelta(days=abs(day_shift))
if day_shift > 0:
dtime = dtime + delta
else:
dtime = dtime - delta
return datetime_to_dateint(dtime)
def dateint_range(first_dateint, last_dateint):
"""Returns all dateints in the given dateint range.
Arguments
---------
first_dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
last_dateint : int
An integer object decipting a specific calendaric day; e.g. 20170108.
Returns
-------
iterable
An iterable of ints representing all days in the given dateint range.
Example
-------
>>> dateint_range(20170228, 20170301)
[20170228, 20170301]
>>> dateint_range(20170225, 20170301)
[20170225, 20170226, 20170227, 20170228, 20170301]
"""
first_datetime = dateint_to_datetime(first_dateint)
last_datetime = dateint_to_datetime(last_dateint)
delta = last_datetime - first_datetime
delta_in_hours = math.ceil(delta.total_seconds() / 3600)
delta_in_days = math.ceil(delta_in_hours / 24) + 1
dateint_set = set()
for delta_i in range(0, delta_in_days * 24, 24):
datetime_i = first_datetime + timedelta(hours=delta_i)
dateint_i = datetime_to_dateint(datetime_i)
if dateint_i <= last_dateint:
dateint_set.add(dateint_i)
return sorted(dateint_set)
def today_int():
"""Returns the dateint for today."""
return datetime_to_dateint(utc_time())
def dateint_week_by_dateint(dateint, first_day='Monday'):
"""Return a dateint range of the week the given dateint belongs to.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
first_day : str, default 'Monday'
The first day of the week.
Returns
-------
iterable
An iterable of dateint representing all days of the week the given
dateint belongs to.
"""
weekday_ix = dateint_to_weekday(dateint, first_day)
first_day_dateint = shift_dateint(dateint, -weekday_ix)
last_day_dateint = shift_dateint(first_day_dateint, 6)
return dateint_range(first_day_dateint, last_day_dateint)
def dateint_difference(dateint1, dateint2):
"""Return the difference between two dateints in days.
Arguments
---------
dateint1 : int
An integer object decipting a specific calendaric day; e.g. 20161225.
dateint2 : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The difference between the two given dateints in days.
"""
dt1 = dateint_to_datetime(dateint1)
dt2 = dateint_to_datetime(dateint2)
delta = dt1 - dt2
return abs(delta.days)
|
shaypal5/utilitime
|
utilitime/dateint/dateint.py
|
dateint_to_weekday
|
python
|
def dateint_to_weekday(dateint, first_day='Monday'):
weekday_ix = dateint_to_datetime(dateint).weekday()
return (weekday_ix - WEEKDAYS.index(first_day)) % 7
|
Returns the weekday of the given dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
first_day : str, default 'Monday'
The first day of the week.
Returns
-------
int
The weekday of the given dateint, when first day of the week = 0,
last day of the week = 6.
Example
-------
>>> dateint_to_weekday(20170213)
0
>>> dateint_to_weekday(20170212)
6
>>> dateint_to_weekday(20170214)
1
>>> dateint_to_weekday(20170212, 'Sunday)
0
>>> dateint_to_weekday(20170214, 'Sunday')
2
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/dateint/dateint.py#L136-L166
|
[
"def dateint_to_datetime(dateint):\n \"\"\"Converts the given dateint to a datetime object, in local timezone.\n\n Arguments\n ---------\n dateint : int\n An integer object decipting a specific calendaric day; e.g. 20161225.\n\n Returns\n -------\n datetime.datetime\n A timezone-unaware datetime object representing the start of the given\n day (so at 0 hours, 0 minutes, etc...) in the local timezone.\n \"\"\"\n if len(str(dateint)) != 8:\n raise ValueError(\n 'Dateints must have exactly 8 digits; the first four representing '\n 'the year, the next two the months, and the last two the days.')\n year, month, day = decompose_dateint(dateint)\n return datetime(year=year, month=month, day=day)\n"
] |
"""Datetime-related utility functions."""
from datetime import datetime, timedelta, date
import math
from ..timestamp import get_timestamp
from ..datetime import (
utc_time,
datetime_to_dateint,
)
from ..constants import (
WEEKDAYS,
)
def decompose_dateint(dateint):
"""Decomposes the given dateint into its year, month and day components.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
year : int
The year component of the given dateint.
month : int
The month component of the given dateint.
day : int
The day component of the given dateint.
"""
year = int(dateint / 10000)
leftover = dateint - year * 10000
month = int(leftover / 100)
day = leftover - month * 100
return year, month, day
def dateint_to_date(dateint):
"""Converts the given integer to a datetime.date object.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.date
The corresponding date object.
Example
-------
>>> dateint_to_date(20170223)
datetime.date(2017, 2, 23)
"""
return date(*decompose_dateint(dateint))
def tz_aware_dateint_to_timestamp(dateint, timezone_name):
"""Returns the epoch timestamp for the given timezone and dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
timezone_name : str
The name of the timezone.
Returns
-------
int
The timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...) at the given timezone.
"""
return get_timestamp(timezone_name, *decompose_dateint(dateint))
def dateint_to_timestamp(dateint):
"""Converts the given dateint to a timestamp, using the local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...) at the local timezone.
"""
return int(dateint_to_datetime(dateint).timestamp())
def dateint_to_utc_timestamp(dateint):
"""Converts the given dateint to the corresponding UTC timestamp.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The UTC timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...).
"""
return tz_aware_dateint_to_timestamp(dateint, 'UTC')
def dateint_to_datetime(dateint):
"""Converts the given dateint to a datetime object, in local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.datetime
A timezone-unaware datetime object representing the start of the given
day (so at 0 hours, 0 minutes, etc...) in the local timezone.
"""
if len(str(dateint)) != 8:
raise ValueError(
'Dateints must have exactly 8 digits; the first four representing '
'the year, the next two the months, and the last two the days.')
year, month, day = decompose_dateint(dateint)
return datetime(year=year, month=month, day=day)
def dateint_to_weekday_name(dateint):
"""Returns the weekday of the given dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
str
The weekday name of the given dateint.
Example
-------
>>> dateint_to_weekday_name(20170213)
'Monday'
>>> dateint_to_weekday_name(20170212)
'Sunday'
>>> dateint_to_weekday_name(20170214)
'Tuesday'
"""
return dateint_to_datetime(dateint).strftime("%A")
def shift_dateint(dateint, day_shift):
"""Shifts the given dateint by the given amount of days.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
days : int
The number of days to shift the given dateint by. A negative number
shifts the dateint backwards.
Returns
-------
int
A dateint corresponding to the given date shifted by the given amount
of days.
Example
-------
>>> shift_dateint(20170228, 1)
20170301
>>> shift_dateint(20170301, -1)
20170228
>>> shift_dateint(20170220, 5)
20170225
"""
dtime = dateint_to_datetime(dateint)
delta = timedelta(days=abs(day_shift))
if day_shift > 0:
dtime = dtime + delta
else:
dtime = dtime - delta
return datetime_to_dateint(dtime)
def dateint_range(first_dateint, last_dateint):
"""Returns all dateints in the given dateint range.
Arguments
---------
first_dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
last_dateint : int
An integer object decipting a specific calendaric day; e.g. 20170108.
Returns
-------
iterable
An iterable of ints representing all days in the given dateint range.
Example
-------
>>> dateint_range(20170228, 20170301)
[20170228, 20170301]
>>> dateint_range(20170225, 20170301)
[20170225, 20170226, 20170227, 20170228, 20170301]
"""
first_datetime = dateint_to_datetime(first_dateint)
last_datetime = dateint_to_datetime(last_dateint)
delta = last_datetime - first_datetime
delta_in_hours = math.ceil(delta.total_seconds() / 3600)
delta_in_days = math.ceil(delta_in_hours / 24) + 1
dateint_set = set()
for delta_i in range(0, delta_in_days * 24, 24):
datetime_i = first_datetime + timedelta(hours=delta_i)
dateint_i = datetime_to_dateint(datetime_i)
if dateint_i <= last_dateint:
dateint_set.add(dateint_i)
return sorted(dateint_set)
def today_int():
"""Returns the dateint for today."""
return datetime_to_dateint(utc_time())
def dateint_week_by_dateint(dateint, first_day='Monday'):
"""Return a dateint range of the week the given dateint belongs to.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
first_day : str, default 'Monday'
The first day of the week.
Returns
-------
iterable
An iterable of dateint representing all days of the week the given
dateint belongs to.
"""
weekday_ix = dateint_to_weekday(dateint, first_day)
first_day_dateint = shift_dateint(dateint, -weekday_ix)
last_day_dateint = shift_dateint(first_day_dateint, 6)
return dateint_range(first_day_dateint, last_day_dateint)
def dateint_difference(dateint1, dateint2):
"""Return the difference between two dateints in days.
Arguments
---------
dateint1 : int
An integer object decipting a specific calendaric day; e.g. 20161225.
dateint2 : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The difference between the two given dateints in days.
"""
dt1 = dateint_to_datetime(dateint1)
dt2 = dateint_to_datetime(dateint2)
delta = dt1 - dt2
return abs(delta.days)
|
shaypal5/utilitime
|
utilitime/dateint/dateint.py
|
shift_dateint
|
python
|
def shift_dateint(dateint, day_shift):
dtime = dateint_to_datetime(dateint)
delta = timedelta(days=abs(day_shift))
if day_shift > 0:
dtime = dtime + delta
else:
dtime = dtime - delta
return datetime_to_dateint(dtime)
|
Shifts the given dateint by the given amount of days.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
days : int
The number of days to shift the given dateint by. A negative number
shifts the dateint backwards.
Returns
-------
int
A dateint corresponding to the given date shifted by the given amount
of days.
Example
-------
>>> shift_dateint(20170228, 1)
20170301
>>> shift_dateint(20170301, -1)
20170228
>>> shift_dateint(20170220, 5)
20170225
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/dateint/dateint.py#L194-L226
|
[
"def datetime_to_dateint(datetime_obj):\n \"\"\"Converts the given datetime object to the corresponding dateint.\n\n Arguments\n ---------\n datetime_obj : datetime.datetime\n A datetime object decipting a specific point in time.\n\n Returns\n -------\n int\n An integer represeting the day, month and year of the given point in\n time. For example, 3:32 AM on December 3rd 2015 will be converted to\n the integer 20151203.\n \"\"\"\n return datetime_obj.year * 10000 + datetime_obj.month * 100 \\\n + datetime_obj.day\n",
"def dateint_to_datetime(dateint):\n \"\"\"Converts the given dateint to a datetime object, in local timezone.\n\n Arguments\n ---------\n dateint : int\n An integer object decipting a specific calendaric day; e.g. 20161225.\n\n Returns\n -------\n datetime.datetime\n A timezone-unaware datetime object representing the start of the given\n day (so at 0 hours, 0 minutes, etc...) in the local timezone.\n \"\"\"\n if len(str(dateint)) != 8:\n raise ValueError(\n 'Dateints must have exactly 8 digits; the first four representing '\n 'the year, the next two the months, and the last two the days.')\n year, month, day = decompose_dateint(dateint)\n return datetime(year=year, month=month, day=day)\n"
] |
"""Datetime-related utility functions."""
from datetime import datetime, timedelta, date
import math
from ..timestamp import get_timestamp
from ..datetime import (
utc_time,
datetime_to_dateint,
)
from ..constants import (
WEEKDAYS,
)
def decompose_dateint(dateint):
"""Decomposes the given dateint into its year, month and day components.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
year : int
The year component of the given dateint.
month : int
The month component of the given dateint.
day : int
The day component of the given dateint.
"""
year = int(dateint / 10000)
leftover = dateint - year * 10000
month = int(leftover / 100)
day = leftover - month * 100
return year, month, day
def dateint_to_date(dateint):
"""Converts the given integer to a datetime.date object.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.date
The corresponding date object.
Example
-------
>>> dateint_to_date(20170223)
datetime.date(2017, 2, 23)
"""
return date(*decompose_dateint(dateint))
def tz_aware_dateint_to_timestamp(dateint, timezone_name):
"""Returns the epoch timestamp for the given timezone and dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
timezone_name : str
The name of the timezone.
Returns
-------
int
The timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...) at the given timezone.
"""
return get_timestamp(timezone_name, *decompose_dateint(dateint))
def dateint_to_timestamp(dateint):
"""Converts the given dateint to a timestamp, using the local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...) at the local timezone.
"""
return int(dateint_to_datetime(dateint).timestamp())
def dateint_to_utc_timestamp(dateint):
"""Converts the given dateint to the corresponding UTC timestamp.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The UTC timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...).
"""
return tz_aware_dateint_to_timestamp(dateint, 'UTC')
def dateint_to_datetime(dateint):
"""Converts the given dateint to a datetime object, in local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.datetime
A timezone-unaware datetime object representing the start of the given
day (so at 0 hours, 0 minutes, etc...) in the local timezone.
"""
if len(str(dateint)) != 8:
raise ValueError(
'Dateints must have exactly 8 digits; the first four representing '
'the year, the next two the months, and the last two the days.')
year, month, day = decompose_dateint(dateint)
return datetime(year=year, month=month, day=day)
def dateint_to_weekday(dateint, first_day='Monday'):
"""Returns the weekday of the given dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
first_day : str, default 'Monday'
The first day of the week.
Returns
-------
int
The weekday of the given dateint, when first day of the week = 0,
last day of the week = 6.
Example
-------
>>> dateint_to_weekday(20170213)
0
>>> dateint_to_weekday(20170212)
6
>>> dateint_to_weekday(20170214)
1
>>> dateint_to_weekday(20170212, 'Sunday)
0
>>> dateint_to_weekday(20170214, 'Sunday')
2
"""
weekday_ix = dateint_to_datetime(dateint).weekday()
return (weekday_ix - WEEKDAYS.index(first_day)) % 7
def dateint_to_weekday_name(dateint):
"""Returns the weekday of the given dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
str
The weekday name of the given dateint.
Example
-------
>>> dateint_to_weekday_name(20170213)
'Monday'
>>> dateint_to_weekday_name(20170212)
'Sunday'
>>> dateint_to_weekday_name(20170214)
'Tuesday'
"""
return dateint_to_datetime(dateint).strftime("%A")
def dateint_range(first_dateint, last_dateint):
"""Returns all dateints in the given dateint range.
Arguments
---------
first_dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
last_dateint : int
An integer object decipting a specific calendaric day; e.g. 20170108.
Returns
-------
iterable
An iterable of ints representing all days in the given dateint range.
Example
-------
>>> dateint_range(20170228, 20170301)
[20170228, 20170301]
>>> dateint_range(20170225, 20170301)
[20170225, 20170226, 20170227, 20170228, 20170301]
"""
first_datetime = dateint_to_datetime(first_dateint)
last_datetime = dateint_to_datetime(last_dateint)
delta = last_datetime - first_datetime
delta_in_hours = math.ceil(delta.total_seconds() / 3600)
delta_in_days = math.ceil(delta_in_hours / 24) + 1
dateint_set = set()
for delta_i in range(0, delta_in_days * 24, 24):
datetime_i = first_datetime + timedelta(hours=delta_i)
dateint_i = datetime_to_dateint(datetime_i)
if dateint_i <= last_dateint:
dateint_set.add(dateint_i)
return sorted(dateint_set)
def today_int():
"""Returns the dateint for today."""
return datetime_to_dateint(utc_time())
def dateint_week_by_dateint(dateint, first_day='Monday'):
"""Return a dateint range of the week the given dateint belongs to.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
first_day : str, default 'Monday'
The first day of the week.
Returns
-------
iterable
An iterable of dateint representing all days of the week the given
dateint belongs to.
"""
weekday_ix = dateint_to_weekday(dateint, first_day)
first_day_dateint = shift_dateint(dateint, -weekday_ix)
last_day_dateint = shift_dateint(first_day_dateint, 6)
return dateint_range(first_day_dateint, last_day_dateint)
def dateint_difference(dateint1, dateint2):
"""Return the difference between two dateints in days.
Arguments
---------
dateint1 : int
An integer object decipting a specific calendaric day; e.g. 20161225.
dateint2 : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The difference between the two given dateints in days.
"""
dt1 = dateint_to_datetime(dateint1)
dt2 = dateint_to_datetime(dateint2)
delta = dt1 - dt2
return abs(delta.days)
|
shaypal5/utilitime
|
utilitime/dateint/dateint.py
|
dateint_range
|
python
|
def dateint_range(first_dateint, last_dateint):
first_datetime = dateint_to_datetime(first_dateint)
last_datetime = dateint_to_datetime(last_dateint)
delta = last_datetime - first_datetime
delta_in_hours = math.ceil(delta.total_seconds() / 3600)
delta_in_days = math.ceil(delta_in_hours / 24) + 1
dateint_set = set()
for delta_i in range(0, delta_in_days * 24, 24):
datetime_i = first_datetime + timedelta(hours=delta_i)
dateint_i = datetime_to_dateint(datetime_i)
if dateint_i <= last_dateint:
dateint_set.add(dateint_i)
return sorted(dateint_set)
|
Returns all dateints in the given dateint range.
Arguments
---------
first_dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
last_dateint : int
An integer object decipting a specific calendaric day; e.g. 20170108.
Returns
-------
iterable
An iterable of ints representing all days in the given dateint range.
Example
-------
>>> dateint_range(20170228, 20170301)
[20170228, 20170301]
>>> dateint_range(20170225, 20170301)
[20170225, 20170226, 20170227, 20170228, 20170301]
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/dateint/dateint.py#L229-L262
|
[
"def datetime_to_dateint(datetime_obj):\n \"\"\"Converts the given datetime object to the corresponding dateint.\n\n Arguments\n ---------\n datetime_obj : datetime.datetime\n A datetime object decipting a specific point in time.\n\n Returns\n -------\n int\n An integer represeting the day, month and year of the given point in\n time. For example, 3:32 AM on December 3rd 2015 will be converted to\n the integer 20151203.\n \"\"\"\n return datetime_obj.year * 10000 + datetime_obj.month * 100 \\\n + datetime_obj.day\n",
"def dateint_to_datetime(dateint):\n \"\"\"Converts the given dateint to a datetime object, in local timezone.\n\n Arguments\n ---------\n dateint : int\n An integer object decipting a specific calendaric day; e.g. 20161225.\n\n Returns\n -------\n datetime.datetime\n A timezone-unaware datetime object representing the start of the given\n day (so at 0 hours, 0 minutes, etc...) in the local timezone.\n \"\"\"\n if len(str(dateint)) != 8:\n raise ValueError(\n 'Dateints must have exactly 8 digits; the first four representing '\n 'the year, the next two the months, and the last two the days.')\n year, month, day = decompose_dateint(dateint)\n return datetime(year=year, month=month, day=day)\n"
] |
"""Datetime-related utility functions."""
from datetime import datetime, timedelta, date
import math
from ..timestamp import get_timestamp
from ..datetime import (
utc_time,
datetime_to_dateint,
)
from ..constants import (
WEEKDAYS,
)
def decompose_dateint(dateint):
"""Decomposes the given dateint into its year, month and day components.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
year : int
The year component of the given dateint.
month : int
The month component of the given dateint.
day : int
The day component of the given dateint.
"""
year = int(dateint / 10000)
leftover = dateint - year * 10000
month = int(leftover / 100)
day = leftover - month * 100
return year, month, day
def dateint_to_date(dateint):
"""Converts the given integer to a datetime.date object.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.date
The corresponding date object.
Example
-------
>>> dateint_to_date(20170223)
datetime.date(2017, 2, 23)
"""
return date(*decompose_dateint(dateint))
def tz_aware_dateint_to_timestamp(dateint, timezone_name):
"""Returns the epoch timestamp for the given timezone and dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
timezone_name : str
The name of the timezone.
Returns
-------
int
The timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...) at the given timezone.
"""
return get_timestamp(timezone_name, *decompose_dateint(dateint))
def dateint_to_timestamp(dateint):
"""Converts the given dateint to a timestamp, using the local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...) at the local timezone.
"""
return int(dateint_to_datetime(dateint).timestamp())
def dateint_to_utc_timestamp(dateint):
"""Converts the given dateint to the corresponding UTC timestamp.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The UTC timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...).
"""
return tz_aware_dateint_to_timestamp(dateint, 'UTC')
def dateint_to_datetime(dateint):
"""Converts the given dateint to a datetime object, in local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.datetime
A timezone-unaware datetime object representing the start of the given
day (so at 0 hours, 0 minutes, etc...) in the local timezone.
"""
if len(str(dateint)) != 8:
raise ValueError(
'Dateints must have exactly 8 digits; the first four representing '
'the year, the next two the months, and the last two the days.')
year, month, day = decompose_dateint(dateint)
return datetime(year=year, month=month, day=day)
def dateint_to_weekday(dateint, first_day='Monday'):
"""Returns the weekday of the given dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
first_day : str, default 'Monday'
The first day of the week.
Returns
-------
int
The weekday of the given dateint, when first day of the week = 0,
last day of the week = 6.
Example
-------
>>> dateint_to_weekday(20170213)
0
>>> dateint_to_weekday(20170212)
6
>>> dateint_to_weekday(20170214)
1
>>> dateint_to_weekday(20170212, 'Sunday)
0
>>> dateint_to_weekday(20170214, 'Sunday')
2
"""
weekday_ix = dateint_to_datetime(dateint).weekday()
return (weekday_ix - WEEKDAYS.index(first_day)) % 7
def dateint_to_weekday_name(dateint):
"""Returns the weekday of the given dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
str
The weekday name of the given dateint.
Example
-------
>>> dateint_to_weekday_name(20170213)
'Monday'
>>> dateint_to_weekday_name(20170212)
'Sunday'
>>> dateint_to_weekday_name(20170214)
'Tuesday'
"""
return dateint_to_datetime(dateint).strftime("%A")
def shift_dateint(dateint, day_shift):
"""Shifts the given dateint by the given amount of days.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
days : int
The number of days to shift the given dateint by. A negative number
shifts the dateint backwards.
Returns
-------
int
A dateint corresponding to the given date shifted by the given amount
of days.
Example
-------
>>> shift_dateint(20170228, 1)
20170301
>>> shift_dateint(20170301, -1)
20170228
>>> shift_dateint(20170220, 5)
20170225
"""
dtime = dateint_to_datetime(dateint)
delta = timedelta(days=abs(day_shift))
if day_shift > 0:
dtime = dtime + delta
else:
dtime = dtime - delta
return datetime_to_dateint(dtime)
def today_int():
"""Returns the dateint for today."""
return datetime_to_dateint(utc_time())
def dateint_week_by_dateint(dateint, first_day='Monday'):
"""Return a dateint range of the week the given dateint belongs to.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
first_day : str, default 'Monday'
The first day of the week.
Returns
-------
iterable
An iterable of dateint representing all days of the week the given
dateint belongs to.
"""
weekday_ix = dateint_to_weekday(dateint, first_day)
first_day_dateint = shift_dateint(dateint, -weekday_ix)
last_day_dateint = shift_dateint(first_day_dateint, 6)
return dateint_range(first_day_dateint, last_day_dateint)
def dateint_difference(dateint1, dateint2):
"""Return the difference between two dateints in days.
Arguments
---------
dateint1 : int
An integer object decipting a specific calendaric day; e.g. 20161225.
dateint2 : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The difference between the two given dateints in days.
"""
dt1 = dateint_to_datetime(dateint1)
dt2 = dateint_to_datetime(dateint2)
delta = dt1 - dt2
return abs(delta.days)
|
shaypal5/utilitime
|
utilitime/dateint/dateint.py
|
dateint_week_by_dateint
|
python
|
def dateint_week_by_dateint(dateint, first_day='Monday'):
weekday_ix = dateint_to_weekday(dateint, first_day)
first_day_dateint = shift_dateint(dateint, -weekday_ix)
last_day_dateint = shift_dateint(first_day_dateint, 6)
return dateint_range(first_day_dateint, last_day_dateint)
|
Return a dateint range of the week the given dateint belongs to.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
first_day : str, default 'Monday'
The first day of the week.
Returns
-------
iterable
An iterable of dateint representing all days of the week the given
dateint belongs to.
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/dateint/dateint.py#L270-L289
|
[
"def dateint_to_weekday(dateint, first_day='Monday'):\n \"\"\"Returns the weekday of the given dateint.\n\n Arguments\n ---------\n dateint : int\n An integer object decipting a specific calendaric day; e.g. 20161225.\n first_day : str, default 'Monday'\n The first day of the week.\n\n Returns\n -------\n int\n The weekday of the given dateint, when first day of the week = 0,\n last day of the week = 6.\n\n Example\n -------\n >>> dateint_to_weekday(20170213)\n 0\n >>> dateint_to_weekday(20170212)\n 6\n >>> dateint_to_weekday(20170214)\n 1\n >>> dateint_to_weekday(20170212, 'Sunday)\n 0\n >>> dateint_to_weekday(20170214, 'Sunday')\n 2\n \"\"\"\n weekday_ix = dateint_to_datetime(dateint).weekday()\n return (weekday_ix - WEEKDAYS.index(first_day)) % 7\n",
"def shift_dateint(dateint, day_shift):\n \"\"\"Shifts the given dateint by the given amount of days.\n\n Arguments\n ---------\n dateint : int\n An integer object decipting a specific calendaric day; e.g. 20161225.\n days : int\n The number of days to shift the given dateint by. A negative number\n shifts the dateint backwards.\n\n Returns\n -------\n int\n A dateint corresponding to the given date shifted by the given amount\n of days.\n\n Example\n -------\n >>> shift_dateint(20170228, 1)\n 20170301\n >>> shift_dateint(20170301, -1)\n 20170228\n >>> shift_dateint(20170220, 5)\n 20170225\n \"\"\"\n dtime = dateint_to_datetime(dateint)\n delta = timedelta(days=abs(day_shift))\n if day_shift > 0:\n dtime = dtime + delta\n else:\n dtime = dtime - delta\n return datetime_to_dateint(dtime)\n",
"def dateint_range(first_dateint, last_dateint):\n \"\"\"Returns all dateints in the given dateint range.\n\n Arguments\n ---------\n first_dateint : int\n An integer object decipting a specific calendaric day; e.g. 20161225.\n last_dateint : int\n An integer object decipting a specific calendaric day; e.g. 20170108.\n\n Returns\n -------\n iterable\n An iterable of ints representing all days in the given dateint range.\n\n Example\n -------\n >>> dateint_range(20170228, 20170301)\n [20170228, 20170301]\n >>> dateint_range(20170225, 20170301)\n [20170225, 20170226, 20170227, 20170228, 20170301]\n \"\"\"\n first_datetime = dateint_to_datetime(first_dateint)\n last_datetime = dateint_to_datetime(last_dateint)\n delta = last_datetime - first_datetime\n delta_in_hours = math.ceil(delta.total_seconds() / 3600)\n delta_in_days = math.ceil(delta_in_hours / 24) + 1\n dateint_set = set()\n for delta_i in range(0, delta_in_days * 24, 24):\n datetime_i = first_datetime + timedelta(hours=delta_i)\n dateint_i = datetime_to_dateint(datetime_i)\n if dateint_i <= last_dateint:\n dateint_set.add(dateint_i)\n return sorted(dateint_set)\n"
] |
"""Datetime-related utility functions."""
from datetime import datetime, timedelta, date
import math
from ..timestamp import get_timestamp
from ..datetime import (
utc_time,
datetime_to_dateint,
)
from ..constants import (
WEEKDAYS,
)
def decompose_dateint(dateint):
"""Decomposes the given dateint into its year, month and day components.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
year : int
The year component of the given dateint.
month : int
The month component of the given dateint.
day : int
The day component of the given dateint.
"""
year = int(dateint / 10000)
leftover = dateint - year * 10000
month = int(leftover / 100)
day = leftover - month * 100
return year, month, day
def dateint_to_date(dateint):
"""Converts the given integer to a datetime.date object.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.date
The corresponding date object.
Example
-------
>>> dateint_to_date(20170223)
datetime.date(2017, 2, 23)
"""
return date(*decompose_dateint(dateint))
def tz_aware_dateint_to_timestamp(dateint, timezone_name):
"""Returns the epoch timestamp for the given timezone and dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
timezone_name : str
The name of the timezone.
Returns
-------
int
The timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...) at the given timezone.
"""
return get_timestamp(timezone_name, *decompose_dateint(dateint))
def dateint_to_timestamp(dateint):
"""Converts the given dateint to a timestamp, using the local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...) at the local timezone.
"""
return int(dateint_to_datetime(dateint).timestamp())
def dateint_to_utc_timestamp(dateint):
"""Converts the given dateint to the corresponding UTC timestamp.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The UTC timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...).
"""
return tz_aware_dateint_to_timestamp(dateint, 'UTC')
def dateint_to_datetime(dateint):
"""Converts the given dateint to a datetime object, in local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.datetime
A timezone-unaware datetime object representing the start of the given
day (so at 0 hours, 0 minutes, etc...) in the local timezone.
"""
if len(str(dateint)) != 8:
raise ValueError(
'Dateints must have exactly 8 digits; the first four representing '
'the year, the next two the months, and the last two the days.')
year, month, day = decompose_dateint(dateint)
return datetime(year=year, month=month, day=day)
def dateint_to_weekday(dateint, first_day='Monday'):
"""Returns the weekday of the given dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
first_day : str, default 'Monday'
The first day of the week.
Returns
-------
int
The weekday of the given dateint, when first day of the week = 0,
last day of the week = 6.
Example
-------
>>> dateint_to_weekday(20170213)
0
>>> dateint_to_weekday(20170212)
6
>>> dateint_to_weekday(20170214)
1
>>> dateint_to_weekday(20170212, 'Sunday)
0
>>> dateint_to_weekday(20170214, 'Sunday')
2
"""
weekday_ix = dateint_to_datetime(dateint).weekday()
return (weekday_ix - WEEKDAYS.index(first_day)) % 7
def dateint_to_weekday_name(dateint):
"""Returns the weekday of the given dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
str
The weekday name of the given dateint.
Example
-------
>>> dateint_to_weekday_name(20170213)
'Monday'
>>> dateint_to_weekday_name(20170212)
'Sunday'
>>> dateint_to_weekday_name(20170214)
'Tuesday'
"""
return dateint_to_datetime(dateint).strftime("%A")
def shift_dateint(dateint, day_shift):
"""Shifts the given dateint by the given amount of days.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
days : int
The number of days to shift the given dateint by. A negative number
shifts the dateint backwards.
Returns
-------
int
A dateint corresponding to the given date shifted by the given amount
of days.
Example
-------
>>> shift_dateint(20170228, 1)
20170301
>>> shift_dateint(20170301, -1)
20170228
>>> shift_dateint(20170220, 5)
20170225
"""
dtime = dateint_to_datetime(dateint)
delta = timedelta(days=abs(day_shift))
if day_shift > 0:
dtime = dtime + delta
else:
dtime = dtime - delta
return datetime_to_dateint(dtime)
def dateint_range(first_dateint, last_dateint):
"""Returns all dateints in the given dateint range.
Arguments
---------
first_dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
last_dateint : int
An integer object decipting a specific calendaric day; e.g. 20170108.
Returns
-------
iterable
An iterable of ints representing all days in the given dateint range.
Example
-------
>>> dateint_range(20170228, 20170301)
[20170228, 20170301]
>>> dateint_range(20170225, 20170301)
[20170225, 20170226, 20170227, 20170228, 20170301]
"""
first_datetime = dateint_to_datetime(first_dateint)
last_datetime = dateint_to_datetime(last_dateint)
delta = last_datetime - first_datetime
delta_in_hours = math.ceil(delta.total_seconds() / 3600)
delta_in_days = math.ceil(delta_in_hours / 24) + 1
dateint_set = set()
for delta_i in range(0, delta_in_days * 24, 24):
datetime_i = first_datetime + timedelta(hours=delta_i)
dateint_i = datetime_to_dateint(datetime_i)
if dateint_i <= last_dateint:
dateint_set.add(dateint_i)
return sorted(dateint_set)
def today_int():
"""Returns the dateint for today."""
return datetime_to_dateint(utc_time())
def dateint_difference(dateint1, dateint2):
"""Return the difference between two dateints in days.
Arguments
---------
dateint1 : int
An integer object decipting a specific calendaric day; e.g. 20161225.
dateint2 : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The difference between the two given dateints in days.
"""
dt1 = dateint_to_datetime(dateint1)
dt2 = dateint_to_datetime(dateint2)
delta = dt1 - dt2
return abs(delta.days)
|
shaypal5/utilitime
|
utilitime/dateint/dateint.py
|
dateint_difference
|
python
|
def dateint_difference(dateint1, dateint2):
dt1 = dateint_to_datetime(dateint1)
dt2 = dateint_to_datetime(dateint2)
delta = dt1 - dt2
return abs(delta.days)
|
Return the difference between two dateints in days.
Arguments
---------
dateint1 : int
An integer object decipting a specific calendaric day; e.g. 20161225.
dateint2 : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The difference between the two given dateints in days.
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/dateint/dateint.py#L292-L310
|
[
"def dateint_to_datetime(dateint):\n \"\"\"Converts the given dateint to a datetime object, in local timezone.\n\n Arguments\n ---------\n dateint : int\n An integer object decipting a specific calendaric day; e.g. 20161225.\n\n Returns\n -------\n datetime.datetime\n A timezone-unaware datetime object representing the start of the given\n day (so at 0 hours, 0 minutes, etc...) in the local timezone.\n \"\"\"\n if len(str(dateint)) != 8:\n raise ValueError(\n 'Dateints must have exactly 8 digits; the first four representing '\n 'the year, the next two the months, and the last two the days.')\n year, month, day = decompose_dateint(dateint)\n return datetime(year=year, month=month, day=day)\n"
] |
"""Datetime-related utility functions."""
from datetime import datetime, timedelta, date
import math
from ..timestamp import get_timestamp
from ..datetime import (
utc_time,
datetime_to_dateint,
)
from ..constants import (
WEEKDAYS,
)
def decompose_dateint(dateint):
"""Decomposes the given dateint into its year, month and day components.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
year : int
The year component of the given dateint.
month : int
The month component of the given dateint.
day : int
The day component of the given dateint.
"""
year = int(dateint / 10000)
leftover = dateint - year * 10000
month = int(leftover / 100)
day = leftover - month * 100
return year, month, day
def dateint_to_date(dateint):
"""Converts the given integer to a datetime.date object.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.date
The corresponding date object.
Example
-------
>>> dateint_to_date(20170223)
datetime.date(2017, 2, 23)
"""
return date(*decompose_dateint(dateint))
def tz_aware_dateint_to_timestamp(dateint, timezone_name):
"""Returns the epoch timestamp for the given timezone and dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
timezone_name : str
The name of the timezone.
Returns
-------
int
The timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...) at the given timezone.
"""
return get_timestamp(timezone_name, *decompose_dateint(dateint))
def dateint_to_timestamp(dateint):
"""Converts the given dateint to a timestamp, using the local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...) at the local timezone.
"""
return int(dateint_to_datetime(dateint).timestamp())
def dateint_to_utc_timestamp(dateint):
"""Converts the given dateint to the corresponding UTC timestamp.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
int
The UTC timestamp corresponding to the start of the given day (so at 0
hours, 0 minutes, etc...).
"""
return tz_aware_dateint_to_timestamp(dateint, 'UTC')
def dateint_to_datetime(dateint):
"""Converts the given dateint to a datetime object, in local timezone.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
datetime.datetime
A timezone-unaware datetime object representing the start of the given
day (so at 0 hours, 0 minutes, etc...) in the local timezone.
"""
if len(str(dateint)) != 8:
raise ValueError(
'Dateints must have exactly 8 digits; the first four representing '
'the year, the next two the months, and the last two the days.')
year, month, day = decompose_dateint(dateint)
return datetime(year=year, month=month, day=day)
def dateint_to_weekday(dateint, first_day='Monday'):
"""Returns the weekday of the given dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
first_day : str, default 'Monday'
The first day of the week.
Returns
-------
int
The weekday of the given dateint, when first day of the week = 0,
last day of the week = 6.
Example
-------
>>> dateint_to_weekday(20170213)
0
>>> dateint_to_weekday(20170212)
6
>>> dateint_to_weekday(20170214)
1
>>> dateint_to_weekday(20170212, 'Sunday)
0
>>> dateint_to_weekday(20170214, 'Sunday')
2
"""
weekday_ix = dateint_to_datetime(dateint).weekday()
return (weekday_ix - WEEKDAYS.index(first_day)) % 7
def dateint_to_weekday_name(dateint):
"""Returns the weekday of the given dateint.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
Returns
-------
str
The weekday name of the given dateint.
Example
-------
>>> dateint_to_weekday_name(20170213)
'Monday'
>>> dateint_to_weekday_name(20170212)
'Sunday'
>>> dateint_to_weekday_name(20170214)
'Tuesday'
"""
return dateint_to_datetime(dateint).strftime("%A")
def shift_dateint(dateint, day_shift):
"""Shifts the given dateint by the given amount of days.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
days : int
The number of days to shift the given dateint by. A negative number
shifts the dateint backwards.
Returns
-------
int
A dateint corresponding to the given date shifted by the given amount
of days.
Example
-------
>>> shift_dateint(20170228, 1)
20170301
>>> shift_dateint(20170301, -1)
20170228
>>> shift_dateint(20170220, 5)
20170225
"""
dtime = dateint_to_datetime(dateint)
delta = timedelta(days=abs(day_shift))
if day_shift > 0:
dtime = dtime + delta
else:
dtime = dtime - delta
return datetime_to_dateint(dtime)
def dateint_range(first_dateint, last_dateint):
"""Returns all dateints in the given dateint range.
Arguments
---------
first_dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
last_dateint : int
An integer object decipting a specific calendaric day; e.g. 20170108.
Returns
-------
iterable
An iterable of ints representing all days in the given dateint range.
Example
-------
>>> dateint_range(20170228, 20170301)
[20170228, 20170301]
>>> dateint_range(20170225, 20170301)
[20170225, 20170226, 20170227, 20170228, 20170301]
"""
first_datetime = dateint_to_datetime(first_dateint)
last_datetime = dateint_to_datetime(last_dateint)
delta = last_datetime - first_datetime
delta_in_hours = math.ceil(delta.total_seconds() / 3600)
delta_in_days = math.ceil(delta_in_hours / 24) + 1
dateint_set = set()
for delta_i in range(0, delta_in_days * 24, 24):
datetime_i = first_datetime + timedelta(hours=delta_i)
dateint_i = datetime_to_dateint(datetime_i)
if dateint_i <= last_dateint:
dateint_set.add(dateint_i)
return sorted(dateint_set)
def today_int():
"""Returns the dateint for today."""
return datetime_to_dateint(utc_time())
def dateint_week_by_dateint(dateint, first_day='Monday'):
"""Return a dateint range of the week the given dateint belongs to.
Arguments
---------
dateint : int
An integer object decipting a specific calendaric day; e.g. 20161225.
first_day : str, default 'Monday'
The first day of the week.
Returns
-------
iterable
An iterable of dateint representing all days of the week the given
dateint belongs to.
"""
weekday_ix = dateint_to_weekday(dateint, first_day)
first_day_dateint = shift_dateint(dateint, -weekday_ix)
last_day_dateint = shift_dateint(first_day_dateint, 6)
return dateint_range(first_day_dateint, last_day_dateint)
|
shaypal5/utilitime
|
utilitime/time_interval.py
|
TimeInterval.from_timedelta
|
python
|
def from_timedelta(cls, datetime_obj, duration):
if duration.total_seconds() > 0:
return TimeInterval(datetime_obj, datetime_obj + duration)
else:
return TimeInterval(datetime_obj + duration, datetime_obj)
|
Create a new TimeInterval object from a start point and a duration.
If duration is positive, datetime_obj is the start of the interval;
if duration is negative, datetime_obj is the end of the interval.
Parameters
----------
datetime_obj : datetime.datetime
duration : datetime.timedelta
Returns
-------
neutils.time.TimeInterval
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/time_interval.py#L19-L37
| null |
class TimeInterval:
"""A class that represents a time interval, based on a start and end point.
"""
def __init__(self, start, end):
"""Create a new TimeInterval object from a start and end point.
Parameters
----------
start, end : datetime.datetime
"""
assert start <= end
self.start = start
self.end = end
@classmethod
def __repr__(self):
return "{}({!r}, {!r})".format(
type(self).__name__, self.start, self.end)
def __str__(self):
return "{} -> {}".format(self.start, self.end)
def __contains__(self, datetime_obj):
"""Check if a certain datetime objects is in the interval, requiring
that it is between the start and end points (inclusive).
Parameters
----------
datetime_obj: datetime.datetime
Returns
-------
bool
"""
return self.start <= datetime_obj <= self.end
|
shaypal5/utilitime
|
utilitime/weekday/weekday.py
|
next_weekday
|
python
|
def next_weekday(weekday):
ix = WEEKDAYS.index(weekday)
if ix == len(WEEKDAYS)-1:
return WEEKDAYS[0]
return WEEKDAYS[ix+1]
|
Returns the name of the weekday after the given weekday name.
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/weekday/weekday.py#L12-L17
| null |
"""Weekday-related utility functions."""
from decore import lazy_property
from ..constants import (
WEEKDAYS,
)
# === weekday-related functions ===
def prev_weekday(weekday):
"""Returns the name of the weekday before the given weekday name."""
ix = WEEKDAYS.index(weekday)
if ix == 0:
return WEEKDAYS[len(WEEKDAYS)-1]
return WEEKDAYS[ix-1]
@lazy_property
def _lower_weekdays():
return [day.lower() for day in WEEKDAYS]
@lazy_property
def _double_weekdays():
return WEEKDAYS + WEEKDAYS
def workdays(first_day=None):
"""Returns a list of workday names.
Arguments
---------
first_day : str, default None
The first day of the five-day work week. If not given, 'Monday' is
used.
Returns
-------
list
A list of workday names.
"""
if first_day is None:
first_day = 'Monday'
ix = _lower_weekdays().index(first_day.lower())
return _double_weekdays()[ix:ix+5]
def weekdays(first_day=None):
"""Returns a list of weekday names.
Arguments
---------
first_day : str, default None
The first day of the week. If not given, 'Monday' is used.
Returns
-------
list
A list of weekday names.
"""
if first_day is None:
first_day = 'Monday'
ix = _lower_weekdays().index(first_day.lower())
return _double_weekdays()[ix:ix+7]
|
shaypal5/utilitime
|
utilitime/weekday/weekday.py
|
prev_weekday
|
python
|
def prev_weekday(weekday):
ix = WEEKDAYS.index(weekday)
if ix == 0:
return WEEKDAYS[len(WEEKDAYS)-1]
return WEEKDAYS[ix-1]
|
Returns the name of the weekday before the given weekday name.
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/weekday/weekday.py#L20-L25
| null |
"""Weekday-related utility functions."""
from decore import lazy_property
from ..constants import (
WEEKDAYS,
)
# === weekday-related functions ===
def next_weekday(weekday):
"""Returns the name of the weekday after the given weekday name."""
ix = WEEKDAYS.index(weekday)
if ix == len(WEEKDAYS)-1:
return WEEKDAYS[0]
return WEEKDAYS[ix+1]
@lazy_property
def _lower_weekdays():
return [day.lower() for day in WEEKDAYS]
@lazy_property
def _double_weekdays():
return WEEKDAYS + WEEKDAYS
def workdays(first_day=None):
"""Returns a list of workday names.
Arguments
---------
first_day : str, default None
The first day of the five-day work week. If not given, 'Monday' is
used.
Returns
-------
list
A list of workday names.
"""
if first_day is None:
first_day = 'Monday'
ix = _lower_weekdays().index(first_day.lower())
return _double_weekdays()[ix:ix+5]
def weekdays(first_day=None):
"""Returns a list of weekday names.
Arguments
---------
first_day : str, default None
The first day of the week. If not given, 'Monday' is used.
Returns
-------
list
A list of weekday names.
"""
if first_day is None:
first_day = 'Monday'
ix = _lower_weekdays().index(first_day.lower())
return _double_weekdays()[ix:ix+7]
|
shaypal5/utilitime
|
utilitime/weekday/weekday.py
|
workdays
|
python
|
def workdays(first_day=None):
if first_day is None:
first_day = 'Monday'
ix = _lower_weekdays().index(first_day.lower())
return _double_weekdays()[ix:ix+5]
|
Returns a list of workday names.
Arguments
---------
first_day : str, default None
The first day of the five-day work week. If not given, 'Monday' is
used.
Returns
-------
list
A list of workday names.
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/weekday/weekday.py#L38-L55
| null |
"""Weekday-related utility functions."""
from decore import lazy_property
from ..constants import (
WEEKDAYS,
)
# === weekday-related functions ===
def next_weekday(weekday):
"""Returns the name of the weekday after the given weekday name."""
ix = WEEKDAYS.index(weekday)
if ix == len(WEEKDAYS)-1:
return WEEKDAYS[0]
return WEEKDAYS[ix+1]
def prev_weekday(weekday):
"""Returns the name of the weekday before the given weekday name."""
ix = WEEKDAYS.index(weekday)
if ix == 0:
return WEEKDAYS[len(WEEKDAYS)-1]
return WEEKDAYS[ix-1]
@lazy_property
def _lower_weekdays():
return [day.lower() for day in WEEKDAYS]
@lazy_property
def _double_weekdays():
return WEEKDAYS + WEEKDAYS
def weekdays(first_day=None):
"""Returns a list of weekday names.
Arguments
---------
first_day : str, default None
The first day of the week. If not given, 'Monday' is used.
Returns
-------
list
A list of weekday names.
"""
if first_day is None:
first_day = 'Monday'
ix = _lower_weekdays().index(first_day.lower())
return _double_weekdays()[ix:ix+7]
|
shaypal5/utilitime
|
utilitime/weekday/weekday.py
|
weekdays
|
python
|
def weekdays(first_day=None):
if first_day is None:
first_day = 'Monday'
ix = _lower_weekdays().index(first_day.lower())
return _double_weekdays()[ix:ix+7]
|
Returns a list of weekday names.
Arguments
---------
first_day : str, default None
The first day of the week. If not given, 'Monday' is used.
Returns
-------
list
A list of weekday names.
|
train
|
https://github.com/shaypal5/utilitime/blob/554ca05fa83c2dbf5d6cf9c9cfa6b03ee6cdb609/utilitime/weekday/weekday.py#L58-L74
| null |
"""Weekday-related utility functions."""
from decore import lazy_property
from ..constants import (
WEEKDAYS,
)
# === weekday-related functions ===
def next_weekday(weekday):
"""Returns the name of the weekday after the given weekday name."""
ix = WEEKDAYS.index(weekday)
if ix == len(WEEKDAYS)-1:
return WEEKDAYS[0]
return WEEKDAYS[ix+1]
def prev_weekday(weekday):
"""Returns the name of the weekday before the given weekday name."""
ix = WEEKDAYS.index(weekday)
if ix == 0:
return WEEKDAYS[len(WEEKDAYS)-1]
return WEEKDAYS[ix-1]
@lazy_property
def _lower_weekdays():
return [day.lower() for day in WEEKDAYS]
@lazy_property
def _double_weekdays():
return WEEKDAYS + WEEKDAYS
def workdays(first_day=None):
"""Returns a list of workday names.
Arguments
---------
first_day : str, default None
The first day of the five-day work week. If not given, 'Monday' is
used.
Returns
-------
list
A list of workday names.
"""
if first_day is None:
first_day = 'Monday'
ix = _lower_weekdays().index(first_day.lower())
return _double_weekdays()[ix:ix+5]
|
thespacedoctor/transientNamer
|
transientNamer/search.py
|
search.sources
|
python
|
def sources(
self):
sourceResultsList = []
sourceResultsList[:] = [dict(l) for l in self.sourceResultsList]
return sourceResultsList
|
*The results of the search returned as a python list of dictionaries*
**Usage:**
.. code-block:: python
sources = tns.sources
|
train
|
https://github.com/thespacedoctor/transientNamer/blob/39be410c84275ed4669632f5df67e728d66a318f/transientNamer/search.py#L152-L164
| null |
class search():
"""
*The worker class for the transient namer search module*
**Key Arguments:**
- ``log`` -- logger
- ``settings`` -- the settings dictionary
- ``ra`` -- RA of the location being checked
- ``dec`` -- DEC of the location being searched
- ``radiusArcsec`` - the radius of the conesearch to perform against the TNS
- ``name`` -- name of the object to search the TNS for
- ``discInLastDays`` -- search the TNS for transient discovered in the last X days
- ``comments`` -- print the comments from the TNS, note these can be long making table outputs somewhat unreadable. Default *False*
**Usage:**
To initiate a search object to search the TNS via an object name (either TNS or survey names accepted):
.. code-block:: python
from transientNamer import search
tns = search(
log=log,
name="Gaia16bbi"
)
or for a conesearch use something similar to:
.. code-block:: python
from transientNamer import search
tns = search(
log=log,
ra="06:50:36.74",
dec="+31:06:44.7",
radiusArcsec=5
)
Note the search method can accept coordinates in sexagesimal or decimal defree formats.
To list all new objects discovered in the last three weeks, then use:
.. code-block:: python
from transientNamer import search
tns = search(
log=log,
discInLastDays=21
)
"""
# Initialisation
def __init__(
self,
log,
ra="",
dec="",
radiusArcsec="",
name="",
discInLastDays="",
settings=False,
comments=False
):
self.log = log
log.debug("instansiating a new 'search' object")
self.settings = settings
self.ra = ra
self.dec = dec
self.radiusArcsec = radiusArcsec
self.comments = comments
self.name = name
self.internal_name = ""
self.discInLastDays = discInLastDays
self.page = 0
self.batchSize = 1000
# CREATE THE TIME-RANGE WINDOW TO SEARCH TNS
if not discInLastDays:
self.start = ""
self.end = ""
else:
discInLastDays = int(discInLastDays)
td = timedelta(days=1)
end = datetime.now() + td
self.end = end.strftime("%Y-%m-%d")
td = timedelta(days=discInLastDays)
start = datetime.now() - td
self.start = start.strftime("%Y-%m-%d")
# DETERMINE IF WE HAVE A TNS OR INTERAL SURVEY NAME
if self.name:
matchObject = re.match(r'^((SN|AT) ?)?(\d{4}\w{1,6})', self.name)
if matchObject:
self.name = matchObject.group(3)
else:
self.internal_name = self.name
self.name = ""
# DO THE SEARCH OF THE TNS AND COMPILE THE RESULTS INTO SEPARATE RESULT
# SETS
self.sourceResultsList, self.photResultsList, self.specResultsList, self.relatedFilesResultsList = self._query_tns()
self.sourceResults = list_of_dictionaries(
log=log,
listOfDictionaries=self.sourceResultsList
)
self.photResults = list_of_dictionaries(
log=log,
listOfDictionaries=self.photResultsList
)
self.specResults = list_of_dictionaries(
log=log,
listOfDictionaries=self.specResultsList
)
self.relatedFilesResults = list_of_dictionaries(
log=log,
listOfDictionaries=self.relatedFilesResultsList
)
return None
@property
@property
def spectra(
self):
"""*The associated source spectral data*
**Usage:**
.. code-block:: python
sourceSpectra = tns.spectra
"""
specResultsList = []
specResultsList[:] = [dict(l) for l in self.specResultsList]
return specResultsList
@property
def files(
self):
"""*The associated source files*
**Usage:**
.. code-block:: python
sourceFiles = tns.files
"""
relatedFilesResultsList = []
relatedFilesResultsList[:] = [dict(l)
for l in self.relatedFilesResultsList]
return relatedFilesResultsList
@property
def photometry(
self):
"""*The associated source photometry*
**Usage:**
.. code-block:: python
sourcePhotometry = tns.photometry
"""
photResultsList = []
photResultsList[:] = [dict(l) for l in self.photResultsList]
return photResultsList
@property
def url(
self):
"""*The generated URL used for searching of the TNS*
**Usage:**
.. code-block:: python
searchURL = tns.url
"""
return self._searchURL
def csv(
self,
dirPath=None):
"""*Render the results in csv format*
**Key Arguments:**
- ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
**Return:**
- `csvSources` -- the top-level transient data
- `csvPhot` -- all photometry associated with the transients
- `csvSpec` -- all spectral data associated with the transients
- `csvFiles` -- all files associated with the matched transients found on the tns
**Usage:**
To render the results in csv format:
.. code-block:: python
csvSources, csvPhot, csvSpec, csvFiles = tns.csv()
print csvSources
.. code-block:: text
TNSId,TNSName,discoveryName,discSurvey,raSex,decSex,raDeg,decDeg,transRedshift,specType,discMag,discMagFilter,discDate,objectUrl,hostName,hostRedshift,separationArcsec,separationNorthArcsec,separationEastArcsec
2016asf,SN2016asf,ASASSN-16cs,ASAS-SN,06:50:36.73,+31:06:45.36,102.6530,31.1126,0.021,SN Ia,17.1,V-Johnson,2016-03-06 08:09:36,http://wis-tns.weizmann.ac.il/object/2016asf,KUG 0647+311,,0.66,0.65,-0.13
You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
.. code-block:: python
tns.csv("~/tns")
.. image:: https://i.imgur.com/BwwqMBg.png
:width: 800px
:alt: csv output
"""
if dirPath:
p = self._file_prefix()
csvSources = self.sourceResults.csv(
filepath=dirPath + "/" + p + "sources.csv")
csvPhot = self.photResults.csv(
filepath=dirPath + "/" + p + "phot.csv")
csvSpec = self.specResults.csv(
filepath=dirPath + "/" + p + "spec.csv")
csvFiles = self.relatedFilesResults.csv(
filepath=dirPath + "/" + p + "relatedFiles.csv")
else:
csvSources = self.sourceResults.csv()
csvPhot = self.photResults.csv()
csvSpec = self.specResults.csv()
csvFiles = self.relatedFilesResults.csv()
return csvSources, csvPhot, csvSpec, csvFiles
def json(
self,
dirPath=None):
"""*Render the results in json format*
**Key Arguments:**
- ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
**Return:**
- `jsonSources` -- the top-level transient data
- `jsonPhot` -- all photometry associated with the transients
- `jsonSpec` -- all spectral data associated with the transients
- `jsonFiles` -- all files associated with the matched transients found on the tns
**Usage:**
To render the results in json format:
.. code-block:: python
jsonSources, jsonPhot, jsonSpec, jsonFiles = tns.json()
print jsonSources
.. code-block:: text
[
{
"TNSId": "2016asf",
"TNSName": "SN2016asf",
"decDeg": 31.1126,
"decSex": "+31:06:45.36",
"discDate": "2016-03-06 08:09:36",
"discMag": "17.1",
"discMagFilter": "V-Johnson",
"discSurvey": "ASAS-SN",
"discoveryName": "ASASSN-16cs",
"hostName": "KUG 0647+311",
"hostRedshift": null,
"objectUrl": "http://wis-tns.weizmann.ac.il/object/2016asf",
"raDeg": 102.65304166666667,
"raSex": "06:50:36.73",
"separationArcsec": "0.66",
"separationEastArcsec": "-0.13",
"separationNorthArcsec": "0.65",
"specType": "SN Ia",
"transRedshift": "0.021"
}
]
You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
.. code-block:: python
tns.json("~/tns")
.. image:: https://i.imgur.com/wAHqARI.png
:width: 800px
:alt: json output
"""
if dirPath:
p = self._file_prefix()
jsonSources = self.sourceResults.json(
filepath=dirPath + "/" + p + "sources.json")
jsonPhot = self.photResults.json(
filepath=dirPath + "/" + p + "phot.json")
jsonSpec = self.specResults.json(
filepath=dirPath + "/" + p + "spec.json")
jsonFiles = self.relatedFilesResults.json(
filepath=dirPath + "/" + p + "relatedFiles.json")
else:
jsonSources = self.sourceResults.json()
jsonPhot = self.photResults.json()
jsonSpec = self.specResults.json()
jsonFiles = self.relatedFilesResults.json()
return jsonSources, jsonPhot, jsonSpec, jsonFiles
def yaml(
self,
dirPath=None):
"""*Render the results in yaml format*
**Key Arguments:**
- ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
**Return:**
- `yamlSources` -- the top-level transient data
- `yamlPhot` -- all photometry associated with the transients
- `yamlSpec` -- all spectral data associated with the transients
- `yamlFiles` -- all files associated with the matched transients found on the tns
**Usage:**
To render the results in yaml format:
.. code-block:: python
yamlSources, yamlPhot, yamlSpec, yamlFiles = tns.yaml()
print yamlSources
.. code-block:: text
- TNSId: 2016asf
TNSName: SN2016asf
decDeg: 31.1126
decSex: '+31:06:45.36'
discDate: '2016-03-06 08:09:36'
discMag: '17.1'
discMagFilter: V-Johnson
discSurvey: ASAS-SN
discoveryName: ASASSN-16cs
hostName: KUG 0647+311
hostRedshift: null
objectUrl: http://wis-tns.weizmann.ac.il/object/2016asf
raDeg: 102.65304166666667
raSex: '06:50:36.73'
separationArcsec: '0.66'
separationEastArcsec: '-0.13'
separationNorthArcsec: '0.65'
specType: SN Ia
transRedshift: '0.021'
You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
.. code-block:: python
tns.yaml("~/tns")
.. image:: https://i.imgur.com/ZpJIC6p.png
:width: 800px
:alt: yaml output
"""
if dirPath:
p = self._file_prefix()
yamlSources = self.sourceResults.yaml(
filepath=dirPath + "/" + p + "sources.yaml")
yamlPhot = self.photResults.yaml(
filepath=dirPath + "/" + p + "phot.yaml")
yamlSpec = self.specResults.yaml(
filepath=dirPath + "/" + p + "spec.yaml")
yamlFiles = self.relatedFilesResults.yaml(
filepath=dirPath + "/" + p + "relatedFiles.yaml")
else:
yamlSources = self.sourceResults.yaml()
yamlPhot = self.photResults.yaml()
yamlSpec = self.specResults.yaml()
yamlFiles = self.relatedFilesResults.yaml()
return yamlSources, yamlPhot, yamlSpec, yamlFiles
def markdown(
self,
dirPath=None):
"""*Render the results in markdown format*
**Key Arguments:**
- ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
**Return:**
- `markdownSources` -- the top-level transient data
- `markdownPhot` -- all photometry associated with the transients
- `markdownSpec` -- all spectral data associated with the transients
- `markdownFiles` -- all files associated with the matched transients found on the tns
**Usage:**
To render the results in markdown table format:
.. code-block:: python
markdownSources, markdownPhot, markdownSpec, markdownFiles = tns.markdown()
print markdownSources
.. code-block:: text
| TNSId | TNSName | discoveryName | discSurvey | raSex | decSex | raDeg | decDeg | transRedshift | specType | discMag | discMagFilter | discDate | objectUrl | hostName | hostRedshift | separationArcsec | separationNorthArcsec | separationEastArcsec |
|:---------|:-----------|:---------------|:------------|:-------------|:--------------|:----------|:---------|:---------------|:----------|:---------|:---------------|:---------------------|:----------------------------------------------|:--------------|:--------------|:------------------|:-----------------------|:----------------------|
| 2016asf | SN2016asf | ASASSN-16cs | ASAS-SN | 06:50:36.73 | +31:06:45.36 | 102.6530 | 31.1126 | 0.021 | SN Ia | 17.1 | V-Johnson | 2016-03-06 08:09:36 | http://wis-tns.weizmann.ac.il/object/2016asf | KUG 0647+311 | | 0.66 | 0.65 | -0.13 |
You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
.. code-block:: python
tns.markdown("~/tns")
.. image:: https://i.imgur.com/AYLBQoJ.png
:width: 800px
:alt: markdown output
"""
if dirPath:
p = self._file_prefix()
markdownSources = self.sourceResults.markdown(
filepath=dirPath + "/" + p + "sources.md")
markdownPhot = self.photResults.markdown(
filepath=dirPath + "/" + p + "phot.md")
markdownSpec = self.specResults.markdown(
filepath=dirPath + "/" + p + "spec.md")
markdownFiles = self.relatedFilesResults.markdown(
filepath=dirPath + "/" + p + "relatedFiles.md")
else:
markdownSources = self.sourceResults.markdown()
markdownPhot = self.photResults.markdown()
markdownSpec = self.specResults.markdown()
markdownFiles = self.relatedFilesResults.markdown()
return markdownSources, markdownPhot, markdownSpec, markdownFiles
def table(
self,
dirPath=None):
"""*Render the results as an ascii table*
**Key Arguments:**
- ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
**Return:**
- `tableSources` -- the top-level transient data
- `tablePhot` -- all photometry associated with the transients
- `tableSpec` -- all spectral data associated with the transients
- `tableFiles` -- all files associated with the matched transients found on the tns
**Usage:**
To render the results in ascii table format:
.. code-block:: python
tableSources, tablePhot, tableSpec, tableFiles = tns.table()
print tableSources
.. code-block:: text
+----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+
| TNSId | TNSName | discoveryName | discSurvey | raSex | decSex | raDeg | decDeg | transRedshift | specType | discMag | discMagFilter | discDate | objectUrl | hostName | hostRedshift | separationArcsec | separationNorthArcsec | separationEastArcsec |
+----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+
| 2016asf | SN2016asf | ASASSN-16cs | ASAS-SN | 06:50:36.73 | +31:06:45.36 | 102.6530 | 31.1126 | 0.021 | SN Ia | 17.1 | V-Johnson | 2016-03-06 08:09:36 | http://wis-tns.weizmann.ac.il/object/2016asf | KUG 0647+311 | | 0.66 | 0.65 | -0.13 |
+----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+
You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
.. code-block:: python
tns.table("~/tns")
.. image:: https://i.imgur.com/m09M0ho.png
:width: 800px
:alt: ascii files
"""
if dirPath:
p = self._file_prefix()
tableSources = self.sourceResults.table(
filepath=dirPath + "/" + p + "sources.ascii")
tablePhot = self.photResults.table(
filepath=dirPath + "/" + p + "phot.ascii")
tableSpec = self.specResults.table(
filepath=dirPath + "/" + p + "spec.ascii")
tableFiles = self.relatedFilesResults.table(
filepath=dirPath + "/" + p + "relatedFiles.ascii")
else:
tableSources = self.sourceResults.table()
tablePhot = self.photResults.table()
tableSpec = self.specResults.table()
tableFiles = self.relatedFilesResults.table()
return tableSources, tablePhot, tableSpec, tableFiles
def mysql(
self,
tableNamePrefix="TNS",
dirPath=None):
"""*Render the results as MySQL Insert statements*
**Key Arguments:**
- ``tableNamePrefix`` -- the prefix for the database table names to assign the insert statements to. Default *TNS*.
- ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
**Return:**
- `mysqlSources` -- the top-level transient data
- `mysqlPhot` -- all photometry associated with the transients
- `mysqlSpec` -- all spectral data associated with the transients
- `mysqlFiles` -- all files associated with the matched transients found on the tns
**Usage:**
To render the results in mysql insert format:
.. code-block:: python
mysqlSources, mysqlPhot, mysqlSpec, mysqlFiles = tns.mysql("TNS")
print mysqlSources
.. code-block:: text
INSERT INTO `TNS_sources` (TNSId,TNSName,dateCreated,decDeg,decSex,discDate,discMag,discMagFilter,discSurvey,discoveryName,hostName,hostRedshift,objectUrl,raDeg,raSex,separationArcsec,separationEastArcsec,separationNorthArcsec,specType,transRedshift) VALUES ("2016asf" ,"SN2016asf" ,"2016-09-20T11:22:13" ,"31.1126" ,"+31:06:45.36" ,"2016-03-06 08:09:36" ,"17.1" ,"V-Johnson" ,"ASAS-SN" ,"ASASSN-16cs" ,"KUG 0647+311" ,null ,"http://wis-tns.weizmann.ac.il/object/2016asf" ,"102.653041667" ,"06:50:36.73" ,"0.66" ,"-0.13" ,"0.65" ,"SN Ia" ,"0.021") ON DUPLICATE KEY UPDATE TNSId="2016asf", TNSName="SN2016asf", dateCreated="2016-09-20T11:22:13", decDeg="31.1126", decSex="+31:06:45.36", discDate="2016-03-06 08:09:36", discMag="17.1", discMagFilter="V-Johnson", discSurvey="ASAS-SN", discoveryName="ASASSN-16cs", hostName="KUG 0647+311", hostRedshift=null, objectUrl="http://wis-tns.weizmann.ac.il/object/2016asf", raDeg="102.653041667", raSex="06:50:36.73", separationArcsec="0.66", separationEastArcsec="-0.13", separationNorthArcsec="0.65", specType="SN Ia", transRedshift="0.021", updated=1, dateLastModified=NOW() ;
You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
.. code-block:: python
tns.mysql("TNS", "~/tns")
.. image:: https://i.imgur.com/CozySPW.png
:width: 800px
:alt: mysql output
"""
if dirPath:
p = self._file_prefix()
createStatement = """
CREATE TABLE `%(tableNamePrefix)s_sources` (
`primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
`TNSId` varchar(20) NOT NULL,
`TNSName` varchar(20) DEFAULT NULL,
`dateCreated` datetime DEFAULT NULL,
`decDeg` double DEFAULT NULL,
`decSex` varchar(45) DEFAULT NULL,
`discDate` datetime DEFAULT NULL,
`discMag` double DEFAULT NULL,
`discMagFilter` varchar(45) DEFAULT NULL,
`discSurvey` varchar(100) DEFAULT NULL,
`discoveryName` varchar(100) DEFAULT NULL,
`objectUrl` varchar(200) DEFAULT NULL,
`raDeg` double DEFAULT NULL,
`raSex` varchar(45) DEFAULT NULL,
`specType` varchar(100) DEFAULT NULL,
`transRedshift` double DEFAULT NULL,
`updated` tinyint(4) DEFAULT '0',
`dateLastModified` datetime DEFAULT NULL,
`hostName` VARCHAR(100) NULL DEFAULT NULL,
`hostRedshift` DOUBLE NULL DEFAULT NULL,
`survey` VARCHAR(100) NULL DEFAULT NULL,
PRIMARY KEY (`primaryId`),
UNIQUE KEY `tnsid` (`TNSId`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
""" % locals()
mysqlSources = self.sourceResults.mysql(
tableNamePrefix + "_sources", filepath=dirPath + "/" + p + "sources.sql", createStatement=createStatement)
createStatement = """
CREATE TABLE `%(tableNamePrefix)s_photometry` (
`primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
`TNSId` varchar(20) NOT NULL,
`dateCreated` datetime DEFAULT CURRENT_TIMESTAMP,
`exptime` double DEFAULT NULL,
`filter` varchar(100) DEFAULT NULL,
`limitingMag` tinyint(4) DEFAULT NULL,
`mag` double DEFAULT NULL,
`magErr` double DEFAULT NULL,
`magUnit` varchar(100) DEFAULT NULL,
`objectName` varchar(100) DEFAULT NULL,
`obsdate` datetime DEFAULT NULL,
`reportAddedDate` datetime DEFAULT NULL,
`suggestedType` varchar(100) DEFAULT NULL,
`survey` varchar(100) DEFAULT NULL,
`telescope` varchar(100) DEFAULT NULL,
`updated` tinyint(4) DEFAULT '0',
`dateLastModified` datetime DEFAULT NULL,
`remarks` VARCHAR(800) NULL DEFAULT NULL,
`sourceComment` VARCHAR(800) NULL DEFAULT NULL,
PRIMARY KEY (`primaryId`),
UNIQUE KEY `tnsid_survey_obsdate` (`TNSId`,`survey`,`obsdate`),
UNIQUE INDEX `u_tnsid_survey_obsdate` (`TNSId` ASC, `survey` ASC, `obsdate` ASC),
UNIQUE INDEX `u_tnsid_obsdate_objname` (`TNSId` ASC, `obsdate` ASC, `objectName` ASC)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
""" % locals()
mysqlPhot = self.photResults.mysql(
tableNamePrefix + "_photometry", filepath=dirPath + "/" + p + "phot.sql", createStatement=createStatement)
createStatement = """
CREATE TABLE `%(tableNamePrefix)s_spectra` (
`primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
`TNSId` varchar(45) NOT NULL,
`TNSuser` varchar(45) DEFAULT NULL,
`dateCreated` datetime DEFAULT CURRENT_TIMESTAMP,
`exptime` double DEFAULT NULL,
`obsdate` datetime DEFAULT NULL,
`reportAddedDate` datetime DEFAULT NULL,
`specType` varchar(100) DEFAULT NULL,
`survey` varchar(100) DEFAULT NULL,
`telescope` varchar(100) DEFAULT NULL,
`transRedshift` double DEFAULT NULL,
`updated` tinyint(4) DEFAULT '0',
`dateLastModified` datetime DEFAULT NULL,
`remarks` VARCHAR(800) NULL DEFAULT NULL,
`sourceComment` VARCHAR(800) NULL DEFAULT NULL,
PRIMARY KEY (`primaryId`),
UNIQUE KEY `u_tnsid_survey_obsdate` (`TNSId`,`survey`,`obsdate`),
UNIQUE KEY `u_id_user_obsdate` (`TNSId`,`TNSuser`,`obsdate`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
""" % locals()
mysqlSpec = self.specResults.mysql(
tableNamePrefix + "_spectra", filepath=dirPath + "/" + p + "spec.sql", createStatement=createStatement)
createStatement = """
CREATE TABLE `%(tableNamePrefix)s_files` (
`primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
`TNSId` varchar(100) NOT NULL,
`dateCreated` datetime DEFAULT CURRENT_TIMESTAMP,
`dateObs` datetime DEFAULT NULL,
`filename` varchar(200) DEFAULT NULL,
`spec1phot2` tinyint(4) DEFAULT NULL,
`url` varchar(800) DEFAULT NULL,
`updated` tinyint(4) DEFAULT '0',
`dateLastModified` datetime DEFAULT NULL,
`comment` VARCHAR(800) NULL DEFAULT NULL,
PRIMARY KEY (`primaryId`),
UNIQUE KEY `tnsid_url` (`TNSId`,`url`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
""" % locals()
mysqlFiles = self.relatedFilesResults.mysql(
tableNamePrefix + "_files", filepath=dirPath + "/" + p + "relatedFiles.sql", createStatement=createStatement)
else:
mysqlSources = self.sourceResults.mysql(
tableNamePrefix + "_sources")
mysqlPhot = self.photResults.mysql(tableNamePrefix + "_photometry")
mysqlSpec = self.specResults.mysql(tableNamePrefix + "_spectra")
mysqlFiles = self.relatedFilesResults.mysql(
tableNamePrefix + "_files")
return mysqlSources, mysqlPhot, mysqlSpec, mysqlFiles
def _query_tns(self):
"""
*determine how to query the TNS, send query and parse the results*
**Return:**
- ``results`` -- a list of dictionaries (one dictionary for each result set returned from the TNS)
"""
self.log.info('starting the ``get`` method')
sourceTable = []
photoTable = []
specTable = []
relatedFilesTable = []
# THIS stop IS TO KEEP TRACK OF THE TNS PAGINATION IF MANY RESULT PAGES
# ARE RETURNED
stop = False
sourceCount = 0
while not stop:
status_code, content, self._searchURL = self._get_tns_search_results()
if status_code != 200:
self.log.error(
'cound not get the search reuslts from the TNS, HTML error code %(status_code)s ' % locals())
return None
if "No results found" in content:
print "No results found"
return sourceTable, photoTable, specTable, relatedFilesTable
if self._parse_transient_rows(content, True) < self.batchSize:
stop = True
else:
self.page += 1
thisPage = self.page
print "Downloaded %(thisPage)s page(s) from the TNS. %(sourceCount)s transients parsed so far." % locals()
sourceCount += self.batchSize
print "\t" + self._searchURL
timesleep.sleep(1)
# PARSE ALL ROWS RETURNED
for transientRow in self._parse_transient_rows(content):
# TOP LEVEL DISCOVERY CONTENT
sourceContent = transientRow.group()
discInfo, TNSId = self._parse_discovery_information(
sourceContent)
sourceTable.append(discInfo)
# PHOTOMETERY
phot, relatedFiles = self._parse_photometry_data(
sourceContent, TNSId)
photoTable += phot
relatedFilesTable += relatedFiles
# SPECTRA
spec, relatedFiles = self._parse_spectral_data(
sourceContent, TNSId)
specTable += spec
relatedFilesTable += relatedFiles
# SORT BY SEPARATION FROM THE SEARCH COORDINATES
try:
sourceTable = sorted(sourceTable, key=itemgetter(
'separationArcsec'), reverse=False)
except:
pass
self.log.info('completed the ``get`` method')
return sourceTable, photoTable, specTable, relatedFilesTable
def _get_tns_search_results(
self):
"""
*query the tns and result the response*
"""
self.log.info('starting the ``_get_tns_search_results`` method')
try:
response = requests.get(
url="http://wis-tns.weizmann.ac.il/search",
params={
"page": self.page,
"ra": self.ra,
"decl": self.dec,
"radius": self.radiusArcsec,
"name": self.name,
"internal_name": self.internal_name,
"date_start[date]": self.start,
"date_end[date]": self.end,
"num_page": self.batchSize,
"display[redshift]": "1",
"display[hostname]": "1",
"display[host_redshift]": "1",
"display[source_group_name]": "1",
"display[internal_name]": "1",
"display[spectra_count]": "1",
"display[discoverymag]": "1",
"display[discmagfilter]": "1",
"display[discoverydate]": "1",
"display[discoverer]": "1",
"display[sources]": "1",
"display[bibcode]": "1",
},
)
except requests.exceptions.RequestException:
print('HTTP Request failed')
self.log.info('completed the ``_get_tns_search_results`` method')
return response.status_code, response.content, response.url
def _file_prefix(
self):
"""*Generate a file prefix based on the type of search for saving files to disk*
**Return:**
- ``prefix`` -- the file prefix
"""
self.log.info('starting the ``_file_prefix`` method')
if self.ra:
now = datetime.now()
prefix = now.strftime("%Y%m%dt%H%M%S%f_tns_conesearch_")
elif self.name:
prefix = self.name + "_tns_conesearch_"
elif self.internal_name:
prefix = self.internal_name + "_tns_conesearch_"
elif self.discInLastDays:
discInLastDays = str(self.discInLastDays)
now = datetime.now()
prefix = now.strftime(
discInLastDays + "d_since_%Y%m%d_tns_conesearch_")
self.log.info('completed the ``_file_prefix`` method')
return prefix
def _parse_transient_rows(
self,
content,
count=False):
"""* parse transient rows from the TNS result page content*
**Key Arguments:**
- ``content`` -- the content from the TNS results page.
- ``count`` -- return only the number of rows
**Return:**
- ``transientRows``
"""
self.log.info('starting the ``_parse_transient_rows`` method')
regexForRow = r"""\n([^\n]*?<a href="/object/.*?)(?=\n[^\n]*?<a href="/object/|<\!\-\- /\.section, /#content \-\->)"""
if count:
# A SINGLE SOURCE BLOCK
matchedSources = re.findall(
regexForRow,
content,
flags=re.S # re.S
)
return len(matchedSources)
# A SINGLE SOURCE BLOCK
matchedSources = re.finditer(
regexForRow,
content,
flags=re.S # re.S
)
self.log.info('completed the ``_parse_transient_rows`` method')
return matchedSources
def _parse_discovery_information(
self,
content):
"""* parse discovery information from one row on the TNS results page*
**Key Arguments:**
- ``content`` -- a table row from the TNS results page.
**Return:**
- ``discoveryData`` -- dictionary of results
- ``TNSId`` -- the unique TNS id for the transient
"""
self.log.info('starting the ``_parse_discovery_information`` method')
# ASTROCALC UNIT CONVERTER OBJECT
converter = unit_conversion(
log=self.log
)
matches = re.finditer(
r"""<tr class="row-.*?"><td class="cell-id">(?P<tnsId>\d*?)</td><td class="cell-name"><a href="(?P<objectUrl>.*?)">(?P<TNSName>.*?)</a></td><td class="cell-.*?<td class="cell-ra">(?P<raSex>.*?)</td><td class="cell-decl">(?P<decSex>.*?)</td><td class="cell-ot_name">(?P<specType>.*?)</td><td class="cell-redshift">(?P<transRedshift>.*?)</td><td class="cell-hostname">(?P<hostName>.*?)</td><td class="cell-host_redshift">(?P<hostRedshift>.*?)</td><td class="cell-source_group_name">(?P<discSurvey>.*?)</td>.*?<td class="cell-internal_name">(<a.*?>)?(?P<discoveryName>.*?)(</a>)?</td>.*?<td class="cell-discoverymag">(?P<discMag>.*?)</td><td class="cell-disc_filter_name">(?P<discMagFilter>.*?)</td><td class="cell-discoverydate">(?P<discDate>.*?)</td><td class="cell-discoverer">(?P<sender>.*?)</td>.*?</tr>""",
content,
flags=0 # re.S
)
discoveryData = []
for match in matches:
row = match.groupdict()
for k, v in row.iteritems():
row[k] = v.strip()
if len(v) == 0:
row[k] = None
if row["transRedshift"] == 0:
row["transRedshift"] = None
if row["TNSName"][0] in ["1", "2"]:
row["TNSName"] = "SN" + row["TNSName"]
row["objectUrl"] = "http://wis-tns.weizmann.ac.il" + \
row["objectUrl"]
# CONVERT COORDINATES TO DECIMAL DEGREES
row["raDeg"] = converter.ra_sexegesimal_to_decimal(
ra=row["raSex"]
)
row["decDeg"] = converter.dec_sexegesimal_to_decimal(
dec=row["decSex"]
)
# IF THIS IS A COORDINATE SEARCH, ADD SEPARATION FROM
# ORIGINAL QUERY COORDINATES
if self.ra:
# CALCULATE SEPARATION IN ARCSEC
from astrocalc.coords import separations
calculator = separations(
log=self.log,
ra1=self.ra,
dec1=self.dec,
ra2=row["raDeg"],
dec2=row["decDeg"],
)
angularSeparation, north, east = calculator.get()
row["separationArcsec"] = angularSeparation
row["separationNorthArcsec"] = north
row["separationEastArcsec"] = east
if not row["discSurvey"]:
row["survey"] = row["sender"]
del row["sender"]
del row["tnsId"]
row["TNSName"] = row["TNSName"].replace(" ", "")
row["TNSId"] = row["TNSName"].replace(
"SN", "").replace("AT", "")
TNSId = row["TNSId"]
# ORDER THE DICTIONARY FOR THIS ROW OF RESULTS
orow = collections.OrderedDict()
keyOrder = ["TNSId", "TNSName", "discoveryName", "discSurvey", "raSex", "decSex", "raDeg", "decDeg",
"transRedshift", "specType", "discMag", "discMagFilter", "discDate", "objectUrl", "hostName", "hostRedshift", "separationArcsec", "separationNorthArcsec", "separationEastArcsec"]
for k, v in row.iteritems():
if k not in keyOrder:
keyOrder.append(k)
for k in keyOrder:
try:
orow[k] = row[k]
except:
self.log.info(
"`%(k)s` not found in the source data for %(TNSId)s" % locals())
pass
discoveryData.append(row)
self.log.info('completed the ``_parse_discovery_information`` method')
return discoveryData[0], TNSId
def _parse_photometry_data(
self,
content,
TNSId):
"""*parse photometry data from a row in the tns results content*
**Key Arguments:**
- ``content`` -- a table row from the TNS results page
- ``TNSId`` -- the tns id of the transient
**Return:**
- ``photData`` -- a list of dictionaries of the photometry data
- ``relatedFilesTable`` -- a list of dictionaries of transient photometry related files
"""
self.log.info('starting the ``_parse_photometry_data`` method')
photData = []
relatedFilesTable = []
# AT REPORT BLOCK
ATBlock = re.search(
r"""<tr class=[^\n]*?AT reportings.*?(?=<tr class=[^\n]*?Classification reportings|$)""",
content,
flags=re.S # re.S
)
if ATBlock:
ATBlock = ATBlock.group()
reports = re.finditer(
r"""<tr class="row-[^"]*"><td class="cell-id">.*?</table>""",
ATBlock,
flags=re.S # re.S
)
relatedFiles = self._parse_related_files(ATBlock)
for r in reports:
header = re.search(
r"""<tr class="row[^"]*".*?time_received">(?P<reportAddedDate>[^<]*).*?user_name">(?P<sender>[^<]*).*?reporter_name">(?P<reporters>[^<]*).*?source_group_name">(?P<surveyGroup>[^<]*).*?ra">(?P<ra>[^<]*).*?decl">(?P<dec>[^<]*).*?discovery_date">(?P<obsDate>[^<]*).*?flux">(?P<mag>[^<]*).*?filter_name">(?P<magFilter>[^<]*).*?related_files">(?P<relatedFiles>[^<]*).*?type_name">(?P<suggestedType>[^<]*).*?hostname">(?P<hostName>[^<]*).*?host_redshift">(?P<hostRedshift>[^<]*).*?internal_name">(?P<objectName>[^<]*).*?groups">(?P<survey>[^<]*).*?remarks">(?P<sourceComment>[^<]*)""",
r.group(),
flags=0 # re.S
)
try:
header = header.groupdict()
except:
print r.group()
header["TNSId"] = TNSId
del header["reporters"]
del header["surveyGroup"]
del header["hostName"]
del header["hostRedshift"]
del header["mag"]
del header["magFilter"]
del header["obsDate"]
del header["ra"]
del header["dec"]
if not self.comments:
del header['sourceComment']
else:
theseComments = header[
"sourceComment"].split("\n")
header["sourceComment"] = ""
for c in theseComments:
header["sourceComment"] += " " + c.strip()
header["sourceComment"] = header[
"sourceComment"].strip().replace('"', "'")[0:750]
phot = re.finditer(
r"""<tr class="row\-[^"]*".*?obsdate">(?P<obsdate>[^<]*).*?flux">(?P<mag>[^<]*).*?fluxerr">(?P<magErr>[^<]*).*?limflux">(?P<limitingMag>[^<]*).*?unit_name">(?P<magUnit>[^<]*).*?filter_name">(?P<filter>[^<]*).*?tel_inst">(?P<telescope>[^<]*).*?exptime">(?P<exptime>[^<]*).*?observer">(?P<observer>[^<]*).*?-remarks">(?P<remarks>[^<]*)""",
r.group(),
flags=0 # re.S
)
filesAppended = False
for p in phot:
p = p.groupdict()
del p["observer"]
if p["limitingMag"] and not p["mag"]:
p["mag"] = p["limitingMag"]
p["limitingMag"] = 1
p["remarks"] = p["remarks"].replace(
"[Last non detection]", "")
else:
p["limitingMag"] = 0
if not self.comments:
del p["remarks"]
p.update(header)
if p["relatedFiles"] and filesAppended == False:
filesAppended = True
for f in relatedFiles:
# ORDER THE DICTIONARY FOR THIS ROW OF
# RESULTS
thisFile = collections.OrderedDict()
thisFile["TNSId"] = TNSId
thisFile["filename"] = f[
"filepath"].split("/")[-1]
thisFile["url"] = f["filepath"]
if self.comments:
thisFile["comment"] = f[
"fileComment"].replace("\n", " ").strip().replace('"', "'")[0:750]
thisFile["dateObs"] = p["obsdate"]
thisFile["spec1phot2"] = 2
relatedFilesTable.append(thisFile)
if not p["survey"] and not p["objectName"]:
p["survey"] = p["sender"]
del p["relatedFiles"]
del p["sender"]
# ORDER THE DICTIONARY FOR THIS ROW OF RESULTS
orow = collections.OrderedDict()
keyOrder = ["TNSId", "survey", "obsdate", "filter", "limitingMag", "mag", "magErr",
"magUnit", "suggestedType", "telescope", "exptime", "reportAddedDate"]
for k, v in p.iteritems():
if k not in keyOrder:
keyOrder.append(k)
for k in keyOrder:
try:
orow[k] = p[k]
except:
self.log.info(
"`%(k)s` not found in the source data for %(TNSId)s" % locals())
pass
photData.append(orow)
self.log.info('completed the ``_parse_photometry_data`` method')
return photData, relatedFilesTable
def _parse_related_files(
self,
content):
"""*parse the contents for related files URLs and comments*
**Key Arguments:**
- ``content`` -- the content to parse.
**Return:**
- ``relatedFiles`` -- a list of dictionaries of transient related files
"""
self.log.info('starting the ``_parse_related_files`` method')
relatedFilesList = re.finditer(
r"""<td class="cell-filename">.*?href="(?P<filepath>[^"]*).*?remarks">(?P<fileComment>[^<]*)""",
content,
flags=0 # re.S
)
relatedFiles = []
for f in relatedFilesList:
f = f.groupdict()
relatedFiles.append(f)
self.log.info('completed the ``_parse_related_files`` method')
return relatedFiles
def _parse_spectral_data(
self,
content,
TNSId):
"""*parse spectra data from a row in the tns results content*
**Key Arguments:**
- ``content`` -- a table row from the TNS results page
- ``TNSId`` -- the tns id of the transient
**Return:**
- ``specData`` -- a list of dictionaries of the spectral data
- ``relatedFilesTable`` -- a list of dictionaries of transient spectrum related files
"""
self.log.info('starting the ``_parse_spectral_data`` method')
specData = []
relatedFilesTable = []
# CLASSIFICATION BLOCK
classBlock = re.search(
r"""<tr class=[^\n]*?Classification reportings.*$""",
content,
flags=re.S # re.S
)
if classBlock:
classBlock = classBlock.group()
reports = re.finditer(
r"""<tr class="row-[^"]*"><td class="cell-id">.*?</tbody>\s*</table>\s*</div></td> </tr>\s*</tbody>\s*</table>\s*</div></td> </tr>""",
classBlock,
flags=re.S #
)
relatedFiles = self._parse_related_files(classBlock)
for r in reports:
header = re.search(
r"""<tr class="row.*?time_received">(?P<reportAddedDate>[^<]*).*?user_name">(?P<TNSuser>[^<]*).*?classifier_name">(?P<reporters>[^<]*).*?source_group_name">(?P<survey>[^<]*).*?-type">(?P<specType>[^<]*).*?-redshift">(?P<transRedshift>[^<]*).*?-related_files">(?P<relatedFiles>[^<]*).*?-groups">(?P<surveyGroup>[^<]*).*?-remarks">(?P<sourceComment>[^<]*)</td>""",
r.group(),
flags=re.S # re.S
)
if not header:
continue
header = header.groupdict()
header["TNSId"] = TNSId
del header["reporters"]
del header["surveyGroup"]
del header["survey"]
if not self.comments:
del header['sourceComment']
else:
theseComments = header[
"sourceComment"].split("\n")
header["sourceComment"] = ""
for c in theseComments:
header["sourceComment"] += " " + c.strip()
header["sourceComment"] = header[
"sourceComment"].strip().replace('"', "'")[0:750]
spec = re.finditer(
r"""<tr class="class-results-.*?-obsdate">(?P<obsdate>[^<]*).*?-tel_inst">(?P<telescope>[^<]*).*?-exptime">(?P<exptime>[^<]*).*?-observer">(?P<sender>[^<]*).*?-reducer">(?P<reducer>[^<]*).*?-source_group_name">(?P<survey>[^<]*).*?-asciifile">(.*?<a href="(?P<filepath>[^"]*)".*?</a>)?.*?-fitsfile">(.*?<a href="(?P<fitsFilepath>[^"]*)".*?</a>)?.*?-groups">(?P<surveyGroup>[^<]*).*?-remarks">(?P<remarks>[^<]*)""",
r.group(),
flags=0 # re.S
)
filesAppended = False
for s in spec:
s = s.groupdict()
del s["sender"]
del s["surveyGroup"]
del s["reducer"]
if not self.comments:
del s["remarks"]
else:
s["remarks"] = s["remarks"].replace('"', "'")[0:750]
s.update(header)
if s["relatedFiles"] and filesAppended == False:
filesAppended = True
for f in relatedFiles:
# ORDER THE DICTIONARY FOR THIS ROW OF
# RESULTS
thisFile = collections.OrderedDict()
thisFile["TNSId"] = TNSId
thisFile["filename"] = f[
"filepath"].split("/")[-1]
thisFile["url"] = f["filepath"]
if self.comments:
thisFile["comment"] = f[
"fileComment"].replace("\n", " ").strip()
thisFile["dateObs"] = s["obsdate"]
thisFile["spec1phot2"] = 1
relatedFilesTable.append(thisFile)
for ffile in [s["filepath"], s["fitsFilepath"]]:
if ffile:
# ORDER THE DICTIONARY FOR THIS ROW OF
# RESULTS
thisFile = collections.OrderedDict()
thisFile["TNSId"] = TNSId
thisFile["filename"] = ffile.split(
"/")[-1]
thisFile["url"] = ffile
if self.comments:
thisFile["comment"] = ""
thisFile["dateObs"] = s["obsdate"]
thisFile["spec1phot2"] = 1
relatedFilesTable.append(thisFile)
del s["filepath"]
del s["fitsFilepath"]
del s["relatedFiles"]
# ORDER THE DICTIONARY FOR THIS ROW OF RESULTS
orow = collections.OrderedDict()
keyOrder = ["TNSId", "survey", "obsdate", "specType", "transRedshift",
"telescope", "exptime", "reportAddedDate", "TNSuser"]
for k, v in s.iteritems():
if k not in keyOrder:
keyOrder.append(k)
for k in keyOrder:
try:
orow[k] = s[k]
except:
self.log.info(
"`%(k)s` not found in the source data for %(TNSId)s" % locals())
pass
specData.append(orow)
self.log.info('completed the ``_parse_spectral_data`` method')
return specData, relatedFilesTable
|
thespacedoctor/transientNamer
|
transientNamer/search.py
|
search.spectra
|
python
|
def spectra(
self):
specResultsList = []
specResultsList[:] = [dict(l) for l in self.specResultsList]
return specResultsList
|
*The associated source spectral data*
**Usage:**
.. code-block:: python
sourceSpectra = tns.spectra
|
train
|
https://github.com/thespacedoctor/transientNamer/blob/39be410c84275ed4669632f5df67e728d66a318f/transientNamer/search.py#L167-L179
| null |
class search():
"""
*The worker class for the transient namer search module*
**Key Arguments:**
- ``log`` -- logger
- ``settings`` -- the settings dictionary
- ``ra`` -- RA of the location being checked
- ``dec`` -- DEC of the location being searched
- ``radiusArcsec`` - the radius of the conesearch to perform against the TNS
- ``name`` -- name of the object to search the TNS for
- ``discInLastDays`` -- search the TNS for transient discovered in the last X days
- ``comments`` -- print the comments from the TNS, note these can be long making table outputs somewhat unreadable. Default *False*
**Usage:**
To initiate a search object to search the TNS via an object name (either TNS or survey names accepted):
.. code-block:: python
from transientNamer import search
tns = search(
log=log,
name="Gaia16bbi"
)
or for a conesearch use something similar to:
.. code-block:: python
from transientNamer import search
tns = search(
log=log,
ra="06:50:36.74",
dec="+31:06:44.7",
radiusArcsec=5
)
Note the search method can accept coordinates in sexagesimal or decimal defree formats.
To list all new objects discovered in the last three weeks, then use:
.. code-block:: python
from transientNamer import search
tns = search(
log=log,
discInLastDays=21
)
"""
# Initialisation
def __init__(
self,
log,
ra="",
dec="",
radiusArcsec="",
name="",
discInLastDays="",
settings=False,
comments=False
):
self.log = log
log.debug("instansiating a new 'search' object")
self.settings = settings
self.ra = ra
self.dec = dec
self.radiusArcsec = radiusArcsec
self.comments = comments
self.name = name
self.internal_name = ""
self.discInLastDays = discInLastDays
self.page = 0
self.batchSize = 1000
# CREATE THE TIME-RANGE WINDOW TO SEARCH TNS
if not discInLastDays:
self.start = ""
self.end = ""
else:
discInLastDays = int(discInLastDays)
td = timedelta(days=1)
end = datetime.now() + td
self.end = end.strftime("%Y-%m-%d")
td = timedelta(days=discInLastDays)
start = datetime.now() - td
self.start = start.strftime("%Y-%m-%d")
# DETERMINE IF WE HAVE A TNS OR INTERAL SURVEY NAME
if self.name:
matchObject = re.match(r'^((SN|AT) ?)?(\d{4}\w{1,6})', self.name)
if matchObject:
self.name = matchObject.group(3)
else:
self.internal_name = self.name
self.name = ""
# DO THE SEARCH OF THE TNS AND COMPILE THE RESULTS INTO SEPARATE RESULT
# SETS
self.sourceResultsList, self.photResultsList, self.specResultsList, self.relatedFilesResultsList = self._query_tns()
self.sourceResults = list_of_dictionaries(
log=log,
listOfDictionaries=self.sourceResultsList
)
self.photResults = list_of_dictionaries(
log=log,
listOfDictionaries=self.photResultsList
)
self.specResults = list_of_dictionaries(
log=log,
listOfDictionaries=self.specResultsList
)
self.relatedFilesResults = list_of_dictionaries(
log=log,
listOfDictionaries=self.relatedFilesResultsList
)
return None
@property
def sources(
self):
"""*The results of the search returned as a python list of dictionaries*
**Usage:**
.. code-block:: python
sources = tns.sources
"""
sourceResultsList = []
sourceResultsList[:] = [dict(l) for l in self.sourceResultsList]
return sourceResultsList
@property
@property
def files(
self):
"""*The associated source files*
**Usage:**
.. code-block:: python
sourceFiles = tns.files
"""
relatedFilesResultsList = []
relatedFilesResultsList[:] = [dict(l)
for l in self.relatedFilesResultsList]
return relatedFilesResultsList
@property
def photometry(
self):
"""*The associated source photometry*
**Usage:**
.. code-block:: python
sourcePhotometry = tns.photometry
"""
photResultsList = []
photResultsList[:] = [dict(l) for l in self.photResultsList]
return photResultsList
@property
def url(
self):
"""*The generated URL used for searching of the TNS*
**Usage:**
.. code-block:: python
searchURL = tns.url
"""
return self._searchURL
def csv(
self,
dirPath=None):
"""*Render the results in csv format*
**Key Arguments:**
- ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
**Return:**
- `csvSources` -- the top-level transient data
- `csvPhot` -- all photometry associated with the transients
- `csvSpec` -- all spectral data associated with the transients
- `csvFiles` -- all files associated with the matched transients found on the tns
**Usage:**
To render the results in csv format:
.. code-block:: python
csvSources, csvPhot, csvSpec, csvFiles = tns.csv()
print csvSources
.. code-block:: text
TNSId,TNSName,discoveryName,discSurvey,raSex,decSex,raDeg,decDeg,transRedshift,specType,discMag,discMagFilter,discDate,objectUrl,hostName,hostRedshift,separationArcsec,separationNorthArcsec,separationEastArcsec
2016asf,SN2016asf,ASASSN-16cs,ASAS-SN,06:50:36.73,+31:06:45.36,102.6530,31.1126,0.021,SN Ia,17.1,V-Johnson,2016-03-06 08:09:36,http://wis-tns.weizmann.ac.il/object/2016asf,KUG 0647+311,,0.66,0.65,-0.13
You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
.. code-block:: python
tns.csv("~/tns")
.. image:: https://i.imgur.com/BwwqMBg.png
:width: 800px
:alt: csv output
"""
if dirPath:
p = self._file_prefix()
csvSources = self.sourceResults.csv(
filepath=dirPath + "/" + p + "sources.csv")
csvPhot = self.photResults.csv(
filepath=dirPath + "/" + p + "phot.csv")
csvSpec = self.specResults.csv(
filepath=dirPath + "/" + p + "spec.csv")
csvFiles = self.relatedFilesResults.csv(
filepath=dirPath + "/" + p + "relatedFiles.csv")
else:
csvSources = self.sourceResults.csv()
csvPhot = self.photResults.csv()
csvSpec = self.specResults.csv()
csvFiles = self.relatedFilesResults.csv()
return csvSources, csvPhot, csvSpec, csvFiles
def json(
self,
dirPath=None):
"""*Render the results in json format*
**Key Arguments:**
- ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
**Return:**
- `jsonSources` -- the top-level transient data
- `jsonPhot` -- all photometry associated with the transients
- `jsonSpec` -- all spectral data associated with the transients
- `jsonFiles` -- all files associated with the matched transients found on the tns
**Usage:**
To render the results in json format:
.. code-block:: python
jsonSources, jsonPhot, jsonSpec, jsonFiles = tns.json()
print jsonSources
.. code-block:: text
[
{
"TNSId": "2016asf",
"TNSName": "SN2016asf",
"decDeg": 31.1126,
"decSex": "+31:06:45.36",
"discDate": "2016-03-06 08:09:36",
"discMag": "17.1",
"discMagFilter": "V-Johnson",
"discSurvey": "ASAS-SN",
"discoveryName": "ASASSN-16cs",
"hostName": "KUG 0647+311",
"hostRedshift": null,
"objectUrl": "http://wis-tns.weizmann.ac.il/object/2016asf",
"raDeg": 102.65304166666667,
"raSex": "06:50:36.73",
"separationArcsec": "0.66",
"separationEastArcsec": "-0.13",
"separationNorthArcsec": "0.65",
"specType": "SN Ia",
"transRedshift": "0.021"
}
]
You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
.. code-block:: python
tns.json("~/tns")
.. image:: https://i.imgur.com/wAHqARI.png
:width: 800px
:alt: json output
"""
if dirPath:
p = self._file_prefix()
jsonSources = self.sourceResults.json(
filepath=dirPath + "/" + p + "sources.json")
jsonPhot = self.photResults.json(
filepath=dirPath + "/" + p + "phot.json")
jsonSpec = self.specResults.json(
filepath=dirPath + "/" + p + "spec.json")
jsonFiles = self.relatedFilesResults.json(
filepath=dirPath + "/" + p + "relatedFiles.json")
else:
jsonSources = self.sourceResults.json()
jsonPhot = self.photResults.json()
jsonSpec = self.specResults.json()
jsonFiles = self.relatedFilesResults.json()
return jsonSources, jsonPhot, jsonSpec, jsonFiles
def yaml(
self,
dirPath=None):
"""*Render the results in yaml format*
**Key Arguments:**
- ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
**Return:**
- `yamlSources` -- the top-level transient data
- `yamlPhot` -- all photometry associated with the transients
- `yamlSpec` -- all spectral data associated with the transients
- `yamlFiles` -- all files associated with the matched transients found on the tns
**Usage:**
To render the results in yaml format:
.. code-block:: python
yamlSources, yamlPhot, yamlSpec, yamlFiles = tns.yaml()
print yamlSources
.. code-block:: text
- TNSId: 2016asf
TNSName: SN2016asf
decDeg: 31.1126
decSex: '+31:06:45.36'
discDate: '2016-03-06 08:09:36'
discMag: '17.1'
discMagFilter: V-Johnson
discSurvey: ASAS-SN
discoveryName: ASASSN-16cs
hostName: KUG 0647+311
hostRedshift: null
objectUrl: http://wis-tns.weizmann.ac.il/object/2016asf
raDeg: 102.65304166666667
raSex: '06:50:36.73'
separationArcsec: '0.66'
separationEastArcsec: '-0.13'
separationNorthArcsec: '0.65'
specType: SN Ia
transRedshift: '0.021'
You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
.. code-block:: python
tns.yaml("~/tns")
.. image:: https://i.imgur.com/ZpJIC6p.png
:width: 800px
:alt: yaml output
"""
if dirPath:
p = self._file_prefix()
yamlSources = self.sourceResults.yaml(
filepath=dirPath + "/" + p + "sources.yaml")
yamlPhot = self.photResults.yaml(
filepath=dirPath + "/" + p + "phot.yaml")
yamlSpec = self.specResults.yaml(
filepath=dirPath + "/" + p + "spec.yaml")
yamlFiles = self.relatedFilesResults.yaml(
filepath=dirPath + "/" + p + "relatedFiles.yaml")
else:
yamlSources = self.sourceResults.yaml()
yamlPhot = self.photResults.yaml()
yamlSpec = self.specResults.yaml()
yamlFiles = self.relatedFilesResults.yaml()
return yamlSources, yamlPhot, yamlSpec, yamlFiles
def markdown(
self,
dirPath=None):
"""*Render the results in markdown format*
**Key Arguments:**
- ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
**Return:**
- `markdownSources` -- the top-level transient data
- `markdownPhot` -- all photometry associated with the transients
- `markdownSpec` -- all spectral data associated with the transients
- `markdownFiles` -- all files associated with the matched transients found on the tns
**Usage:**
To render the results in markdown table format:
.. code-block:: python
markdownSources, markdownPhot, markdownSpec, markdownFiles = tns.markdown()
print markdownSources
.. code-block:: text
| TNSId | TNSName | discoveryName | discSurvey | raSex | decSex | raDeg | decDeg | transRedshift | specType | discMag | discMagFilter | discDate | objectUrl | hostName | hostRedshift | separationArcsec | separationNorthArcsec | separationEastArcsec |
|:---------|:-----------|:---------------|:------------|:-------------|:--------------|:----------|:---------|:---------------|:----------|:---------|:---------------|:---------------------|:----------------------------------------------|:--------------|:--------------|:------------------|:-----------------------|:----------------------|
| 2016asf | SN2016asf | ASASSN-16cs | ASAS-SN | 06:50:36.73 | +31:06:45.36 | 102.6530 | 31.1126 | 0.021 | SN Ia | 17.1 | V-Johnson | 2016-03-06 08:09:36 | http://wis-tns.weizmann.ac.il/object/2016asf | KUG 0647+311 | | 0.66 | 0.65 | -0.13 |
You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
.. code-block:: python
tns.markdown("~/tns")
.. image:: https://i.imgur.com/AYLBQoJ.png
:width: 800px
:alt: markdown output
"""
if dirPath:
p = self._file_prefix()
markdownSources = self.sourceResults.markdown(
filepath=dirPath + "/" + p + "sources.md")
markdownPhot = self.photResults.markdown(
filepath=dirPath + "/" + p + "phot.md")
markdownSpec = self.specResults.markdown(
filepath=dirPath + "/" + p + "spec.md")
markdownFiles = self.relatedFilesResults.markdown(
filepath=dirPath + "/" + p + "relatedFiles.md")
else:
markdownSources = self.sourceResults.markdown()
markdownPhot = self.photResults.markdown()
markdownSpec = self.specResults.markdown()
markdownFiles = self.relatedFilesResults.markdown()
return markdownSources, markdownPhot, markdownSpec, markdownFiles
def table(
self,
dirPath=None):
"""*Render the results as an ascii table*
**Key Arguments:**
- ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
**Return:**
- `tableSources` -- the top-level transient data
- `tablePhot` -- all photometry associated with the transients
- `tableSpec` -- all spectral data associated with the transients
- `tableFiles` -- all files associated with the matched transients found on the tns
**Usage:**
To render the results in ascii table format:
.. code-block:: python
tableSources, tablePhot, tableSpec, tableFiles = tns.table()
print tableSources
.. code-block:: text
+----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+
| TNSId | TNSName | discoveryName | discSurvey | raSex | decSex | raDeg | decDeg | transRedshift | specType | discMag | discMagFilter | discDate | objectUrl | hostName | hostRedshift | separationArcsec | separationNorthArcsec | separationEastArcsec |
+----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+
| 2016asf | SN2016asf | ASASSN-16cs | ASAS-SN | 06:50:36.73 | +31:06:45.36 | 102.6530 | 31.1126 | 0.021 | SN Ia | 17.1 | V-Johnson | 2016-03-06 08:09:36 | http://wis-tns.weizmann.ac.il/object/2016asf | KUG 0647+311 | | 0.66 | 0.65 | -0.13 |
+----------+------------+----------------+-------------+--------------+---------------+-----------+----------+----------------+-----------+----------+----------------+----------------------+-----------------------------------------------+---------------+---------------+-------------------+------------------------+-----------------------+
You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
.. code-block:: python
tns.table("~/tns")
.. image:: https://i.imgur.com/m09M0ho.png
:width: 800px
:alt: ascii files
"""
if dirPath:
p = self._file_prefix()
tableSources = self.sourceResults.table(
filepath=dirPath + "/" + p + "sources.ascii")
tablePhot = self.photResults.table(
filepath=dirPath + "/" + p + "phot.ascii")
tableSpec = self.specResults.table(
filepath=dirPath + "/" + p + "spec.ascii")
tableFiles = self.relatedFilesResults.table(
filepath=dirPath + "/" + p + "relatedFiles.ascii")
else:
tableSources = self.sourceResults.table()
tablePhot = self.photResults.table()
tableSpec = self.specResults.table()
tableFiles = self.relatedFilesResults.table()
return tableSources, tablePhot, tableSpec, tableFiles
def mysql(
self,
tableNamePrefix="TNS",
dirPath=None):
"""*Render the results as MySQL Insert statements*
**Key Arguments:**
- ``tableNamePrefix`` -- the prefix for the database table names to assign the insert statements to. Default *TNS*.
- ``dirPath`` -- the path to the directory to save the rendered results to. Default *None*
**Return:**
- `mysqlSources` -- the top-level transient data
- `mysqlPhot` -- all photometry associated with the transients
- `mysqlSpec` -- all spectral data associated with the transients
- `mysqlFiles` -- all files associated with the matched transients found on the tns
**Usage:**
To render the results in mysql insert format:
.. code-block:: python
mysqlSources, mysqlPhot, mysqlSpec, mysqlFiles = tns.mysql("TNS")
print mysqlSources
.. code-block:: text
INSERT INTO `TNS_sources` (TNSId,TNSName,dateCreated,decDeg,decSex,discDate,discMag,discMagFilter,discSurvey,discoveryName,hostName,hostRedshift,objectUrl,raDeg,raSex,separationArcsec,separationEastArcsec,separationNorthArcsec,specType,transRedshift) VALUES ("2016asf" ,"SN2016asf" ,"2016-09-20T11:22:13" ,"31.1126" ,"+31:06:45.36" ,"2016-03-06 08:09:36" ,"17.1" ,"V-Johnson" ,"ASAS-SN" ,"ASASSN-16cs" ,"KUG 0647+311" ,null ,"http://wis-tns.weizmann.ac.il/object/2016asf" ,"102.653041667" ,"06:50:36.73" ,"0.66" ,"-0.13" ,"0.65" ,"SN Ia" ,"0.021") ON DUPLICATE KEY UPDATE TNSId="2016asf", TNSName="SN2016asf", dateCreated="2016-09-20T11:22:13", decDeg="31.1126", decSex="+31:06:45.36", discDate="2016-03-06 08:09:36", discMag="17.1", discMagFilter="V-Johnson", discSurvey="ASAS-SN", discoveryName="ASASSN-16cs", hostName="KUG 0647+311", hostRedshift=null, objectUrl="http://wis-tns.weizmann.ac.il/object/2016asf", raDeg="102.653041667", raSex="06:50:36.73", separationArcsec="0.66", separationEastArcsec="-0.13", separationNorthArcsec="0.65", specType="SN Ia", transRedshift="0.021", updated=1, dateLastModified=NOW() ;
You can save the results to file by passing in a directory path within which to save the files to. The four flavours of data (sources, photometry, spectra and files) are saved to separate files but all data can be assoicated with its transient source using the transient's unique `TNSId`.
.. code-block:: python
tns.mysql("TNS", "~/tns")
.. image:: https://i.imgur.com/CozySPW.png
:width: 800px
:alt: mysql output
"""
if dirPath:
p = self._file_prefix()
createStatement = """
CREATE TABLE `%(tableNamePrefix)s_sources` (
`primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
`TNSId` varchar(20) NOT NULL,
`TNSName` varchar(20) DEFAULT NULL,
`dateCreated` datetime DEFAULT NULL,
`decDeg` double DEFAULT NULL,
`decSex` varchar(45) DEFAULT NULL,
`discDate` datetime DEFAULT NULL,
`discMag` double DEFAULT NULL,
`discMagFilter` varchar(45) DEFAULT NULL,
`discSurvey` varchar(100) DEFAULT NULL,
`discoveryName` varchar(100) DEFAULT NULL,
`objectUrl` varchar(200) DEFAULT NULL,
`raDeg` double DEFAULT NULL,
`raSex` varchar(45) DEFAULT NULL,
`specType` varchar(100) DEFAULT NULL,
`transRedshift` double DEFAULT NULL,
`updated` tinyint(4) DEFAULT '0',
`dateLastModified` datetime DEFAULT NULL,
`hostName` VARCHAR(100) NULL DEFAULT NULL,
`hostRedshift` DOUBLE NULL DEFAULT NULL,
`survey` VARCHAR(100) NULL DEFAULT NULL,
PRIMARY KEY (`primaryId`),
UNIQUE KEY `tnsid` (`TNSId`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
""" % locals()
mysqlSources = self.sourceResults.mysql(
tableNamePrefix + "_sources", filepath=dirPath + "/" + p + "sources.sql", createStatement=createStatement)
createStatement = """
CREATE TABLE `%(tableNamePrefix)s_photometry` (
`primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
`TNSId` varchar(20) NOT NULL,
`dateCreated` datetime DEFAULT CURRENT_TIMESTAMP,
`exptime` double DEFAULT NULL,
`filter` varchar(100) DEFAULT NULL,
`limitingMag` tinyint(4) DEFAULT NULL,
`mag` double DEFAULT NULL,
`magErr` double DEFAULT NULL,
`magUnit` varchar(100) DEFAULT NULL,
`objectName` varchar(100) DEFAULT NULL,
`obsdate` datetime DEFAULT NULL,
`reportAddedDate` datetime DEFAULT NULL,
`suggestedType` varchar(100) DEFAULT NULL,
`survey` varchar(100) DEFAULT NULL,
`telescope` varchar(100) DEFAULT NULL,
`updated` tinyint(4) DEFAULT '0',
`dateLastModified` datetime DEFAULT NULL,
`remarks` VARCHAR(800) NULL DEFAULT NULL,
`sourceComment` VARCHAR(800) NULL DEFAULT NULL,
PRIMARY KEY (`primaryId`),
UNIQUE KEY `tnsid_survey_obsdate` (`TNSId`,`survey`,`obsdate`),
UNIQUE INDEX `u_tnsid_survey_obsdate` (`TNSId` ASC, `survey` ASC, `obsdate` ASC),
UNIQUE INDEX `u_tnsid_obsdate_objname` (`TNSId` ASC, `obsdate` ASC, `objectName` ASC)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
""" % locals()
mysqlPhot = self.photResults.mysql(
tableNamePrefix + "_photometry", filepath=dirPath + "/" + p + "phot.sql", createStatement=createStatement)
createStatement = """
CREATE TABLE `%(tableNamePrefix)s_spectra` (
`primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
`TNSId` varchar(45) NOT NULL,
`TNSuser` varchar(45) DEFAULT NULL,
`dateCreated` datetime DEFAULT CURRENT_TIMESTAMP,
`exptime` double DEFAULT NULL,
`obsdate` datetime DEFAULT NULL,
`reportAddedDate` datetime DEFAULT NULL,
`specType` varchar(100) DEFAULT NULL,
`survey` varchar(100) DEFAULT NULL,
`telescope` varchar(100) DEFAULT NULL,
`transRedshift` double DEFAULT NULL,
`updated` tinyint(4) DEFAULT '0',
`dateLastModified` datetime DEFAULT NULL,
`remarks` VARCHAR(800) NULL DEFAULT NULL,
`sourceComment` VARCHAR(800) NULL DEFAULT NULL,
PRIMARY KEY (`primaryId`),
UNIQUE KEY `u_tnsid_survey_obsdate` (`TNSId`,`survey`,`obsdate`),
UNIQUE KEY `u_id_user_obsdate` (`TNSId`,`TNSuser`,`obsdate`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
""" % locals()
mysqlSpec = self.specResults.mysql(
tableNamePrefix + "_spectra", filepath=dirPath + "/" + p + "spec.sql", createStatement=createStatement)
createStatement = """
CREATE TABLE `%(tableNamePrefix)s_files` (
`primaryId` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'An internal counter',
`TNSId` varchar(100) NOT NULL,
`dateCreated` datetime DEFAULT CURRENT_TIMESTAMP,
`dateObs` datetime DEFAULT NULL,
`filename` varchar(200) DEFAULT NULL,
`spec1phot2` tinyint(4) DEFAULT NULL,
`url` varchar(800) DEFAULT NULL,
`updated` tinyint(4) DEFAULT '0',
`dateLastModified` datetime DEFAULT NULL,
`comment` VARCHAR(800) NULL DEFAULT NULL,
PRIMARY KEY (`primaryId`),
UNIQUE KEY `tnsid_url` (`TNSId`,`url`)
) ENGINE=MyISAM AUTO_INCREMENT=0 DEFAULT CHARSET=latin1;
""" % locals()
mysqlFiles = self.relatedFilesResults.mysql(
tableNamePrefix + "_files", filepath=dirPath + "/" + p + "relatedFiles.sql", createStatement=createStatement)
else:
mysqlSources = self.sourceResults.mysql(
tableNamePrefix + "_sources")
mysqlPhot = self.photResults.mysql(tableNamePrefix + "_photometry")
mysqlSpec = self.specResults.mysql(tableNamePrefix + "_spectra")
mysqlFiles = self.relatedFilesResults.mysql(
tableNamePrefix + "_files")
return mysqlSources, mysqlPhot, mysqlSpec, mysqlFiles
def _query_tns(self):
"""
*determine how to query the TNS, send query and parse the results*
**Return:**
- ``results`` -- a list of dictionaries (one dictionary for each result set returned from the TNS)
"""
self.log.info('starting the ``get`` method')
sourceTable = []
photoTable = []
specTable = []
relatedFilesTable = []
# THIS stop IS TO KEEP TRACK OF THE TNS PAGINATION IF MANY RESULT PAGES
# ARE RETURNED
stop = False
sourceCount = 0
while not stop:
status_code, content, self._searchURL = self._get_tns_search_results()
if status_code != 200:
self.log.error(
'cound not get the search reuslts from the TNS, HTML error code %(status_code)s ' % locals())
return None
if "No results found" in content:
print "No results found"
return sourceTable, photoTable, specTable, relatedFilesTable
if self._parse_transient_rows(content, True) < self.batchSize:
stop = True
else:
self.page += 1
thisPage = self.page
print "Downloaded %(thisPage)s page(s) from the TNS. %(sourceCount)s transients parsed so far." % locals()
sourceCount += self.batchSize
print "\t" + self._searchURL
timesleep.sleep(1)
# PARSE ALL ROWS RETURNED
for transientRow in self._parse_transient_rows(content):
# TOP LEVEL DISCOVERY CONTENT
sourceContent = transientRow.group()
discInfo, TNSId = self._parse_discovery_information(
sourceContent)
sourceTable.append(discInfo)
# PHOTOMETERY
phot, relatedFiles = self._parse_photometry_data(
sourceContent, TNSId)
photoTable += phot
relatedFilesTable += relatedFiles
# SPECTRA
spec, relatedFiles = self._parse_spectral_data(
sourceContent, TNSId)
specTable += spec
relatedFilesTable += relatedFiles
# SORT BY SEPARATION FROM THE SEARCH COORDINATES
try:
sourceTable = sorted(sourceTable, key=itemgetter(
'separationArcsec'), reverse=False)
except:
pass
self.log.info('completed the ``get`` method')
return sourceTable, photoTable, specTable, relatedFilesTable
def _get_tns_search_results(
self):
"""
*query the tns and result the response*
"""
self.log.info('starting the ``_get_tns_search_results`` method')
try:
response = requests.get(
url="http://wis-tns.weizmann.ac.il/search",
params={
"page": self.page,
"ra": self.ra,
"decl": self.dec,
"radius": self.radiusArcsec,
"name": self.name,
"internal_name": self.internal_name,
"date_start[date]": self.start,
"date_end[date]": self.end,
"num_page": self.batchSize,
"display[redshift]": "1",
"display[hostname]": "1",
"display[host_redshift]": "1",
"display[source_group_name]": "1",
"display[internal_name]": "1",
"display[spectra_count]": "1",
"display[discoverymag]": "1",
"display[discmagfilter]": "1",
"display[discoverydate]": "1",
"display[discoverer]": "1",
"display[sources]": "1",
"display[bibcode]": "1",
},
)
except requests.exceptions.RequestException:
print('HTTP Request failed')
self.log.info('completed the ``_get_tns_search_results`` method')
return response.status_code, response.content, response.url
def _file_prefix(
self):
"""*Generate a file prefix based on the type of search for saving files to disk*
**Return:**
- ``prefix`` -- the file prefix
"""
self.log.info('starting the ``_file_prefix`` method')
if self.ra:
now = datetime.now()
prefix = now.strftime("%Y%m%dt%H%M%S%f_tns_conesearch_")
elif self.name:
prefix = self.name + "_tns_conesearch_"
elif self.internal_name:
prefix = self.internal_name + "_tns_conesearch_"
elif self.discInLastDays:
discInLastDays = str(self.discInLastDays)
now = datetime.now()
prefix = now.strftime(
discInLastDays + "d_since_%Y%m%d_tns_conesearch_")
self.log.info('completed the ``_file_prefix`` method')
return prefix
def _parse_transient_rows(
self,
content,
count=False):
"""* parse transient rows from the TNS result page content*
**Key Arguments:**
- ``content`` -- the content from the TNS results page.
- ``count`` -- return only the number of rows
**Return:**
- ``transientRows``
"""
self.log.info('starting the ``_parse_transient_rows`` method')
regexForRow = r"""\n([^\n]*?<a href="/object/.*?)(?=\n[^\n]*?<a href="/object/|<\!\-\- /\.section, /#content \-\->)"""
if count:
# A SINGLE SOURCE BLOCK
matchedSources = re.findall(
regexForRow,
content,
flags=re.S # re.S
)
return len(matchedSources)
# A SINGLE SOURCE BLOCK
matchedSources = re.finditer(
regexForRow,
content,
flags=re.S # re.S
)
self.log.info('completed the ``_parse_transient_rows`` method')
return matchedSources
def _parse_discovery_information(
self,
content):
"""* parse discovery information from one row on the TNS results page*
**Key Arguments:**
- ``content`` -- a table row from the TNS results page.
**Return:**
- ``discoveryData`` -- dictionary of results
- ``TNSId`` -- the unique TNS id for the transient
"""
self.log.info('starting the ``_parse_discovery_information`` method')
# ASTROCALC UNIT CONVERTER OBJECT
converter = unit_conversion(
log=self.log
)
matches = re.finditer(
r"""<tr class="row-.*?"><td class="cell-id">(?P<tnsId>\d*?)</td><td class="cell-name"><a href="(?P<objectUrl>.*?)">(?P<TNSName>.*?)</a></td><td class="cell-.*?<td class="cell-ra">(?P<raSex>.*?)</td><td class="cell-decl">(?P<decSex>.*?)</td><td class="cell-ot_name">(?P<specType>.*?)</td><td class="cell-redshift">(?P<transRedshift>.*?)</td><td class="cell-hostname">(?P<hostName>.*?)</td><td class="cell-host_redshift">(?P<hostRedshift>.*?)</td><td class="cell-source_group_name">(?P<discSurvey>.*?)</td>.*?<td class="cell-internal_name">(<a.*?>)?(?P<discoveryName>.*?)(</a>)?</td>.*?<td class="cell-discoverymag">(?P<discMag>.*?)</td><td class="cell-disc_filter_name">(?P<discMagFilter>.*?)</td><td class="cell-discoverydate">(?P<discDate>.*?)</td><td class="cell-discoverer">(?P<sender>.*?)</td>.*?</tr>""",
content,
flags=0 # re.S
)
discoveryData = []
for match in matches:
row = match.groupdict()
for k, v in row.iteritems():
row[k] = v.strip()
if len(v) == 0:
row[k] = None
if row["transRedshift"] == 0:
row["transRedshift"] = None
if row["TNSName"][0] in ["1", "2"]:
row["TNSName"] = "SN" + row["TNSName"]
row["objectUrl"] = "http://wis-tns.weizmann.ac.il" + \
row["objectUrl"]
# CONVERT COORDINATES TO DECIMAL DEGREES
row["raDeg"] = converter.ra_sexegesimal_to_decimal(
ra=row["raSex"]
)
row["decDeg"] = converter.dec_sexegesimal_to_decimal(
dec=row["decSex"]
)
# IF THIS IS A COORDINATE SEARCH, ADD SEPARATION FROM
# ORIGINAL QUERY COORDINATES
if self.ra:
# CALCULATE SEPARATION IN ARCSEC
from astrocalc.coords import separations
calculator = separations(
log=self.log,
ra1=self.ra,
dec1=self.dec,
ra2=row["raDeg"],
dec2=row["decDeg"],
)
angularSeparation, north, east = calculator.get()
row["separationArcsec"] = angularSeparation
row["separationNorthArcsec"] = north
row["separationEastArcsec"] = east
if not row["discSurvey"]:
row["survey"] = row["sender"]
del row["sender"]
del row["tnsId"]
row["TNSName"] = row["TNSName"].replace(" ", "")
row["TNSId"] = row["TNSName"].replace(
"SN", "").replace("AT", "")
TNSId = row["TNSId"]
# ORDER THE DICTIONARY FOR THIS ROW OF RESULTS
orow = collections.OrderedDict()
keyOrder = ["TNSId", "TNSName", "discoveryName", "discSurvey", "raSex", "decSex", "raDeg", "decDeg",
"transRedshift", "specType", "discMag", "discMagFilter", "discDate", "objectUrl", "hostName", "hostRedshift", "separationArcsec", "separationNorthArcsec", "separationEastArcsec"]
for k, v in row.iteritems():
if k not in keyOrder:
keyOrder.append(k)
for k in keyOrder:
try:
orow[k] = row[k]
except:
self.log.info(
"`%(k)s` not found in the source data for %(TNSId)s" % locals())
pass
discoveryData.append(row)
self.log.info('completed the ``_parse_discovery_information`` method')
return discoveryData[0], TNSId
def _parse_photometry_data(
self,
content,
TNSId):
"""*parse photometry data from a row in the tns results content*
**Key Arguments:**
- ``content`` -- a table row from the TNS results page
- ``TNSId`` -- the tns id of the transient
**Return:**
- ``photData`` -- a list of dictionaries of the photometry data
- ``relatedFilesTable`` -- a list of dictionaries of transient photometry related files
"""
self.log.info('starting the ``_parse_photometry_data`` method')
photData = []
relatedFilesTable = []
# AT REPORT BLOCK
ATBlock = re.search(
r"""<tr class=[^\n]*?AT reportings.*?(?=<tr class=[^\n]*?Classification reportings|$)""",
content,
flags=re.S # re.S
)
if ATBlock:
ATBlock = ATBlock.group()
reports = re.finditer(
r"""<tr class="row-[^"]*"><td class="cell-id">.*?</table>""",
ATBlock,
flags=re.S # re.S
)
relatedFiles = self._parse_related_files(ATBlock)
for r in reports:
header = re.search(
r"""<tr class="row[^"]*".*?time_received">(?P<reportAddedDate>[^<]*).*?user_name">(?P<sender>[^<]*).*?reporter_name">(?P<reporters>[^<]*).*?source_group_name">(?P<surveyGroup>[^<]*).*?ra">(?P<ra>[^<]*).*?decl">(?P<dec>[^<]*).*?discovery_date">(?P<obsDate>[^<]*).*?flux">(?P<mag>[^<]*).*?filter_name">(?P<magFilter>[^<]*).*?related_files">(?P<relatedFiles>[^<]*).*?type_name">(?P<suggestedType>[^<]*).*?hostname">(?P<hostName>[^<]*).*?host_redshift">(?P<hostRedshift>[^<]*).*?internal_name">(?P<objectName>[^<]*).*?groups">(?P<survey>[^<]*).*?remarks">(?P<sourceComment>[^<]*)""",
r.group(),
flags=0 # re.S
)
try:
header = header.groupdict()
except:
print r.group()
header["TNSId"] = TNSId
del header["reporters"]
del header["surveyGroup"]
del header["hostName"]
del header["hostRedshift"]
del header["mag"]
del header["magFilter"]
del header["obsDate"]
del header["ra"]
del header["dec"]
if not self.comments:
del header['sourceComment']
else:
theseComments = header[
"sourceComment"].split("\n")
header["sourceComment"] = ""
for c in theseComments:
header["sourceComment"] += " " + c.strip()
header["sourceComment"] = header[
"sourceComment"].strip().replace('"', "'")[0:750]
phot = re.finditer(
r"""<tr class="row\-[^"]*".*?obsdate">(?P<obsdate>[^<]*).*?flux">(?P<mag>[^<]*).*?fluxerr">(?P<magErr>[^<]*).*?limflux">(?P<limitingMag>[^<]*).*?unit_name">(?P<magUnit>[^<]*).*?filter_name">(?P<filter>[^<]*).*?tel_inst">(?P<telescope>[^<]*).*?exptime">(?P<exptime>[^<]*).*?observer">(?P<observer>[^<]*).*?-remarks">(?P<remarks>[^<]*)""",
r.group(),
flags=0 # re.S
)
filesAppended = False
for p in phot:
p = p.groupdict()
del p["observer"]
if p["limitingMag"] and not p["mag"]:
p["mag"] = p["limitingMag"]
p["limitingMag"] = 1
p["remarks"] = p["remarks"].replace(
"[Last non detection]", "")
else:
p["limitingMag"] = 0
if not self.comments:
del p["remarks"]
p.update(header)
if p["relatedFiles"] and filesAppended == False:
filesAppended = True
for f in relatedFiles:
# ORDER THE DICTIONARY FOR THIS ROW OF
# RESULTS
thisFile = collections.OrderedDict()
thisFile["TNSId"] = TNSId
thisFile["filename"] = f[
"filepath"].split("/")[-1]
thisFile["url"] = f["filepath"]
if self.comments:
thisFile["comment"] = f[
"fileComment"].replace("\n", " ").strip().replace('"', "'")[0:750]
thisFile["dateObs"] = p["obsdate"]
thisFile["spec1phot2"] = 2
relatedFilesTable.append(thisFile)
if not p["survey"] and not p["objectName"]:
p["survey"] = p["sender"]
del p["relatedFiles"]
del p["sender"]
# ORDER THE DICTIONARY FOR THIS ROW OF RESULTS
orow = collections.OrderedDict()
keyOrder = ["TNSId", "survey", "obsdate", "filter", "limitingMag", "mag", "magErr",
"magUnit", "suggestedType", "telescope", "exptime", "reportAddedDate"]
for k, v in p.iteritems():
if k not in keyOrder:
keyOrder.append(k)
for k in keyOrder:
try:
orow[k] = p[k]
except:
self.log.info(
"`%(k)s` not found in the source data for %(TNSId)s" % locals())
pass
photData.append(orow)
self.log.info('completed the ``_parse_photometry_data`` method')
return photData, relatedFilesTable
def _parse_related_files(
self,
content):
"""*parse the contents for related files URLs and comments*
**Key Arguments:**
- ``content`` -- the content to parse.
**Return:**
- ``relatedFiles`` -- a list of dictionaries of transient related files
"""
self.log.info('starting the ``_parse_related_files`` method')
relatedFilesList = re.finditer(
r"""<td class="cell-filename">.*?href="(?P<filepath>[^"]*).*?remarks">(?P<fileComment>[^<]*)""",
content,
flags=0 # re.S
)
relatedFiles = []
for f in relatedFilesList:
f = f.groupdict()
relatedFiles.append(f)
self.log.info('completed the ``_parse_related_files`` method')
return relatedFiles
def _parse_spectral_data(
self,
content,
TNSId):
"""*parse spectra data from a row in the tns results content*
**Key Arguments:**
- ``content`` -- a table row from the TNS results page
- ``TNSId`` -- the tns id of the transient
**Return:**
- ``specData`` -- a list of dictionaries of the spectral data
- ``relatedFilesTable`` -- a list of dictionaries of transient spectrum related files
"""
self.log.info('starting the ``_parse_spectral_data`` method')
specData = []
relatedFilesTable = []
# CLASSIFICATION BLOCK
classBlock = re.search(
r"""<tr class=[^\n]*?Classification reportings.*$""",
content,
flags=re.S # re.S
)
if classBlock:
classBlock = classBlock.group()
reports = re.finditer(
r"""<tr class="row-[^"]*"><td class="cell-id">.*?</tbody>\s*</table>\s*</div></td> </tr>\s*</tbody>\s*</table>\s*</div></td> </tr>""",
classBlock,
flags=re.S #
)
relatedFiles = self._parse_related_files(classBlock)
for r in reports:
header = re.search(
r"""<tr class="row.*?time_received">(?P<reportAddedDate>[^<]*).*?user_name">(?P<TNSuser>[^<]*).*?classifier_name">(?P<reporters>[^<]*).*?source_group_name">(?P<survey>[^<]*).*?-type">(?P<specType>[^<]*).*?-redshift">(?P<transRedshift>[^<]*).*?-related_files">(?P<relatedFiles>[^<]*).*?-groups">(?P<surveyGroup>[^<]*).*?-remarks">(?P<sourceComment>[^<]*)</td>""",
r.group(),
flags=re.S # re.S
)
if not header:
continue
header = header.groupdict()
header["TNSId"] = TNSId
del header["reporters"]
del header["surveyGroup"]
del header["survey"]
if not self.comments:
del header['sourceComment']
else:
theseComments = header[
"sourceComment"].split("\n")
header["sourceComment"] = ""
for c in theseComments:
header["sourceComment"] += " " + c.strip()
header["sourceComment"] = header[
"sourceComment"].strip().replace('"', "'")[0:750]
spec = re.finditer(
r"""<tr class="class-results-.*?-obsdate">(?P<obsdate>[^<]*).*?-tel_inst">(?P<telescope>[^<]*).*?-exptime">(?P<exptime>[^<]*).*?-observer">(?P<sender>[^<]*).*?-reducer">(?P<reducer>[^<]*).*?-source_group_name">(?P<survey>[^<]*).*?-asciifile">(.*?<a href="(?P<filepath>[^"]*)".*?</a>)?.*?-fitsfile">(.*?<a href="(?P<fitsFilepath>[^"]*)".*?</a>)?.*?-groups">(?P<surveyGroup>[^<]*).*?-remarks">(?P<remarks>[^<]*)""",
r.group(),
flags=0 # re.S
)
filesAppended = False
for s in spec:
s = s.groupdict()
del s["sender"]
del s["surveyGroup"]
del s["reducer"]
if not self.comments:
del s["remarks"]
else:
s["remarks"] = s["remarks"].replace('"', "'")[0:750]
s.update(header)
if s["relatedFiles"] and filesAppended == False:
filesAppended = True
for f in relatedFiles:
# ORDER THE DICTIONARY FOR THIS ROW OF
# RESULTS
thisFile = collections.OrderedDict()
thisFile["TNSId"] = TNSId
thisFile["filename"] = f[
"filepath"].split("/")[-1]
thisFile["url"] = f["filepath"]
if self.comments:
thisFile["comment"] = f[
"fileComment"].replace("\n", " ").strip()
thisFile["dateObs"] = s["obsdate"]
thisFile["spec1phot2"] = 1
relatedFilesTable.append(thisFile)
for ffile in [s["filepath"], s["fitsFilepath"]]:
if ffile:
# ORDER THE DICTIONARY FOR THIS ROW OF
# RESULTS
thisFile = collections.OrderedDict()
thisFile["TNSId"] = TNSId
thisFile["filename"] = ffile.split(
"/")[-1]
thisFile["url"] = ffile
if self.comments:
thisFile["comment"] = ""
thisFile["dateObs"] = s["obsdate"]
thisFile["spec1phot2"] = 1
relatedFilesTable.append(thisFile)
del s["filepath"]
del s["fitsFilepath"]
del s["relatedFiles"]
# ORDER THE DICTIONARY FOR THIS ROW OF RESULTS
orow = collections.OrderedDict()
keyOrder = ["TNSId", "survey", "obsdate", "specType", "transRedshift",
"telescope", "exptime", "reportAddedDate", "TNSuser"]
for k, v in s.iteritems():
if k not in keyOrder:
keyOrder.append(k)
for k in keyOrder:
try:
orow[k] = s[k]
except:
self.log.info(
"`%(k)s` not found in the source data for %(TNSId)s" % locals())
pass
specData.append(orow)
self.log.info('completed the ``_parse_spectral_data`` method')
return specData, relatedFilesTable
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.