repository_name stringclasses 316 values | func_path_in_repository stringlengths 6 223 | func_name stringlengths 1 134 | language stringclasses 1 value | func_code_string stringlengths 57 65.5k | func_documentation_string stringlengths 1 46.3k | split_name stringclasses 1 value | func_code_url stringlengths 91 315 | called_functions listlengths 1 156 ⌀ | enclosing_scope stringlengths 2 1.48M |
|---|---|---|---|---|---|---|---|---|---|
hobson/pug-ann | pug/ann/example.py | oneday_weather_forecast | python | def oneday_weather_forecast(
location='Portland, OR',
inputs=('Min Temperature', 'Mean Temperature', 'Max Temperature', 'Max Humidity', 'Mean Humidity', 'Min Humidity', 'Max Sea Level Pressure', 'Mean Sea Level Pressure', 'Min Sea Level Pressure', 'Wind Direction'),
outputs=('Min Temperature', 'Mean Temperature', 'Max Temperature', 'Max Humidity'),
date=None,
epochs=200,
delays=(1, 2, 3, 4),
num_years=4,
use_cache=False,
verbosity=1,
):
date = make_date(date or datetime.datetime.now().date())
num_years = int(num_years or 10)
years = range(date.year - num_years, date.year + 1)
df = weather.daily(location, years=years, use_cache=use_cache, verbosity=verbosity).sort()
# because up-to-date weather history was cached above, can use that cache, regardless of use_cache kwarg
trainer, df = train_weather_predictor(
location,
years=years,
delays=delays,
inputs=inputs,
outputs=outputs,
epochs=epochs,
verbosity=verbosity,
use_cache=True,
)
nn = trainer.module
forecast = {'trainer': trainer}
yesterday = dict(zip(outputs, nn.activate(trainer.ds['input'][-2])))
forecast['yesterday'] = update_dict(yesterday, {'date': df.index[-2].date()})
today = dict(zip(outputs, nn.activate(trainer.ds['input'][-1])))
forecast['today'] = update_dict(today, {'date': df.index[-1].date()})
ds = util.input_dataset_from_dataframe(df[-max(delays):], delays=delays, inputs=inputs, normalize=False, verbosity=0)
tomorrow = dict(zip(outputs, nn.activate(ds['input'][-1])))
forecast['tomorrow'] = update_dict(tomorrow, {'date': (df.index[-1] + datetime.timedelta(1)).date()})
return forecast | Provide a weather forecast for tomorrow based on historical weather at that location | train | https://github.com/hobson/pug-ann/blob/8a4d7103a744d15b4a737fc0f9a84c823973e0ec/pug/ann/example.py#L82-L122 | [
"def train_weather_predictor(\n location='Portland, OR',\n years=range(2013, 2016,),\n delays=(1, 2, 3),\n inputs=('Min Temperature', 'Max Temperature', 'Min Sea Level Pressure', u'Max Sea Level Pressure', 'WindDirDegrees',),\n outputs=(u'Max TemperatureF',),\n N_hidden=6,\n epochs=30,\n use_cache=False,\n verbosity=2,\n ):\n \"\"\"Train a neural nerual net to predict the weather for tomorrow based on past weather.\n\n Builds a linear single hidden layer neural net (multi-dimensional nonlinear regression).\n The dataset is a basic SupervisedDataSet rather than a SequentialDataSet, so the training set\n and the test set are sampled randomly. This means that historical data for one sample (the delayed\n input vector) will likely be used as the target for other samples.\n\n Uses CSVs scraped from wunderground (without an api key) to get daily weather for the years indicated.\n\n Arguments:\n location (str): City and state in standard US postal service format: \"City, ST\"\n alternatively an airport code like \"PDX or LAX\"\n delays (list of int): sample delays to use for the input tapped delay line.\n Positive and negative values are treated the same as sample counts into the past.\n default: [1, 2, 3], in z-transform notation: z^-1 + z^-2 + z^-3\n years (int or list of int): list of 4-digit years to download weather from wunderground\n inputs (list of int or list of str): column indices or labels for the inputs\n outputs (list of int or list of str): column indices or labels for the outputs\n\n Returns:\n 3-tuple: tuple(dataset, list of means, list of stds)\n means and stds allow normalization of new inputs and denormalization of the outputs\n\n \"\"\"\n df = weather.daily(location, years=years, use_cache=use_cache, verbosity=verbosity).sort()\n ds = util.dataset_from_dataframe(df, normalize=False, delays=delays, inputs=inputs, outputs=outputs, verbosity=verbosity)\n nn = util.ann_from_ds(ds, N_hidden=N_hidden, verbosity=verbosity)\n trainer = util.build_trainer(nn, ds=ds, verbosity=verbosity)\n trainer.trainEpochs(epochs)\n\n columns = []\n for delay in delays:\n columns += [inp + \"[-{}]\".format(delay) for inp in inputs]\n columns += list(outputs)\n\n columns += ['Predicted {}'.format(outp) for outp in outputs]\n table = [list(i) + list(t) + list(trainer.module.activate(i)) for i, t in zip(trainer.ds['input'], trainer.ds['target'])]\n df = pd.DataFrame(table, columns=columns, index=df.index[max(delays):])\n\n #comparison = df[[] + list(outputs)]\n return trainer, df\n",
"def daily(location='Fresno, CA', years=1, use_cache=True, verbosity=1):\n \"\"\"Retrieve weather for the indicated airport code or 'City, ST' string.\n\n >>> df = daily('Camas, WA', verbosity=-1)\n >>> 365 <= len(df) <= 365 * 2 + 1\n True\n\n Sacramento data has gaps (airport KMCC):\n 8/21/2013 is missing from 2013.\n Whole months are missing from 2014.\n >>> df = daily('Sacramento, CA', years=[2013], verbosity=-1)\n >>> 364 <= len(df) <= 365\n True\n >>> df.columns\n Index([u'PST', u'Max TemperatureF', u'Mean TemperatureF', u'Min TemperatureF', u'Max Dew PointF', u'MeanDew PointF', u'Min DewpointF', ...\n \"\"\"\n this_year = datetime.date.today().year\n if isinstance(years, (int, float)):\n # current (incomplete) year doesn't count in total number of years\n # so 0 would return this calendar year's weather data\n years = np.arange(0, int(years) + 1)\n years = sorted(years)\n if not all(1900 <= yr <= this_year for yr in years):\n years = np.array([abs(yr) if (1900 <= abs(yr) <= this_year) else (this_year - abs(int(yr))) for yr in years])[::-1]\n\n airport_code = airport(location, default=location)\n\n # refresh the cache each time the start or end year changes\n cache_path = 'daily-{}-{}-{}.csv'.format(airport_code, years[0], years[-1])\n cache_path = os.path.join(CACHE_PATH, cache_path)\n if use_cache:\n try:\n return pd.DataFrame.from_csv(cache_path)\n except:\n pass\n\n df = pd.DataFrame()\n for year in years:\n url = ('http://www.wunderground.com/history/airport/{airport}/{yearstart}/1/1/' +\n 'CustomHistory.html?dayend=31&monthend=12&yearend={yearend}' +\n '&req_city=&req_state=&req_statename=&reqdb.zip=&reqdb.magic=&reqdb.wmo=&MR=1&format=1').format(\n airport=airport_code,\n yearstart=year,\n yearend=year\n )\n if verbosity > 1:\n print('GETing *.CSV using \"{0}\"'.format(url))\n buf = urllib.urlopen(url).read()\n if verbosity > 0:\n N = buf.count('\\n')\n M = (buf.count(',') + N) / float(N)\n print('Retrieved CSV for airport code \"{}\" with appox. {} lines and {} columns = {} cells.'.format(\n airport_code, N, int(round(M)), int(round(M)) * N))\n if verbosity > 2:\n print(buf)\n table = util.read_csv(buf, format='header+values-list', numbers=True)\n # # clean up the last column (if it contains <br> tags)\n table = [util.strip_br(row) if len(row) > 1 else row for row in table]\n # numcols = max(len(row) for row in table)\n # table = [row for row in table if len(row) == numcols]\n columns = table.pop(0)\n tzs = [s for s in columns if (s[1:] in ['ST', 'DT'] and s[0] in 'PMCE')]\n dates = [float('nan')] * len(table)\n for i, row in enumerate(table):\n for j, value in enumerate(row):\n if not value and value is not None:\n value = 0\n continue\n if columns[j] in tzs:\n table[i][j] = util.make_tz_aware(value, tz=columns[j])\n if isinstance(table[i][j], datetime.datetime):\n dates[i] = table[i][j]\n continue\n try:\n table[i][j] = float(value)\n if not (table[i][j] % 1):\n table[i][j] = int(table[i][j])\n except:\n pass\n df0 = pd.DataFrame(table, columns=columns, index=dates)\n df = df.append(df0)\n\n if verbosity > 1:\n print(df)\n\n try:\n df.to_csv(cache_path)\n except:\n if verbosity > 0 and use_cache:\n from traceback import print_exc\n print_exc()\n warnings.warn('Unable to write weather data to cache file at {}'.format(cache_path))\n\n return df\n",
"def input_dataset_from_dataframe(df, delays=(1, 2, 3), inputs=(1, 2, -1), outputs=None, normalize=True, verbosity=1):\n \"\"\" Build a dataset with an empty output/target vector\n\n Identical to `dataset_from_dataframe`, except that default values for 2 arguments:\n outputs: None\n \"\"\"\n return dataset_from_dataframe(df=df, delays=delays, inputs=inputs, outputs=outputs,\n normalize=normalize, verbosity=verbosity)\n"
] | """Example pybrain network training to predict the weather
Installation:
pip install pug-ann
Examples:
In the future DataSets should have an attribute `columns` or `df` to facilitate converting back to dataframes
>>> trainer, df = train_weather_predictor('San Francisco, CA', epochs=2, inputs=['Max TemperatureF'], outputs=['Max TemperatureF'], years=range(2013,2015), delays=(1,), use_cache=True, verbosity=0)
>>> all(trainer.module.activate(trainer.ds['input'][0]) == trainer.module.activate(trainer.ds['input'][1]))
False
>>> trainer.trainEpochs(5)
Make sure NN hasn't saturated (as it might for a sigmoid hidden layer)
>>> all(trainer.module.activate(trainer.ds['input'][0]) == trainer.module.activate(trainer.ds['input'][1]))
False
"""
import datetime
from pug.ann.data import weather
from pug.ann import util
from pug.nlp.util import make_date, update_dict
from matplotlib import pyplot as plt
import pandas as pd
def train_weather_predictor(
location='Portland, OR',
years=range(2013, 2016,),
delays=(1, 2, 3),
inputs=('Min Temperature', 'Max Temperature', 'Min Sea Level Pressure', u'Max Sea Level Pressure', 'WindDirDegrees',),
outputs=(u'Max TemperatureF',),
N_hidden=6,
epochs=30,
use_cache=False,
verbosity=2,
):
"""Train a neural nerual net to predict the weather for tomorrow based on past weather.
Builds a linear single hidden layer neural net (multi-dimensional nonlinear regression).
The dataset is a basic SupervisedDataSet rather than a SequentialDataSet, so the training set
and the test set are sampled randomly. This means that historical data for one sample (the delayed
input vector) will likely be used as the target for other samples.
Uses CSVs scraped from wunderground (without an api key) to get daily weather for the years indicated.
Arguments:
location (str): City and state in standard US postal service format: "City, ST"
alternatively an airport code like "PDX or LAX"
delays (list of int): sample delays to use for the input tapped delay line.
Positive and negative values are treated the same as sample counts into the past.
default: [1, 2, 3], in z-transform notation: z^-1 + z^-2 + z^-3
years (int or list of int): list of 4-digit years to download weather from wunderground
inputs (list of int or list of str): column indices or labels for the inputs
outputs (list of int or list of str): column indices or labels for the outputs
Returns:
3-tuple: tuple(dataset, list of means, list of stds)
means and stds allow normalization of new inputs and denormalization of the outputs
"""
df = weather.daily(location, years=years, use_cache=use_cache, verbosity=verbosity).sort()
ds = util.dataset_from_dataframe(df, normalize=False, delays=delays, inputs=inputs, outputs=outputs, verbosity=verbosity)
nn = util.ann_from_ds(ds, N_hidden=N_hidden, verbosity=verbosity)
trainer = util.build_trainer(nn, ds=ds, verbosity=verbosity)
trainer.trainEpochs(epochs)
columns = []
for delay in delays:
columns += [inp + "[-{}]".format(delay) for inp in inputs]
columns += list(outputs)
columns += ['Predicted {}'.format(outp) for outp in outputs]
table = [list(i) + list(t) + list(trainer.module.activate(i)) for i, t in zip(trainer.ds['input'], trainer.ds['target'])]
df = pd.DataFrame(table, columns=columns, index=df.index[max(delays):])
#comparison = df[[] + list(outputs)]
return trainer, df
def thermostat(
location='Camas, WA',
days=100,
capacity=1000,
max_eval=1000,
):
""" Control the thermostat on an AirCon system with finite thermal energy capacity (chiller)
Useful for controlling a chiller (something that can cool down overnight and heat up during the
hottest part of the day (in order to cool the building).
Builds a linear single-layer neural net (multi-dimensional regression).
The dataset is a basic SupervisedDataSet rather than a SequentialDataSet, so there may be
"accuracy left on the table" or even "cheating" during training, because training and test
set are selected randomly so historical data for one sample is used as target (furture data)
for other samples.
Uses CSVs scraped from wunderground (no api key required) to get daily weather for the years indicated.
Arguments:
location (str): City and state in standard US postal service format: "City, ST" or an airport code like "PDX"
days (int): Number of days of weather data to download from wunderground
delays (list of int): sample delays to use for the input tapped delay line.
Positive and negative values are treated the same as sample counts into the past.
default: [1, 2, 3], in z-transform notation: z^-1 + z^-2 + z^-3
years (int or list of int): list of 4-digit years to download weather from wunderground
inputs (list of int or list of str): column indices or labels for the inputs
outputs (list of int or list of str): column indices or labels for the outputs
Returns:
3-tuple: tuple(dataset, list of means, list of stds)
means and stds allow normalization of new inputs and denormalization of the outputs
"""
pass
def explore_maze():
# simplified version of the reinforcement learning tutorial example
structure = [
list('!!!!!!!!!!'),
list('! ! ! ! !'),
list('! !! ! ! !'),
list('! ! !'),
list('! !!!!!! !'),
list('! ! ! !'),
list('! ! !!!! !'),
list('! !'),
list('! !!!!! !'),
list('! ! !'),
list('!!!!!!!!!!'),
]
structure = np.array([[ord(c)-ord(' ') for c in row] for row in structure])
shape = np.array(structure.shape)
environment = Maze(structure, tuple(shape - 2))
controller = ActionValueTable(shape.prod(), 4)
controller.initialize(1.)
learner = Q()
agent = LearningAgent(controller, learner)
task = MDPMazeTask(environment)
experiment = Experiment(task, agent)
for i in range(30):
experiment.doInteractions(30)
agent.learn()
agent.reset()
controller.params.reshape(shape.prod(), 4).max(1).reshape(*shape)
# (0, 0) is upper left and (0, N) is upper right, so flip matrix upside down to match NESW action order
greedy_policy = np.argmax(controller.params.reshape(shape.prod(), 4), 1)
greedy_policy = np.flipud(np.array(list('NESW'))[greedy_policy].reshape(shape))
maze = np.flipud(np.array(list(' #'))[structure])
print('Maze map:')
print('\n'.join(''.join(row) for row in maze))
print('Greedy policy:')
print('\n'.join(''.join(row) for row in greedy_policy))
assert '\n'.join(''.join(row) for row in greedy_policy) == 'NNNNN\nNSNNN\nNSNNN\nNEENN\nNNNNN'
#################################################################
## An online (reinforcement) learning example based on the
## cart pole-balancing example in pybrian
## WIP to perform optimal control of Building HVAC system
## with limited electrical or thermal energy resource that is recharged every day
from pybrain.rl.environments import EpisodicTask
from pybrain.rl.environments.cartpole import CartPoleEnvironment
from pybrain.rl.environments.cartpole.nonmarkovpole import NonMarkovPoleEnvironment
class BalanceTask(EpisodicTask):
""" The task of balancing some pole(s) on a cart """
def __init__(self, env=None, maxsteps=1000, desiredValue=0, location='Portland, OR'):
"""
:key env: (optional) an instance of a CartPoleEnvironment (or a subclass thereof)
:key maxsteps: maximal number of steps (default: 1000)
"""
self.location = location
self.airport_code = weather.airport(location)
self.desiredValue = desiredValue
if env is None:
env = CartPoleEnvironment()
EpisodicTask.__init__(self, env)
self.N = maxsteps
self.t = 0
# scale position and angle, don't scale velocities (unknown maximum)
self.sensor_limits = [(-3, 3)]
for i in range(1, self.outdim):
if isinstance(self.env, NonMarkovPoleEnvironment) and i % 2 == 0:
self.sensor_limits.append(None)
else:
self.sensor_limits.append((-np.pi, np.pi))
# self.sensor_limits = [None] * 4
# actor between -10 and 10 Newton
self.actor_limits = [(-50, 50)]
def reset(self):
EpisodicTask.reset(self)
self.day = weather.daily(date='random')
self.t = 0
def performAction(self, action):
self.t += 1
EpisodicTask.performAction(self, action)
def isFinished(self):
if max(list(map(abs, self.env.getPoleAngles()))) > 0.7:
# pole has fallen
return True
elif abs(self.env.getCartPosition()) > 2.4:
# cart is out of it's border conditions
return True
elif self.t >= self.N:
# maximal timesteps
return True
return False
def getReward(self):
angles = list(map(abs, self.env.getPoleAngles()))
s = abs(self.env.getCartPosition())
reward = 0
if min(angles) < 0.05 and abs(s) < 0.05:
reward = 0
elif max(angles) > 0.7 or abs(s) > 2.4:
reward = -2 * (self.N - self.t)
else:
reward = -1
return reward
def setMaxLength(self, n):
self.N = n
from pybrain.tools.shortcuts import buildNetwork, NetworkError
from pybrain.optimization.hillclimber import HillClimber
import time
import numpy as np
def run_competition(builders=[], task=BalanceTask(), Optimizer=HillClimber, rounds=3, max_eval=20, N_hidden=3, verbosity=0):
""" pybrain buildNetwork builds a subtly different network structhan build_ann... so compete them!
Arguments:
task (Task): task to compete at
Optimizer (class): pybrain.Optimizer class to instantiate for each competitor
rounds (int): number of times to run the competition
max_eval (int): number of objective function evaluations that the optimizer is allowed
in each round
N_hidden (int): number of hidden nodes in each network being competed
The functional difference that I can see is that:
buildNetwork connects the bias to the output
build_ann does not
The api differences are:
build_ann allows heterogeneous layer types but the output layer is always linear
buildNetwork allows specification of the output layer type
"""
results = []
builders = list(builders) + [buildNetwork, util.build_ann]
for r in range(rounds):
heat = []
# FIXME: shuffle the order of the builders to keep things fair
# (like switching sides of the tennis court)
for builder in builders:
try:
competitor = builder(task.outdim, N_hidden, task.indim, verbosity=verbosity)
except NetworkError:
competitor = builder(task.outdim, N_hidden, task.indim)
# TODO: verify that a full reset is actually happening
task.reset()
optimizer = Optimizer(task, competitor, maxEvaluations=max_eval)
t0 = time.time()
nn, nn_best = optimizer.learn()
t1 = time.time()
heat += [(nn_best, t1-t0, nn)]
results += [tuple(heat)]
if verbosity >= 0:
print([competitor_scores[:2] for competitor_scores in heat])
# # alternatively:
# agent = ( pybrain.rl.agents.OptimizationAgent(net, HillClimber())
# or
# pybrain.rl.agents.LearningAgent(net, pybrain.rl.learners.ENAC()) )
# exp = pybrain.rl.experiments.EpisodicExperiment(task, agent).doEpisodes(100)
means = [[np.array([r[i][j] for r in results]).mean() for i in range(len(results[0]))] for j in range(2)]
if verbosity > -1:
print('Mean Performance:')
print(means)
perfi, speedi = np.argmax(means[0]), np.argmin(means[1])
print('And the winner for performance is ... Algorithm #{} (0-offset array index [{}])'.format(perfi+1, perfi))
print('And the winner for speed is ... Algorithm #{} (0-offset array index [{}])'.format(speedi+1, speedi))
return results, means
try:
# this will fail on latest master branch of pybrain as well as latest pypi release of pybrain
from pybrain.rl.environments.mazes import Maze, MDPMazeTask
from pybrain.rl.learners.valuebased import ActionValueTable
from pybrain.rl.agents import LearningAgent
from pybrain.rl.learners import Q # , SARSA # (State-Action-Reward-State-Action)
from pybrain.rl.experiments import Experiment
# from pybrain.rl.environments import Task
import pylab
def maze():
# import sys, time
pylab.gray()
pylab.ion()
# The goal appears to be in the upper right
structure = [
'!!!!!!!!!!',
'! ! ! ! !',
'! !! ! ! !',
'! ! !',
'! !!!!!! !',
'! ! ! !',
'! ! !!!! !',
'! !',
'! !!!!! !',
'! ! !',
'!!!!!!!!!!',
]
structure = np.array([[ord(c)-ord(' ') for c in row] for row in structure])
shape = np.array(structure.shape)
environment = Maze(structure, tuple(shape - 2))
controller = ActionValueTable(shape.prod(), 4)
controller.initialize(1.)
learner = Q()
agent = LearningAgent(controller, learner)
task = MDPMazeTask(environment)
experiment = Experiment(task, agent)
for i in range(100):
experiment.doInteractions(100)
agent.learn()
agent.reset()
# 4 actions, 81 locations/states (9x9 grid)
# max(1) gives/plots the biggest objective function value for that square
pylab.pcolor(controller.params.reshape(81, 4).max(1).reshape(9, 9))
pylab.draw()
# (0, 0) is upper left and (0, N) is upper right, so flip matrix upside down to match NESW action order
greedy_policy = np.argmax(controller.params.reshape(shape.prod(), 4), 1)
greedy_policy = np.flipud(np.array(list('NESW'))[greedy_policy].reshape(shape))
maze = np.flipud(np.array(list(' #'))[structure])
print('Maze map:')
print('\n'.join(''.join(row) for row in maze))
print('Greedy policy:')
print('\n'.join(''.join(row) for row in greedy_policy))
# pylab.show()
except ImportError:
pass
if __name__ == '__main__':
import sys
try:
explore_maze()
except:
from traceback import format_exc
sys.exit(format_exc())
print(run_competition(verbosity=0))
sys.exit(0)
|
hobson/pug-ann | pug/ann/example.py | run_competition | python | def run_competition(builders=[], task=BalanceTask(), Optimizer=HillClimber, rounds=3, max_eval=20, N_hidden=3, verbosity=0):
results = []
builders = list(builders) + [buildNetwork, util.build_ann]
for r in range(rounds):
heat = []
# FIXME: shuffle the order of the builders to keep things fair
# (like switching sides of the tennis court)
for builder in builders:
try:
competitor = builder(task.outdim, N_hidden, task.indim, verbosity=verbosity)
except NetworkError:
competitor = builder(task.outdim, N_hidden, task.indim)
# TODO: verify that a full reset is actually happening
task.reset()
optimizer = Optimizer(task, competitor, maxEvaluations=max_eval)
t0 = time.time()
nn, nn_best = optimizer.learn()
t1 = time.time()
heat += [(nn_best, t1-t0, nn)]
results += [tuple(heat)]
if verbosity >= 0:
print([competitor_scores[:2] for competitor_scores in heat])
# # alternatively:
# agent = ( pybrain.rl.agents.OptimizationAgent(net, HillClimber())
# or
# pybrain.rl.agents.LearningAgent(net, pybrain.rl.learners.ENAC()) )
# exp = pybrain.rl.experiments.EpisodicExperiment(task, agent).doEpisodes(100)
means = [[np.array([r[i][j] for r in results]).mean() for i in range(len(results[0]))] for j in range(2)]
if verbosity > -1:
print('Mean Performance:')
print(means)
perfi, speedi = np.argmax(means[0]), np.argmin(means[1])
print('And the winner for performance is ... Algorithm #{} (0-offset array index [{}])'.format(perfi+1, perfi))
print('And the winner for speed is ... Algorithm #{} (0-offset array index [{}])'.format(speedi+1, speedi))
return results, means | pybrain buildNetwork builds a subtly different network structhan build_ann... so compete them!
Arguments:
task (Task): task to compete at
Optimizer (class): pybrain.Optimizer class to instantiate for each competitor
rounds (int): number of times to run the competition
max_eval (int): number of objective function evaluations that the optimizer is allowed
in each round
N_hidden (int): number of hidden nodes in each network being competed
The functional difference that I can see is that:
buildNetwork connects the bias to the output
build_ann does not
The api differences are:
build_ann allows heterogeneous layer types but the output layer is always linear
buildNetwork allows specification of the output layer type | train | https://github.com/hobson/pug-ann/blob/8a4d7103a744d15b4a737fc0f9a84c823973e0ec/pug/ann/example.py#L286-L347 | null | """Example pybrain network training to predict the weather
Installation:
pip install pug-ann
Examples:
In the future DataSets should have an attribute `columns` or `df` to facilitate converting back to dataframes
>>> trainer, df = train_weather_predictor('San Francisco, CA', epochs=2, inputs=['Max TemperatureF'], outputs=['Max TemperatureF'], years=range(2013,2015), delays=(1,), use_cache=True, verbosity=0)
>>> all(trainer.module.activate(trainer.ds['input'][0]) == trainer.module.activate(trainer.ds['input'][1]))
False
>>> trainer.trainEpochs(5)
Make sure NN hasn't saturated (as it might for a sigmoid hidden layer)
>>> all(trainer.module.activate(trainer.ds['input'][0]) == trainer.module.activate(trainer.ds['input'][1]))
False
"""
import datetime
from pug.ann.data import weather
from pug.ann import util
from pug.nlp.util import make_date, update_dict
from matplotlib import pyplot as plt
import pandas as pd
def train_weather_predictor(
location='Portland, OR',
years=range(2013, 2016,),
delays=(1, 2, 3),
inputs=('Min Temperature', 'Max Temperature', 'Min Sea Level Pressure', u'Max Sea Level Pressure', 'WindDirDegrees',),
outputs=(u'Max TemperatureF',),
N_hidden=6,
epochs=30,
use_cache=False,
verbosity=2,
):
"""Train a neural nerual net to predict the weather for tomorrow based on past weather.
Builds a linear single hidden layer neural net (multi-dimensional nonlinear regression).
The dataset is a basic SupervisedDataSet rather than a SequentialDataSet, so the training set
and the test set are sampled randomly. This means that historical data for one sample (the delayed
input vector) will likely be used as the target for other samples.
Uses CSVs scraped from wunderground (without an api key) to get daily weather for the years indicated.
Arguments:
location (str): City and state in standard US postal service format: "City, ST"
alternatively an airport code like "PDX or LAX"
delays (list of int): sample delays to use for the input tapped delay line.
Positive and negative values are treated the same as sample counts into the past.
default: [1, 2, 3], in z-transform notation: z^-1 + z^-2 + z^-3
years (int or list of int): list of 4-digit years to download weather from wunderground
inputs (list of int or list of str): column indices or labels for the inputs
outputs (list of int or list of str): column indices or labels for the outputs
Returns:
3-tuple: tuple(dataset, list of means, list of stds)
means and stds allow normalization of new inputs and denormalization of the outputs
"""
df = weather.daily(location, years=years, use_cache=use_cache, verbosity=verbosity).sort()
ds = util.dataset_from_dataframe(df, normalize=False, delays=delays, inputs=inputs, outputs=outputs, verbosity=verbosity)
nn = util.ann_from_ds(ds, N_hidden=N_hidden, verbosity=verbosity)
trainer = util.build_trainer(nn, ds=ds, verbosity=verbosity)
trainer.trainEpochs(epochs)
columns = []
for delay in delays:
columns += [inp + "[-{}]".format(delay) for inp in inputs]
columns += list(outputs)
columns += ['Predicted {}'.format(outp) for outp in outputs]
table = [list(i) + list(t) + list(trainer.module.activate(i)) for i, t in zip(trainer.ds['input'], trainer.ds['target'])]
df = pd.DataFrame(table, columns=columns, index=df.index[max(delays):])
#comparison = df[[] + list(outputs)]
return trainer, df
def oneday_weather_forecast(
location='Portland, OR',
inputs=('Min Temperature', 'Mean Temperature', 'Max Temperature', 'Max Humidity', 'Mean Humidity', 'Min Humidity', 'Max Sea Level Pressure', 'Mean Sea Level Pressure', 'Min Sea Level Pressure', 'Wind Direction'),
outputs=('Min Temperature', 'Mean Temperature', 'Max Temperature', 'Max Humidity'),
date=None,
epochs=200,
delays=(1, 2, 3, 4),
num_years=4,
use_cache=False,
verbosity=1,
):
""" Provide a weather forecast for tomorrow based on historical weather at that location """
date = make_date(date or datetime.datetime.now().date())
num_years = int(num_years or 10)
years = range(date.year - num_years, date.year + 1)
df = weather.daily(location, years=years, use_cache=use_cache, verbosity=verbosity).sort()
# because up-to-date weather history was cached above, can use that cache, regardless of use_cache kwarg
trainer, df = train_weather_predictor(
location,
years=years,
delays=delays,
inputs=inputs,
outputs=outputs,
epochs=epochs,
verbosity=verbosity,
use_cache=True,
)
nn = trainer.module
forecast = {'trainer': trainer}
yesterday = dict(zip(outputs, nn.activate(trainer.ds['input'][-2])))
forecast['yesterday'] = update_dict(yesterday, {'date': df.index[-2].date()})
today = dict(zip(outputs, nn.activate(trainer.ds['input'][-1])))
forecast['today'] = update_dict(today, {'date': df.index[-1].date()})
ds = util.input_dataset_from_dataframe(df[-max(delays):], delays=delays, inputs=inputs, normalize=False, verbosity=0)
tomorrow = dict(zip(outputs, nn.activate(ds['input'][-1])))
forecast['tomorrow'] = update_dict(tomorrow, {'date': (df.index[-1] + datetime.timedelta(1)).date()})
return forecast
def thermostat(
location='Camas, WA',
days=100,
capacity=1000,
max_eval=1000,
):
""" Control the thermostat on an AirCon system with finite thermal energy capacity (chiller)
Useful for controlling a chiller (something that can cool down overnight and heat up during the
hottest part of the day (in order to cool the building).
Builds a linear single-layer neural net (multi-dimensional regression).
The dataset is a basic SupervisedDataSet rather than a SequentialDataSet, so there may be
"accuracy left on the table" or even "cheating" during training, because training and test
set are selected randomly so historical data for one sample is used as target (furture data)
for other samples.
Uses CSVs scraped from wunderground (no api key required) to get daily weather for the years indicated.
Arguments:
location (str): City and state in standard US postal service format: "City, ST" or an airport code like "PDX"
days (int): Number of days of weather data to download from wunderground
delays (list of int): sample delays to use for the input tapped delay line.
Positive and negative values are treated the same as sample counts into the past.
default: [1, 2, 3], in z-transform notation: z^-1 + z^-2 + z^-3
years (int or list of int): list of 4-digit years to download weather from wunderground
inputs (list of int or list of str): column indices or labels for the inputs
outputs (list of int or list of str): column indices or labels for the outputs
Returns:
3-tuple: tuple(dataset, list of means, list of stds)
means and stds allow normalization of new inputs and denormalization of the outputs
"""
pass
def explore_maze():
# simplified version of the reinforcement learning tutorial example
structure = [
list('!!!!!!!!!!'),
list('! ! ! ! !'),
list('! !! ! ! !'),
list('! ! !'),
list('! !!!!!! !'),
list('! ! ! !'),
list('! ! !!!! !'),
list('! !'),
list('! !!!!! !'),
list('! ! !'),
list('!!!!!!!!!!'),
]
structure = np.array([[ord(c)-ord(' ') for c in row] for row in structure])
shape = np.array(structure.shape)
environment = Maze(structure, tuple(shape - 2))
controller = ActionValueTable(shape.prod(), 4)
controller.initialize(1.)
learner = Q()
agent = LearningAgent(controller, learner)
task = MDPMazeTask(environment)
experiment = Experiment(task, agent)
for i in range(30):
experiment.doInteractions(30)
agent.learn()
agent.reset()
controller.params.reshape(shape.prod(), 4).max(1).reshape(*shape)
# (0, 0) is upper left and (0, N) is upper right, so flip matrix upside down to match NESW action order
greedy_policy = np.argmax(controller.params.reshape(shape.prod(), 4), 1)
greedy_policy = np.flipud(np.array(list('NESW'))[greedy_policy].reshape(shape))
maze = np.flipud(np.array(list(' #'))[structure])
print('Maze map:')
print('\n'.join(''.join(row) for row in maze))
print('Greedy policy:')
print('\n'.join(''.join(row) for row in greedy_policy))
assert '\n'.join(''.join(row) for row in greedy_policy) == 'NNNNN\nNSNNN\nNSNNN\nNEENN\nNNNNN'
#################################################################
## An online (reinforcement) learning example based on the
## cart pole-balancing example in pybrian
## WIP to perform optimal control of Building HVAC system
## with limited electrical or thermal energy resource that is recharged every day
from pybrain.rl.environments import EpisodicTask
from pybrain.rl.environments.cartpole import CartPoleEnvironment
from pybrain.rl.environments.cartpole.nonmarkovpole import NonMarkovPoleEnvironment
class BalanceTask(EpisodicTask):
""" The task of balancing some pole(s) on a cart """
def __init__(self, env=None, maxsteps=1000, desiredValue=0, location='Portland, OR'):
"""
:key env: (optional) an instance of a CartPoleEnvironment (or a subclass thereof)
:key maxsteps: maximal number of steps (default: 1000)
"""
self.location = location
self.airport_code = weather.airport(location)
self.desiredValue = desiredValue
if env is None:
env = CartPoleEnvironment()
EpisodicTask.__init__(self, env)
self.N = maxsteps
self.t = 0
# scale position and angle, don't scale velocities (unknown maximum)
self.sensor_limits = [(-3, 3)]
for i in range(1, self.outdim):
if isinstance(self.env, NonMarkovPoleEnvironment) and i % 2 == 0:
self.sensor_limits.append(None)
else:
self.sensor_limits.append((-np.pi, np.pi))
# self.sensor_limits = [None] * 4
# actor between -10 and 10 Newton
self.actor_limits = [(-50, 50)]
def reset(self):
EpisodicTask.reset(self)
self.day = weather.daily(date='random')
self.t = 0
def performAction(self, action):
self.t += 1
EpisodicTask.performAction(self, action)
def isFinished(self):
if max(list(map(abs, self.env.getPoleAngles()))) > 0.7:
# pole has fallen
return True
elif abs(self.env.getCartPosition()) > 2.4:
# cart is out of it's border conditions
return True
elif self.t >= self.N:
# maximal timesteps
return True
return False
def getReward(self):
angles = list(map(abs, self.env.getPoleAngles()))
s = abs(self.env.getCartPosition())
reward = 0
if min(angles) < 0.05 and abs(s) < 0.05:
reward = 0
elif max(angles) > 0.7 or abs(s) > 2.4:
reward = -2 * (self.N - self.t)
else:
reward = -1
return reward
def setMaxLength(self, n):
self.N = n
from pybrain.tools.shortcuts import buildNetwork, NetworkError
from pybrain.optimization.hillclimber import HillClimber
import time
import numpy as np
try:
# this will fail on latest master branch of pybrain as well as latest pypi release of pybrain
from pybrain.rl.environments.mazes import Maze, MDPMazeTask
from pybrain.rl.learners.valuebased import ActionValueTable
from pybrain.rl.agents import LearningAgent
from pybrain.rl.learners import Q # , SARSA # (State-Action-Reward-State-Action)
from pybrain.rl.experiments import Experiment
# from pybrain.rl.environments import Task
import pylab
def maze():
# import sys, time
pylab.gray()
pylab.ion()
# The goal appears to be in the upper right
structure = [
'!!!!!!!!!!',
'! ! ! ! !',
'! !! ! ! !',
'! ! !',
'! !!!!!! !',
'! ! ! !',
'! ! !!!! !',
'! !',
'! !!!!! !',
'! ! !',
'!!!!!!!!!!',
]
structure = np.array([[ord(c)-ord(' ') for c in row] for row in structure])
shape = np.array(structure.shape)
environment = Maze(structure, tuple(shape - 2))
controller = ActionValueTable(shape.prod(), 4)
controller.initialize(1.)
learner = Q()
agent = LearningAgent(controller, learner)
task = MDPMazeTask(environment)
experiment = Experiment(task, agent)
for i in range(100):
experiment.doInteractions(100)
agent.learn()
agent.reset()
# 4 actions, 81 locations/states (9x9 grid)
# max(1) gives/plots the biggest objective function value for that square
pylab.pcolor(controller.params.reshape(81, 4).max(1).reshape(9, 9))
pylab.draw()
# (0, 0) is upper left and (0, N) is upper right, so flip matrix upside down to match NESW action order
greedy_policy = np.argmax(controller.params.reshape(shape.prod(), 4), 1)
greedy_policy = np.flipud(np.array(list('NESW'))[greedy_policy].reshape(shape))
maze = np.flipud(np.array(list(' #'))[structure])
print('Maze map:')
print('\n'.join(''.join(row) for row in maze))
print('Greedy policy:')
print('\n'.join(''.join(row) for row in greedy_policy))
# pylab.show()
except ImportError:
pass
if __name__ == '__main__':
import sys
try:
explore_maze()
except:
from traceback import format_exc
sys.exit(format_exc())
print(run_competition(verbosity=0))
sys.exit(0)
|
cemsbr/yala | yala/config.py | Config._set_linters | python | def _set_linters(self):
if 'linters' in self._config:
self.user_linters = list(self._parse_cfg_linters())
self.linters = {linter: self._all_linters[linter]
for linter in self.user_linters}
else:
self.linters = self._all_linters | Use user linters or all available when not specified. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/config.py#L37-L44 | [
"def _parse_cfg_linters(self):\n \"\"\"Return valid linter names found in config files.\"\"\"\n user_value = self._config.get('linters', '')\n # For each line of \"linters\" value, use comma as separator\n for line in user_value.splitlines():\n yield from self._parse_linters_line(line)\n"
] | class Config:
"""Deal with default and user configuration.
Internal use only. If you are implementing your own linter, use
``self._config``.
"""
_config = None
_CFG_FILE = 'setup.cfg'
#: str: Section of the config file.
_CFG_SECTION = 'yala'
def __init__(self, all_linters):
"""Read default and user config files.
Args:
all_linters (dict): Names and classes of all available linters.
"""
self._all_linters = all_linters
default_cfg = self._read_default_file()
user_cfg = self._read_user_files()
self._config = self._merge(default_cfg, user_cfg)
self.user_linters = [] # chosen by the user
self.linters = {} # chosen by the user or all of them
self._set_linters()
def print_config(self):
"""Print all yala configurations, including default and user's."""
linters = self.user_linters or list(self.linters)
print('linters:', ', '.join(linters))
for key, value in self._config.items():
if key != 'linters':
print('{}: {}'.format(key, value))
def get_linter_classes(self):
"""Return linters to be executed."""
return (self._all_linters[linter] for linter in self.linters)
def _parse_cfg_linters(self):
"""Return valid linter names found in config files."""
user_value = self._config.get('linters', '')
# For each line of "linters" value, use comma as separator
for line in user_value.splitlines():
yield from self._parse_linters_line(line)
def _parse_linters_line(self, line):
linters = (linter for linter in re.split(r'\s*,\s*', line))
for linter in linters:
if linter in self._all_linters:
yield linter
elif linter:
LOG.warning('%s is not a valid linter', linter)
def get_linter_config(self, name):
"""Return linter options without linter name prefix."""
prefix = name + ' '
return {k[len(prefix):]: v
for k, v in self._config.items()
if k.startswith(prefix)}
@classmethod
def _read_default_file(cls):
yala_dir = Path(__file__).parent
default_file = yala_dir / cls._CFG_FILE
config = ConfigParser()
config.read(str(default_file))
return config
@classmethod
def _read_user_files(cls):
work_dir = Path.cwd()
user_files = [work_dir / cls._CFG_FILE]
# From current dir's file to root's file
user_files += [parent / cls._CFG_FILE for parent in work_dir.parents]
user_cfg = ConfigParser()
# Reverse order so parent folder's file is overridden.
for user_file in reversed(user_files):
if user_file.is_file():
LOG.info('Reading %s', user_file)
user_cfg.read(str(user_file))
return user_cfg
@classmethod
def _merge(cls, default, user):
"""Append user options to default options. Return yala section."""
section = cls._CFG_SECTION
merged = default[section]
if section not in user:
return merged
user = user[section]
for key, value in user.items():
if key in merged:
merged[key] += ' ' + value
else:
merged[key] = value
return merged
|
cemsbr/yala | yala/config.py | Config.print_config | python | def print_config(self):
linters = self.user_linters or list(self.linters)
print('linters:', ', '.join(linters))
for key, value in self._config.items():
if key != 'linters':
print('{}: {}'.format(key, value)) | Print all yala configurations, including default and user's. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/config.py#L46-L52 | null | class Config:
"""Deal with default and user configuration.
Internal use only. If you are implementing your own linter, use
``self._config``.
"""
_config = None
_CFG_FILE = 'setup.cfg'
#: str: Section of the config file.
_CFG_SECTION = 'yala'
def __init__(self, all_linters):
"""Read default and user config files.
Args:
all_linters (dict): Names and classes of all available linters.
"""
self._all_linters = all_linters
default_cfg = self._read_default_file()
user_cfg = self._read_user_files()
self._config = self._merge(default_cfg, user_cfg)
self.user_linters = [] # chosen by the user
self.linters = {} # chosen by the user or all of them
self._set_linters()
def _set_linters(self):
"""Use user linters or all available when not specified."""
if 'linters' in self._config:
self.user_linters = list(self._parse_cfg_linters())
self.linters = {linter: self._all_linters[linter]
for linter in self.user_linters}
else:
self.linters = self._all_linters
def get_linter_classes(self):
"""Return linters to be executed."""
return (self._all_linters[linter] for linter in self.linters)
def _parse_cfg_linters(self):
"""Return valid linter names found in config files."""
user_value = self._config.get('linters', '')
# For each line of "linters" value, use comma as separator
for line in user_value.splitlines():
yield from self._parse_linters_line(line)
def _parse_linters_line(self, line):
linters = (linter for linter in re.split(r'\s*,\s*', line))
for linter in linters:
if linter in self._all_linters:
yield linter
elif linter:
LOG.warning('%s is not a valid linter', linter)
def get_linter_config(self, name):
"""Return linter options without linter name prefix."""
prefix = name + ' '
return {k[len(prefix):]: v
for k, v in self._config.items()
if k.startswith(prefix)}
@classmethod
def _read_default_file(cls):
yala_dir = Path(__file__).parent
default_file = yala_dir / cls._CFG_FILE
config = ConfigParser()
config.read(str(default_file))
return config
@classmethod
def _read_user_files(cls):
work_dir = Path.cwd()
user_files = [work_dir / cls._CFG_FILE]
# From current dir's file to root's file
user_files += [parent / cls._CFG_FILE for parent in work_dir.parents]
user_cfg = ConfigParser()
# Reverse order so parent folder's file is overridden.
for user_file in reversed(user_files):
if user_file.is_file():
LOG.info('Reading %s', user_file)
user_cfg.read(str(user_file))
return user_cfg
@classmethod
def _merge(cls, default, user):
"""Append user options to default options. Return yala section."""
section = cls._CFG_SECTION
merged = default[section]
if section not in user:
return merged
user = user[section]
for key, value in user.items():
if key in merged:
merged[key] += ' ' + value
else:
merged[key] = value
return merged
|
cemsbr/yala | yala/config.py | Config._parse_cfg_linters | python | def _parse_cfg_linters(self):
user_value = self._config.get('linters', '')
# For each line of "linters" value, use comma as separator
for line in user_value.splitlines():
yield from self._parse_linters_line(line) | Return valid linter names found in config files. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/config.py#L58-L63 | [
"def _parse_linters_line(self, line):\n linters = (linter for linter in re.split(r'\\s*,\\s*', line))\n for linter in linters:\n if linter in self._all_linters:\n yield linter\n elif linter:\n LOG.warning('%s is not a valid linter', linter)\n"
] | class Config:
"""Deal with default and user configuration.
Internal use only. If you are implementing your own linter, use
``self._config``.
"""
_config = None
_CFG_FILE = 'setup.cfg'
#: str: Section of the config file.
_CFG_SECTION = 'yala'
def __init__(self, all_linters):
"""Read default and user config files.
Args:
all_linters (dict): Names and classes of all available linters.
"""
self._all_linters = all_linters
default_cfg = self._read_default_file()
user_cfg = self._read_user_files()
self._config = self._merge(default_cfg, user_cfg)
self.user_linters = [] # chosen by the user
self.linters = {} # chosen by the user or all of them
self._set_linters()
def _set_linters(self):
"""Use user linters or all available when not specified."""
if 'linters' in self._config:
self.user_linters = list(self._parse_cfg_linters())
self.linters = {linter: self._all_linters[linter]
for linter in self.user_linters}
else:
self.linters = self._all_linters
def print_config(self):
"""Print all yala configurations, including default and user's."""
linters = self.user_linters or list(self.linters)
print('linters:', ', '.join(linters))
for key, value in self._config.items():
if key != 'linters':
print('{}: {}'.format(key, value))
def get_linter_classes(self):
"""Return linters to be executed."""
return (self._all_linters[linter] for linter in self.linters)
def _parse_linters_line(self, line):
linters = (linter for linter in re.split(r'\s*,\s*', line))
for linter in linters:
if linter in self._all_linters:
yield linter
elif linter:
LOG.warning('%s is not a valid linter', linter)
def get_linter_config(self, name):
"""Return linter options without linter name prefix."""
prefix = name + ' '
return {k[len(prefix):]: v
for k, v in self._config.items()
if k.startswith(prefix)}
@classmethod
def _read_default_file(cls):
yala_dir = Path(__file__).parent
default_file = yala_dir / cls._CFG_FILE
config = ConfigParser()
config.read(str(default_file))
return config
@classmethod
def _read_user_files(cls):
work_dir = Path.cwd()
user_files = [work_dir / cls._CFG_FILE]
# From current dir's file to root's file
user_files += [parent / cls._CFG_FILE for parent in work_dir.parents]
user_cfg = ConfigParser()
# Reverse order so parent folder's file is overridden.
for user_file in reversed(user_files):
if user_file.is_file():
LOG.info('Reading %s', user_file)
user_cfg.read(str(user_file))
return user_cfg
@classmethod
def _merge(cls, default, user):
"""Append user options to default options. Return yala section."""
section = cls._CFG_SECTION
merged = default[section]
if section not in user:
return merged
user = user[section]
for key, value in user.items():
if key in merged:
merged[key] += ' ' + value
else:
merged[key] = value
return merged
|
cemsbr/yala | yala/config.py | Config.get_linter_config | python | def get_linter_config(self, name):
prefix = name + ' '
return {k[len(prefix):]: v
for k, v in self._config.items()
if k.startswith(prefix)} | Return linter options without linter name prefix. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/config.py#L73-L78 | null | class Config:
"""Deal with default and user configuration.
Internal use only. If you are implementing your own linter, use
``self._config``.
"""
_config = None
_CFG_FILE = 'setup.cfg'
#: str: Section of the config file.
_CFG_SECTION = 'yala'
def __init__(self, all_linters):
"""Read default and user config files.
Args:
all_linters (dict): Names and classes of all available linters.
"""
self._all_linters = all_linters
default_cfg = self._read_default_file()
user_cfg = self._read_user_files()
self._config = self._merge(default_cfg, user_cfg)
self.user_linters = [] # chosen by the user
self.linters = {} # chosen by the user or all of them
self._set_linters()
def _set_linters(self):
"""Use user linters or all available when not specified."""
if 'linters' in self._config:
self.user_linters = list(self._parse_cfg_linters())
self.linters = {linter: self._all_linters[linter]
for linter in self.user_linters}
else:
self.linters = self._all_linters
def print_config(self):
"""Print all yala configurations, including default and user's."""
linters = self.user_linters or list(self.linters)
print('linters:', ', '.join(linters))
for key, value in self._config.items():
if key != 'linters':
print('{}: {}'.format(key, value))
def get_linter_classes(self):
"""Return linters to be executed."""
return (self._all_linters[linter] for linter in self.linters)
def _parse_cfg_linters(self):
"""Return valid linter names found in config files."""
user_value = self._config.get('linters', '')
# For each line of "linters" value, use comma as separator
for line in user_value.splitlines():
yield from self._parse_linters_line(line)
def _parse_linters_line(self, line):
linters = (linter for linter in re.split(r'\s*,\s*', line))
for linter in linters:
if linter in self._all_linters:
yield linter
elif linter:
LOG.warning('%s is not a valid linter', linter)
@classmethod
def _read_default_file(cls):
yala_dir = Path(__file__).parent
default_file = yala_dir / cls._CFG_FILE
config = ConfigParser()
config.read(str(default_file))
return config
@classmethod
def _read_user_files(cls):
work_dir = Path.cwd()
user_files = [work_dir / cls._CFG_FILE]
# From current dir's file to root's file
user_files += [parent / cls._CFG_FILE for parent in work_dir.parents]
user_cfg = ConfigParser()
# Reverse order so parent folder's file is overridden.
for user_file in reversed(user_files):
if user_file.is_file():
LOG.info('Reading %s', user_file)
user_cfg.read(str(user_file))
return user_cfg
@classmethod
def _merge(cls, default, user):
"""Append user options to default options. Return yala section."""
section = cls._CFG_SECTION
merged = default[section]
if section not in user:
return merged
user = user[section]
for key, value in user.items():
if key in merged:
merged[key] += ' ' + value
else:
merged[key] = value
return merged
|
cemsbr/yala | yala/config.py | Config._merge | python | def _merge(cls, default, user):
section = cls._CFG_SECTION
merged = default[section]
if section not in user:
return merged
user = user[section]
for key, value in user.items():
if key in merged:
merged[key] += ' ' + value
else:
merged[key] = value
return merged | Append user options to default options. Return yala section. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/config.py#L103-L117 | null | class Config:
"""Deal with default and user configuration.
Internal use only. If you are implementing your own linter, use
``self._config``.
"""
_config = None
_CFG_FILE = 'setup.cfg'
#: str: Section of the config file.
_CFG_SECTION = 'yala'
def __init__(self, all_linters):
"""Read default and user config files.
Args:
all_linters (dict): Names and classes of all available linters.
"""
self._all_linters = all_linters
default_cfg = self._read_default_file()
user_cfg = self._read_user_files()
self._config = self._merge(default_cfg, user_cfg)
self.user_linters = [] # chosen by the user
self.linters = {} # chosen by the user or all of them
self._set_linters()
def _set_linters(self):
"""Use user linters or all available when not specified."""
if 'linters' in self._config:
self.user_linters = list(self._parse_cfg_linters())
self.linters = {linter: self._all_linters[linter]
for linter in self.user_linters}
else:
self.linters = self._all_linters
def print_config(self):
"""Print all yala configurations, including default and user's."""
linters = self.user_linters or list(self.linters)
print('linters:', ', '.join(linters))
for key, value in self._config.items():
if key != 'linters':
print('{}: {}'.format(key, value))
def get_linter_classes(self):
"""Return linters to be executed."""
return (self._all_linters[linter] for linter in self.linters)
def _parse_cfg_linters(self):
"""Return valid linter names found in config files."""
user_value = self._config.get('linters', '')
# For each line of "linters" value, use comma as separator
for line in user_value.splitlines():
yield from self._parse_linters_line(line)
def _parse_linters_line(self, line):
linters = (linter for linter in re.split(r'\s*,\s*', line))
for linter in linters:
if linter in self._all_linters:
yield linter
elif linter:
LOG.warning('%s is not a valid linter', linter)
def get_linter_config(self, name):
"""Return linter options without linter name prefix."""
prefix = name + ' '
return {k[len(prefix):]: v
for k, v in self._config.items()
if k.startswith(prefix)}
@classmethod
def _read_default_file(cls):
yala_dir = Path(__file__).parent
default_file = yala_dir / cls._CFG_FILE
config = ConfigParser()
config.read(str(default_file))
return config
@classmethod
def _read_user_files(cls):
work_dir = Path.cwd()
user_files = [work_dir / cls._CFG_FILE]
# From current dir's file to root's file
user_files += [parent / cls._CFG_FILE for parent in work_dir.parents]
user_cfg = ConfigParser()
# Reverse order so parent folder's file is overridden.
for user_file in reversed(user_files):
if user_file.is_file():
LOG.info('Reading %s', user_file)
user_cfg.read(str(user_file))
return user_cfg
@classmethod
|
cemsbr/yala | yala/main.py | LinterRunner.get_results | python | def get_results(self):
try:
stdout, stderr = self._lint()
# Can't return a generator from a subprocess
return list(stdout), stderr or []
except FileNotFoundError as exception:
# Error if the linter was not found but was chosen by the user
if self._linter.name in self.config.user_linters:
error_msg = 'Could not find {}. Did you install it? ' \
'Got exception: {}'.format(self._linter.name, exception)
return [[], [error_msg]]
# If the linter was not chosen by the user, do nothing
return [[], []] | Run the linter, parse, and return result list.
If a linter specified by the user is not found, return an error message
as result. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/main.py#L48-L65 | [
"def _lint(self):\n \"\"\"Run linter in a subprocess.\"\"\"\n command = self._get_command()\n process = subprocess.run(command, stdout=subprocess.PIPE, # nosec\n stderr=subprocess.PIPE)\n LOG.info('Finished %s', ' '.join(command))\n stdout, stderr = self._get_output_lines(process)\n return self._linter.parse(stdout), self._parse_stderr(stderr)\n"
] | class LinterRunner:
"""Run linter and process results."""
config = None
targets = []
def __init__(self, linter_class):
"""Set linter class and its configuration."""
linter_class.config = self.config.get_linter_config(linter_class.name)
self._linter = linter_class()
@classmethod
def run(cls, linter_class):
"""Run a linter and return the results."""
runner = cls(linter_class)
return runner.get_results()
def _get_command(self):
"""Return command with options and targets, ready for execution."""
targets = ' '.join(self.targets)
cmd_str = self._linter.command_with_options + ' ' + targets
cmd_shlex = shlex.split(cmd_str)
return list(cmd_shlex)
def _lint(self):
"""Run linter in a subprocess."""
command = self._get_command()
process = subprocess.run(command, stdout=subprocess.PIPE, # nosec
stderr=subprocess.PIPE)
LOG.info('Finished %s', ' '.join(command))
stdout, stderr = self._get_output_lines(process)
return self._linter.parse(stdout), self._parse_stderr(stderr)
@staticmethod
def _get_output_lines(process):
return [(line for line in output.decode('utf-8').split('\n') if line)
for output in (process.stdout, process.stderr)]
def _parse_stderr(self, lines):
return ['[{}] {}'.format(self._linter.name, line) for line in lines]
|
cemsbr/yala | yala/main.py | LinterRunner._get_command | python | def _get_command(self):
targets = ' '.join(self.targets)
cmd_str = self._linter.command_with_options + ' ' + targets
cmd_shlex = shlex.split(cmd_str)
return list(cmd_shlex) | Return command with options and targets, ready for execution. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/main.py#L67-L72 | null | class LinterRunner:
"""Run linter and process results."""
config = None
targets = []
def __init__(self, linter_class):
"""Set linter class and its configuration."""
linter_class.config = self.config.get_linter_config(linter_class.name)
self._linter = linter_class()
@classmethod
def run(cls, linter_class):
"""Run a linter and return the results."""
runner = cls(linter_class)
return runner.get_results()
def get_results(self):
"""Run the linter, parse, and return result list.
If a linter specified by the user is not found, return an error message
as result.
"""
try:
stdout, stderr = self._lint()
# Can't return a generator from a subprocess
return list(stdout), stderr or []
except FileNotFoundError as exception:
# Error if the linter was not found but was chosen by the user
if self._linter.name in self.config.user_linters:
error_msg = 'Could not find {}. Did you install it? ' \
'Got exception: {}'.format(self._linter.name, exception)
return [[], [error_msg]]
# If the linter was not chosen by the user, do nothing
return [[], []]
def _lint(self):
"""Run linter in a subprocess."""
command = self._get_command()
process = subprocess.run(command, stdout=subprocess.PIPE, # nosec
stderr=subprocess.PIPE)
LOG.info('Finished %s', ' '.join(command))
stdout, stderr = self._get_output_lines(process)
return self._linter.parse(stdout), self._parse_stderr(stderr)
@staticmethod
def _get_output_lines(process):
return [(line for line in output.decode('utf-8').split('\n') if line)
for output in (process.stdout, process.stderr)]
def _parse_stderr(self, lines):
return ['[{}] {}'.format(self._linter.name, line) for line in lines]
|
cemsbr/yala | yala/main.py | LinterRunner._lint | python | def _lint(self):
command = self._get_command()
process = subprocess.run(command, stdout=subprocess.PIPE, # nosec
stderr=subprocess.PIPE)
LOG.info('Finished %s', ' '.join(command))
stdout, stderr = self._get_output_lines(process)
return self._linter.parse(stdout), self._parse_stderr(stderr) | Run linter in a subprocess. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/main.py#L74-L81 | [
"def _get_command(self):\n \"\"\"Return command with options and targets, ready for execution.\"\"\"\n targets = ' '.join(self.targets)\n cmd_str = self._linter.command_with_options + ' ' + targets\n cmd_shlex = shlex.split(cmd_str)\n return list(cmd_shlex)\n",
"def _get_output_lines(process):\n return [(line for line in output.decode('utf-8').split('\\n') if line)\n for output in (process.stdout, process.stderr)]\n",
"def _parse_stderr(self, lines):\n return ['[{}] {}'.format(self._linter.name, line) for line in lines]\n"
] | class LinterRunner:
"""Run linter and process results."""
config = None
targets = []
def __init__(self, linter_class):
"""Set linter class and its configuration."""
linter_class.config = self.config.get_linter_config(linter_class.name)
self._linter = linter_class()
@classmethod
def run(cls, linter_class):
"""Run a linter and return the results."""
runner = cls(linter_class)
return runner.get_results()
def get_results(self):
"""Run the linter, parse, and return result list.
If a linter specified by the user is not found, return an error message
as result.
"""
try:
stdout, stderr = self._lint()
# Can't return a generator from a subprocess
return list(stdout), stderr or []
except FileNotFoundError as exception:
# Error if the linter was not found but was chosen by the user
if self._linter.name in self.config.user_linters:
error_msg = 'Could not find {}. Did you install it? ' \
'Got exception: {}'.format(self._linter.name, exception)
return [[], [error_msg]]
# If the linter was not chosen by the user, do nothing
return [[], []]
def _get_command(self):
"""Return command with options and targets, ready for execution."""
targets = ' '.join(self.targets)
cmd_str = self._linter.command_with_options + ' ' + targets
cmd_shlex = shlex.split(cmd_str)
return list(cmd_shlex)
@staticmethod
def _get_output_lines(process):
return [(line for line in output.decode('utf-8').split('\n') if line)
for output in (process.stdout, process.stderr)]
def _parse_stderr(self, lines):
return ['[{}] {}'.format(self._linter.name, line) for line in lines]
|
cemsbr/yala | yala/main.py | Main.lint | python | def lint(self, targets):
LinterRunner.targets = targets
linters = self._config.get_linter_classes()
with Pool() as pool:
out_err_none = pool.map(LinterRunner.run, linters)
out_err = [item for item in out_err_none if item is not None]
stdout, stderr = zip(*out_err)
return sorted(chain.from_iterable(stdout)), chain.from_iterable(stderr) | Run linters in parallel and sort all results.
Args:
targets (list): List of files and folders to lint. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/main.py#L109-L121 | null | class Main:
"""Parse all linters and aggregate results."""
# We only need the ``run`` method.
# pylint: disable=too-few-public-methods
def __init__(self, config=None, all_linters=None):
"""Initialize the only Config object and assign it to other classes.
Args:
config (Config): Config object.
all_linters (dict): Names and classes of all available linters.
"""
self._classes = all_linters or LINTERS
self._config = config or Config(self._classes)
LinterRunner.config = self._config
def run_from_cli(self, args):
"""Read arguments, run and print results.
Args:
args (dict): Arguments parsed by docopt.
"""
if args['--dump-config']:
self._config.print_config()
else:
stdout, stderr = self.lint(args['<path>'])
self.print_results(stdout, stderr)
@classmethod
def print_results(cls, stdout, stderr):
"""Print linter results and exits with an error if there's any."""
for line in stderr:
print(line, file=sys.stderr)
if stdout:
if stderr: # blank line to separate stdout from stderr
print(file=sys.stderr)
cls._print_stdout(stdout)
else:
print(':) No issues found.')
@staticmethod
def _print_stdout(stdout):
for line in stdout:
print(line)
issue = 'issues' if len(stdout) > 1 else 'issue'
sys.exit('\n:( {} {} found.'.format(len(stdout), issue))
|
cemsbr/yala | yala/main.py | Main.run_from_cli | python | def run_from_cli(self, args):
if args['--dump-config']:
self._config.print_config()
else:
stdout, stderr = self.lint(args['<path>'])
self.print_results(stdout, stderr) | Read arguments, run and print results.
Args:
args (dict): Arguments parsed by docopt. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/main.py#L123-L133 | [
"def lint(self, targets):\n \"\"\"Run linters in parallel and sort all results.\n\n Args:\n targets (list): List of files and folders to lint.\n \"\"\"\n LinterRunner.targets = targets\n linters = self._config.get_linter_classes()\n with Pool() as pool:\n out_err_none = pool.map(LinterRunner.run, linters)\n out_err = [item for item in out_err_none if item is not None]\n stdout, stderr = zip(*out_err)\n return sorted(chain.from_iterable(stdout)), chain.from_iterable(stderr)\n",
"def print_results(cls, stdout, stderr):\n \"\"\"Print linter results and exits with an error if there's any.\"\"\"\n for line in stderr:\n print(line, file=sys.stderr)\n if stdout:\n if stderr: # blank line to separate stdout from stderr\n print(file=sys.stderr)\n cls._print_stdout(stdout)\n else:\n print(':) No issues found.')\n"
] | class Main:
"""Parse all linters and aggregate results."""
# We only need the ``run`` method.
# pylint: disable=too-few-public-methods
def __init__(self, config=None, all_linters=None):
"""Initialize the only Config object and assign it to other classes.
Args:
config (Config): Config object.
all_linters (dict): Names and classes of all available linters.
"""
self._classes = all_linters or LINTERS
self._config = config or Config(self._classes)
LinterRunner.config = self._config
def lint(self, targets):
"""Run linters in parallel and sort all results.
Args:
targets (list): List of files and folders to lint.
"""
LinterRunner.targets = targets
linters = self._config.get_linter_classes()
with Pool() as pool:
out_err_none = pool.map(LinterRunner.run, linters)
out_err = [item for item in out_err_none if item is not None]
stdout, stderr = zip(*out_err)
return sorted(chain.from_iterable(stdout)), chain.from_iterable(stderr)
@classmethod
def print_results(cls, stdout, stderr):
"""Print linter results and exits with an error if there's any."""
for line in stderr:
print(line, file=sys.stderr)
if stdout:
if stderr: # blank line to separate stdout from stderr
print(file=sys.stderr)
cls._print_stdout(stdout)
else:
print(':) No issues found.')
@staticmethod
def _print_stdout(stdout):
for line in stdout:
print(line)
issue = 'issues' if len(stdout) > 1 else 'issue'
sys.exit('\n:( {} {} found.'.format(len(stdout), issue))
|
cemsbr/yala | yala/main.py | Main.print_results | python | def print_results(cls, stdout, stderr):
for line in stderr:
print(line, file=sys.stderr)
if stdout:
if stderr: # blank line to separate stdout from stderr
print(file=sys.stderr)
cls._print_stdout(stdout)
else:
print(':) No issues found.') | Print linter results and exits with an error if there's any. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/main.py#L136-L145 | [
"def _print_stdout(stdout):\n for line in stdout:\n print(line)\n issue = 'issues' if len(stdout) > 1 else 'issue'\n sys.exit('\\n:( {} {} found.'.format(len(stdout), issue))\n"
] | class Main:
"""Parse all linters and aggregate results."""
# We only need the ``run`` method.
# pylint: disable=too-few-public-methods
def __init__(self, config=None, all_linters=None):
"""Initialize the only Config object and assign it to other classes.
Args:
config (Config): Config object.
all_linters (dict): Names and classes of all available linters.
"""
self._classes = all_linters or LINTERS
self._config = config or Config(self._classes)
LinterRunner.config = self._config
def lint(self, targets):
"""Run linters in parallel and sort all results.
Args:
targets (list): List of files and folders to lint.
"""
LinterRunner.targets = targets
linters = self._config.get_linter_classes()
with Pool() as pool:
out_err_none = pool.map(LinterRunner.run, linters)
out_err = [item for item in out_err_none if item is not None]
stdout, stderr = zip(*out_err)
return sorted(chain.from_iterable(stdout)), chain.from_iterable(stderr)
def run_from_cli(self, args):
"""Read arguments, run and print results.
Args:
args (dict): Arguments parsed by docopt.
"""
if args['--dump-config']:
self._config.print_config()
else:
stdout, stderr = self.lint(args['<path>'])
self.print_results(stdout, stderr)
@classmethod
@staticmethod
def _print_stdout(stdout):
for line in stdout:
print(line)
issue = 'issues' if len(stdout) > 1 else 'issue'
sys.exit('\n:( {} {} found.'.format(len(stdout), issue))
|
cemsbr/yala | yala/base.py | LinterOutput._cmp_key | python | def _cmp_key(self, obj=None):
if not obj:
obj = self
line_nr = int(obj.line_nr) if obj.line_nr else 0
col = int(obj.col) if obj.col else 0
return (obj.path, line_nr, col, obj.msg) | Comparison key for sorting results from all linters.
The sort should group files and lines from different linters to make it
easier for refactoring. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/base.py#L41-L51 | null | class LinterOutput:
"""A one-line linter result. It can be sorted and printed as string."""
# We only override magic methods.
# pylint: disable=too-few-public-methods
def __init__(self, linter_name, path, msg, line_nr=None, col=None):
"""Optionally set all attributes.
Args:
path (str): Relative file path.
line (int): Line number.
msg (str): Explanation of what is wrong.
col (int): Column where the problem begins.
"""
# Set all attributes in the constructor for convenience.
# pylint: disable=too-many-arguments
if line_nr:
line_nr = int(line_nr)
if col:
col = int(col)
self._linter_name = linter_name
self.path = path
self.line_nr = line_nr
self.msg = msg
self.col = col
def __str__(self):
"""Output shown to the user."""
return '{}|{}:{}|{} [{}]'.format(self.path, self.line_nr, self.col,
self.msg, self._linter_name)
def __lt__(self, other):
"""Use ``_cmp_key`` to compare two lines."""
if isinstance(other, type(self)):
return self._cmp_key() < self._cmp_key(other)
return super().__lt__(other)
|
cemsbr/yala | yala/base.py | Linter.command_with_options | python | def command_with_options(self):
if 'args' in self.config:
return ' '.join((self.command, self.config['args']))
return self.command | Add arguments from config to :attr:`command`. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/base.py#L85-L89 | null | class Linter(metaclass=ABCMeta):
"""Linter implementations should inherit from this class."""
# Most methods are for child class only, not public.
#: dict: Configuration for a specific linter
config = None
@property
@classmethod
@abstractmethod
def name(cls):
"""Name of this linter. Recommended to be the same as its command."""
pass # pragma: no cover
@property
def command(self):
"""Command to execute. Defaults to :attr:`name`.
The options in config files are appended in
:meth:`command_with_options`.
"""
return self.name
@property
@abstractmethod
def parse(self, lines):
"""Parse linter output and return results.
Args:
lines (iterable): Output lines.
Returns:
iterable of Result: Linter results.
"""
pass # pragma: no cover
def _get_relative_path(self, full_path):
"""Return the relative path from current path."""
try:
rel_path = Path(full_path).relative_to(Path().absolute())
except ValueError:
LOG.error("%s: Couldn't find relative path of '%s' from '%s'.",
self.name, full_path, Path().absolute())
return full_path
return str(rel_path)
def _parse_by_pattern(self, lines, pattern):
"""Match pattern line by line and return Results.
Use ``_create_output_from_match`` to convert pattern match groups to
Result instances.
Args:
lines (iterable): Output lines to be parsed.
pattern: Compiled pattern to match against lines.
result_fn (function): Receive results of one match and return a
Result.
Return:
generator: Result instances.
"""
for line in lines:
match = pattern.match(line)
if match:
params = match.groupdict()
if not params:
params = match.groups()
yield self._create_output_from_match(params)
def _create_output_from_match(self, match_result):
"""Create Result instance from pattern match results.
Args:
match: Pattern match.
"""
if isinstance(match_result, dict):
return LinterOutput(self.name, **match_result)
return LinterOutput(self.name, *match_result)
|
cemsbr/yala | yala/base.py | Linter._get_relative_path | python | def _get_relative_path(self, full_path):
try:
rel_path = Path(full_path).relative_to(Path().absolute())
except ValueError:
LOG.error("%s: Couldn't find relative path of '%s' from '%s'.",
self.name, full_path, Path().absolute())
return full_path
return str(rel_path) | Return the relative path from current path. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/base.py#L104-L112 | null | class Linter(metaclass=ABCMeta):
"""Linter implementations should inherit from this class."""
# Most methods are for child class only, not public.
#: dict: Configuration for a specific linter
config = None
@property
@classmethod
@abstractmethod
def name(cls):
"""Name of this linter. Recommended to be the same as its command."""
pass # pragma: no cover
@property
def command(self):
"""Command to execute. Defaults to :attr:`name`.
The options in config files are appended in
:meth:`command_with_options`.
"""
return self.name
@property
def command_with_options(self):
"""Add arguments from config to :attr:`command`."""
if 'args' in self.config:
return ' '.join((self.command, self.config['args']))
return self.command
@abstractmethod
def parse(self, lines):
"""Parse linter output and return results.
Args:
lines (iterable): Output lines.
Returns:
iterable of Result: Linter results.
"""
pass # pragma: no cover
def _parse_by_pattern(self, lines, pattern):
"""Match pattern line by line and return Results.
Use ``_create_output_from_match`` to convert pattern match groups to
Result instances.
Args:
lines (iterable): Output lines to be parsed.
pattern: Compiled pattern to match against lines.
result_fn (function): Receive results of one match and return a
Result.
Return:
generator: Result instances.
"""
for line in lines:
match = pattern.match(line)
if match:
params = match.groupdict()
if not params:
params = match.groups()
yield self._create_output_from_match(params)
def _create_output_from_match(self, match_result):
"""Create Result instance from pattern match results.
Args:
match: Pattern match.
"""
if isinstance(match_result, dict):
return LinterOutput(self.name, **match_result)
return LinterOutput(self.name, *match_result)
|
cemsbr/yala | yala/base.py | Linter._parse_by_pattern | python | def _parse_by_pattern(self, lines, pattern):
for line in lines:
match = pattern.match(line)
if match:
params = match.groupdict()
if not params:
params = match.groups()
yield self._create_output_from_match(params) | Match pattern line by line and return Results.
Use ``_create_output_from_match`` to convert pattern match groups to
Result instances.
Args:
lines (iterable): Output lines to be parsed.
pattern: Compiled pattern to match against lines.
result_fn (function): Receive results of one match and return a
Result.
Return:
generator: Result instances. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/base.py#L114-L135 | [
"def _create_output_from_match(self, match_result):\n \"\"\"Create Result instance from pattern match results.\n\n Args:\n match: Pattern match.\n \"\"\"\n if isinstance(match_result, dict):\n return LinterOutput(self.name, **match_result)\n return LinterOutput(self.name, *match_result)\n",
"def _create_output_from_match(self, match_result):\n \"\"\"As isort outputs full path, we change it to relative path.\"\"\"\n full_path = match_result['full_path']\n path = self._get_relative_path(full_path)\n return LinterOutput(self.name, path, match_result['msg'])\n"
] | class Linter(metaclass=ABCMeta):
"""Linter implementations should inherit from this class."""
# Most methods are for child class only, not public.
#: dict: Configuration for a specific linter
config = None
@property
@classmethod
@abstractmethod
def name(cls):
"""Name of this linter. Recommended to be the same as its command."""
pass # pragma: no cover
@property
def command(self):
"""Command to execute. Defaults to :attr:`name`.
The options in config files are appended in
:meth:`command_with_options`.
"""
return self.name
@property
def command_with_options(self):
"""Add arguments from config to :attr:`command`."""
if 'args' in self.config:
return ' '.join((self.command, self.config['args']))
return self.command
@abstractmethod
def parse(self, lines):
"""Parse linter output and return results.
Args:
lines (iterable): Output lines.
Returns:
iterable of Result: Linter results.
"""
pass # pragma: no cover
def _get_relative_path(self, full_path):
"""Return the relative path from current path."""
try:
rel_path = Path(full_path).relative_to(Path().absolute())
except ValueError:
LOG.error("%s: Couldn't find relative path of '%s' from '%s'.",
self.name, full_path, Path().absolute())
return full_path
return str(rel_path)
def _create_output_from_match(self, match_result):
"""Create Result instance from pattern match results.
Args:
match: Pattern match.
"""
if isinstance(match_result, dict):
return LinterOutput(self.name, **match_result)
return LinterOutput(self.name, *match_result)
|
cemsbr/yala | yala/base.py | Linter._create_output_from_match | python | def _create_output_from_match(self, match_result):
if isinstance(match_result, dict):
return LinterOutput(self.name, **match_result)
return LinterOutput(self.name, *match_result) | Create Result instance from pattern match results.
Args:
match: Pattern match. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/base.py#L137-L145 | null | class Linter(metaclass=ABCMeta):
"""Linter implementations should inherit from this class."""
# Most methods are for child class only, not public.
#: dict: Configuration for a specific linter
config = None
@property
@classmethod
@abstractmethod
def name(cls):
"""Name of this linter. Recommended to be the same as its command."""
pass # pragma: no cover
@property
def command(self):
"""Command to execute. Defaults to :attr:`name`.
The options in config files are appended in
:meth:`command_with_options`.
"""
return self.name
@property
def command_with_options(self):
"""Add arguments from config to :attr:`command`."""
if 'args' in self.config:
return ' '.join((self.command, self.config['args']))
return self.command
@abstractmethod
def parse(self, lines):
"""Parse linter output and return results.
Args:
lines (iterable): Output lines.
Returns:
iterable of Result: Linter results.
"""
pass # pragma: no cover
def _get_relative_path(self, full_path):
"""Return the relative path from current path."""
try:
rel_path = Path(full_path).relative_to(Path().absolute())
except ValueError:
LOG.error("%s: Couldn't find relative path of '%s' from '%s'.",
self.name, full_path, Path().absolute())
return full_path
return str(rel_path)
def _parse_by_pattern(self, lines, pattern):
"""Match pattern line by line and return Results.
Use ``_create_output_from_match`` to convert pattern match groups to
Result instances.
Args:
lines (iterable): Output lines to be parsed.
pattern: Compiled pattern to match against lines.
result_fn (function): Receive results of one match and return a
Result.
Return:
generator: Result instances.
"""
for line in lines:
match = pattern.match(line)
if match:
params = match.groupdict()
if not params:
params = match.groups()
yield self._create_output_from_match(params)
|
cemsbr/yala | yala/linters.py | Isort._create_output_from_match | python | def _create_output_from_match(self, match_result):
full_path = match_result['full_path']
path = self._get_relative_path(full_path)
return LinterOutput(self.name, path, match_result['msg']) | As isort outputs full path, we change it to relative path. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/linters.py#L23-L27 | null | class Isort(Linter):
"""Isort parser."""
name = 'isort'
def parse(self, lines):
"""Get full path and message from each output line."""
# E.g. "ERROR: /my/path/main.py Imports are incorrectly sorted."
pattern = re.compile(r'''
^.+?
:\ (?P<full_path>.+?)
\ (?P<msg>.+)$''', re.VERBOSE)
return self._parse_by_pattern(lines, pattern)
|
cemsbr/yala | yala/linters.py | Pydocstyle.parse | python | def parse(self, lines):
patterns = [re.compile(r'^(.+?):(\d+)'),
re.compile(r'^\s+(.+)$')]
for i, line in enumerate(lines):
if i % 2 == 0:
path, line_nr = patterns[0].match(line).groups()
else:
msg = patterns[1].match(line).group(1)
yield LinterOutput(self.name, path, msg, line_nr) | Get :class:`base.Result` parameters using regex.
There are 2 lines for each pydocstyle result:
1. Filename and line number;
2. Message for the problem found. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/linters.py#L64-L78 | null | class Pydocstyle(Linter):
"""Pydocstyle parser."""
name = 'pydocstyle'
|
cemsbr/yala | yala/linters.py | Pylint.parse | python | def parse(self, lines):
pattern = re.compile(r"""^(?P<path>.+?)
:(?P<msg>.+)
:(?P<line_nr>\d+?)
:(?P<col>\d+?)$""", re.VERBOSE)
return self._parse_by_pattern(lines, pattern) | Get :class:`base.Result` parameters using regex. | train | https://github.com/cemsbr/yala/blob/efceb044cb3de8d1c12140087ae9d5f8269bfbf9/yala/linters.py#L86-L92 | [
"def _parse_by_pattern(self, lines, pattern):\n \"\"\"Match pattern line by line and return Results.\n\n Use ``_create_output_from_match`` to convert pattern match groups to\n Result instances.\n\n Args:\n lines (iterable): Output lines to be parsed.\n pattern: Compiled pattern to match against lines.\n result_fn (function): Receive results of one match and return a\n Result.\n\n Return:\n generator: Result instances.\n \"\"\"\n for line in lines:\n match = pattern.match(line)\n if match:\n params = match.groupdict()\n if not params:\n params = match.groups()\n yield self._create_output_from_match(params)\n"
] | class Pylint(Linter):
"""Pylint parser."""
name = 'pylint'
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/examples/calage_bdf_cn.py | calcul_ratios_calage | python | def calcul_ratios_calage(year_data, year_calage, data_bdf, data_cn):
'''
Fonction qui calcule les ratios de calage (bdf sur cn pour année de données) et de vieillissement
à partir des masses de comptabilité nationale et des masses de consommation de bdf.
'''
masses = data_cn.merge(
data_bdf, left_index = True, right_index = True
)
masses.rename(columns = {0: 'conso_bdf{}'.format(year_data)}, inplace = True)
if year_calage != year_data:
masses['ratio_cn{}_cn{}'.format(year_data, year_calage)] = (
masses['consoCN_COICOP_{}'.format(year_calage)] / masses['consoCN_COICOP_{}'.format(year_data)]
)
if year_calage == year_data:
masses['ratio_cn{}_cn{}'.format(year_data, year_calage)] = 1
masses['ratio_bdf{}_cn{}'.format(year_data, year_data)] = (
1e6 * masses['consoCN_COICOP_{}'.format(year_data)] / masses['conso_bdf{}'.format(year_data)]
)
return masses | Fonction qui calcule les ratios de calage (bdf sur cn pour année de données) et de vieillissement
à partir des masses de comptabilité nationale et des masses de consommation de bdf. | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/examples/calage_bdf_cn.py#L91-L110 | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Ce script comprend plusieurs fonctions, dont le but est de caler les données Budget des Familles sur des agrégats.
# L'agrégation des données BdF rendant des totaux différents de ceux de la comptabilité nationale, ces calages sont
# importants pour restaurer les bonnes quantités. Plusieurs méthodes sont proposées.
# Import de modules généraux
from __future__ import division
import logging
import os
import pkg_resources
import pandas
from pandas import concat
# Import de modules spécifiques à Openfisca
from openfisca_france_indirect_taxation.build_survey_data.utils import ident_men_dtype
log = logging.getLogger(__name__)
def calage_viellissement_depenses(year_data, year_calage, depenses, masses):
depenses_calees = pandas.DataFrame()
coicop_list = set(poste_coicop for poste_coicop in depenses.columns if poste_coicop[:5] == 'poste')
for column in coicop_list:
coicop = column.replace('poste_coicop_', '')
if coicop[:1] != '1' and coicop[:1] != '9':
grosposte = int(coicop[:1])
else:
if len(coicop) == 3:
grosposte = int(coicop[:1])
elif len(coicop) == 5:
grosposte = int(coicop[:2])
elif coicop in ['1151', '1181', '1411', '9122', '9151', '9211', '9341']:
grosposte = int(coicop[:1])
elif coicop[:2] == '99' or coicop[:2] == '13':
grosposte = 99
else:
grosposte = int(coicop[:2])
# RAPPEL : 12 postes CN et COICOP
# 01 Produits alimentaires et boissons non alcoolisées
# 02 Boissons alcoolisées et tabac
# 03 Articles d'habillement et chaussures
# 04 Logement, eau, gaz, électricité et autres combustibles
# 05 Meubles, articles de ménage et entretien courant de l'habitation
# 06 Santé
# 07 Transports
# 08 Communication
# 09 Loisir et culture
# 10 Education
# 11 Hotels, cafés, restaurants
# 12 Biens et services divers
if grosposte != 99:
grosposte = 'coicop12_{}'.format(grosposte)
ratio_bdf_cn = masses.at[grosposte, 'ratio_bdf{}_cn{}'.format(year_data, year_data)]
ratio_cn_cn = masses.at[grosposte, 'ratio_cn{}_cn{}'.format(year_data, year_calage)]
depenses_calees[column] = depenses[column] * ratio_bdf_cn * ratio_cn_cn
log.info(u'''
Pour le grosposte {}, le ratio de calage de la base bdf {} sur la cn est {},
le ratio de calage sur la cn pour l\'annee {} est {}'''.format(
grosposte, year_data, ratio_bdf_cn, year_calage, ratio_cn_cn))
return depenses_calees
def get_bdf_data_frames(depenses, year_data = None):
assert year_data is not None
'''
Récupère les dépenses de budget des familles et les agrège par poste
(en tenant compte des poids respectifs des ménages)
'''
depenses_by_grosposte = pandas.DataFrame()
for grosposte in range(1, 13):
if depenses_by_grosposte is None:
depenses_by_grosposte = depenses['coicop12_{}'.format(grosposte)]
else:
depenses_by_grosposte = concat([depenses_by_grosposte, depenses['coicop12_{}'.format(grosposte)]], axis = 1)
depenses_by_grosposte = concat([depenses_by_grosposte, depenses['pondmen']], axis = 1)
grospostes_list = set(depenses_by_grosposte.columns)
grospostes_list.remove('pondmen')
dict_bdf_weighted_sum_by_grosposte = {}
for grosposte in grospostes_list:
depenses_by_grosposte['{}pond'.format(grosposte)] = (
depenses_by_grosposte[grosposte] * depenses_by_grosposte['pondmen']
)
dict_bdf_weighted_sum_by_grosposte[grosposte] = depenses_by_grosposte['{}pond'.format(grosposte)].sum()
df_bdf_weighted_sum_by_grosposte = pandas.DataFrame(
pandas.Series(
data = dict_bdf_weighted_sum_by_grosposte,
index = dict_bdf_weighted_sum_by_grosposte.keys()
)
)
return df_bdf_weighted_sum_by_grosposte
def get_cn_data_frames(year_data = None, year_calage = None):
assert year_data is not None
if year_calage is None:
year_calage = year_data
default_config_files_directory = os.path.join(
pkg_resources.get_distribution('openfisca_france_indirect_taxation').location)
parametres_fiscalite_file_path = os.path.join(
default_config_files_directory,
'openfisca_france_indirect_taxation',
'assets',
'legislation',
'Parametres fiscalite indirecte.xls'
)
masses_cn_data_frame = pandas.read_excel(parametres_fiscalite_file_path, sheetname = "consommation_CN")
if year_data != year_calage:
masses_cn_12postes_data_frame = masses_cn_data_frame.loc[:, ['Code', year_data, year_calage]]
else:
masses_cn_12postes_data_frame = masses_cn_data_frame.loc[:, ['Code', year_data]]
masses_cn_12postes_data_frame['code_unicode'] = masses_cn_12postes_data_frame.Code.astype(unicode)
masses_cn_12postes_data_frame['len_code'] = masses_cn_12postes_data_frame['code_unicode'].apply(lambda x: len(x))
# On ne garde que les 12 postes sur lesquels on cale:
masses_cn_12postes_data_frame = masses_cn_12postes_data_frame[masses_cn_12postes_data_frame['len_code'] == 6]
masses_cn_12postes_data_frame['code'] = masses_cn_12postes_data_frame.Code.astype(int)
masses_cn_12postes_data_frame = masses_cn_12postes_data_frame.drop(['len_code', 'code_unicode', 'Code'], 1)
if year_calage != year_data:
masses_cn_12postes_data_frame.rename(
columns = {
year_data: 'consoCN_COICOP_{}'.format(year_data),
year_calage: 'consoCN_COICOP_{}'.format(year_calage),
'code': 'poste'
},
inplace = True,
)
else:
masses_cn_12postes_data_frame.rename(
columns = {
year_data: 'consoCN_COICOP_{}'.format(year_data),
'code': 'poste'
},
inplace = True,
)
masses_cn_12postes_data_frame['poste'] = masses_cn_12postes_data_frame['poste'].astype(str)
for element in masses_cn_12postes_data_frame['poste']:
masses_cn_12postes_data_frame['poste'] = \
masses_cn_12postes_data_frame['poste'].replace(element, 'coicop12_{}'.format(element))
masses_cn_12postes_data_frame.set_index('poste', inplace = True)
return masses_cn_12postes_data_frame
def build_depenses_calees(depenses, year_calage, year_data):
# Masses de calage provenant de la comptabilité nationale
masses_cn_12postes_data_frame = get_cn_data_frames(year_data = year_data, year_calage = year_calage)
# Enquête agrégée au niveau des gros postes de COICOP (12)
df_bdf_weighted_sum_by_grosposte = get_bdf_data_frames(depenses = depenses, year_data = year_data)
# Calcul des ratios de calage :
masses = calcul_ratios_calage(
year_data,
year_calage,
data_bdf = df_bdf_weighted_sum_by_grosposte,
data_cn = masses_cn_12postes_data_frame
)
# Application des ratios de calage
depenses.index = depenses.index.astype(ident_men_dtype)
assert depenses.index.dtype == 'object', "depenses index is not an object"
depenses_calees = calage_viellissement_depenses(year_data, year_calage, depenses, masses)
return depenses_calees
def build_revenus_cales(revenus, year_calage, year_data):
# Masses de calage provenant de la comptabilité nationale
default_config_files_directory = os.path.join(
pkg_resources.get_distribution('openfisca_france_indirect_taxation').location)
parametres_fiscalite_file_path = os.path.join(
default_config_files_directory,
'openfisca_france_indirect_taxation',
'assets',
'legislation',
'Parametres fiscalite indirecte.xls',
)
masses_cn_revenus_data_frame = pandas.read_excel(parametres_fiscalite_file_path, sheetname = "revenus_CN")
masses_cn_revenus_data_frame.rename(
columns = {
'annee': 'year',
'Revenu disponible brut': 'rev_disponible_cn',
'Loyers imputes': 'loyer_imput_cn'
},
inplace = True
)
masses_cn_revenus_data_frame = masses_cn_revenus_data_frame[masses_cn_revenus_data_frame.year == year_calage]
revenus = revenus[['pondmen'] + ['loyer_impute'] + ['rev_disponible'] + ['rev_disp_loyerimput']]
weighted_sum_revenus = (revenus.pondmen * revenus.rev_disponible).sum()
revenus.loyer_impute = revenus.loyer_impute.astype(float)
weighted_sum_loyer_impute = (revenus.pondmen * revenus.loyer_impute).sum()
rev_disponible_cn = masses_cn_revenus_data_frame.rev_disponible_cn.sum()
loyer_imput_cn = masses_cn_revenus_data_frame.loyer_imput_cn.sum()
revenus_cales = revenus.copy()
# Calcul des ratios de calage :
revenus_cales['ratio_revenus'] = (rev_disponible_cn * 1e9 - loyer_imput_cn * 1e9) / weighted_sum_revenus
revenus_cales['ratio_loyer_impute'] = loyer_imput_cn * 1e9 / weighted_sum_loyer_impute
# Application des ratios de calage
revenus_cales.rev_disponible = revenus.rev_disponible * revenus_cales['ratio_revenus']
revenus_cales.loyer_impute = revenus_cales.loyer_impute * revenus_cales['ratio_loyer_impute']
revenus_cales.rev_disp_loyerimput = revenus_cales.rev_disponible + revenus_cales.loyer_impute
return revenus_cales
def build_df_calee_on_grospostes(dataframe, year_calage = None, year_data = None):
assert year_data is not None
if year_calage is None:
year_calage = year_data
depenses_calees = build_depenses_calees(dataframe, year_calage, year_data)
revenus_cales = build_revenus_cales(dataframe, year_calage, year_data)
var_list = [variable for variable in dataframe.columns if variable[:5] != 'poste' and variable != 'loyer_impute' and
variable != 'rev_disponible' and variable != 'rev_disp_loyerimput' and variable != 'pondmen']
autres_variables = dataframe[var_list]
dataframe_calee = concat([depenses_calees, revenus_cales, autres_variables], axis = 1)
return dataframe_calee
def build_df_calee_on_ticpe(dataframe, year_calage = None, year_data = None):
assert year_data is not None
if year_calage is None:
year_calage = year_data
dataframe_calee = build_df_calee_on_grospostes(dataframe, year_calage, year_data)
default_config_files_directory = os.path.join(
pkg_resources.get_distribution('openfisca_france_indirect_taxation').location)
parametres_fiscalite_file_path = os.path.join(
default_config_files_directory,
'openfisca_france_indirect_taxation',
'assets',
'legislation',
'Parametres fiscalite indirecte.xls'
)
masses_cn_data_frame = pandas.read_excel(parametres_fiscalite_file_path, sheetname = "consommation_CN")
if year_data != year_calage:
masses_cn_12postes_data_frame = masses_cn_data_frame.loc[:, ['Code', year_data, year_calage]]
else:
masses_cn_12postes_data_frame = masses_cn_data_frame.loc[:, ['Code', year_data]]
masses_ticpe_cn = int(
masses_cn_12postes_data_frame[year_calage][masses_cn_12postes_data_frame['Code'] == ' 07.2.2'].values
)
masses_ticpe_bdf = (dataframe['poste_coicop_722'] * dataframe['pondmen']).sum() / 1e6
ratio_ticpe = masses_ticpe_cn / masses_ticpe_bdf
dataframe['poste_coicop_722'] = dataframe['poste_coicop_722'] * ratio_ticpe
dataframe_calee['poste_coicop_722'] = dataframe['poste_coicop_722']
return dataframe_calee
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/examples/calage_bdf_cn.py | get_bdf_data_frames | python | def get_bdf_data_frames(depenses, year_data = None):
assert year_data is not None
'''
Récupère les dépenses de budget des familles et les agrège par poste
(en tenant compte des poids respectifs des ménages)
'''
depenses_by_grosposte = pandas.DataFrame()
for grosposte in range(1, 13):
if depenses_by_grosposte is None:
depenses_by_grosposte = depenses['coicop12_{}'.format(grosposte)]
else:
depenses_by_grosposte = concat([depenses_by_grosposte, depenses['coicop12_{}'.format(grosposte)]], axis = 1)
depenses_by_grosposte = concat([depenses_by_grosposte, depenses['pondmen']], axis = 1)
grospostes_list = set(depenses_by_grosposte.columns)
grospostes_list.remove('pondmen')
dict_bdf_weighted_sum_by_grosposte = {}
for grosposte in grospostes_list:
depenses_by_grosposte['{}pond'.format(grosposte)] = (
depenses_by_grosposte[grosposte] * depenses_by_grosposte['pondmen']
)
dict_bdf_weighted_sum_by_grosposte[grosposte] = depenses_by_grosposte['{}pond'.format(grosposte)].sum()
df_bdf_weighted_sum_by_grosposte = pandas.DataFrame(
pandas.Series(
data = dict_bdf_weighted_sum_by_grosposte,
index = dict_bdf_weighted_sum_by_grosposte.keys()
)
)
return df_bdf_weighted_sum_by_grosposte | Récupère les dépenses de budget des familles et les agrège par poste
(en tenant compte des poids respectifs des ménages) | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/examples/calage_bdf_cn.py#L113-L141 | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Ce script comprend plusieurs fonctions, dont le but est de caler les données Budget des Familles sur des agrégats.
# L'agrégation des données BdF rendant des totaux différents de ceux de la comptabilité nationale, ces calages sont
# importants pour restaurer les bonnes quantités. Plusieurs méthodes sont proposées.
# Import de modules généraux
from __future__ import division
import logging
import os
import pkg_resources
import pandas
from pandas import concat
# Import de modules spécifiques à Openfisca
from openfisca_france_indirect_taxation.build_survey_data.utils import ident_men_dtype
log = logging.getLogger(__name__)
def calage_viellissement_depenses(year_data, year_calage, depenses, masses):
depenses_calees = pandas.DataFrame()
coicop_list = set(poste_coicop for poste_coicop in depenses.columns if poste_coicop[:5] == 'poste')
for column in coicop_list:
coicop = column.replace('poste_coicop_', '')
if coicop[:1] != '1' and coicop[:1] != '9':
grosposte = int(coicop[:1])
else:
if len(coicop) == 3:
grosposte = int(coicop[:1])
elif len(coicop) == 5:
grosposte = int(coicop[:2])
elif coicop in ['1151', '1181', '1411', '9122', '9151', '9211', '9341']:
grosposte = int(coicop[:1])
elif coicop[:2] == '99' or coicop[:2] == '13':
grosposte = 99
else:
grosposte = int(coicop[:2])
# RAPPEL : 12 postes CN et COICOP
# 01 Produits alimentaires et boissons non alcoolisées
# 02 Boissons alcoolisées et tabac
# 03 Articles d'habillement et chaussures
# 04 Logement, eau, gaz, électricité et autres combustibles
# 05 Meubles, articles de ménage et entretien courant de l'habitation
# 06 Santé
# 07 Transports
# 08 Communication
# 09 Loisir et culture
# 10 Education
# 11 Hotels, cafés, restaurants
# 12 Biens et services divers
if grosposte != 99:
grosposte = 'coicop12_{}'.format(grosposte)
ratio_bdf_cn = masses.at[grosposte, 'ratio_bdf{}_cn{}'.format(year_data, year_data)]
ratio_cn_cn = masses.at[grosposte, 'ratio_cn{}_cn{}'.format(year_data, year_calage)]
depenses_calees[column] = depenses[column] * ratio_bdf_cn * ratio_cn_cn
log.info(u'''
Pour le grosposte {}, le ratio de calage de la base bdf {} sur la cn est {},
le ratio de calage sur la cn pour l\'annee {} est {}'''.format(
grosposte, year_data, ratio_bdf_cn, year_calage, ratio_cn_cn))
return depenses_calees
def calcul_ratios_calage(year_data, year_calage, data_bdf, data_cn):
'''
Fonction qui calcule les ratios de calage (bdf sur cn pour année de données) et de vieillissement
à partir des masses de comptabilité nationale et des masses de consommation de bdf.
'''
masses = data_cn.merge(
data_bdf, left_index = True, right_index = True
)
masses.rename(columns = {0: 'conso_bdf{}'.format(year_data)}, inplace = True)
if year_calage != year_data:
masses['ratio_cn{}_cn{}'.format(year_data, year_calage)] = (
masses['consoCN_COICOP_{}'.format(year_calage)] / masses['consoCN_COICOP_{}'.format(year_data)]
)
if year_calage == year_data:
masses['ratio_cn{}_cn{}'.format(year_data, year_calage)] = 1
masses['ratio_bdf{}_cn{}'.format(year_data, year_data)] = (
1e6 * masses['consoCN_COICOP_{}'.format(year_data)] / masses['conso_bdf{}'.format(year_data)]
)
return masses
def get_cn_data_frames(year_data = None, year_calage = None):
assert year_data is not None
if year_calage is None:
year_calage = year_data
default_config_files_directory = os.path.join(
pkg_resources.get_distribution('openfisca_france_indirect_taxation').location)
parametres_fiscalite_file_path = os.path.join(
default_config_files_directory,
'openfisca_france_indirect_taxation',
'assets',
'legislation',
'Parametres fiscalite indirecte.xls'
)
masses_cn_data_frame = pandas.read_excel(parametres_fiscalite_file_path, sheetname = "consommation_CN")
if year_data != year_calage:
masses_cn_12postes_data_frame = masses_cn_data_frame.loc[:, ['Code', year_data, year_calage]]
else:
masses_cn_12postes_data_frame = masses_cn_data_frame.loc[:, ['Code', year_data]]
masses_cn_12postes_data_frame['code_unicode'] = masses_cn_12postes_data_frame.Code.astype(unicode)
masses_cn_12postes_data_frame['len_code'] = masses_cn_12postes_data_frame['code_unicode'].apply(lambda x: len(x))
# On ne garde que les 12 postes sur lesquels on cale:
masses_cn_12postes_data_frame = masses_cn_12postes_data_frame[masses_cn_12postes_data_frame['len_code'] == 6]
masses_cn_12postes_data_frame['code'] = masses_cn_12postes_data_frame.Code.astype(int)
masses_cn_12postes_data_frame = masses_cn_12postes_data_frame.drop(['len_code', 'code_unicode', 'Code'], 1)
if year_calage != year_data:
masses_cn_12postes_data_frame.rename(
columns = {
year_data: 'consoCN_COICOP_{}'.format(year_data),
year_calage: 'consoCN_COICOP_{}'.format(year_calage),
'code': 'poste'
},
inplace = True,
)
else:
masses_cn_12postes_data_frame.rename(
columns = {
year_data: 'consoCN_COICOP_{}'.format(year_data),
'code': 'poste'
},
inplace = True,
)
masses_cn_12postes_data_frame['poste'] = masses_cn_12postes_data_frame['poste'].astype(str)
for element in masses_cn_12postes_data_frame['poste']:
masses_cn_12postes_data_frame['poste'] = \
masses_cn_12postes_data_frame['poste'].replace(element, 'coicop12_{}'.format(element))
masses_cn_12postes_data_frame.set_index('poste', inplace = True)
return masses_cn_12postes_data_frame
def build_depenses_calees(depenses, year_calage, year_data):
# Masses de calage provenant de la comptabilité nationale
masses_cn_12postes_data_frame = get_cn_data_frames(year_data = year_data, year_calage = year_calage)
# Enquête agrégée au niveau des gros postes de COICOP (12)
df_bdf_weighted_sum_by_grosposte = get_bdf_data_frames(depenses = depenses, year_data = year_data)
# Calcul des ratios de calage :
masses = calcul_ratios_calage(
year_data,
year_calage,
data_bdf = df_bdf_weighted_sum_by_grosposte,
data_cn = masses_cn_12postes_data_frame
)
# Application des ratios de calage
depenses.index = depenses.index.astype(ident_men_dtype)
assert depenses.index.dtype == 'object', "depenses index is not an object"
depenses_calees = calage_viellissement_depenses(year_data, year_calage, depenses, masses)
return depenses_calees
def build_revenus_cales(revenus, year_calage, year_data):
# Masses de calage provenant de la comptabilité nationale
default_config_files_directory = os.path.join(
pkg_resources.get_distribution('openfisca_france_indirect_taxation').location)
parametres_fiscalite_file_path = os.path.join(
default_config_files_directory,
'openfisca_france_indirect_taxation',
'assets',
'legislation',
'Parametres fiscalite indirecte.xls',
)
masses_cn_revenus_data_frame = pandas.read_excel(parametres_fiscalite_file_path, sheetname = "revenus_CN")
masses_cn_revenus_data_frame.rename(
columns = {
'annee': 'year',
'Revenu disponible brut': 'rev_disponible_cn',
'Loyers imputes': 'loyer_imput_cn'
},
inplace = True
)
masses_cn_revenus_data_frame = masses_cn_revenus_data_frame[masses_cn_revenus_data_frame.year == year_calage]
revenus = revenus[['pondmen'] + ['loyer_impute'] + ['rev_disponible'] + ['rev_disp_loyerimput']]
weighted_sum_revenus = (revenus.pondmen * revenus.rev_disponible).sum()
revenus.loyer_impute = revenus.loyer_impute.astype(float)
weighted_sum_loyer_impute = (revenus.pondmen * revenus.loyer_impute).sum()
rev_disponible_cn = masses_cn_revenus_data_frame.rev_disponible_cn.sum()
loyer_imput_cn = masses_cn_revenus_data_frame.loyer_imput_cn.sum()
revenus_cales = revenus.copy()
# Calcul des ratios de calage :
revenus_cales['ratio_revenus'] = (rev_disponible_cn * 1e9 - loyer_imput_cn * 1e9) / weighted_sum_revenus
revenus_cales['ratio_loyer_impute'] = loyer_imput_cn * 1e9 / weighted_sum_loyer_impute
# Application des ratios de calage
revenus_cales.rev_disponible = revenus.rev_disponible * revenus_cales['ratio_revenus']
revenus_cales.loyer_impute = revenus_cales.loyer_impute * revenus_cales['ratio_loyer_impute']
revenus_cales.rev_disp_loyerimput = revenus_cales.rev_disponible + revenus_cales.loyer_impute
return revenus_cales
def build_df_calee_on_grospostes(dataframe, year_calage = None, year_data = None):
assert year_data is not None
if year_calage is None:
year_calage = year_data
depenses_calees = build_depenses_calees(dataframe, year_calage, year_data)
revenus_cales = build_revenus_cales(dataframe, year_calage, year_data)
var_list = [variable for variable in dataframe.columns if variable[:5] != 'poste' and variable != 'loyer_impute' and
variable != 'rev_disponible' and variable != 'rev_disp_loyerimput' and variable != 'pondmen']
autres_variables = dataframe[var_list]
dataframe_calee = concat([depenses_calees, revenus_cales, autres_variables], axis = 1)
return dataframe_calee
def build_df_calee_on_ticpe(dataframe, year_calage = None, year_data = None):
assert year_data is not None
if year_calage is None:
year_calage = year_data
dataframe_calee = build_df_calee_on_grospostes(dataframe, year_calage, year_data)
default_config_files_directory = os.path.join(
pkg_resources.get_distribution('openfisca_france_indirect_taxation').location)
parametres_fiscalite_file_path = os.path.join(
default_config_files_directory,
'openfisca_france_indirect_taxation',
'assets',
'legislation',
'Parametres fiscalite indirecte.xls'
)
masses_cn_data_frame = pandas.read_excel(parametres_fiscalite_file_path, sheetname = "consommation_CN")
if year_data != year_calage:
masses_cn_12postes_data_frame = masses_cn_data_frame.loc[:, ['Code', year_data, year_calage]]
else:
masses_cn_12postes_data_frame = masses_cn_data_frame.loc[:, ['Code', year_data]]
masses_ticpe_cn = int(
masses_cn_12postes_data_frame[year_calage][masses_cn_12postes_data_frame['Code'] == ' 07.2.2'].values
)
masses_ticpe_bdf = (dataframe['poste_coicop_722'] * dataframe['pondmen']).sum() / 1e6
ratio_ticpe = masses_ticpe_cn / masses_ticpe_bdf
dataframe['poste_coicop_722'] = dataframe['poste_coicop_722'] * ratio_ticpe
dataframe_calee['poste_coicop_722'] = dataframe['poste_coicop_722']
return dataframe_calee
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/build_survey_data/step_1_2_imputations_loyers_proprietaires.py | build_imputation_loyers_proprietaires | python | def build_imputation_loyers_proprietaires(temporary_store = None, year = None):
assert temporary_store is not None
assert year is not None
# Load data
bdf_survey_collection = SurveyCollection.load(collection = 'budget_des_familles',
config_files_directory = config_files_directory)
survey = bdf_survey_collection.get_survey('budget_des_familles_{}'.format(year))
if year == 1995:
imput00 = survey.get_values(table = "socioscm")
# cette étape permet de ne garder que les données dont on est sûr de la qualité et de la véracité
# exdep = 1 si les données sont bien remplies pour les dépenses du ménage
# exrev = 1 si les données sont bien remplies pour les revenus du ménage
imput00 = imput00[(imput00.exdep == 1) & (imput00.exrev == 1)]
imput00 = imput00[(imput00.exdep == 1) & (imput00.exrev == 1)]
kept_variables = ['mena', 'stalog', 'surfhab', 'confort1', 'confort2', 'confort3', 'confort4',
'ancons', 'sitlog', 'nbphab', 'rg', 'cc']
imput00 = imput00[kept_variables]
imput00.rename(columns = {'mena': 'ident_men'}, inplace = True)
#TODO: continue variable cleaning
var_to_filnas = ['surfhab']
for var_to_filna in var_to_filnas:
imput00[var_to_filna] = imput00[var_to_filna].fillna(0)
var_to_ints = ['sitlog', 'confort1', 'stalog', 'surfhab', 'ident_men', 'ancons', 'nbphab']
for var_to_int in var_to_ints:
imput00[var_to_int] = imput00[var_to_int].astype(int)
depenses = temporary_store['depenses_{}'.format(year)]
depenses.reset_index(inplace = True)
depenses_small = depenses[['ident_men', '04110', 'pondmen']].copy()
depenses_small.ident_men = depenses_small.ident_men.astype('int')
imput00 = depenses_small.merge(imput00, on = 'ident_men').set_index('ident_men')
imput00.rename(columns = {'04110': 'loyer_reel'}, inplace = True)
# * une indicatrice pour savoir si le loyer est connu et l'occupant est locataire
imput00['observe'] = (imput00.loyer_reel > 0) & (imput00.stalog.isin([3, 4]))
imput00['maison_appart'] = imput00.sitlog == 1
imput00['catsurf'] = (
1 +
(imput00.surfhab > 15) +
(imput00.surfhab > 30) +
(imput00.surfhab > 40) +
(imput00.surfhab > 60) +
(imput00.surfhab > 80) +
(imput00.surfhab > 100) +
(imput00.surfhab > 150)
)
assert imput00.catsurf.isin(range(1, 9)).all()
# TODO: vérifier ce qe l'on fait notamment regarder la vleur catsurf = 2 ommise dans le code stata
imput00.maison = 1 - ((imput00.cc == 5) & (imput00.catsurf == 1) & (imput00.maison_appart == 1))
imput00.maison = 1 - ((imput00.cc == 5) & (imput00.catsurf == 3) & (imput00.maison_appart == 1))
imput00.maison = 1 - ((imput00.cc == 5) & (imput00.catsurf == 8) & (imput00.maison_appart == 1))
imput00.maison = 1 - ((imput00.cc == 4) & (imput00.catsurf == 1) & (imput00.maison_appart == 1))
try:
parser = SafeConfigParser()
config_local_ini = os.path.join(config_files_directory, 'config_local.ini')
config_ini = os.path.join(config_files_directory, 'config.ini')
parser.read([config_ini, config_local_ini])
directory_path = os.path.normpath(
parser.get("openfisca_france_indirect_taxation", "assets")
)
hotdeck = pandas.read_stata(os.path.join(directory_path, 'hotdeck_result.dta'))
except:
hotdeck = survey.get_values(table = 'hotdeck_result')
imput00.reset_index(inplace = True)
hotdeck.ident_men = hotdeck.ident_men.astype('int')
imput00 = imput00.merge(hotdeck, on = 'ident_men')
imput00.loyer_impute[imput00.observe] = 0
imput00.reset_index(inplace = True)
loyers_imputes = imput00[['ident_men', 'loyer_impute']].copy()
assert loyers_imputes.loyer_impute.notnull().all()
loyers_imputes.rename(columns = dict(loyer_impute = '0411'), inplace = True)
# POUR BdF 2000 ET 2005, ON UTILISE LES LOYERS IMPUTES CALCULES PAR L'INSEE
if year == 2000:
# Garder les loyers imputés (disponibles dans la table sur les ménages)
loyers_imputes = survey.get_values(table = "menage", variables = ['ident', 'rev81'])
loyers_imputes.rename(
columns = {
'ident': 'ident_men',
'rev81': 'poste_coicop_421',
},
inplace = True,
)
if year == 2005:
# Garder les loyers imputés (disponibles dans la table sur les ménages)
loyers_imputes = survey.get_values(table = "menage")
kept_variables = ['ident_men', 'rev801_d']
loyers_imputes = loyers_imputes[kept_variables]
loyers_imputes.rename(columns = {'rev801_d': 'poste_coicop_421'}, inplace = True)
if year == 2011:
try:
loyers_imputes = survey.get_values(table = "MENAGE")
except:
loyers_imputes = survey.get_values(table = "menage")
kept_variables = ['ident_me', 'rev801']
loyers_imputes = loyers_imputes[kept_variables]
loyers_imputes.rename(columns = {'rev801': 'poste_coicop_421', 'ident_me': 'ident_men'},
inplace = True)
# Joindre à la table des dépenses par COICOP
loyers_imputes.set_index('ident_men', inplace = True)
temporary_store['loyers_imputes_{}'.format(year)] = loyers_imputes
depenses = temporary_store['depenses_{}'.format(year)]
depenses.index = depenses.index.astype('int64')
loyers_imputes.index = loyers_imputes.index.astype('int64')
assert set(depenses.index) == set(loyers_imputes.index)
assert len(set(depenses.columns).intersection(set(loyers_imputes.columns))) == 0
depenses = depenses.merge(loyers_imputes, left_index = True, right_index = True)
# ****************************************************************************************************************
# Etape n° 0-1-3 : SAUVER LES BASES DE DEPENSES HOMOGENEISEES DANS LE BON DOSSIER
# ****************************************************************************************************************
# Save in temporary store
temporary_store['depenses_bdf_{}'.format(year)] = depenses | Build menage consumption by categorie fiscale dataframe | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/build_survey_data/step_1_2_imputations_loyers_proprietaires.py#L50-L178 | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division
import os
from ConfigParser import SafeConfigParser
import logging
import pandas
from openfisca_survey_manager.temporary import temporary_store_decorator
from openfisca_survey_manager import default_config_files_directory as config_files_directory
from openfisca_survey_manager.survey_collections import SurveyCollection
log = logging.getLogger(__name__)
# **************************************************************************************************************************
# * Etape n° 0-1-2 : IMPUTATION DE LOYERS POUR LES MENAGES PROPRIETAIRES
# **************************************************************************************************************************
@temporary_store_decorator(config_files_directory = config_files_directory, file_name = 'indirect_taxation_tmp')
if __name__ == '__main__':
import sys
import time
logging.basicConfig(level = logging.INFO, stream = sys.stdout)
deb = time.clock()
year = 1995
build_imputation_loyers_proprietaires(year = year)
log.info("step 0_1_2_build_imputation_loyers_proprietaires duration is {}".format(time.clock() - deb))
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/build_survey_data/step_4_homogeneisation_revenus_menages.py | build_homogeneisation_revenus_menages | python | def build_homogeneisation_revenus_menages(temporary_store = None, year = None):
assert temporary_store is not None
assert year is not None
# Load data
bdf_survey_collection = SurveyCollection.load(
collection = 'budget_des_familles', config_files_directory = config_files_directory)
survey = bdf_survey_collection.get_survey('budget_des_familles_{}'.format(year))
# **********************************************************************************************************************
# ********************************* HOMOGENEISATION DES DONNEES SUR LES REVENUS DES MENAGES ****************************
# ************************************ CALCUL D'UN PROXI DU REVENU DISPONIBLE DES MENAGES ******************************
# **********************************************************************************************************************
#
# ********************HOMOGENEISATION DES BASES DE RESSOURCES***************************
# La base 95 permet de distinguer taxe d'habitation et impôts fonciers.
# On calcule leur montant relatif pour l'appliquer à 00 et 05
if year == 1995:
menrev = survey.get_values(
table = "menrev",
variables = [
'revtot', 'ir', 'irbis', 'imphab', 'impfon', 'revaid', 'revsal', 'revind', 'revsec', 'revret',
'revcho', 'revfam', 'revlog', 'revinv', 'revrmi', 'revpat', 'mena', 'ponderr'
],
)
menage = survey.get_values(
table = "socioscm",
variables = ['exdep', 'exrev', 'mena']
)
menage.set_index('mena')
menrev = menrev.merge(menage, left_index = True, right_index = True)
# cette étape de ne garder que les données dont on est sûr de la qualité et de la véracité
# exdep = 1 si les données sont bien remplies pour les dépenses du ménage
# exrev = 1 si les données sont bien remplies pour les revenus du ménage
menrev = menrev[(menrev.exdep == 1) & (menrev.exrev == 1)]
menrev['foncier_hab'] = menrev.imphab + menrev.impfon
menrev['part_IMPHAB'] = menrev.imphab / menrev.foncier_hab
menrev['part_IMPFON'] = menrev.impfon / menrev.foncier_hab
menrev['revsoc'] = (
menrev.revret + menrev.revcho + menrev.revfam + menrev.revlog + menrev.revinv + menrev.revrmi
)
for variable in ['revcho', 'revfam', 'revinv', 'revlog', 'revret', 'revrmi']:
del menrev[variable]
menrev['revact'] = menrev['revsal'] + menrev['revind'] + menrev['revsec']
menrev.rename(
columns = dict(
revpat = "revpat",
impfon = "impfon",
imphab = "imphab",
revaid = "somme_obl_recue",
),
inplace = True
)
menrev['impot_revenu'] = menrev['ir'] + menrev['irbis']
rev_disp = survey.get_values(
table = "menrev",
variables = ['revtot', 'revret', 'revcho', 'revfam', 'revlog', 'revinv', 'revrmi', 'imphab', 'impfon',
'revaid', 'revsal', 'revind', 'revsec', 'revpat', 'mena', 'ponderr', 'ir', 'irbis'],
)
rev_disp.set_index('mena', inplace=True)
menage2 = survey.get_values(
table = "socioscm",
variables = ['exdep', 'exrev', 'mena']
)
menage2.set_index('mena', inplace = True)
rev_disp = menage2.merge(rev_disp, left_index = True, right_index = True)
rev_disp = rev_disp[(rev_disp.exrev == 1) & (rev_disp.exdep == 1)]
rev_disp['revsoc'] = (
rev_disp['revret'] + rev_disp['revcho'] + rev_disp['revfam'] + rev_disp['revlog'] + rev_disp['revinv'] +
rev_disp['revrmi']
)
rev_disp['impot_revenu'] = rev_disp['ir'] + rev_disp['irbis']
rev_disp.rename(
columns = dict(
revaid = 'somme_obl_recue',
),
inplace = True
)
rev_disp.somme_obl_recue = rev_disp.somme_obl_recue.fillna(0)
rev_disp['revact'] = rev_disp['revsal'] + rev_disp['revind'] + rev_disp['revsec']
rev_disp['revtot'] = rev_disp['revact'] + rev_disp['revpat'] + rev_disp['revsoc'] + rev_disp['somme_obl_recue']
rev_disp['revact'] = rev_disp['revsal'] + rev_disp['revind'] + rev_disp['revsec']
rev_disp.rename(
columns = dict(
ponderr = "pondmen",
mena = "ident_men",
revind = "act_indpt",
revsal = "salaires",
revsec = "autres_rev",
),
inplace = True
)
rev_disp['autoverses'] = '0'
rev_disp['somme_libre_recue'] = '0'
rev_disp['autres_ress'] = '0'
#
# /* Le revenu disponible se calcule à partir de revtot à laquelle on retrancher la taxe d'habitation
# et l'impôt sur le revenu, plus éventuellement les CSG et CRDS.
# La variable revtot est la somme des revenus d'activité, sociaux, du patrimoine et d'aide. */
#
rev_disp['rev_disponible'] = rev_disp.revtot - rev_disp.impot_revenu - rev_disp.imphab
loyers_imputes = temporary_store['depenses_bdf_{}'.format(year)]
loyers_imputes.rename(
columns = {"0411": "loyer_impute"},
inplace = True,
)
rev_dispbis = loyers_imputes.merge(rev_disp, left_index = True, right_index = True)
rev_disp['rev_disp_loyerimput'] = rev_disp['rev_disponible'] - rev_dispbis['loyer_impute']
for var in ['somme_obl_recue', 'act_indpt', 'revpat', 'salaires', 'autres_rev', 'rev_disponible', 'impfon',
'imphab', 'revsoc', 'revact', 'impot_revenu', 'revtot', 'rev_disp_loyerimput']:
rev_disp[var] = rev_disp[var] / 6.55957 # CONVERSION EN EUROS
temporary_store["revenus_{}".format(year)] = rev_disp
elif year == 2000:
# TODO: récupérer plutôt les variables qui viennent de la table dépenses (dans temporary_store)
rev_disp = survey.get_values(
table = "consomen",
variables = ['c13141', 'c13111', 'c13121', 'c13131', 'pondmen', 'ident'],
)
menage = survey.get_values(
table = "menage",
variables = ['ident', 'revtot', 'revact', 'revsoc', 'revpat', 'rev70', 'rev71', 'revt_d', 'pondmen',
'rev10', 'rev11', 'rev20', 'rev21'],
).sort_values(by = ['ident'])
menage.index = menage.index.astype(ident_men_dtype)
rev_disp.index = rev_disp.index.astype(ident_men_dtype)
revenus = menage.join(rev_disp, how = "outer", rsuffix = "rev_disp")
revenus.fillna(0, inplace = True)
revenus.rename(
columns = dict(
c13111 = "impot_res_ppal",
c13141 = "impot_revenu",
c13121 = "impot_autres_res",
rev70 = "somme_obl_recue",
rev71 = "somme_libre_recue",
revt_d = "autres_ress",
ident = "ident_men",
rev10 = "act_indpt",
rev11 = "autoverses",
rev20 = "salaires",
rev21 = "autres_rev",
),
inplace = True
)
var_to_ints = ['pondmen', 'impot_autres_res', 'impot_res_ppal', 'pondmenrev_disp', 'c13131']
for var_to_int in var_to_ints:
revenus.loc[revenus[var_to_int].isnull(), var_to_int] = 0
revenus[var_to_int] = revenus[var_to_int].astype(int)
revenus['imphab'] = 0.65 * (revenus.impot_res_ppal + revenus.impot_autres_res)
revenus['impfon'] = 0.35 * (revenus.impot_res_ppal + revenus.impot_autres_res)
loyers_imputes = temporary_store["depenses_bdf_{}".format(year)]
variables = ["poste_coicop_421"]
loyers_imputes = loyers_imputes[variables]
loyers_imputes.rename(
columns = {"poste_coicop_421": "loyer_impute"},
inplace = True,
)
temporary_store["loyers_imputes_{}".format(year)] = loyers_imputes
loyers_imputes.index = loyers_imputes.index.astype(ident_men_dtype)
revenus.set_index('ident_men', inplace = True)
revenus.index = revenus.index.astype(ident_men_dtype)
assert set(revenus.index) == set(loyers_imputes.index), 'revenus and loyers_imputes indexes are not equal'
revenus = revenus.merge(loyers_imputes, left_index = True, right_index = True)
revenus['rev_disponible'] = revenus.revtot - revenus.impot_revenu - revenus.imphab
revenus['rev_disponible'] = revenus['rev_disponible'] * (revenus['rev_disponible'] >= 0)
revenus['rev_disp_loyerimput'] = revenus.rev_disponible + revenus.loyer_impute
var_to_ints = ['loyer_impute']
for var_to_int in var_to_ints:
revenus[var_to_int] = revenus[var_to_int].astype(int)
temporary_store["revenus_{}".format(year)] = revenus
elif year == 2005:
c05d = survey.get_values(
table = "c05d",
variables = ['c13111', 'c13121', 'c13141', 'pondmen', 'ident_men'],
)
rev_disp = c05d.sort_values(by = ['ident_men'])
del c05d
menage = survey.get_values(
table = "menage",
variables = ['ident_men', 'revtot', 'revact', 'revsoc', 'revpat', 'rev700_d', 'rev701_d',
'rev999_d', 'rev100_d', 'rev101_d', 'rev200_d', 'rev201_d'],
).sort_values(by = ['ident_men'])
rev_disp.set_index('ident_men', inplace = True)
menage.set_index('ident_men', inplace = True)
menage.index = menage.index.astype('str')
rev_disp.index = rev_disp.index.astype('str')
assert menage.index.dtype == rev_disp.index.dtype, 'menage ({}) and revdisp ({}) dtypes differs'.format(
menage.index.dtype, rev_disp.index.dtype)
revenus = pandas.concat([menage, rev_disp], axis = 1)
assert len(menage.index) == len(revenus.index)
revenus.rename(
columns = dict(
rev100_d = "act_indpt",
rev101_d = "autoverses",
rev200_d = "salaires",
rev201_d = "autres_rev",
rev700_d = "somme_obl_recue",
rev701_d = "somme_libre_recue",
rev999_d = "autres_ress",
c13111 = "impot_res_ppal",
c13141 = "impot_revenu",
c13121 = "impot_autres_res",
),
inplace = True
)
# * Ces pondérations (0.65 0.35) viennent de l'enquête BdF 1995 qui distingue taxe d'habitation et impôts
# fonciers. A partir de BdF 1995,
# * on a calculé que la taxe d'habitation représente en moyenne 65% des impôts locaux, et que les impôts
# fonciers en représentenr 35%.
# * On applique ces taux aux enquêtes 2000 et 2005.
revenus['imphab'] = 0.65 * (revenus.impot_res_ppal + revenus.impot_autres_res)
revenus['impfon'] = 0.35 * (revenus.impot_res_ppal + revenus.impot_autres_res)
del revenus['impot_autres_res']
del revenus['impot_res_ppal']
# * Calculer le revenu disponible avec et sans le loyer imputé
loyers_imputes = temporary_store["depenses_bdf_{}".format(year)]
variables = ["poste_coicop_421"]
loyers_imputes = loyers_imputes[variables]
loyers_imputes.rename(
columns = {"poste_coicop_421": "loyer_impute"},
inplace = True,
)
temporary_store["loyers_imputes_{}".format(year)] = loyers_imputes
loyers_imputes.index = loyers_imputes.index.astype('str')
assert revenus.index.dtype == loyers_imputes.index.dtype
assert set(revenus.index) == set(loyers_imputes.index), '''revenus and loyers_imputes indexes are not equal.
In revenus and not in loyers_imputes:
{}
In loyers_imputes and not in revenus:
{}
'''.format(set(revenus.index) - set(loyers_imputes.index), set(loyers_imputes.index) - set(revenus.index))
revenus = revenus.merge(loyers_imputes, left_index = True, right_index = True)
revenus['rev_disponible'] = revenus.revtot - revenus.impot_revenu - revenus.imphab
revenus['rev_disponible'] = revenus['rev_disponible'] * (revenus['rev_disponible'] >= 0)
revenus['rev_disp_loyerimput'] = revenus.rev_disponible + revenus.loyer_impute
temporary_store["revenus_{}".format(year)] = revenus
elif year == 2011:
try:
c05 = survey.get_values(
table = "C05",
variables = ['c13111', 'c13121', 'c13141', 'pondmen', 'ident_me'],
)
rev_disp = c05.sort_values(by = ['ident_me'])
except:
c05 = survey.get_values(
table = "c05",
variables = ['c13111', 'c13121', 'c13141', 'pondmen', 'ident_me'],
)
rev_disp = c05.sort_values(by = ['ident_me'])
del c05
try:
menage = survey.get_values(
table = "MENAGE",
variables = ['ident_me', 'revtot', 'revact', 'revsoc', 'revpat', 'rev700', 'rev701', 'rev999',
'revindep', 'salaires'],
).sort_values(by = ['ident_me'])
except:
menage = survey.get_values(
table = "menage",
variables = ['ident_me', 'revtot', 'revact', 'revsoc', 'revpat', 'rev700', 'rev701', 'rev999',
'revindep', 'salaires'],
).sort_values(by = ['ident_me'])
rev_disp.index = rev_disp.index.astype(ident_men_dtype)
menage.index = menage.index.astype(ident_men_dtype)
rev_disp.set_index('ident_me', inplace = True)
menage.set_index('ident_me', inplace = True)
revenus = pandas.concat([menage, rev_disp], axis = 1)
menage.index.name = 'ident_men'
revenus.index.name = 'ident_men'
revenus.rename(
columns = dict(
revindep = "act_indpt",
# TODO: trouver ces revenus commentés dans bdf 2011
# rev101_d = "autoverses",
salaires = "salaires",
# rev201_d = "autres_rev",
rev700 = "somme_obl_recue",
rev701 = "somme_libre_recue",
rev999 = "autres_ress",
c13111 = "impot_res_ppal",
c13141 = "impot_revenu",
c13121 = "impot_autres_res",
),
inplace = True
)
revenus['imphab'] = 0.65 * (revenus.impot_res_ppal + revenus.impot_autres_res)
revenus['impfon'] = 0.35 * (revenus.impot_res_ppal + revenus.impot_autres_res)
del revenus['impot_autres_res']
del revenus['impot_res_ppal']
loyers_imputes = temporary_store["depenses_bdf_{}".format(year)]
variables = ["poste_coicop_421"]
loyers_imputes = loyers_imputes[variables]
loyers_imputes.rename(
columns = {"poste_coicop_421": "loyer_impute"},
inplace = True,
)
temporary_store["loyers_imputes_{}".format(year)] = loyers_imputes
revenus = revenus.merge(loyers_imputes, left_index = True, right_index = True)
revenus['rev_disponible'] = revenus.revtot - revenus.impot_revenu - revenus.imphab
revenus['rev_disponible'] = revenus['rev_disponible'] * (revenus['rev_disponible'] >= 0)
revenus['rev_disp_loyerimput'] = revenus.rev_disponible + revenus.loyer_impute
temporary_store["revenus_{}".format(year)] = revenus | Build menage consumption by categorie fiscale dataframe | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/build_survey_data/step_4_homogeneisation_revenus_menages.py#L46-L387 | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division
import logging
import pandas
from openfisca_survey_manager.temporary import temporary_store_decorator
from openfisca_survey_manager.survey_collections import SurveyCollection
from openfisca_survey_manager import default_config_files_directory as config_files_directory
from openfisca_france_indirect_taxation.build_survey_data.utils \
import ident_men_dtype
log = logging.getLogger(__name__)
@temporary_store_decorator(config_files_directory = config_files_directory, file_name = 'indirect_taxation_tmp')
if __name__ == '__main__':
import sys
import time
logging.basicConfig(level = logging.INFO, stream = sys.stdout)
deb = time.clock()
year = 2011
build_homogeneisation_revenus_menages(year = year)
log.info("step_4_homogeneisation_revenus_menages duration is {}".format(time.clock() - deb))
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/build_survey_data/step_2_homogeneisation_vehicules.py | build_homogeneisation_vehicules | python | def build_homogeneisation_vehicules(temporary_store = None, year = None):
assert temporary_store is not None
assert year is not None
# Load data
bdf_survey_collection = SurveyCollection.load(
collection = 'budget_des_familles', config_files_directory = config_files_directory)
survey = bdf_survey_collection.get_survey('budget_des_familles_{}'.format(year))
if year == 1995:
vehicule = None
# L'enquête BdF 1995 ne contient pas d'information sur le type de carburant utilisé par les véhicules.
if year == 2000:
vehicule = survey.get_values(table = "depmen")
kept_variables = ['ident', 'carbu01', 'carbu02']
vehicule = vehicule[kept_variables]
vehicule.rename(columns = {'ident': 'ident_men'}, inplace = True)
vehicule.rename(columns = {'carbu01': 'carbu1'}, inplace = True)
vehicule.rename(columns = {'carbu02': 'carbu2'}, inplace = True)
vehicule["veh_tot"] = 1
vehicule["veh_essence"] = 1 * (vehicule['carbu1'] == 1) + 1 * (vehicule['carbu2'] == 1)
vehicule["veh_diesel"] = 1 * (vehicule['carbu1'] == 2) + 1 * (vehicule['carbu2'] == 2)
vehicule.index = vehicule.index.astype(ident_men_dtype)
if year == 2005:
vehicule = survey.get_values(table = "automobile")
kept_variables = ['ident_men', 'carbu']
vehicule = vehicule[kept_variables]
vehicule["veh_tot"] = 1
vehicule["veh_essence"] = (vehicule['carbu'] == 1)
vehicule["veh_diesel"] = (vehicule['carbu'] == 2)
if year == 2011:
try:
vehicule = survey.get_values(table = "AUTOMOBILE")
except:
vehicule = survey.get_values(table = "automobile")
kept_variables = ['ident_me', 'carbu']
vehicule = vehicule[kept_variables]
vehicule.rename(columns = {'ident_me': 'ident_men'}, inplace = True)
vehicule["veh_tot"] = 1
vehicule["veh_essence"] = (vehicule['carbu'] == 1)
vehicule["veh_diesel"] = (vehicule['carbu'] == 2)
# Compute the number of cars by category and save
if year != 1995:
vehicule = vehicule.groupby(by = 'ident_men')["veh_tot", "veh_essence", "veh_diesel"].sum()
vehicule["pourcentage_vehicule_essence"] = 0
vehicule.pourcentage_vehicule_essence.loc[vehicule.veh_tot != 0] = vehicule.veh_essence / vehicule.veh_tot
# Save in temporary store
temporary_store['automobile_{}'.format(year)] = vehicule | Compute vehicule numbers by type | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/build_survey_data/step_2_homogeneisation_vehicules.py#L55-L109 | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division
import logging
from openfisca_survey_manager.survey_collections import SurveyCollection
from openfisca_survey_manager.temporary import temporary_store_decorator
from openfisca_survey_manager import default_config_files_directory as config_files_directory
from openfisca_france_indirect_taxation.build_survey_data.utils \
import ident_men_dtype
log = logging.getLogger(__name__)
# **************************************************************************************************************************
# * Etape n° 0-2 : HOMOGENEISATION DES DONNEES SUR LES VEHICULES
# **************************************************************************************************************************
# **************************************************************************************************************************
#
#
# DONNEES SUR LES TYPES DE CARBURANTS
@temporary_store_decorator(config_files_directory = config_files_directory, file_name = 'indirect_taxation_tmp')
if __name__ == '__main__':
import sys
import time
logging.basicConfig(level = logging.INFO, stream = sys.stdout)
deb = time.clock()
year = 2005
build_homogeneisation_vehicules(year = year)
log.info("step 0_2_homogeneisation_vehicules duration is {}".format(time.clock() - deb))
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/model/base.py | droit_d_accise | python | def droit_d_accise(depense, droit_cn, consommation_cn, taux_plein_tva):
return depense * ((1 + taux_plein_tva) * droit_cn) / (consommation_cn - (1 + taux_plein_tva) * droit_cn) | Calcule le montant de droit d'accise sur un volume de dépense payé pour le poste adéquat. | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/model/base.py#L54-L58 | null | # -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openfisca_core.columns import AgeCol, DateCol, FloatCol, IntCol, EnumCol
from openfisca_core.enumerations import Enum
from openfisca_core.formulas import dated_function, DatedVariable, Variable
from openfisca_survey_manager.statshelpers import mark_weighted_percentiles, weighted_quantiles
from ..entities import Individus, Menages
__all__ = [
'AgeCol',
'DateCol',
'DatedVariable',
'dated_function',
'Enum',
'EnumCol',
'FloatCol',
'Individus',
'IntCol',
'mark_weighted_percentiles',
'Menages',
'droit_d_accise',
'tax_from_expense_including_tax',
'Variable',
'weighted_quantiles',
]
def taux_implicite(accise, tva, prix_ttc):
"""Calcule le taux implicite sur les carburants : pttc = pht * (1+ti) * (1+tva), ici on obtient ti"""
return (accise * (1 + tva)) / (prix_ttc - accise * (1 + tva))
def tax_from_expense_including_tax(expense = None, tax_rate = None):
"""Compute the tax amount form the expense including tax : si Dttc = (1+t) * Dht, ici on obtient t * Dht"""
return expense * tax_rate / (1 + tax_rate)
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/build_survey_data/step_3_homogeneisation_caracteristiques_menages.py | build_homogeneisation_caracteristiques_sociales | python | def build_homogeneisation_caracteristiques_sociales(temporary_store = None, year = None):
u"""Homogénéisation des caractéristiques sociales des ménages """
assert temporary_store is not None
assert year is not None
# Load data
bdf_survey_collection = SurveyCollection.load(
collection = 'budget_des_familles', config_files_directory = config_files_directory)
survey = bdf_survey_collection.get_survey('budget_des_familles_{}'.format(year))
# ******************************************************************************************************************
# * Etape n° 0-3 : HOMOGENEISATION DES CARACTERISTIQUES SOCIALES DES MENAGES
# ******************************************************************************************************************
# ******************************************************************************************************************
if year == 1995:
kept_variables = ['exdep', 'exrev', 'mena', 'v', 'ponderrd', 'nbpers', 'nbenf', 'typmen1', 'cohabpr', 'sexepr',
'agepr', 'agecj', 'matripr', 'occuppr', 'occupcj', 'nbact', 'sitlog', 'stalog', 'mena', 'nm14a', 'typmen1']
menage = survey.get_values(
table = "socioscm",
variables = kept_variables,
)
# cette étape permet de ne garder que les données dont on est sûr de la qualité et de la véracité
# exdep = 1 si les données sont bien remplies pour les dépenses du ménage
# exrev = 1 si les données sont bien remplies pour les revenus du ménage
menage = menage[(menage.exdep == 1) & (menage.exrev == 1)]
menage.rename(
columns = {
'v': 'vag',
'mena': 'ident_men',
'ponderrd': 'pondmen',
'nbpers': 'npers',
'nm14a': 'nenfants',
'nbenf': 'nenfhors',
'nbact': 'nactifs',
'cohabpr': 'couplepr',
'matripr': 'etamatri',
'typmen1': 'typmen'
},
inplace = True,
)
# la variable vag est utilisée dans les modèles QAIDS et AIDS comme variable temporelle afin d'attibuer
# le bon prix mensuel
menage.agecj = menage.agecj.fillna(0)
menage.nenfhors = menage.nenfhors.fillna(0)
menage.vag = menage.vag.astype('int')
menage['nadultes'] = menage['npers'] - menage['nenfants']
menage['ocde10'] = 1 + 0.5 * numpy.maximum(0, menage['nadultes'] - 1) + 0.3 * menage['nenfants']
# harmonisation des types de ménage sur la nomenclature 2010
menage['typmen_'] = menage['typmen']
menage.typmen[menage.typmen_ == 1] = 1
menage.typmen[menage.typmen_ == 2] = 3
menage.typmen[menage.typmen_ == 3] = 4
menage.typmen[menage.typmen_ == 4] = 4
menage.typmen[menage.typmen_ == 5] = 4
menage.typmen[menage.typmen_ == 6] = 2
menage.typmen[menage.typmen_ == 7] = 5
del menage['typmen_']
var_to_ints = ['couplepr', 'etamatri']
for var_to_int in var_to_ints:
menage[var_to_int] = menage[var_to_int].astype(int)
# Methode :
# 1. on nettoite les variables (i.e. changement de nom de format)
# 2. Reformatage des variables (réattribution des catégories pour quelles soient identiques
# pour les différentes années)
menage["situacj"] = 0
menage.situacj[menage.occupcj == 1] = 1
menage.situacj[menage.occupcj == 3] = 3
menage.situacj[menage.occupcj == 2] = 4
menage.situacj[menage.occupcj == 5] = 5
menage.situacj[menage.occupcj == 6] = 5
menage.situacj[menage.occupcj == 7] = 6
menage.situacj[menage.occupcj == 8] = 7
menage.situacj[menage.occupcj == 4] = 8
menage["situapr"] = 0
menage.situapr[menage.occuppr == 1] = 1
menage.situapr[menage.occuppr == 3] = 3
menage.situapr[menage.occuppr == 2] = 4
menage.situapr[menage.occuppr == 5] = 5
menage.situapr[menage.occuppr == 6] = 5
menage.situapr[menage.occuppr == 7] = 6
menage.situapr[menage.occuppr == 8] = 7
menage.situapr[menage.occuppr == 4] = 8
menage["typlog"] = 0
menage.typlog[menage.sitlog == 1] = 1
menage.typlog[menage.sitlog != 1] = 2
menage['stalog'] = menage['stalog'].astype(int)
individus = survey.get_values(
table = "individu",
)
variables = ['mena', 'v']
individus.rename(
columns = {'mena': 'identmen'},
inplace = True,
)
menage.set_index('ident_men', inplace = True)
if year == 2000:
menage = survey.get_values(
table = "menage",
variables = [
'ident', 'pondmen', 'nbact', 'nbenf1', 'nbpers', 'ocde10', 'sitlog', 'stalog', 'strate',
'typmen1', 'zeat', 'stalog', 'vag', 'sexepr', 'sexecj', 'agecj', 'napr', 'nacj', 'cs2pr',
'cs2cj', 'diegpr', 'dieppr', 'diespr', 'diegcj', 'diepcj', 'diescj', 'hod_nb', 'cohabpr',
'occupapr', 'occupacj', 'occupbpr', 'occupbcj', 'occupcpr', 'occupccj', 'typmen1'
]
)
menage.rename(
columns = {
'cohabpr': 'couplepr',
'hod_nb': 'nenfhors',
'ident': 'ident_men',
'nbact': 'nactifs',
'nbenf1': 'nenfants',
'nbpers': 'npers',
'rev81': 'poste_coicop_421',
'typmen1': 'typmen'
},
inplace = True,
)
menage.ocde10 = menage.ocde10 / 10
# on met un numéro à chaque vague pour pouvoir faire un meilleur suivi des évolutions temporelles
# pour le modèle de demande
menage.agecj = menage.agecj.fillna(0)
assert menage.notnull().all().all(), 'The following variables contains NaN values: {}'.format(
list(menage.isnull().any()[menage.isnull().any()].index))
menage['vag_'] = menage['vag']
menage.vag.loc[menage.vag_ == 1] = 9
menage.vag.loc[menage.vag_ == 2] = 10
menage.vag.loc[menage.vag_ == 3] = 11
menage.vag.loc[menage.vag_ == 4] = 12
menage.vag.loc[menage.vag_ == 5] = 13
menage.vag.loc[menage.vag_ == 6] = 14
menage.vag.loc[menage.vag_ == 7] = 15
menage.vag.loc[menage.vag_ == 8] = 16
del menage['vag_']
# harmonisation des types de ménage sur la nomenclature 2010
menage['typmen_'] = menage['typmen']
menage.typmen.loc[menage.typmen_ == 1] = 1
menage.typmen.loc[menage.typmen_ == 2] = 3
menage.typmen.loc[menage.typmen_ == 3] = 4
menage.typmen.loc[menage.typmen_ == 4] = 4
menage.typmen.loc[menage.typmen_ == 5] = 4
menage.typmen.loc[menage.typmen_ == 6] = 2
menage.typmen.loc[menage.typmen_ == 7] = 5
del menage['typmen_']
menage.couplepr = menage.couplepr.astype('int')
menage["nadultes"] = menage['npers'] - menage['nenfants']
menage.typmen = menage.typmen.astype('int')
# occupa : 1 si la personne travaille, 2 sinon. occupb : 1 si elle travaille effectivement, 2 si congé de
# longue durée (négligé ici). occupc : de 2 à 8 selon le statut si ne travaille pas (étudiant, retraité, etc.)
menage["situacj"] = 0
menage.situacj.loc[menage.occupacj == 1] = 1
menage.situacj.loc[menage.occupccj == 3] = 3
menage.situacj.loc[menage.occupccj == 2] = 4
menage.situacj.loc[menage.occupccj == 5] = 5
menage.situacj.loc[menage.occupccj == 6] = 5
menage.situacj.loc[menage.occupccj == 7] = 6
menage.situacj.loc[menage.occupccj == 8] = 7
menage.situacj.loc[menage.occupccj == 4] = 8
menage["situapr"] = 0
menage.situapr.loc[menage.occupapr == 1] = 1
menage.situapr.loc[menage.occupcpr == 3] = 3
menage.situapr.loc[menage.occupcpr == 2] = 4
menage.situapr.loc[menage.occupcpr == 5] = 5
menage.situapr.loc[menage.occupcpr == 6] = 5
menage.situapr.loc[menage.occupcpr == 7] = 6
menage.situapr.loc[menage.occupcpr == 8] = 7
menage.situapr.loc[menage.occupcpr == 4] = 8
menage["natiocj"] = 0
menage["natiopr"] = 0
menage.natiocj.loc[menage.nacj == 1] = 1
menage.natiocj.loc[menage.nacj == 2] = 1
menage.natiocj.loc[menage.nacj == 3] = 2
menage.natiopr.loc[menage.napr == 1] = 1
menage.natiopr.loc[menage.napr == 2] = 1
menage.natiopr.loc[menage.napr == 3] = 2
menage["typlog"] = 0
menage.typlog.loc[menage.sitlog == 1] = 1
menage.typlog.loc[menage.sitlog != 1] = 2
# Homogénéisation des diplômes, choix d'équivalence entre les diplômes
menage["dip14pr"] = 999999
menage.dip14pr.loc[menage.diegpr == 0] = 71
menage.dip14pr.loc[menage.diegpr == 2] = 70
menage.dip14pr.loc[menage.diegpr == 15] = 60
menage.dip14pr.loc[menage.diegpr == 18] = 60
menage.dip14pr.loc[menage.diegpr == 16] = 41
menage.dip14pr.loc[menage.diegpr == 17] = 41
menage.dip14pr.loc[menage.diegpr == 19] = 41
menage.dip14pr.loc[menage.dieppr == 23] = 50
menage.dip14pr.loc[menage.dieppr == 25] = 50
menage.dip14pr.loc[menage.dieppr == 27] = 50
menage.dip14pr.loc[menage.dieppr == 29] = 50
menage.dip14pr.loc[menage.dieppr == 34] = 43
menage.dip14pr.loc[menage.dieppr == 32] = 42
menage.dip14pr.loc[menage.dieppr == 36] = 42
menage.dip14pr.loc[menage.diespr == 41] = 30
menage.dip14pr.loc[menage.diespr == 42] = 31
menage.dip14pr.loc[menage.diespr == 43] = 31
menage.dip14pr.loc[menage.diespr == 44] = 33
menage.dip14pr.loc[menage.diespr == 46] = 20
menage.dip14pr.loc[menage.diespr == 48] = 12
menage.dip14pr.loc[menage.diespr == 47] = 10
menage.set_index('ident_men', inplace = True)
# Recodage des catégories zeat
menage.zeat.loc[menage.zeat == 7] = 6
menage.zeat.loc[menage.zeat == 8] = 7
menage.zeat.loc[menage.zeat == 9] = 8
assert menage.zeat.isin(range(1, 9)).all()
individus = survey.get_values(
table = "individus",
variables = ['ident', 'matri', 'lien', 'anais']
)
individus = individus.loc[individus.lien == 1].copy()
individus.rename(
columns = {'ident': 'ident_men', 'matri': 'etamatri'},
inplace = True,
)
variables_to_destring = ['anais']
for variable_to_destring in variables_to_destring:
individus[variable_to_destring] = individus[variable_to_destring].astype('int').copy()
individus['agepr'] = year - individus.anais
individus.set_index('ident_men', inplace = True)
assert menage.notnull().all().all(), 'The following variables contains NaN values: {}'.format(
list(menage.isnull().any()[menage.isnull().any()].index))
menage = menage.merge(individus, left_index = True, right_index = True)
if year == 2005:
menage = survey.get_values(table = "menage")
# données socio-démographiques
socio_demo_variables = ['agpr', 'agcj', 'couplepr', 'decuc', 'ident_men', 'nactifs', 'nenfants', 'nenfhors',
'npers', 'ocde10', 'pondmen', 'sexecj', 'sexepr', 'typmen5', 'vag', 'zeat', 'cs24pr']
socio_demo_variables += [column for column in menage.columns if column.startswith('dip14')]
socio_demo_variables += [column for column in menage.columns if column.startswith('natio7')]
# activité professionnelle
activite_prof_variables = ['situacj', 'situapr']
activite_prof_variables += [column for column in menage.columns if column.startswith('cs42')]
# logement
logement_variables = ['htl', 'strate']
menage = menage[socio_demo_variables + activite_prof_variables + logement_variables]
menage.rename(
columns = {
# "agpr": "agepr",
"agcj": "agecj",
"typmen5": "typmen",
"cs24pr": "cs_pr"
},
inplace = True,
)
del menage['agpr']
menage['nadultes'] = menage.npers - menage.nenfants
for person in ['pr', 'cj']:
menage['natio' + person] = (menage['natio7' + person] > 2) # TODO: changer de convention ?
del menage['natio7' + person]
menage.agecj = menage.agecj.fillna(0)
menage.nenfhors = menage.nenfhors.fillna(0)
var_to_ints = ['ocde10', 'decuc', 'nactifs', 'nenfants', 'npers', 'pondmen', 'nadultes']
assert menage.notnull().all().all(), 'The following variables contains NaN values: {}'.format(
list(menage.isnull().any()[menage.isnull().any()].index))
menage.couplepr = menage.couplepr > 2 # TODO: changer de convention ?
menage.ocde10 = menage.ocde10 / 10
menage.set_index('ident_men', inplace = True)
# on met un numéro à chaque vague pour pouvoir faire un meilleur suivi des évolutions temporelles
# pour le modèle de demande
menage['vag_'] = menage['vag']
menage.vag.loc[menage.vag_ == 1] = 17
menage.vag.loc[menage.vag_ == 2] = 18
menage.vag.loc[menage.vag_ == 3] = 19
menage.vag.loc[menage.vag_ == 4] = 20
menage.vag.loc[menage.vag_ == 5] = 21
menage.vag.loc[menage.vag_ == 6] = 22
del menage['vag_']
# Recodage des catégories zeat
menage.zeat.loc[menage.zeat == 7] = 6
menage.zeat.loc[menage.zeat == 8] = 7
menage.zeat.loc[menage.zeat == 9] = 8
assert menage.zeat.isin(range(1, 9)).all()
stalog = survey.get_values(table = "depmen", variables = ['ident_men', 'stalog'])
stalog['stalog'] = stalog.stalog.astype('int').copy()
stalog['new_stalog'] = 0
stalog.loc[stalog.stalog == 2, 'new_stalog'] = 1
stalog.loc[stalog.stalog == 1, 'new_stalog'] = 2
stalog.loc[stalog.stalog == 4, 'new_stalog'] = 3
stalog.loc[stalog.stalog == 5, 'new_stalog'] = 4
stalog.loc[stalog.stalog.isin([3, 6]), 'new_stalog'] = 5
stalog.stalog = stalog.new_stalog.copy()
del stalog['new_stalog']
assert stalog.stalog.isin(range(1, 6)).all()
stalog.set_index('ident_men', inplace = True)
menage = menage.merge(stalog, left_index = True, right_index = True)
menage['typlog'] = 2
menage.loc[menage.htl.isin(['1', '5']), 'typlog'] = 1
assert menage.typlog.isin([1, 2]).all()
del menage['htl']
individus = survey.get_values(table = 'individu')
# Il y a un problème sur l'année de naissance,
# donc on le recalcule avec l'année de naissance et la vague d'enquête
individus['agepr'] = year - individus.anais
individus.loc[individus.vag == 6, ['agepr']] = year + 1 - individus.anais
individus = individus[individus.lienpref == 00].copy()
kept_variables = ['ident_men', 'etamatri', 'agepr']
individus = individus[kept_variables].copy()
individus.etamatri.loc[individus.etamatri == 0] = 1
individus['etamatri'] = individus['etamatri'].astype('int') # MBJ TODO: define as a catagory ?
individus.set_index('ident_men', inplace = True)
menage = menage.merge(individus, left_index = True, right_index = True)
individus = survey.get_values(
table = 'individu',
variables = ['ident_men', 'ident_ind', 'age', 'anais', 'vag', 'lienpref'],
)
# Il y a un problème sur l'année de naissance,
# donc on le recalcule avec l'année de naissance et la vague d'enquête
individus['age'] = year - individus.anais
individus.loc[individus.vag == 6, ['age']] = year + 1 - individus.anais
# Garder toutes les personnes du ménage qui ne sont pas la personne de référence et le conjoint
individus = individus[(individus.lienpref != 00) & (individus.lienpref != 01)].copy()
individus.sort_values(by = ['ident_men', 'ident_ind'], inplace = True)
# Inspired by http://stackoverflow.com/questions/17228215/enumerate-each-row-for-each-group-in-a-dataframe
def add_col_numero(data_frame):
data_frame['numero'] = numpy.arange(len(data_frame)) + 3
return data_frame
individus = individus.groupby(by = 'ident_men').apply(add_col_numero)
pivoted = individus.pivot(index = 'ident_men', columns = "numero", values = 'age')
pivoted.columns = ["age{}".format(column) for column in pivoted.columns]
menage = menage.merge(pivoted, left_index = True, right_index = True, how = 'outer')
individus = survey.get_values(
table = 'individu',
variables = ['ident_men', 'ident_ind', 'agfinetu', 'lienpref'],
)
individus.set_index('ident_men', inplace = True)
pr = individus.loc[individus.lienpref == 00, 'agfinetu'].copy()
conjoint = individus.loc[individus.lienpref == 01, 'agfinetu'].copy()
conjoint.name = 'agfinetu_cj'
agfinetu_merged = pandas.concat([pr, conjoint], axis = 1)
menage = menage.merge(agfinetu_merged, left_index = True, right_index = True)
temporary_store['donnes_socio_demog_{}'.format(year)] = menage
# label var agepr "Age de la personne de référence au 31/12/${yearrawdata}"
# label var agecj "Age du conjoint de la PR au 31/12/${yearrawdata}"
# label var sexepr "Sexe de la personne de référence"
# label var sexecj "Sexe du conjoint de la PR"
# label var cs42pr "Catégorie socio-professionnelle de la PR"
# label var cs42cj "Catégorie socio-professionnelle du conjoint de la PR"
# label var ocde10 "Nombre d'unités de consommation (échelle OCDE)"
# label var ident_men "Identifiant du ménage"
# label var pondmen "Ponderation du ménage"
# label var npers "Nombre total de personnes dans le ménage"
# label var nadultes "Nombre d'adultes dans le ménage"
# label var nenfants "Nombre d'enfants dans le ménage"
# label var nenfhors "Nombre d'enfants vivant hors domicile"
# label var nactifs "Nombre d'actifs dans le ménage"
# label var couplepr "Vie en couple de la personne de référence"
# label define typmen5 1 "Personne seule" 2 "Famille monoparentale" 3 "Couple sans enfant"
# 4 "Couple avec enfants" 5 "Autre type de ménage (complexe)"
# label values typmen5 typmen5
# label var typmen5 "Type de ménage (5 modalités)"
# label var etamatri "Situation matrimoniale de la personne de référence"
# label define matripr 1 "Célibataire" 2 "Marié(e)" 3 "Veuf(ve)" 4 "Divorcé(e)"
# label values etamatri matripr
# label define occupation 1 "Occupe un emploi" ///
# 2 "Apprenti" ///
# 3 "Etudiant, élève, en formation" ///
# 4 "Chômeur (inscrit ou non à l'ANPE)" ///
# 5 "Retraité, préretraité ou retiré des affaires" ///
# 6 "Au foyer" ///
# 7 "Autre situation (handicapé)" ///
# 8 "Militaire du contingent"
# label values situapr occupation
# label values situacj occupation
# label var situapr "Situation d'activité de la personne de référence"
# label var situacj "Situation d'activité du conjoint de la PR"
# label define diplome 10 "Diplôme de 3ème cycle universitaire, doctorat" ///
# 12 "Diplôme d'ingénieur, grande école" ///
# 20 "Diplôme de 2nd cycle universitaire" ///
# 30 "Diplôme de 1er cycle universitaire" ///
# 31 "BTS, DUT ou équivalent" ///
# 33 "Diplôme des professions sociales et de la santé niveau Bac +2" ///
# 41 "Baccalauréat général, brevet supérieur, capacité en droit" ///
# 42 "Baccalauréat technologique" ///
# 43 "Baccalauréat professionnel" ///
# 44 "Brevet professionnel ou de technicien" ///
# 50 "CAP, BEP ou diplôme de même niveau" ///
# 60 "Brevet des collèges, BEPC" ///
# 70 "Certificat d'études primaires" ///
# 71 "Aucun diplôme"
# label values dip14pr diplome
# label values dip14cj diplome
# label var dip14pr "Diplôme le plus élevé de la PR"
# label var dip14cj "Diplôme le plus élevé du conjoint de la PR"
# label define nationalite 1 "Français, par naissance ou naturalisation" 2 "Etranger"
# label values natiopr nationalite
# label values natiocj nationalite
# label var natiopr "Nationalité de la personne de référence"
# label var natiocj "Nationalité du conjoint de la PR"
# label define logement 1 "Maison" 2 "Appartement"
# label values typlog logement
# label var typlog "Type de logement"
# label define statutlogement 1 "Propriétaire ou copropriétaire" ///
# 2 "Accédant à la propriété (rembourse un prêt)" ///
# 3 "Locataire" ///
# 4 "Sous-locataire" ///
# 5 "Logé gratuitement"
# label values stalog statutlogement
# label var stalog "Statut d'occupation du logement"
# label define viecouple 1 "Vit en couple" 2 "Ne vit pas en couple"
# label values couplepr viecouple
#
# /* Recodage des CSP en 12 et 8 postes à partir de classification de l'INSEE (2003, PCS niveaux 1 et 2) */
# gen cs24pr=00
# replace cs24pr=10 if cs42pr=="11"
# replace cs24pr=10 if cs42pr=="12"
# replace cs24pr=10 if cs42pr=="13"
# replace cs24pr=21 if cs42pr=="21"
# replace cs24pr=22 if cs42pr=="22"
# replace cs24pr=23 if cs42pr=="23"
# replace cs24pr=31 if cs42pr=="31"
# replace cs24pr=32 if cs42pr=="33"
# replace cs24pr=32 if cs42pr=="34"
# replace cs24pr=32 if cs42pr=="35"
# replace cs24pr=36 if cs42pr=="37"
# replace cs24pr=36 if cs42pr=="38"
# replace cs24pr=41 if cs42pr=="42"
# replace cs24pr=41 if cs42pr=="43"
# replace cs24pr=41 if cs42pr=="44"
# replace cs24pr=41 if cs42pr=="45"
# replace cs24pr=46 if cs42pr=="46"
# replace cs24pr=47 if cs42pr=="47"
# replace cs24pr=48 if cs42pr=="48"
# replace cs24pr=51 if cs42pr=="52"
# replace cs24pr=51 if cs42pr=="53"
# replace cs24pr=54 if cs42pr=="54"
# replace cs24pr=55 if cs42pr=="55"
# replace cs24pr=56 if cs42pr=="56"
# replace cs24pr=61 if cs42pr=="62"
# replace cs24pr=61 if cs42pr=="63"
# replace cs24pr=61 if cs42pr=="64"
# replace cs24pr=61 if cs42pr=="65"
# replace cs24pr=66 if cs42pr=="67"
# replace cs24pr=66 if cs42pr=="68"
# replace cs24pr=69 if cs42pr=="69"
# replace cs24pr=71 if cs42pr=="71"
# replace cs24pr=72 if cs42pr=="72"
# replace cs24pr=73 if cs42pr=="74"
# replace cs24pr=73 if cs42pr=="75"
# replace cs24pr=76 if cs42pr=="77"
# replace cs24pr=76 if cs42pr=="78"
# replace cs24pr=81 if cs42pr=="81"
# replace cs24pr=82 if cs42pr=="83"
# replace cs24pr=82 if cs42pr=="84"
# replace cs24pr=82 if cs42pr=="85"
# replace cs24pr=82 if cs42pr=="86"
# replace cs24pr=82 if cs42pr=="**"
# replace cs24pr=82 if cs42pr=="00"
#
menage['cs24pr'] = 0
csp42s_by_csp24 = {
10: ["11", "12", "13"],
21: ["21"],
22: ["22"],
23: ["23"],
31: ["31"],
32: ["32", "33", "34", "35"],
36: ["37", "38"],
41: ["42", "43", "44", "45"],
46: ["46"],
47: ["47"],
48: ["48"],
51: ["52", "53"],
54: ["54"],
55: ["55"],
56: ["56"],
61: ["62", "63", "64", "65"],
66: ["67", "68"],
69: ["69"],
71: ["71"],
72: ["72"],
73: ["74", "75"],
76: ["77", "78"],
81: ["81"],
82: ["83", "84", "85", "86", "**", "00"],
}
for csp24, csp42s in csp42s_by_csp24.items():
menage.loc[menage.cs42pr.isin(csp42s), 'cs24pr'] = csp24
assert menage.cs24pr.isin(csp42s_by_csp24.keys()).all()
menage['cs8pr'] = numpy.floor(menage.cs24pr / 10)
assert menage.cs8pr.isin(range(1, 9)).all()
variables = [
'pondmen', 'npers', 'nenfants', 'nenfhors', 'nadultes', 'nactifs', 'ocde10', 'typmen',
'sexepr', 'agepr', 'etamatri', 'couplepr', 'situapr', 'dip14pr', 'cs42pr', 'cs24pr', 'cs8pr', 'natiopr',
'sexecj', 'agecj', 'situacj', 'dip14cj', 'cs42cj', 'natiocj', 'typlog', 'stalog'
] + ["age{}".format(age) for age in range(3, 14)]
for variable in variables:
assert variable in menage.columns, "{} is not a column of menage data frame".format(variable)
if year == 2011:
variables = [
'agecj',
'agepr',
'coeffuc',
'decuc1',
'ident_me',
'pondmen',
'npers',
'nenfants',
'nactifs',
'sexepr',
'sexecj',
'dip14cj',
'dip14pr',
'typmen5',
'cataeu',
'situapr',
'situacj',
'zeat',
]
try:
menage = survey.get_values(table = "MENAGE", variables = variables)
except:
menage = survey.get_values(table = "menage", variables = variables)
menage.rename(
columns = {
'ident_me': 'ident_men',
'coeffuc': 'ocde10',
'typmen5': 'typmen',
'decuc1': 'decuc',
'cataeu': 'strate'
},
inplace = True,
)
del variables
menage.agecj = menage.agecj.fillna(0)
# Ajout de la variable vag
try:
depmen = survey.get_values(table = "DEPMEN")
except:
depmen = survey.get_values(table = "depmen")
depmen.rename(columns = {'ident_me': 'ident_men'}, inplace = True)
vague = depmen[['vag', 'ident_men']].copy()
stalog = depmen[['stalog', 'ident_men']].copy()
del depmen
menage.set_index('ident_men', inplace = True)
vague.set_index('ident_men', inplace = True)
menage = menage.merge(vague, left_index = True, right_index = True)
# On met un numéro à chaque vague pour pouvoir faire un meilleur suivi des évolutions temporelles pour
# le modèle de demande
menage['vag_'] = menage['vag'].copy()
menage.vag.loc[menage.vag_ == 1] = 23
menage.vag.loc[menage.vag_ == 2] = 24
menage.vag.loc[menage.vag_ == 3] = 25
menage.vag.loc[menage.vag_ == 4] = 26
menage.vag.loc[menage.vag_ == 5] = 27
menage.vag.loc[menage.vag_ == 6] = 28
del menage['vag_']
# Homogénéisation de la variable statut du logement qui prend des valeurs différentes pour 2011
stalog['stalog'] = stalog.stalog.astype('int').copy()
stalog['new_stalog'] = 0
stalog.loc[stalog.stalog == 2, 'new_stalog'] = 1
stalog.loc[stalog.stalog == 1, 'new_stalog'] = 2
stalog.loc[stalog.stalog == 4, 'new_stalog'] = 3
stalog.loc[stalog.stalog == 5, 'new_stalog'] = 4
stalog.loc[stalog.stalog.isin([3, 6]), 'new_stalog'] = 5
stalog.stalog = stalog.new_stalog.copy()
del stalog['new_stalog']
assert stalog.stalog.isin(range(1, 6)).all()
stalog.set_index('ident_men', inplace = True)
menage = menage.merge(stalog, left_index = True, right_index = True)
# Recodage des catégories zeat
menage.loc[menage.zeat == 7, 'zeat'] = 6
menage.zeat.loc[menage.zeat == 8] = 7
menage.zeat.loc[menage.zeat == 9] = 8
assert menage.zeat.isin(range(0, 9)).all()
menage.index.name = 'ident_men'
#
assert menage.index.name == 'ident_men'
menage['role_menage'] = 0
temporary_store['donnes_socio_demog_{}'.format(year)] = menage | u"""Homogénéisation des caractéristiques sociales des ménages | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/build_survey_data/step_3_homogeneisation_caracteristiques_menages.py#L44-L666 | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division
import logging
import numpy
import pandas
from openfisca_survey_manager.temporary import temporary_store_decorator
from openfisca_survey_manager.survey_collections import SurveyCollection
from openfisca_survey_manager import default_config_files_directory as config_files_directory
log = logging.getLogger(__name__)
@temporary_store_decorator(config_files_directory = config_files_directory, file_name = 'indirect_taxation_tmp')
if __name__ == '__main__':
import sys
import time
logging.basicConfig(level = logging.INFO, stream = sys.stdout)
deb = time.clock()
year = 2011
build_homogeneisation_caracteristiques_sociales(year = year)
log.info("step_3_homogeneisation_caracteristiques_sociales {}".format(time.clock() - deb))
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/build_survey_data/utils.py | collapsesum | python | def collapsesum(data_frame, by = None, var = None):
'''
Pour une variable, fonction qui calcule la moyenne pondérée au sein de chaque groupe.
'''
assert by is not None
assert var is not None
grouped = data_frame.groupby([by])
return grouped.apply(lambda x: weighted_sum(groupe = x, var =var)) | Pour une variable, fonction qui calcule la moyenne pondérée au sein de chaque groupe. | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/build_survey_data/utils.py#L30-L37 | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division
def find_nearest_inferior(years, year):
# years = year_data_list
anterior_years = [
available_year for available_year in years if available_year <= year
]
return max(anterior_years)
# ident_men_dtype = numpy.dtype('O')
ident_men_dtype = 'str'
def weighted_sum(groupe, var):
'''
Fonction qui calcule la moyenne pondérée par groupe d'une variable
'''
data = groupe[var]
weights = groupe['pondmen']
return (data * weights).sum()
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/build_survey_data/utils.py | weighted_sum | python | def weighted_sum(groupe, var):
'''
Fonction qui calcule la moyenne pondérée par groupe d'une variable
'''
data = groupe[var]
weights = groupe['pondmen']
return (data * weights).sum() | Fonction qui calcule la moyenne pondérée par groupe d'une variable | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/build_survey_data/utils.py#L52-L58 | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division
def collapsesum(data_frame, by = None, var = None):
'''
Pour une variable, fonction qui calcule la moyenne pondérée au sein de chaque groupe.
'''
assert by is not None
assert var is not None
grouped = data_frame.groupby([by])
return grouped.apply(lambda x: weighted_sum(groupe = x, var =var))
def find_nearest_inferior(years, year):
# years = year_data_list
anterior_years = [
available_year for available_year in years if available_year <= year
]
return max(anterior_years)
# ident_men_dtype = numpy.dtype('O')
ident_men_dtype = 'str'
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/build_survey_data/step_1_1_homogeneisation_donnees_depenses.py | build_depenses_homogenisees | python | def build_depenses_homogenisees(temporary_store = None, year = None):
assert temporary_store is not None
assert year is not None
bdf_survey_collection = SurveyCollection.load(
collection = 'budget_des_familles', config_files_directory = config_files_directory
)
survey = bdf_survey_collection.get_survey('budget_des_familles_{}'.format(year))
# Homogénéisation des bases de données de dépenses
if year == 1995:
socioscm = survey.get_values(table = "socioscm")
poids = socioscm[['mena', 'ponderrd', 'exdep', 'exrev']]
# cette étape de ne garder que les données dont on est sûr de la qualité et de la véracité
# exdep = 1 si les données sont bien remplies pour les dépenses du ménage
# exrev = 1 si les données sont bien remplies pour les revenus du ménage
poids = poids[(poids.exdep == 1) & (poids.exrev == 1)]
del poids['exdep'], poids['exrev']
poids.rename(
columns = {
'mena': 'ident_men',
'ponderrd': 'pondmen',
},
inplace = True
)
poids.set_index('ident_men', inplace = True)
conso = survey.get_values(table = "depnom")
conso = conso[["valeur", "montant", "mena", "nomen5"]]
conso = conso.groupby(["mena", "nomen5"]).sum()
conso = conso.reset_index()
conso.rename(
columns = {
'mena': 'ident_men',
'nomen5': 'poste{}'.format(year),
'valeur': 'depense',
'montant': 'depense_avt_imput',
},
inplace = True
)
# Passage à l'euro
conso.depense = conso.depense / 6.55957
conso.depense_avt_imput = conso.depense_avt_imput / 6.55957
conso_small = conso[[u'ident_men', u'poste1995', u'depense']]
conso_unstacked = conso_small.set_index(['ident_men', 'poste1995']).unstack('poste1995')
conso_unstacked = conso_unstacked.fillna(0)
levels = conso_unstacked.columns.levels[1]
labels = conso_unstacked.columns.labels[1]
conso_unstacked.columns = levels[labels]
conso_unstacked.rename(index = {0: 'ident_men'}, inplace = True)
conso = conso_unstacked.merge(poids, left_index = True, right_index = True)
conso = conso.reset_index()
if year == 2000:
conso = survey.get_values(table = "consomen")
conso.rename(
columns = {
'ident': 'ident_men',
'pondmen': 'pondmen',
},
inplace = True,
)
for variable in ['ctotale', 'c99', 'c99999'] + \
["c0{}".format(i) for i in range(1, 10)] + \
["c{}".format(i) for i in range(10, 14)]:
del conso[variable]
if year == 2005:
conso = survey.get_values(table = "c05d")
if year == 2011:
try:
conso = survey.get_values(table = "C05")
except:
conso = survey.get_values(table = "c05")
conso.rename(
columns = {
'ident_me': 'ident_men',
},
inplace = True,
)
del conso['ctot']
# Grouping by coicop
poids = conso[['ident_men', 'pondmen']].copy()
poids.set_index('ident_men', inplace = True)
conso.drop('pondmen', axis = 1, inplace = True)
conso.set_index('ident_men', inplace = True)
matrice_passage_data_frame, selected_parametres_fiscalite_data_frame = get_transfert_data_frames(year)
coicop_poste_bdf = matrice_passage_data_frame[['poste{}'.format(year), 'posteCOICOP']]
coicop_poste_bdf.set_index('poste{}'.format(year), inplace = True)
coicop_by_poste_bdf = coicop_poste_bdf.to_dict()['posteCOICOP']
del coicop_poste_bdf
def reformat_consumption_column_coicop(coicop):
try:
return int(coicop.replace('c', '').lstrip('0'))
except:
return numpy.NaN
# cette étape permet d'harmoniser les df pour 1995 qui ne se présentent pas de la même façon
# que pour les trois autres années
if year == 1995:
coicop_labels = [
normalize_code_coicop(coicop_by_poste_bdf.get(poste_bdf))
for poste_bdf in conso.columns
]
else:
coicop_labels = [
normalize_code_coicop(coicop_by_poste_bdf.get(reformat_consumption_column_coicop(poste_bdf)))
for poste_bdf in conso.columns
]
tuples = zip(coicop_labels, conso.columns)
conso.columns = pandas.MultiIndex.from_tuples(tuples, names=['coicop', 'poste{}'.format(year)])
coicop_data_frame = conso.groupby(level = 0, axis = 1).sum()
depenses = coicop_data_frame.merge(poids, left_index = True, right_index = True)
# Création de gros postes, les 12 postes sur lesquels le calage se fera
def select_gros_postes(coicop):
try:
coicop = unicode(coicop)
except:
coicop = coicop
normalized_coicop = normalize_code_coicop(coicop)
grosposte = normalized_coicop[0:2]
return int(grosposte)
grospostes = [
select_gros_postes(coicop)
for coicop in coicop_data_frame.columns
]
tuples_gros_poste = zip(coicop_data_frame.columns, grospostes)
coicop_data_frame.columns = pandas.MultiIndex.from_tuples(tuples_gros_poste, names=['coicop', 'grosposte'])
depenses_by_grosposte = coicop_data_frame.groupby(level = 1, axis = 1).sum()
depenses_by_grosposte = depenses_by_grosposte.merge(poids, left_index = True, right_index = True)
# TODO : understand why it does not work: depenses.rename(columns = {u'0421': 'poste_coicop_421'}, inplace = True)
produits = [column for column in depenses.columns if column.isdigit()]
for code in produits:
if code[-1:] == '0':
depenses.rename(columns = {code: code[:-1]}, inplace = True)
else:
depenses.rename(columns = {code: code}, inplace = True)
produits = [column for column in depenses.columns if column.isdigit()]
for code in produits:
if code[0:1] == '0':
depenses.rename(columns = {code: code[1:]}, inplace = True)
else:
depenses.rename(columns = {code: code}, inplace = True)
produits = [column for column in depenses.columns if column.isdigit()]
for code in produits:
depenses.rename(columns = {code: 'poste_coicop_' + code}, inplace = True)
temporary_store['depenses_{}'.format(year)] = depenses
depenses_by_grosposte.columns = depenses_by_grosposte.columns.astype(str)
liste_grospostes = [column for column in depenses_by_grosposte.columns if column.isdigit()]
for grosposte in liste_grospostes:
depenses_by_grosposte.rename(columns = {grosposte: 'coicop12_' + grosposte}, inplace = True)
temporary_store['depenses_by_grosposte_{}'.format(year)] = depenses_by_grosposte | Build menage consumption by categorie fiscale dataframe | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/build_survey_data/step_1_1_homogeneisation_donnees_depenses.py#L47-L217 | [
"def get_transfert_data_frames(year = None):\n assert year is not None\n matrice_passage_csv_file_path = os.path.join(\n assets_directory,\n 'legislation',\n 'Matrice passage {}-COICOP.csv'.format(year),\n )\n if os.path.exists(matrice_passage_csv_file_path):\n matrice_passage_data_frame = pandas.read_csv(matrice_passage_csv_file_path)\n else:\n matrice_passage_xls_file_path = os.path.join(\n assets_directory,\n 'legislation',\n 'Matrice passage {}-COICOP.xls'.format(year),\n )\n matrice_passage_data_frame = pandas.read_excel(matrice_passage_xls_file_path)\n matrice_passage_data_frame.to_csv(matrice_passage_csv_file_path, encoding = 'utf-8')\n\n if year == 2011:\n matrice_passage_data_frame['poste2011'] = \\\n matrice_passage_data_frame['poste2011'].apply(lambda x: int(x.replace('c', '').lstrip('0')))\n\n selected_parametres_fiscalite_data_frame = get_parametres_fiscalite_data_frame(year = year)\n return matrice_passage_data_frame, selected_parametres_fiscalite_data_frame\n"
] | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division
import logging
import numpy
import pandas
from openfisca_survey_manager.temporary import temporary_store_decorator
from openfisca_survey_manager import default_config_files_directory as config_files_directory
from openfisca_survey_manager.survey_collections import SurveyCollection
from openfisca_france_indirect_taxation.utils import get_transfert_data_frames
log = logging.getLogger(__name__)
@temporary_store_decorator(config_files_directory = config_files_directory, file_name = 'indirect_taxation_tmp')
def normalize_code_coicop(code):
'''Normalize_coicop est function d'harmonisation de la colonne d'entiers posteCOICOP de la table
matrice_passage_data_frame en la transformant en une chaine de 5 caractères afin de pouvoir par la suite agréger les postes
COICOP selon les 12 postes agrégés de la nomenclature de la comptabilité nationale. Chaque poste contient 5 caractères,
les deux premiers (entre 01 et 12) correspondent à ces postes agrégés de la CN.
'''
# TODO: vérifier la formule !!!
try:
code = unicode(code)
except:
code = code
if len(code) == 3:
code_coicop = "0" + code + "0" # "{0}{1}{0}".format(0, code)
elif len(code) == 4:
if not code.startswith("0") and not code.startswith("1") and not code.startswith("45") and not code.startswith("9"):
code_coicop = "0" + code
# 022.. = cigarettes et tabacs => on les range avec l'alcool (021.0)
elif code.startswith("0"):
code_coicop = code + "0"
elif code in ["1151", "1181", "4552", "4522", "4511", "9122", "9151", "9211", "9341", "1411"]:
# 1151 = Margarines et autres graisses végétales
# 1181 = Confiserie
# 04522 = Achat de butane, propane
# 04511 = Facture EDF GDF non dissociables
code_coicop = "0" + code
else:
# 99 = loyer, impots et taxes, cadeaux...
code_coicop = code + "0"
elif len(code) == 5:
if not code.startswith("13") and not code.startswith("44") and not code.startswith("51"):
code_coicop = code
else:
code_coicop = "99000"
else:
log.error("Problematic code {}".format(code))
raise()
return code_coicop
if __name__ == '__main__':
import sys
import time
logging.basicConfig(level = logging.INFO, stream = sys.stdout)
deb = time.clock()
year = 2011
build_depenses_homogenisees(year = year)
log.info("duration is {}".format(time.clock() - deb))
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/build_survey_data/step_1_1_homogeneisation_donnees_depenses.py | normalize_code_coicop | python | def normalize_code_coicop(code):
'''Normalize_coicop est function d'harmonisation de la colonne d'entiers posteCOICOP de la table
matrice_passage_data_frame en la transformant en une chaine de 5 caractères afin de pouvoir par la suite agréger les postes
COICOP selon les 12 postes agrégés de la nomenclature de la comptabilité nationale. Chaque poste contient 5 caractères,
les deux premiers (entre 01 et 12) correspondent à ces postes agrégés de la CN.
'''
# TODO: vérifier la formule !!!
try:
code = unicode(code)
except:
code = code
if len(code) == 3:
code_coicop = "0" + code + "0" # "{0}{1}{0}".format(0, code)
elif len(code) == 4:
if not code.startswith("0") and not code.startswith("1") and not code.startswith("45") and not code.startswith("9"):
code_coicop = "0" + code
# 022.. = cigarettes et tabacs => on les range avec l'alcool (021.0)
elif code.startswith("0"):
code_coicop = code + "0"
elif code in ["1151", "1181", "4552", "4522", "4511", "9122", "9151", "9211", "9341", "1411"]:
# 1151 = Margarines et autres graisses végétales
# 1181 = Confiserie
# 04522 = Achat de butane, propane
# 04511 = Facture EDF GDF non dissociables
code_coicop = "0" + code
else:
# 99 = loyer, impots et taxes, cadeaux...
code_coicop = code + "0"
elif len(code) == 5:
if not code.startswith("13") and not code.startswith("44") and not code.startswith("51"):
code_coicop = code
else:
code_coicop = "99000"
else:
log.error("Problematic code {}".format(code))
raise()
return code_coicop | Normalize_coicop est function d'harmonisation de la colonne d'entiers posteCOICOP de la table
matrice_passage_data_frame en la transformant en une chaine de 5 caractères afin de pouvoir par la suite agréger les postes
COICOP selon les 12 postes agrégés de la nomenclature de la comptabilité nationale. Chaque poste contient 5 caractères,
les deux premiers (entre 01 et 12) correspondent à ces postes agrégés de la CN. | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/build_survey_data/step_1_1_homogeneisation_donnees_depenses.py#L220-L258 | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division
import logging
import numpy
import pandas
from openfisca_survey_manager.temporary import temporary_store_decorator
from openfisca_survey_manager import default_config_files_directory as config_files_directory
from openfisca_survey_manager.survey_collections import SurveyCollection
from openfisca_france_indirect_taxation.utils import get_transfert_data_frames
log = logging.getLogger(__name__)
@temporary_store_decorator(config_files_directory = config_files_directory, file_name = 'indirect_taxation_tmp')
def build_depenses_homogenisees(temporary_store = None, year = None):
"""Build menage consumption by categorie fiscale dataframe """
assert temporary_store is not None
assert year is not None
bdf_survey_collection = SurveyCollection.load(
collection = 'budget_des_familles', config_files_directory = config_files_directory
)
survey = bdf_survey_collection.get_survey('budget_des_familles_{}'.format(year))
# Homogénéisation des bases de données de dépenses
if year == 1995:
socioscm = survey.get_values(table = "socioscm")
poids = socioscm[['mena', 'ponderrd', 'exdep', 'exrev']]
# cette étape de ne garder que les données dont on est sûr de la qualité et de la véracité
# exdep = 1 si les données sont bien remplies pour les dépenses du ménage
# exrev = 1 si les données sont bien remplies pour les revenus du ménage
poids = poids[(poids.exdep == 1) & (poids.exrev == 1)]
del poids['exdep'], poids['exrev']
poids.rename(
columns = {
'mena': 'ident_men',
'ponderrd': 'pondmen',
},
inplace = True
)
poids.set_index('ident_men', inplace = True)
conso = survey.get_values(table = "depnom")
conso = conso[["valeur", "montant", "mena", "nomen5"]]
conso = conso.groupby(["mena", "nomen5"]).sum()
conso = conso.reset_index()
conso.rename(
columns = {
'mena': 'ident_men',
'nomen5': 'poste{}'.format(year),
'valeur': 'depense',
'montant': 'depense_avt_imput',
},
inplace = True
)
# Passage à l'euro
conso.depense = conso.depense / 6.55957
conso.depense_avt_imput = conso.depense_avt_imput / 6.55957
conso_small = conso[[u'ident_men', u'poste1995', u'depense']]
conso_unstacked = conso_small.set_index(['ident_men', 'poste1995']).unstack('poste1995')
conso_unstacked = conso_unstacked.fillna(0)
levels = conso_unstacked.columns.levels[1]
labels = conso_unstacked.columns.labels[1]
conso_unstacked.columns = levels[labels]
conso_unstacked.rename(index = {0: 'ident_men'}, inplace = True)
conso = conso_unstacked.merge(poids, left_index = True, right_index = True)
conso = conso.reset_index()
if year == 2000:
conso = survey.get_values(table = "consomen")
conso.rename(
columns = {
'ident': 'ident_men',
'pondmen': 'pondmen',
},
inplace = True,
)
for variable in ['ctotale', 'c99', 'c99999'] + \
["c0{}".format(i) for i in range(1, 10)] + \
["c{}".format(i) for i in range(10, 14)]:
del conso[variable]
if year == 2005:
conso = survey.get_values(table = "c05d")
if year == 2011:
try:
conso = survey.get_values(table = "C05")
except:
conso = survey.get_values(table = "c05")
conso.rename(
columns = {
'ident_me': 'ident_men',
},
inplace = True,
)
del conso['ctot']
# Grouping by coicop
poids = conso[['ident_men', 'pondmen']].copy()
poids.set_index('ident_men', inplace = True)
conso.drop('pondmen', axis = 1, inplace = True)
conso.set_index('ident_men', inplace = True)
matrice_passage_data_frame, selected_parametres_fiscalite_data_frame = get_transfert_data_frames(year)
coicop_poste_bdf = matrice_passage_data_frame[['poste{}'.format(year), 'posteCOICOP']]
coicop_poste_bdf.set_index('poste{}'.format(year), inplace = True)
coicop_by_poste_bdf = coicop_poste_bdf.to_dict()['posteCOICOP']
del coicop_poste_bdf
def reformat_consumption_column_coicop(coicop):
try:
return int(coicop.replace('c', '').lstrip('0'))
except:
return numpy.NaN
# cette étape permet d'harmoniser les df pour 1995 qui ne se présentent pas de la même façon
# que pour les trois autres années
if year == 1995:
coicop_labels = [
normalize_code_coicop(coicop_by_poste_bdf.get(poste_bdf))
for poste_bdf in conso.columns
]
else:
coicop_labels = [
normalize_code_coicop(coicop_by_poste_bdf.get(reformat_consumption_column_coicop(poste_bdf)))
for poste_bdf in conso.columns
]
tuples = zip(coicop_labels, conso.columns)
conso.columns = pandas.MultiIndex.from_tuples(tuples, names=['coicop', 'poste{}'.format(year)])
coicop_data_frame = conso.groupby(level = 0, axis = 1).sum()
depenses = coicop_data_frame.merge(poids, left_index = True, right_index = True)
# Création de gros postes, les 12 postes sur lesquels le calage se fera
def select_gros_postes(coicop):
try:
coicop = unicode(coicop)
except:
coicop = coicop
normalized_coicop = normalize_code_coicop(coicop)
grosposte = normalized_coicop[0:2]
return int(grosposte)
grospostes = [
select_gros_postes(coicop)
for coicop in coicop_data_frame.columns
]
tuples_gros_poste = zip(coicop_data_frame.columns, grospostes)
coicop_data_frame.columns = pandas.MultiIndex.from_tuples(tuples_gros_poste, names=['coicop', 'grosposte'])
depenses_by_grosposte = coicop_data_frame.groupby(level = 1, axis = 1).sum()
depenses_by_grosposte = depenses_by_grosposte.merge(poids, left_index = True, right_index = True)
# TODO : understand why it does not work: depenses.rename(columns = {u'0421': 'poste_coicop_421'}, inplace = True)
produits = [column for column in depenses.columns if column.isdigit()]
for code in produits:
if code[-1:] == '0':
depenses.rename(columns = {code: code[:-1]}, inplace = True)
else:
depenses.rename(columns = {code: code}, inplace = True)
produits = [column for column in depenses.columns if column.isdigit()]
for code in produits:
if code[0:1] == '0':
depenses.rename(columns = {code: code[1:]}, inplace = True)
else:
depenses.rename(columns = {code: code}, inplace = True)
produits = [column for column in depenses.columns if column.isdigit()]
for code in produits:
depenses.rename(columns = {code: 'poste_coicop_' + code}, inplace = True)
temporary_store['depenses_{}'.format(year)] = depenses
depenses_by_grosposte.columns = depenses_by_grosposte.columns.astype(str)
liste_grospostes = [column for column in depenses_by_grosposte.columns if column.isdigit()]
for grosposte in liste_grospostes:
depenses_by_grosposte.rename(columns = {grosposte: 'coicop12_' + grosposte}, inplace = True)
temporary_store['depenses_by_grosposte_{}'.format(year)] = depenses_by_grosposte
if __name__ == '__main__':
import sys
import time
logging.basicConfig(level = logging.INFO, stream = sys.stdout)
deb = time.clock()
year = 2011
build_depenses_homogenisees(year = year)
log.info("duration is {}".format(time.clock() - deb))
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/examples/utils_example.py | simulate | python | def simulate(simulated_variables, year):
'''
Construction de la DataFrame à partir de laquelle sera faite l'analyse des données
'''
input_data_frame = get_input_data_frame(year)
TaxBenefitSystem = openfisca_france_indirect_taxation.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame,
tax_benefit_system = tax_benefit_system,
year = year,
)
simulation = survey_scenario.new_simulation()
return DataFrame(
dict([
(name, simulation.calculate(name)) for name in simulated_variables
])
) | Construction de la DataFrame à partir de laquelle sera faite l'analyse des données | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/examples/utils_example.py#L39-L58 | [
"def init_country():\n class TaxBenefitSystem(XmlBasedTaxBenefitSystem):\n entity_class_by_key_plural = {\n entity_class.key_plural: entity_class\n for entity_class in entity_class_by_symbol.itervalues()\n }\n legislation_xml_file_path = os.path.join(\n os.path.dirname(os.path.abspath(param.__file__)),\n 'parameters.xml'\n )\n preprocess_legislation = staticmethod(preprocessing.preprocess_legislation)\n\n def prefill_cache(self):\n # Define categorie_fiscale_* and poste_coicp_* variables\n from .model.consommation import categories_fiscales\n categories_fiscales.preload_categories_fiscales_data_frame()\n from .model.consommation import postes_coicop\n postes_coicop.preload_postes_coicop_data_frame()\n # Reindex columns since preload functions generate new columns.\n self.index_columns()\n\n # Define class attributes after class declaration to avoid \"name is not defined\" exceptions.\n TaxBenefitSystem.Scenario = Scenario\n\n from .model import model # noqa analysis:ignore\n\n return TaxBenefitSystem\n",
"def get_input_data_frame(year):\n openfisca_survey_collection = SurveyCollection.load(collection = \"openfisca_indirect_taxation\")\n openfisca_survey = openfisca_survey_collection.get_survey(\"openfisca_indirect_taxation_data_{}\".format(year))\n input_data_frame = openfisca_survey.get_values(table = \"input\")\n input_data_frame.reset_index(inplace = True)\n return input_data_frame\n"
] | # -*- coding: utf-8 -*-
from __future__ import division
from pandas import DataFrame
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import openfisca_france_indirect_taxation
from openfisca_france_indirect_taxation.surveys import get_input_data_frame
from openfisca_survey_manager.survey_collections import SurveyCollection
from openfisca_france_indirect_taxation.surveys import SurveyScenario
from openfisca_france_indirect_taxation.examples.calage_bdf_cn import \
build_df_calee_on_grospostes, build_df_calee_on_ticpe
def create_survey_scenario(year = None):
assert year is not None
input_data_frame = get_input_data_frame(year)
TaxBenefitSystem = openfisca_france_indirect_taxation.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame,
tax_benefit_system = tax_benefit_system,
year = year,
)
return survey_scenario
def simulate_df_calee_by_grosposte(simulated_variables, year):
'''
Construction de la DataFrame à partir de laquelle sera faite l'analyse des données
'''
input_data_frame = get_input_data_frame(year)
input_data_frame_calee = build_df_calee_on_grospostes(input_data_frame, year, year)
TaxBenefitSystem = openfisca_france_indirect_taxation.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame_calee,
tax_benefit_system = tax_benefit_system,
year = year,
)
simulation = survey_scenario.new_simulation()
return DataFrame(
dict([
(name, simulation.calculate(name)) for name in simulated_variables
])
)
def simulate_df_calee_on_ticpe(simulated_variables, year):
'''
Construction de la DataFrame à partir de laquelle sera faite l'analyse des données
'''
input_data_frame = get_input_data_frame(year)
input_data_frame_calee = build_df_calee_on_ticpe(input_data_frame, year, year)
TaxBenefitSystem = openfisca_france_indirect_taxation.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame_calee,
tax_benefit_system = tax_benefit_system,
year = year,
)
simulation = survey_scenario.new_simulation()
return DataFrame(
dict([
(name, simulation.calculate(name)) for name in simulated_variables
])
)
def wavg(groupe, var):
'''
Fonction qui calcule la moyenne pondérée par groupe d'une variable
'''
d = groupe[var]
w = groupe['pondmen']
return (d * w).sum() / w.sum()
def collapse(dataframe, groupe, var):
'''
Pour une variable, fonction qui calcule la moyenne pondérée au sein de chaque groupe.
'''
grouped = dataframe.groupby([groupe])
var_weighted_grouped = grouped.apply(lambda x: wavg(groupe = x, var = var))
return var_weighted_grouped
def df_weighted_average_grouped(dataframe, groupe, varlist):
'''
Agrège les résultats de weighted_average_grouped() en une unique dataframe pour la liste de variable 'varlist'.
'''
return DataFrame(
dict([
(var, collapse(dataframe, groupe, var)) for var in varlist
])
)
# To choose color when doing graph, could put a list of colors in argument
def graph_builder_bar(graph):
axes = graph.plot(
kind = 'bar',
stacked = True,
)
plt.axhline(0, color = 'k')
axes.yaxis.set_major_formatter(ticker.FuncFormatter(percent_formatter))
axes.legend(
bbox_to_anchor = (1.5, 1.05),
)
return plt.show()
def graph_builder_bar_list(graph, a, b):
axes = graph.plot(
kind = 'bar',
stacked = True,
color = ['#FF0000']
)
plt.axhline(0, color = 'k')
axes.legend(
bbox_to_anchor = (a, b),
)
return plt.show()
def graph_builder_line_percent(graph, a, b):
axes = graph.plot(
)
plt.axhline(0, color = 'k')
axes.yaxis.set_major_formatter(ticker.FuncFormatter(percent_formatter))
axes.legend(
bbox_to_anchor = (a, b),
)
return plt.show()
def graph_builder_line(graph):
axes = graph.plot(
)
plt.axhline(0, color = 'k')
axes.legend(
bbox_to_anchor = (1, 0.25),
)
return plt.show()
def graph_builder_carburants(data_frame, name, legend1, legend2, color1, color2, color3, color4):
axes = data_frame.plot(
color = [color1, color2, color3, color4])
fig = axes.get_figure()
plt.axhline(0, color = 'k')
# axes.xaxis(data_frame['annee'])
axes.legend(
bbox_to_anchor = (legend1, legend2),
)
return plt.show(), fig.savefig('C:/Users/thomas.douenne/Documents/data/graphs_transports/{}.png'.format(name))
def graph_builder_carburants_no_color(data_frame, name, legend1, legend2):
axes = data_frame.plot()
fig = axes.get_figure()
plt.axhline(0, color = 'k')
# axes.xaxis(data_frame['annee'])
axes.legend(
bbox_to_anchor = (legend1, legend2),
)
return plt.show(), fig.savefig('C:/Users/thomas.douenne/Documents/data/graphs_transports/{}.png'.format(name))
def percent_formatter(x, pos = 0):
return '%1.0f%%' % (100 * x)
def save_dataframe_to_graph(dataframe, file_name):
return dataframe.to_csv('C:/Users/thomas.douenne/Documents/data/Stats_rapport/' + file_name, sep = ';')
# assets_directory = os.path.join(
# pkg_resources.get_distribution('openfisca_france_indirect_taxation').location
# )
# return dataframe.to_csv(os.path.join(assets_directory, 'openfisca_france_indirect_taxation', 'assets',
# file_name), sep = ';')
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/examples/utils_example.py | wavg | python | def wavg(groupe, var):
'''
Fonction qui calcule la moyenne pondérée par groupe d'une variable
'''
d = groupe[var]
w = groupe['pondmen']
return (d * w).sum() / w.sum() | Fonction qui calcule la moyenne pondérée par groupe d'une variable | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/examples/utils_example.py#L107-L113 | null | # -*- coding: utf-8 -*-
from __future__ import division
from pandas import DataFrame
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import openfisca_france_indirect_taxation
from openfisca_france_indirect_taxation.surveys import get_input_data_frame
from openfisca_survey_manager.survey_collections import SurveyCollection
from openfisca_france_indirect_taxation.surveys import SurveyScenario
from openfisca_france_indirect_taxation.examples.calage_bdf_cn import \
build_df_calee_on_grospostes, build_df_calee_on_ticpe
def create_survey_scenario(year = None):
assert year is not None
input_data_frame = get_input_data_frame(year)
TaxBenefitSystem = openfisca_france_indirect_taxation.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame,
tax_benefit_system = tax_benefit_system,
year = year,
)
return survey_scenario
def simulate(simulated_variables, year):
'''
Construction de la DataFrame à partir de laquelle sera faite l'analyse des données
'''
input_data_frame = get_input_data_frame(year)
TaxBenefitSystem = openfisca_france_indirect_taxation.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame,
tax_benefit_system = tax_benefit_system,
year = year,
)
simulation = survey_scenario.new_simulation()
return DataFrame(
dict([
(name, simulation.calculate(name)) for name in simulated_variables
])
)
def simulate_df_calee_by_grosposte(simulated_variables, year):
'''
Construction de la DataFrame à partir de laquelle sera faite l'analyse des données
'''
input_data_frame = get_input_data_frame(year)
input_data_frame_calee = build_df_calee_on_grospostes(input_data_frame, year, year)
TaxBenefitSystem = openfisca_france_indirect_taxation.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame_calee,
tax_benefit_system = tax_benefit_system,
year = year,
)
simulation = survey_scenario.new_simulation()
return DataFrame(
dict([
(name, simulation.calculate(name)) for name in simulated_variables
])
)
def simulate_df_calee_on_ticpe(simulated_variables, year):
'''
Construction de la DataFrame à partir de laquelle sera faite l'analyse des données
'''
input_data_frame = get_input_data_frame(year)
input_data_frame_calee = build_df_calee_on_ticpe(input_data_frame, year, year)
TaxBenefitSystem = openfisca_france_indirect_taxation.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame_calee,
tax_benefit_system = tax_benefit_system,
year = year,
)
simulation = survey_scenario.new_simulation()
return DataFrame(
dict([
(name, simulation.calculate(name)) for name in simulated_variables
])
)
def collapse(dataframe, groupe, var):
'''
Pour une variable, fonction qui calcule la moyenne pondérée au sein de chaque groupe.
'''
grouped = dataframe.groupby([groupe])
var_weighted_grouped = grouped.apply(lambda x: wavg(groupe = x, var = var))
return var_weighted_grouped
def df_weighted_average_grouped(dataframe, groupe, varlist):
'''
Agrège les résultats de weighted_average_grouped() en une unique dataframe pour la liste de variable 'varlist'.
'''
return DataFrame(
dict([
(var, collapse(dataframe, groupe, var)) for var in varlist
])
)
# To choose color when doing graph, could put a list of colors in argument
def graph_builder_bar(graph):
axes = graph.plot(
kind = 'bar',
stacked = True,
)
plt.axhline(0, color = 'k')
axes.yaxis.set_major_formatter(ticker.FuncFormatter(percent_formatter))
axes.legend(
bbox_to_anchor = (1.5, 1.05),
)
return plt.show()
def graph_builder_bar_list(graph, a, b):
axes = graph.plot(
kind = 'bar',
stacked = True,
color = ['#FF0000']
)
plt.axhline(0, color = 'k')
axes.legend(
bbox_to_anchor = (a, b),
)
return plt.show()
def graph_builder_line_percent(graph, a, b):
axes = graph.plot(
)
plt.axhline(0, color = 'k')
axes.yaxis.set_major_formatter(ticker.FuncFormatter(percent_formatter))
axes.legend(
bbox_to_anchor = (a, b),
)
return plt.show()
def graph_builder_line(graph):
axes = graph.plot(
)
plt.axhline(0, color = 'k')
axes.legend(
bbox_to_anchor = (1, 0.25),
)
return plt.show()
def graph_builder_carburants(data_frame, name, legend1, legend2, color1, color2, color3, color4):
axes = data_frame.plot(
color = [color1, color2, color3, color4])
fig = axes.get_figure()
plt.axhline(0, color = 'k')
# axes.xaxis(data_frame['annee'])
axes.legend(
bbox_to_anchor = (legend1, legend2),
)
return plt.show(), fig.savefig('C:/Users/thomas.douenne/Documents/data/graphs_transports/{}.png'.format(name))
def graph_builder_carburants_no_color(data_frame, name, legend1, legend2):
axes = data_frame.plot()
fig = axes.get_figure()
plt.axhline(0, color = 'k')
# axes.xaxis(data_frame['annee'])
axes.legend(
bbox_to_anchor = (legend1, legend2),
)
return plt.show(), fig.savefig('C:/Users/thomas.douenne/Documents/data/graphs_transports/{}.png'.format(name))
def percent_formatter(x, pos = 0):
return '%1.0f%%' % (100 * x)
def save_dataframe_to_graph(dataframe, file_name):
return dataframe.to_csv('C:/Users/thomas.douenne/Documents/data/Stats_rapport/' + file_name, sep = ';')
# assets_directory = os.path.join(
# pkg_resources.get_distribution('openfisca_france_indirect_taxation').location
# )
# return dataframe.to_csv(os.path.join(assets_directory, 'openfisca_france_indirect_taxation', 'assets',
# file_name), sep = ';')
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/examples/utils_example.py | collapse | python | def collapse(dataframe, groupe, var):
'''
Pour une variable, fonction qui calcule la moyenne pondérée au sein de chaque groupe.
'''
grouped = dataframe.groupby([groupe])
var_weighted_grouped = grouped.apply(lambda x: wavg(groupe = x, var = var))
return var_weighted_grouped | Pour une variable, fonction qui calcule la moyenne pondérée au sein de chaque groupe. | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/examples/utils_example.py#L116-L122 | null | # -*- coding: utf-8 -*-
from __future__ import division
from pandas import DataFrame
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import openfisca_france_indirect_taxation
from openfisca_france_indirect_taxation.surveys import get_input_data_frame
from openfisca_survey_manager.survey_collections import SurveyCollection
from openfisca_france_indirect_taxation.surveys import SurveyScenario
from openfisca_france_indirect_taxation.examples.calage_bdf_cn import \
build_df_calee_on_grospostes, build_df_calee_on_ticpe
def create_survey_scenario(year = None):
assert year is not None
input_data_frame = get_input_data_frame(year)
TaxBenefitSystem = openfisca_france_indirect_taxation.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame,
tax_benefit_system = tax_benefit_system,
year = year,
)
return survey_scenario
def simulate(simulated_variables, year):
'''
Construction de la DataFrame à partir de laquelle sera faite l'analyse des données
'''
input_data_frame = get_input_data_frame(year)
TaxBenefitSystem = openfisca_france_indirect_taxation.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame,
tax_benefit_system = tax_benefit_system,
year = year,
)
simulation = survey_scenario.new_simulation()
return DataFrame(
dict([
(name, simulation.calculate(name)) for name in simulated_variables
])
)
def simulate_df_calee_by_grosposte(simulated_variables, year):
'''
Construction de la DataFrame à partir de laquelle sera faite l'analyse des données
'''
input_data_frame = get_input_data_frame(year)
input_data_frame_calee = build_df_calee_on_grospostes(input_data_frame, year, year)
TaxBenefitSystem = openfisca_france_indirect_taxation.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame_calee,
tax_benefit_system = tax_benefit_system,
year = year,
)
simulation = survey_scenario.new_simulation()
return DataFrame(
dict([
(name, simulation.calculate(name)) for name in simulated_variables
])
)
def simulate_df_calee_on_ticpe(simulated_variables, year):
'''
Construction de la DataFrame à partir de laquelle sera faite l'analyse des données
'''
input_data_frame = get_input_data_frame(year)
input_data_frame_calee = build_df_calee_on_ticpe(input_data_frame, year, year)
TaxBenefitSystem = openfisca_france_indirect_taxation.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame_calee,
tax_benefit_system = tax_benefit_system,
year = year,
)
simulation = survey_scenario.new_simulation()
return DataFrame(
dict([
(name, simulation.calculate(name)) for name in simulated_variables
])
)
def wavg(groupe, var):
'''
Fonction qui calcule la moyenne pondérée par groupe d'une variable
'''
d = groupe[var]
w = groupe['pondmen']
return (d * w).sum() / w.sum()
def df_weighted_average_grouped(dataframe, groupe, varlist):
'''
Agrège les résultats de weighted_average_grouped() en une unique dataframe pour la liste de variable 'varlist'.
'''
return DataFrame(
dict([
(var, collapse(dataframe, groupe, var)) for var in varlist
])
)
# To choose color when doing graph, could put a list of colors in argument
def graph_builder_bar(graph):
axes = graph.plot(
kind = 'bar',
stacked = True,
)
plt.axhline(0, color = 'k')
axes.yaxis.set_major_formatter(ticker.FuncFormatter(percent_formatter))
axes.legend(
bbox_to_anchor = (1.5, 1.05),
)
return plt.show()
def graph_builder_bar_list(graph, a, b):
axes = graph.plot(
kind = 'bar',
stacked = True,
color = ['#FF0000']
)
plt.axhline(0, color = 'k')
axes.legend(
bbox_to_anchor = (a, b),
)
return plt.show()
def graph_builder_line_percent(graph, a, b):
axes = graph.plot(
)
plt.axhline(0, color = 'k')
axes.yaxis.set_major_formatter(ticker.FuncFormatter(percent_formatter))
axes.legend(
bbox_to_anchor = (a, b),
)
return plt.show()
def graph_builder_line(graph):
axes = graph.plot(
)
plt.axhline(0, color = 'k')
axes.legend(
bbox_to_anchor = (1, 0.25),
)
return plt.show()
def graph_builder_carburants(data_frame, name, legend1, legend2, color1, color2, color3, color4):
axes = data_frame.plot(
color = [color1, color2, color3, color4])
fig = axes.get_figure()
plt.axhline(0, color = 'k')
# axes.xaxis(data_frame['annee'])
axes.legend(
bbox_to_anchor = (legend1, legend2),
)
return plt.show(), fig.savefig('C:/Users/thomas.douenne/Documents/data/graphs_transports/{}.png'.format(name))
def graph_builder_carburants_no_color(data_frame, name, legend1, legend2):
axes = data_frame.plot()
fig = axes.get_figure()
plt.axhline(0, color = 'k')
# axes.xaxis(data_frame['annee'])
axes.legend(
bbox_to_anchor = (legend1, legend2),
)
return plt.show(), fig.savefig('C:/Users/thomas.douenne/Documents/data/graphs_transports/{}.png'.format(name))
def percent_formatter(x, pos = 0):
return '%1.0f%%' % (100 * x)
def save_dataframe_to_graph(dataframe, file_name):
return dataframe.to_csv('C:/Users/thomas.douenne/Documents/data/Stats_rapport/' + file_name, sep = ';')
# assets_directory = os.path.join(
# pkg_resources.get_distribution('openfisca_france_indirect_taxation').location
# )
# return dataframe.to_csv(os.path.join(assets_directory, 'openfisca_france_indirect_taxation', 'assets',
# file_name), sep = ';')
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/examples/utils_example.py | df_weighted_average_grouped | python | def df_weighted_average_grouped(dataframe, groupe, varlist):
'''
Agrège les résultats de weighted_average_grouped() en une unique dataframe pour la liste de variable 'varlist'.
'''
return DataFrame(
dict([
(var, collapse(dataframe, groupe, var)) for var in varlist
])
) | Agrège les résultats de weighted_average_grouped() en une unique dataframe pour la liste de variable 'varlist'. | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/examples/utils_example.py#L125-L133 | null | # -*- coding: utf-8 -*-
from __future__ import division
from pandas import DataFrame
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import openfisca_france_indirect_taxation
from openfisca_france_indirect_taxation.surveys import get_input_data_frame
from openfisca_survey_manager.survey_collections import SurveyCollection
from openfisca_france_indirect_taxation.surveys import SurveyScenario
from openfisca_france_indirect_taxation.examples.calage_bdf_cn import \
build_df_calee_on_grospostes, build_df_calee_on_ticpe
def create_survey_scenario(year = None):
assert year is not None
input_data_frame = get_input_data_frame(year)
TaxBenefitSystem = openfisca_france_indirect_taxation.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame,
tax_benefit_system = tax_benefit_system,
year = year,
)
return survey_scenario
def simulate(simulated_variables, year):
'''
Construction de la DataFrame à partir de laquelle sera faite l'analyse des données
'''
input_data_frame = get_input_data_frame(year)
TaxBenefitSystem = openfisca_france_indirect_taxation.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame,
tax_benefit_system = tax_benefit_system,
year = year,
)
simulation = survey_scenario.new_simulation()
return DataFrame(
dict([
(name, simulation.calculate(name)) for name in simulated_variables
])
)
def simulate_df_calee_by_grosposte(simulated_variables, year):
'''
Construction de la DataFrame à partir de laquelle sera faite l'analyse des données
'''
input_data_frame = get_input_data_frame(year)
input_data_frame_calee = build_df_calee_on_grospostes(input_data_frame, year, year)
TaxBenefitSystem = openfisca_france_indirect_taxation.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame_calee,
tax_benefit_system = tax_benefit_system,
year = year,
)
simulation = survey_scenario.new_simulation()
return DataFrame(
dict([
(name, simulation.calculate(name)) for name in simulated_variables
])
)
def simulate_df_calee_on_ticpe(simulated_variables, year):
'''
Construction de la DataFrame à partir de laquelle sera faite l'analyse des données
'''
input_data_frame = get_input_data_frame(year)
input_data_frame_calee = build_df_calee_on_ticpe(input_data_frame, year, year)
TaxBenefitSystem = openfisca_france_indirect_taxation.init_country()
tax_benefit_system = TaxBenefitSystem()
survey_scenario = SurveyScenario().init_from_data_frame(
input_data_frame = input_data_frame_calee,
tax_benefit_system = tax_benefit_system,
year = year,
)
simulation = survey_scenario.new_simulation()
return DataFrame(
dict([
(name, simulation.calculate(name)) for name in simulated_variables
])
)
def wavg(groupe, var):
'''
Fonction qui calcule la moyenne pondérée par groupe d'une variable
'''
d = groupe[var]
w = groupe['pondmen']
return (d * w).sum() / w.sum()
def collapse(dataframe, groupe, var):
'''
Pour une variable, fonction qui calcule la moyenne pondérée au sein de chaque groupe.
'''
grouped = dataframe.groupby([groupe])
var_weighted_grouped = grouped.apply(lambda x: wavg(groupe = x, var = var))
return var_weighted_grouped
# To choose color when doing graph, could put a list of colors in argument
def graph_builder_bar(graph):
axes = graph.plot(
kind = 'bar',
stacked = True,
)
plt.axhline(0, color = 'k')
axes.yaxis.set_major_formatter(ticker.FuncFormatter(percent_formatter))
axes.legend(
bbox_to_anchor = (1.5, 1.05),
)
return plt.show()
def graph_builder_bar_list(graph, a, b):
axes = graph.plot(
kind = 'bar',
stacked = True,
color = ['#FF0000']
)
plt.axhline(0, color = 'k')
axes.legend(
bbox_to_anchor = (a, b),
)
return plt.show()
def graph_builder_line_percent(graph, a, b):
axes = graph.plot(
)
plt.axhline(0, color = 'k')
axes.yaxis.set_major_formatter(ticker.FuncFormatter(percent_formatter))
axes.legend(
bbox_to_anchor = (a, b),
)
return plt.show()
def graph_builder_line(graph):
axes = graph.plot(
)
plt.axhline(0, color = 'k')
axes.legend(
bbox_to_anchor = (1, 0.25),
)
return plt.show()
def graph_builder_carburants(data_frame, name, legend1, legend2, color1, color2, color3, color4):
axes = data_frame.plot(
color = [color1, color2, color3, color4])
fig = axes.get_figure()
plt.axhline(0, color = 'k')
# axes.xaxis(data_frame['annee'])
axes.legend(
bbox_to_anchor = (legend1, legend2),
)
return plt.show(), fig.savefig('C:/Users/thomas.douenne/Documents/data/graphs_transports/{}.png'.format(name))
def graph_builder_carburants_no_color(data_frame, name, legend1, legend2):
axes = data_frame.plot()
fig = axes.get_figure()
plt.axhline(0, color = 'k')
# axes.xaxis(data_frame['annee'])
axes.legend(
bbox_to_anchor = (legend1, legend2),
)
return plt.show(), fig.savefig('C:/Users/thomas.douenne/Documents/data/graphs_transports/{}.png'.format(name))
def percent_formatter(x, pos = 0):
return '%1.0f%%' % (100 * x)
def save_dataframe_to_graph(dataframe, file_name):
return dataframe.to_csv('C:/Users/thomas.douenne/Documents/data/Stats_rapport/' + file_name, sep = ';')
# assets_directory = os.path.join(
# pkg_resources.get_distribution('openfisca_france_indirect_taxation').location
# )
# return dataframe.to_csv(os.path.join(assets_directory, 'openfisca_france_indirect_taxation', 'assets',
# file_name), sep = ';')
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/param/preprocessing.py | preprocess_legislation | python | def preprocess_legislation(legislation_json):
'''
Preprocess the legislation parameters to add prices and amounts from national accounts
'''
import os
import pkg_resources
import pandas as pd
# Add fuel prices to the tree
default_config_files_directory = os.path.join(
pkg_resources.get_distribution('openfisca_france_indirect_taxation').location)
prix_annuel_carburants = pd.read_csv(
os.path.join(
default_config_files_directory,
'openfisca_france_indirect_taxation',
'assets',
'prix',
'prix_annuel_carburants.csv'
), sep =';'
)
prix_annuel_carburants['Date'] = prix_annuel_carburants['Date'].astype(int)
prix_annuel_carburants = prix_annuel_carburants.set_index('Date')
all_values = {}
prix_carburants = {
"@type": "Node",
"description": "prix des carburants en euros par hectolitre",
"children": {},
}
# For super_95_e10, we need to use the price of super_95 between 2009 and 2012 included,
# because we don't have the data. We use super_95 because it is very close and won't affect the results too much
prix_annuel = prix_annuel_carburants['super_95_e10_ttc']
all_values['super_95_e10_ttc'] = []
for year in range(1990, 2009):
values1 = dict()
values1['start'] = u'{}-01-01'.format(year)
values1['stop'] = u'{}-12-31'.format(year)
values1['value'] = prix_annuel.loc[year] * 100
all_values['super_95_e10_ttc'].append(values1)
prix_annuel = prix_annuel_carburants['super_95_ttc']
for year in range(2009, 2013):
values2 = dict()
values2['start'] = u'{}-01-01'.format(year)
values2['stop'] = u'{}-12-31'.format(year)
values2['value'] = prix_annuel.loc[year] * 100
all_values['super_95_e10_ttc'].append(values2)
prix_annuel = prix_annuel_carburants['super_95_e10_ttc']
for year in range(2013, 2015):
values3 = dict()
values3['start'] = u'{}-01-01'.format(year)
values3['stop'] = u'{}-12-31'.format(year)
values3['value'] = prix_annuel.loc[year] * 100
all_values['super_95_e10_ttc'].append(values3)
prix_carburants['children']['super_95_e10_ttc'] = {
"@type": "Parameter",
"description": 'super_95_e10_ttc'.replace('_', ' '),
"format": "float",
"values": all_values['super_95_e10_ttc']
}
for element in ['diesel_ht', 'diesel_ttc', 'super_95_ht', 'super_95_ttc', 'super_98_ht', 'super_98_ttc',
'super_95_e10_ht', 'gplc_ht', 'gplc_ttc', 'super_plombe_ht', 'super_plombe_ttc']:
assert element in prix_annuel_carburants.columns
prix_annuel = prix_annuel_carburants[element]
all_values[element] = []
for year in range(1990, 2015):
values = dict()
values['start'] = u'{}-01-01'.format(year)
values['stop'] = u'{}-12-31'.format(year)
values['value'] = prix_annuel.loc[year] * 100
all_values[element].append(values)
prix_carburants['children'][element] = {
"@type": "Parameter",
"description": element.replace('_', ' '),
"format": "float",
"values": all_values[element]
}
legislation_json['children']['imposition_indirecte']['children']['prix_carburants'] = prix_carburants
# Add the number of vehicle in circulation to the tree
default_config_files_directory = os.path.join(
pkg_resources.get_distribution('openfisca_france_indirect_taxation').location)
parc_annuel_moyen_vp = pd.read_csv(
os.path.join(
default_config_files_directory,
'openfisca_france_indirect_taxation',
'assets',
'quantites',
'parc_annuel_moyen_vp.csv'
), sep =';'
)
parc_annuel_moyen_vp = parc_annuel_moyen_vp.set_index('Unnamed: 0')
values_parc = {}
parc_vp = {
"@type": "Node",
"description": "taille moyenne du parc automobile en France métropolitaine en milliers de véhicules",
"children": {},
}
for element in ['diesel', 'essence']:
taille_parc = parc_annuel_moyen_vp[element]
values_parc[element] = []
for year in range(1990, 2014):
values = dict()
values['start'] = u'{}-01-01'.format(year)
values['stop'] = u'{}-12-31'.format(year)
values['value'] = taille_parc.loc[year]
values_parc[element].append(values)
parc_vp['children'][element] = {
"@type": "Parameter",
"description": "nombre de véhicules particuliers immatriculés en France à motorisation " + element,
"format": "float",
"values": values_parc[element]
}
legislation_json['children']['imposition_indirecte']['children']['parc_vp'] = parc_vp
# Add the total quantity of fuel consumed per year to the tree
default_config_files_directory = os.path.join(
pkg_resources.get_distribution('openfisca_france_indirect_taxation').location)
quantite_carbu_vp_france = pd.read_csv(
os.path.join(
default_config_files_directory,
'openfisca_france_indirect_taxation',
'assets',
'quantites',
'quantite_carbu_vp_france.csv'
), sep =';'
)
quantite_carbu_vp_france = quantite_carbu_vp_france.set_index('Unnamed: 0')
values_quantite = {}
quantite_carbu_vp = {
"@type": "Node",
"description": "quantite de carburants consommés en France métropolitaine",
"children": {},
}
for element in ['diesel', 'essence']:
quantite_carburants = quantite_carbu_vp_france[element]
values_quantite[element] = []
for year in range(1990, 2014):
values = dict()
values['start'] = u'{}-01-01'.format(year)
values['stop'] = u'{}-12-31'.format(year)
values['value'] = quantite_carburants.loc[year]
values_quantite[element].append(values)
quantite_carbu_vp['children'][element] = {
"@type": "Parameter",
"description": "consommation totale de " + element + " en France",
"format": "float",
"values": values_quantite[element]
}
legislation_json['children']['imposition_indirecte']['children']['quantite_carbu_vp'] = quantite_carbu_vp
# Add the shares of each type of supercabrurant (SP95, SP98, E10, etc.) among supercarburants
default_config_files_directory = os.path.join(
pkg_resources.get_distribution('openfisca_france_indirect_taxation').location)
part_des_types_de_supercarburants = pd.read_csv(
os.path.join(
default_config_files_directory,
'openfisca_france_indirect_taxation',
'assets',
'part_des_types_de_supercarburants.csv'
), sep =';'
)
del part_des_types_de_supercarburants['Source']
part_des_types_de_supercarburants = \
part_des_types_de_supercarburants[part_des_types_de_supercarburants['annee'] > 0].copy()
part_des_types_de_supercarburants['annee'] = part_des_types_de_supercarburants['annee'].astype(int)
part_des_types_de_supercarburants = part_des_types_de_supercarburants.set_index('annee')
# delete share of e_85 because we have no data for its price
# When the sum of all shares is not one, need to multiply each share by the same coefficient
cols = part_des_types_de_supercarburants.columns
for element in cols:
part_des_types_de_supercarburants[element] = (
part_des_types_de_supercarburants[element] /
(part_des_types_de_supercarburants['somme'] - part_des_types_de_supercarburants['sp_e85'])
)
del part_des_types_de_supercarburants['sp_e85']
del part_des_types_de_supercarburants['somme']
cols = part_des_types_de_supercarburants.columns
part_des_types_de_supercarburants['somme'] = 0
for element in cols:
part_des_types_de_supercarburants['somme'] += part_des_types_de_supercarburants[element]
assert (part_des_types_de_supercarburants['somme'] == 1).any(), "The weighting of the shares did not work"
values_part_supercarburants = {}
part_type_supercaburant = {
"@type": "Node",
"description": "part de la consommation totale d'essence de chaque type supercarburant",
"children": {},
}
for element in ['super_plombe', 'sp_95', 'sp_98', 'sp_e10']:
part_par_carburant = part_des_types_de_supercarburants[element]
values_part_supercarburants[element] = []
for year in range(2000, 2015):
values = dict()
values['start'] = u'{}-01-01'.format(year)
values['stop'] = u'{}-12-31'.format(year)
values['value'] = part_par_carburant.loc[year]
values_part_supercarburants[element].append(values)
part_type_supercaburant['children'][element] = {
"@type": "Parameter",
"description": "part de " + element + " dans la consommation totale d'essences",
"format": "float",
"values": values_part_supercarburants[element]
}
legislation_json['children']['imposition_indirecte']['children']['part_type_supercarburants'] = \
part_type_supercaburant
# Add data from comptabilite national about alcohol
alcool_conso_et_vin = {
"@type": "Node",
"description": "alcools",
"children": {},
}
alcool_conso_et_vin['children']['vin'] = {
"@type": "Node",
"description": "Pour calculer le taux de taxation implicite sur le vin",
"children": {
"droit_cn_vin": {
"@type": "Parameter",
"description": u"Masse droit vin, vin mousseux, cidres et poirés selon comptabilité nationale",
"format": "float",
"values": [
{'start': u'1995-01-01', 'stop': u'1995-12-31', 'value': 129},
{'start': u'1996-01-01', 'stop': u'1996-12-31', 'value': 130},
{'start': u'1997-01-01', 'stop': u'1997-12-31', 'value': 129},
{'start': u'1998-01-01', 'stop': u'1998-12-31', 'value': 132},
{'start': u'1999-01-01', 'stop': u'1999-12-31', 'value': 133},
{'start': u'2000-01-01', 'stop': u'2000-12-31', 'value': 127},
{'start': u'2001-01-01', 'stop': u'2001-12-31', 'value': 127},
{'start': u'2002-01-01', 'stop': u'2002-12-31', 'value': 127},
{'start': u'2003-01-01', 'stop': u'2003-12-31', 'value': 127},
{'start': u'2004-01-01', 'stop': u'2004-12-31', 'value': 125},
{'start': u'2005-01-01', 'stop': u'2005-12-31', 'value': 117},
{'start': u'2006-01-01', 'stop': u'2006-12-31', 'value': 119},
{'start': u'2007-01-01', 'stop': u'2007-12-31', 'value': 117},
{'start': u'2008-01-01', 'stop': u'2008-12-31', 'value': 114},
{'start': u'2009-01-01', 'stop': u'2009-12-31', 'value': 117},
{'start': u'2010-01-01', 'stop': u'2010-12-31', 'value': 119},
{'start': u'2011-01-01', 'stop': u'2011-12-31', 'value': 118},
{'start': u'2012-01-01', 'stop': u'2012-12-31', 'value': 120},
{'start': u'2013-01-01', 'stop': u'2013-12-31', 'value': 122},
# {'start': u'2014-01-01', 'stop': u'2014-12-31', 'value': },
],
},
"masse_conso_cn_vin": {
"@type": "Parameter",
"description": u"Masse consommation vin, vin mousseux, cidres et poirés selon comptabilité nationale",
"format": "float",
"values": [
{'start': u'1995-01-01', 'stop': u'1995-12-31', 'value': 7191},
{'start': u'1996-01-01', 'stop': u'1996-12-31', 'value': 7419},
{'start': u'1997-01-01', 'stop': u'1997-12-31', 'value': 7636},
{'start': u'1998-01-01', 'stop': u'1998-12-31', 'value': 8025},
{'start': u'1999-01-01', 'stop': u'1999-12-31', 'value': 8451},
{'start': u'2000-01-01', 'stop': u'2000-12-31', 'value': 8854},
{'start': u'2001-01-01', 'stop': u'2001-12-31', 'value': 9168},
{'start': u'2002-01-01', 'stop': u'2002-12-31', 'value': 9476},
{'start': u'2003-01-01', 'stop': u'2003-12-31', 'value': 9695},
{'start': u'2004-01-01', 'stop': u'2004-12-31', 'value': 9985},
{'start': u'2005-01-01', 'stop': u'2005-12-31', 'value': 9933},
{'start': u'2006-01-01', 'stop': u'2006-12-31', 'value': 10002},
{'start': u'2007-01-01', 'stop': u'2007-12-31', 'value': 10345},
{'start': u'2008-01-01', 'stop': u'2008-12-31', 'value': 10461},
{'start': u'2009-01-01', 'stop': u'2009-12-31', 'value': 10728},
{'start': u'2010-01-01', 'stop': u'2010-12-31', 'value': 11002},
{'start': u'2011-01-01', 'stop': u'2011-12-31', 'value': 11387},
{'start': u'2012-01-01', 'stop': u'2012-12-31', 'value': 11407},
{'start': u'2013-01-01', 'stop': u'2013-12-31', 'value': 11515},
# {'start': u'2014-01-01', 'stop': u'2014-12-31', 'value': },
],
},
},
}
alcool_conso_et_vin['children']['biere'] = {
"@type": "Node",
"description": "Pour calculer le taux de taxation implicite sur la bière",
"children": {
"droit_cn_biere": {
"@type": "Parameter",
"description": "Masse droit biere selon comptabilité nationale",
"format": "float",
"values": [
{'start': u'1995-01-01', 'stop': u'1995-12-31', 'value': 361},
{'start': u'1996-01-01', 'stop': u'1996-12-31', 'value': 366},
{'start': u'1997-01-01', 'stop': u'1997-12-31', 'value': 364},
{'start': u'1998-01-01', 'stop': u'1998-12-31', 'value': 365},
{'start': u'1999-01-01', 'stop': u'1999-12-31', 'value': 380},
{'start': u'2000-01-01', 'stop': u'2000-12-31', 'value': 359},
{'start': u'2001-01-01', 'stop': u'2001-12-31', 'value': 364},
{'start': u'2002-01-01', 'stop': u'2002-12-31', 'value': 361},
{'start': u'2003-01-01', 'stop': u'2003-12-31', 'value': 370},
{'start': u'2004-01-01', 'stop': u'2004-12-31', 'value': 378},
{'start': u'2005-01-01', 'stop': u'2005-12-31', 'value': 364},
{'start': u'2006-01-01', 'stop': u'2006-12-31', 'value': 396},
{'start': u'2007-01-01', 'stop': u'2007-12-31', 'value': 382},
{'start': u'2008-01-01', 'stop': u'2008-12-31', 'value': 375}, {'start': u'2009-01-01', 'stop': u'2009-12-31', 'value': 376},
{'start': u'2010-01-01', 'stop': u'2010-12-31', 'value': 375},
{'start': u'2011-01-01', 'stop': u'2011-12-31', 'value': 393},
{'start': u'2012-01-01', 'stop': u'2012-12-31', 'value': 783},
{'start': u'2013-01-01', 'stop': u'2013-12-31', 'value': 897},
# {'start': u'2014-01-01', 'stop': u'2014-12-31', 'value': },
],
},
"masse_conso_cn_biere": {
"@type": "Parameter",
"description": u"Masse consommation biere selon comptabilité nationale",
"format": "float",
"values": [
{'start': u'1995-01-01', 'stop': u'1995-12-31', 'value': 2111},
{'start': u'1996-01-01', 'stop': u'1996-12-31', 'value': 2144},
{'start': u'1997-01-01', 'stop': u'1997-12-31', 'value': 2186},
{'start': u'1998-01-01', 'stop': u'1998-12-31', 'value': 2291},
{'start': u'1999-01-01', 'stop': u'1999-12-31', 'value': 2334},
{'start': u'2000-01-01', 'stop': u'2000-12-31', 'value': 2290},
{'start': u'2001-01-01', 'stop': u'2001-12-31', 'value': 2327},
{'start': u'2002-01-01', 'stop': u'2002-12-31', 'value': 2405},
{'start': u'2003-01-01', 'stop': u'2003-12-31', 'value': 2554},
{'start': u'2004-01-01', 'stop': u'2004-12-31', 'value': 2484},
{'start': u'2005-01-01', 'stop': u'2005-12-31', 'value': 2466},
{'start': u'2006-01-01', 'stop': u'2006-12-31', 'value': 2486},
{'start': u'2007-01-01', 'stop': u'2007-12-31', 'value': 2458},
{'start': u'2008-01-01', 'stop': u'2008-12-31', 'value': 2287},
{'start': u'2009-01-01', 'stop': u'2009-12-31', 'value': 2375},
{'start': u'2010-01-01', 'stop': u'2010-12-31', 'value': 2461},
{'start': u'2011-01-01', 'stop': u'2011-12-31', 'value': 2769},
{'start': u'2012-01-01', 'stop': u'2012-12-31', 'value': 2868},
{'start': u'2013-01-01', 'stop': u'2013-12-31', 'value': 3321},
# {'start': u'2014-01-01', 'stop': u'2014-12-31', 'value': },
],
},
},
}
alcool_conso_et_vin['children']['alcools_forts'] = {
"@type": "Node",
"description": "Pour calculer le taux de taxation implicite sur alcools forts",
"children": {
"droit_cn_alcools": {
"@type": "Parameter",
"description": "Masse droit alcool selon comptabilité nationale sans droits sur les produits intermediaires et cotisation spéciale alcool fort",
"format": "float",
"values": [
{'start': u'2000-01-01', 'stop': u'2000-12-31', 'value': 1872},
{'start': u'2001-01-01', 'stop': u'2001-12-31', 'value': 1957},
{'start': u'2002-01-01', 'stop': u'2002-12-31', 'value': 1932},
{'start': u'2003-01-01', 'stop': u'2003-12-31', 'value': 1891},
{'start': u'2004-01-01', 'stop': u'2004-12-31', 'value': 1908},
{'start': u'2005-01-01', 'stop': u'2005-12-31', 'value': 1842},
{'start': u'2006-01-01', 'stop': u'2006-12-31', 'value': 1954},
{'start': u'2007-01-01', 'stop': u'2007-12-31', 'value': 1990},
{'start': u'2008-01-01', 'stop': u'2008-12-31', 'value': 2005},
{'start': u'2009-01-01', 'stop': u'2009-12-31', 'value': 2031},
{'start': u'2010-01-01', 'stop': u'2010-12-31', 'value': 2111},
{'start': u'2011-01-01', 'stop': u'2011-12-31', 'value': 2150},
{'start': u'2012-01-01', 'stop': u'2012-12-31', 'value': 2225},
# TODO: Problème pour les alcools forts chiffres différents entre les deux bases excel !
],
},
"droit_cn_alcools_total": {
"@type": "Parameter",
"description": u"Masse droit alcool selon comptabilité nationale avec les differents droits",
"format": "float",
"values": [
{'start': u'1995-01-01', 'stop': u'1995-12-31', 'value': 2337},
{'start': u'1996-01-01', 'stop': u'1996-12-31', 'value': 2350},
{'start': u'1997-01-01', 'stop': u'1997-12-31', 'value': 2366},
{'start': u'1998-01-01', 'stop': u'1998-12-31', 'value': 2369},
{'start': u'1999-01-01', 'stop': u'1999-12-31', 'value': 2385},
{'start': u'2000-01-01', 'stop': u'2000-12-31', 'value': 2416}, {'start': u'2001-01-01', 'stop': u'2001-12-31', 'value': 2514},
{'start': u'2002-01-01', 'stop': u'2002-12-31', 'value': 2503},
{'start': u'2003-01-01', 'stop': u'2003-12-31', 'value': 2453},
{'start': u'2004-01-01', 'stop': u'2004-12-31', 'value': 2409},
{'start': u'2005-01-01', 'stop': u'2005-12-31', 'value': 2352},
{'start': u'2006-01-01', 'stop': u'2006-12-31', 'value': 2477},
{'start': u'2007-01-01', 'stop': u'2007-12-31', 'value': 2516},
{'start': u'2008-01-01', 'stop': u'2008-12-31', 'value': 2528},
{'start': u'2009-01-01', 'stop': u'2009-12-31', 'value': 2629},
{'start': u'2010-01-01', 'stop': u'2010-12-31', 'value': 2734},
{'start': u'2011-01-01', 'stop': u'2011-12-31', 'value': 3078},
{'start': u'2012-01-01', 'stop': u'2012-12-31', 'value': 2718},
{'start': u'2013-01-01', 'stop': u'2013-12-31', 'value': 3022},
# {'start': u'2014-01-01', 'stop': u'2014-12-31', 'value': },
],
},
"masse_conso_cn_alcools": {
"@type": "Parameter",
"description": u"Masse consommation alcool selon comptabilité nationale",
"format": "float",
"values": [
{'start': u'1995-01-01', 'stop': u'1995-12-31', 'value': 4893},
{'start': u'1996-01-01', 'stop': u'1996-12-31', 'value': 5075},
{'start': u'1997-01-01', 'stop': u'1997-12-31', 'value': 5065},
{'start': u'1998-01-01', 'stop': u'1998-12-31', 'value': 5123},
{'start': u'1999-01-01', 'stop': u'1999-12-31', 'value': 5234},
{'start': u'2000-01-01', 'stop': u'2000-12-31', 'value': 5558},
{'start': u'2001-01-01', 'stop': u'2001-12-31', 'value': 5721},
{'start': u'2002-01-01', 'stop': u'2002-12-31', 'value': 5932},
{'start': u'2003-01-01', 'stop': u'2003-12-31', 'value': 5895},
{'start': u'2004-01-01', 'stop': u'2004-12-31', 'value': 5967},
{'start': u'2005-01-01', 'stop': u'2005-12-31', 'value': 5960},
{'start': u'2006-01-01', 'stop': u'2006-12-31', 'value': 6106},
{'start': u'2007-01-01', 'stop': u'2007-12-31', 'value': 6142},
{'start': u'2008-01-01', 'stop': u'2008-12-31', 'value': 6147},
{'start': u'2009-01-01', 'stop': u'2009-12-31', 'value': 6342},
{'start': u'2010-01-01', 'stop': u'2010-12-31', 'value': 6618},
{'start': u'2011-01-01', 'stop': u'2011-12-31', 'value': 6680},
{'start': u'2012-01-01', 'stop': u'2012-12-31', 'value': 6996},
{'start': u'2013-01-01', 'stop': u'2013-12-31', 'value': 7022},
],
},
},
}
legislation_json['children']['imposition_indirecte']['children']['alcool_conso_et_vin'] = alcool_conso_et_vin
# Make the change from francs to euros for excise taxes in ticpe
keys_ticpe = legislation_json['children']['imposition_indirecte']['children']['ticpe']['children'].keys()
for element in keys_ticpe:
get_values = \
legislation_json['children']['imposition_indirecte']['children']['ticpe']['children'][element]['values']
for each_value in get_values:
get_character = '{}'.format(each_value['start'])
year = int(get_character[:4])
if year < 2002:
each_value['value'] = each_value['value'] / 6.55957
else:
each_value['value'] = each_value['value']
return legislation_json | Preprocess the legislation parameters to add prices and amounts from national accounts | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/param/preprocessing.py#L29-L475 | null | # -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openfisca_core import reforms
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/examples/calage_bdf_cn_bis.py | get_inflators_bdf_to_cn | python | def get_inflators_bdf_to_cn(data_year):
'''
Calcule les ratios de calage (bdf sur cn pour année de données)
à partir des masses de comptabilité nationale et des masses de consommation de bdf.
'''
data_cn = get_cn_aggregates(data_year)
data_bdf = get_bdf_aggregates(data_year)
masses = data_cn.merge(
data_bdf, left_index = True, right_index = True
)
masses.rename(columns = {'bdf_aggregates': 'conso_bdf{}'.format(data_year)}, inplace = True)
return (
masses['consoCN_COICOP_{}'.format(data_year)] / masses['conso_bdf{}'.format(data_year)]
).to_dict() | Calcule les ratios de calage (bdf sur cn pour année de données)
à partir des masses de comptabilité nationale et des masses de consommation de bdf. | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/examples/calage_bdf_cn_bis.py#L82-L95 | [
"def get_bdf_aggregates(data_year = None):\n assert data_year is not None\n\n depenses = get_input_data_frame(data_year)\n depenses_by_grosposte = pandas.DataFrame()\n for grosposte in range(1, 13):\n if depenses_by_grosposte is None:\n depenses_by_grosposte = depenses['coicop12_{}'.format(grosposte)]\n else:\n depenses_by_grosposte = concat([depenses_by_grosposte, depenses['coicop12_{}'.format(grosposte)]], axis = 1)\n\n depenses_by_grosposte = concat([depenses_by_grosposte, depenses['pondmen']], axis = 1)\n grospostes = set(depenses_by_grosposte.columns)\n grospostes.remove('pondmen')\n bdf_aggregates_by_grosposte = pandas.DataFrame()\n for grosposte in grospostes:\n bdf_aggregates_by_grosposte.loc[grosposte, 'bdf_aggregates'] = (\n depenses_by_grosposte[grosposte] * depenses_by_grosposte['pondmen']\n ).sum()\n\n return bdf_aggregates_by_grosposte\n",
"def get_cn_aggregates(target_year = None):\n assert target_year is not None\n default_config_files_directory = os.path.join(\n pkg_resources.get_distribution('openfisca_france_indirect_taxation').location)\n parametres_fiscalite_file_path = os.path.join(\n default_config_files_directory,\n 'openfisca_france_indirect_taxation',\n 'assets',\n 'legislation',\n 'Parametres fiscalite indirecte.xls'\n )\n masses_cn_data_frame = pandas.read_excel(parametres_fiscalite_file_path, sheetname = \"consommation_CN\")\n masses_cn_12postes_data_frame = masses_cn_data_frame.loc[:, ['Code', target_year]]\n masses_cn_12postes_data_frame['code_unicode'] = masses_cn_12postes_data_frame.Code.astype(unicode)\n masses_cn_12postes_data_frame['len_code'] = masses_cn_12postes_data_frame['code_unicode'].apply(lambda x: len(x))\n\n masses_cn_12postes_data_frame = masses_cn_12postes_data_frame[masses_cn_12postes_data_frame['len_code'] == 6]\n masses_cn_12postes_data_frame['code'] = masses_cn_12postes_data_frame.Code.astype(int)\n masses_cn_12postes_data_frame = masses_cn_12postes_data_frame.drop(['len_code', 'code_unicode', 'Code'], 1)\n\n masses_cn_12postes_data_frame.rename(\n columns = {\n target_year: 'consoCN_COICOP_{}'.format(target_year),\n 'code': 'poste'\n },\n inplace = True,\n )\n masses_cn_12postes_data_frame['poste'] = masses_cn_12postes_data_frame['poste'].astype(str)\n masses_cn_12postes_data_frame = masses_cn_12postes_data_frame[masses_cn_12postes_data_frame['poste'] != '15']\n for element in masses_cn_12postes_data_frame['poste']:\n masses_cn_12postes_data_frame['poste'] = \\\n masses_cn_12postes_data_frame['poste'].replace(element, 'coicop12_{}'.format(element))\n masses_cn_12postes_data_frame.set_index('poste', inplace = True)\n return masses_cn_12postes_data_frame * 1e6\n"
] | # -*- coding: utf-8 -*-
from __future__ import division
import logging
import os
import pkg_resources
import pandas
from pandas import concat
from openfisca_france_indirect_taxation.examples.utils_example import get_input_data_frame
from openfisca_france_indirect_taxation.build_survey_data.utils import find_nearest_inferior
log = logging.getLogger(__name__)
data_years = [2000, 2005, 2011]
def get_bdf_aggregates(data_year = None):
assert data_year is not None
depenses = get_input_data_frame(data_year)
depenses_by_grosposte = pandas.DataFrame()
for grosposte in range(1, 13):
if depenses_by_grosposte is None:
depenses_by_grosposte = depenses['coicop12_{}'.format(grosposte)]
else:
depenses_by_grosposte = concat([depenses_by_grosposte, depenses['coicop12_{}'.format(grosposte)]], axis = 1)
depenses_by_grosposte = concat([depenses_by_grosposte, depenses['pondmen']], axis = 1)
grospostes = set(depenses_by_grosposte.columns)
grospostes.remove('pondmen')
bdf_aggregates_by_grosposte = pandas.DataFrame()
for grosposte in grospostes:
bdf_aggregates_by_grosposte.loc[grosposte, 'bdf_aggregates'] = (
depenses_by_grosposte[grosposte] * depenses_by_grosposte['pondmen']
).sum()
return bdf_aggregates_by_grosposte
def get_cn_aggregates(target_year = None):
assert target_year is not None
default_config_files_directory = os.path.join(
pkg_resources.get_distribution('openfisca_france_indirect_taxation').location)
parametres_fiscalite_file_path = os.path.join(
default_config_files_directory,
'openfisca_france_indirect_taxation',
'assets',
'legislation',
'Parametres fiscalite indirecte.xls'
)
masses_cn_data_frame = pandas.read_excel(parametres_fiscalite_file_path, sheetname = "consommation_CN")
masses_cn_12postes_data_frame = masses_cn_data_frame.loc[:, ['Code', target_year]]
masses_cn_12postes_data_frame['code_unicode'] = masses_cn_12postes_data_frame.Code.astype(unicode)
masses_cn_12postes_data_frame['len_code'] = masses_cn_12postes_data_frame['code_unicode'].apply(lambda x: len(x))
masses_cn_12postes_data_frame = masses_cn_12postes_data_frame[masses_cn_12postes_data_frame['len_code'] == 6]
masses_cn_12postes_data_frame['code'] = masses_cn_12postes_data_frame.Code.astype(int)
masses_cn_12postes_data_frame = masses_cn_12postes_data_frame.drop(['len_code', 'code_unicode', 'Code'], 1)
masses_cn_12postes_data_frame.rename(
columns = {
target_year: 'consoCN_COICOP_{}'.format(target_year),
'code': 'poste'
},
inplace = True,
)
masses_cn_12postes_data_frame['poste'] = masses_cn_12postes_data_frame['poste'].astype(str)
masses_cn_12postes_data_frame = masses_cn_12postes_data_frame[masses_cn_12postes_data_frame['poste'] != '15']
for element in masses_cn_12postes_data_frame['poste']:
masses_cn_12postes_data_frame['poste'] = \
masses_cn_12postes_data_frame['poste'].replace(element, 'coicop12_{}'.format(element))
masses_cn_12postes_data_frame.set_index('poste', inplace = True)
return masses_cn_12postes_data_frame * 1e6
def get_inflators_cn_to_cn(target_year):
'''
Calcule l'inflateur de vieillissement à partir des masses de comptabilité nationale.
'''
data_year = find_nearest_inferior(data_years, target_year)
data_year_cn_aggregates = get_cn_aggregates(data_year)['consoCN_COICOP_{}'.format(data_year)].to_dict()
target_year_cn_aggregates = get_cn_aggregates(target_year)['consoCN_COICOP_{}'.format(target_year)].to_dict()
return dict(
(key, target_year_cn_aggregates[key] / data_year_cn_aggregates[key])
for key in data_year_cn_aggregates.keys()
)
def get_inflators(target_year):
'''
Fonction qui calcule les ratios de calage (bdf sur cn pour année de données) et de vieillissement
à partir des masses de comptabilité nationale et des masses de consommation de bdf.
'''
data_year = find_nearest_inferior(data_years, target_year)
inflators_bdf_to_cn = get_inflators_bdf_to_cn(data_year)
inflators_cn_to_cn = get_inflators_cn_to_cn(target_year)
ratio_by_variable = dict()
for key in inflators_cn_to_cn.keys():
ratio_by_variable[key] = inflators_bdf_to_cn[key] * inflators_cn_to_cn[key]
return ratio_by_variable
def get_inflators_by_year():
inflators_by_year = dict()
for target_year in range(2000, 2015):
inflators = get_inflators(target_year)
inflators_by_year[target_year] = inflators
return inflators_by_year
def get_aggregates_by_year():
aggregates_by_year = dict()
for target_year in range(2000, 2015):
aggregates = get_cn_aggregates(target_year)['consoCN_COICOP_{}'.format(target_year)].to_dict()
aggregates_by_year[target_year] = aggregates
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/examples/calage_bdf_cn_bis.py | get_inflators_cn_to_cn | python | def get_inflators_cn_to_cn(target_year):
'''
Calcule l'inflateur de vieillissement à partir des masses de comptabilité nationale.
'''
data_year = find_nearest_inferior(data_years, target_year)
data_year_cn_aggregates = get_cn_aggregates(data_year)['consoCN_COICOP_{}'.format(data_year)].to_dict()
target_year_cn_aggregates = get_cn_aggregates(target_year)['consoCN_COICOP_{}'.format(target_year)].to_dict()
return dict(
(key, target_year_cn_aggregates[key] / data_year_cn_aggregates[key])
for key in data_year_cn_aggregates.keys()
) | Calcule l'inflateur de vieillissement à partir des masses de comptabilité nationale. | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/examples/calage_bdf_cn_bis.py#L98-L109 | [
"def find_nearest_inferior(years, year):\n # years = year_data_list\n anterior_years = [\n available_year for available_year in years if available_year <= year\n ]\n return max(anterior_years)\n",
"def get_cn_aggregates(target_year = None):\n assert target_year is not None\n default_config_files_directory = os.path.join(\n pkg_resources.get_distribution('openfisca_france_indirect_taxation').location)\n parametres_fiscalite_file_path = os.path.join(\n default_config_files_directory,\n 'openfisca_france_indirect_taxation',\n 'assets',\n 'legislation',\n 'Parametres fiscalite indirecte.xls'\n )\n masses_cn_data_frame = pandas.read_excel(parametres_fiscalite_file_path, sheetname = \"consommation_CN\")\n masses_cn_12postes_data_frame = masses_cn_data_frame.loc[:, ['Code', target_year]]\n masses_cn_12postes_data_frame['code_unicode'] = masses_cn_12postes_data_frame.Code.astype(unicode)\n masses_cn_12postes_data_frame['len_code'] = masses_cn_12postes_data_frame['code_unicode'].apply(lambda x: len(x))\n\n masses_cn_12postes_data_frame = masses_cn_12postes_data_frame[masses_cn_12postes_data_frame['len_code'] == 6]\n masses_cn_12postes_data_frame['code'] = masses_cn_12postes_data_frame.Code.astype(int)\n masses_cn_12postes_data_frame = masses_cn_12postes_data_frame.drop(['len_code', 'code_unicode', 'Code'], 1)\n\n masses_cn_12postes_data_frame.rename(\n columns = {\n target_year: 'consoCN_COICOP_{}'.format(target_year),\n 'code': 'poste'\n },\n inplace = True,\n )\n masses_cn_12postes_data_frame['poste'] = masses_cn_12postes_data_frame['poste'].astype(str)\n masses_cn_12postes_data_frame = masses_cn_12postes_data_frame[masses_cn_12postes_data_frame['poste'] != '15']\n for element in masses_cn_12postes_data_frame['poste']:\n masses_cn_12postes_data_frame['poste'] = \\\n masses_cn_12postes_data_frame['poste'].replace(element, 'coicop12_{}'.format(element))\n masses_cn_12postes_data_frame.set_index('poste', inplace = True)\n return masses_cn_12postes_data_frame * 1e6\n"
] | # -*- coding: utf-8 -*-
from __future__ import division
import logging
import os
import pkg_resources
import pandas
from pandas import concat
from openfisca_france_indirect_taxation.examples.utils_example import get_input_data_frame
from openfisca_france_indirect_taxation.build_survey_data.utils import find_nearest_inferior
log = logging.getLogger(__name__)
data_years = [2000, 2005, 2011]
def get_bdf_aggregates(data_year = None):
assert data_year is not None
depenses = get_input_data_frame(data_year)
depenses_by_grosposte = pandas.DataFrame()
for grosposte in range(1, 13):
if depenses_by_grosposte is None:
depenses_by_grosposte = depenses['coicop12_{}'.format(grosposte)]
else:
depenses_by_grosposte = concat([depenses_by_grosposte, depenses['coicop12_{}'.format(grosposte)]], axis = 1)
depenses_by_grosposte = concat([depenses_by_grosposte, depenses['pondmen']], axis = 1)
grospostes = set(depenses_by_grosposte.columns)
grospostes.remove('pondmen')
bdf_aggregates_by_grosposte = pandas.DataFrame()
for grosposte in grospostes:
bdf_aggregates_by_grosposte.loc[grosposte, 'bdf_aggregates'] = (
depenses_by_grosposte[grosposte] * depenses_by_grosposte['pondmen']
).sum()
return bdf_aggregates_by_grosposte
def get_cn_aggregates(target_year = None):
assert target_year is not None
default_config_files_directory = os.path.join(
pkg_resources.get_distribution('openfisca_france_indirect_taxation').location)
parametres_fiscalite_file_path = os.path.join(
default_config_files_directory,
'openfisca_france_indirect_taxation',
'assets',
'legislation',
'Parametres fiscalite indirecte.xls'
)
masses_cn_data_frame = pandas.read_excel(parametres_fiscalite_file_path, sheetname = "consommation_CN")
masses_cn_12postes_data_frame = masses_cn_data_frame.loc[:, ['Code', target_year]]
masses_cn_12postes_data_frame['code_unicode'] = masses_cn_12postes_data_frame.Code.astype(unicode)
masses_cn_12postes_data_frame['len_code'] = masses_cn_12postes_data_frame['code_unicode'].apply(lambda x: len(x))
masses_cn_12postes_data_frame = masses_cn_12postes_data_frame[masses_cn_12postes_data_frame['len_code'] == 6]
masses_cn_12postes_data_frame['code'] = masses_cn_12postes_data_frame.Code.astype(int)
masses_cn_12postes_data_frame = masses_cn_12postes_data_frame.drop(['len_code', 'code_unicode', 'Code'], 1)
masses_cn_12postes_data_frame.rename(
columns = {
target_year: 'consoCN_COICOP_{}'.format(target_year),
'code': 'poste'
},
inplace = True,
)
masses_cn_12postes_data_frame['poste'] = masses_cn_12postes_data_frame['poste'].astype(str)
masses_cn_12postes_data_frame = masses_cn_12postes_data_frame[masses_cn_12postes_data_frame['poste'] != '15']
for element in masses_cn_12postes_data_frame['poste']:
masses_cn_12postes_data_frame['poste'] = \
masses_cn_12postes_data_frame['poste'].replace(element, 'coicop12_{}'.format(element))
masses_cn_12postes_data_frame.set_index('poste', inplace = True)
return masses_cn_12postes_data_frame * 1e6
def get_inflators_bdf_to_cn(data_year):
'''
Calcule les ratios de calage (bdf sur cn pour année de données)
à partir des masses de comptabilité nationale et des masses de consommation de bdf.
'''
data_cn = get_cn_aggregates(data_year)
data_bdf = get_bdf_aggregates(data_year)
masses = data_cn.merge(
data_bdf, left_index = True, right_index = True
)
masses.rename(columns = {'bdf_aggregates': 'conso_bdf{}'.format(data_year)}, inplace = True)
return (
masses['consoCN_COICOP_{}'.format(data_year)] / masses['conso_bdf{}'.format(data_year)]
).to_dict()
def get_inflators(target_year):
'''
Fonction qui calcule les ratios de calage (bdf sur cn pour année de données) et de vieillissement
à partir des masses de comptabilité nationale et des masses de consommation de bdf.
'''
data_year = find_nearest_inferior(data_years, target_year)
inflators_bdf_to_cn = get_inflators_bdf_to_cn(data_year)
inflators_cn_to_cn = get_inflators_cn_to_cn(target_year)
ratio_by_variable = dict()
for key in inflators_cn_to_cn.keys():
ratio_by_variable[key] = inflators_bdf_to_cn[key] * inflators_cn_to_cn[key]
return ratio_by_variable
def get_inflators_by_year():
inflators_by_year = dict()
for target_year in range(2000, 2015):
inflators = get_inflators(target_year)
inflators_by_year[target_year] = inflators
return inflators_by_year
def get_aggregates_by_year():
aggregates_by_year = dict()
for target_year in range(2000, 2015):
aggregates = get_cn_aggregates(target_year)['consoCN_COICOP_{}'.format(target_year)].to_dict()
aggregates_by_year[target_year] = aggregates
|
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/examples/calage_bdf_cn_bis.py | get_inflators | python | def get_inflators(target_year):
'''
Fonction qui calcule les ratios de calage (bdf sur cn pour année de données) et de vieillissement
à partir des masses de comptabilité nationale et des masses de consommation de bdf.
'''
data_year = find_nearest_inferior(data_years, target_year)
inflators_bdf_to_cn = get_inflators_bdf_to_cn(data_year)
inflators_cn_to_cn = get_inflators_cn_to_cn(target_year)
ratio_by_variable = dict()
for key in inflators_cn_to_cn.keys():
ratio_by_variable[key] = inflators_bdf_to_cn[key] * inflators_cn_to_cn[key]
return ratio_by_variable | Fonction qui calcule les ratios de calage (bdf sur cn pour année de données) et de vieillissement
à partir des masses de comptabilité nationale et des masses de consommation de bdf. | train | https://github.com/openfisca/openfisca-france-indirect-taxation/blob/b4bc7da90a1126ebfc3af2c3ec61de5a2b70bb2e/openfisca_france_indirect_taxation/examples/calage_bdf_cn_bis.py#L112-L125 | [
"def find_nearest_inferior(years, year):\n # years = year_data_list\n anterior_years = [\n available_year for available_year in years if available_year <= year\n ]\n return max(anterior_years)\n",
"def get_inflators_bdf_to_cn(data_year):\n '''\n Calcule les ratios de calage (bdf sur cn pour année de données)\n à partir des masses de comptabilité nationale et des masses de consommation de bdf.\n '''\n data_cn = get_cn_aggregates(data_year)\n data_bdf = get_bdf_aggregates(data_year)\n masses = data_cn.merge(\n data_bdf, left_index = True, right_index = True\n )\n masses.rename(columns = {'bdf_aggregates': 'conso_bdf{}'.format(data_year)}, inplace = True)\n return (\n masses['consoCN_COICOP_{}'.format(data_year)] / masses['conso_bdf{}'.format(data_year)]\n ).to_dict()\n",
"def get_inflators_cn_to_cn(target_year):\n '''\n Calcule l'inflateur de vieillissement à partir des masses de comptabilité nationale.\n '''\n data_year = find_nearest_inferior(data_years, target_year)\n data_year_cn_aggregates = get_cn_aggregates(data_year)['consoCN_COICOP_{}'.format(data_year)].to_dict()\n target_year_cn_aggregates = get_cn_aggregates(target_year)['consoCN_COICOP_{}'.format(target_year)].to_dict()\n\n return dict(\n (key, target_year_cn_aggregates[key] / data_year_cn_aggregates[key])\n for key in data_year_cn_aggregates.keys()\n )\n"
] | # -*- coding: utf-8 -*-
from __future__ import division
import logging
import os
import pkg_resources
import pandas
from pandas import concat
from openfisca_france_indirect_taxation.examples.utils_example import get_input_data_frame
from openfisca_france_indirect_taxation.build_survey_data.utils import find_nearest_inferior
log = logging.getLogger(__name__)
data_years = [2000, 2005, 2011]
def get_bdf_aggregates(data_year = None):
assert data_year is not None
depenses = get_input_data_frame(data_year)
depenses_by_grosposte = pandas.DataFrame()
for grosposte in range(1, 13):
if depenses_by_grosposte is None:
depenses_by_grosposte = depenses['coicop12_{}'.format(grosposte)]
else:
depenses_by_grosposte = concat([depenses_by_grosposte, depenses['coicop12_{}'.format(grosposte)]], axis = 1)
depenses_by_grosposte = concat([depenses_by_grosposte, depenses['pondmen']], axis = 1)
grospostes = set(depenses_by_grosposte.columns)
grospostes.remove('pondmen')
bdf_aggregates_by_grosposte = pandas.DataFrame()
for grosposte in grospostes:
bdf_aggregates_by_grosposte.loc[grosposte, 'bdf_aggregates'] = (
depenses_by_grosposte[grosposte] * depenses_by_grosposte['pondmen']
).sum()
return bdf_aggregates_by_grosposte
def get_cn_aggregates(target_year = None):
assert target_year is not None
default_config_files_directory = os.path.join(
pkg_resources.get_distribution('openfisca_france_indirect_taxation').location)
parametres_fiscalite_file_path = os.path.join(
default_config_files_directory,
'openfisca_france_indirect_taxation',
'assets',
'legislation',
'Parametres fiscalite indirecte.xls'
)
masses_cn_data_frame = pandas.read_excel(parametres_fiscalite_file_path, sheetname = "consommation_CN")
masses_cn_12postes_data_frame = masses_cn_data_frame.loc[:, ['Code', target_year]]
masses_cn_12postes_data_frame['code_unicode'] = masses_cn_12postes_data_frame.Code.astype(unicode)
masses_cn_12postes_data_frame['len_code'] = masses_cn_12postes_data_frame['code_unicode'].apply(lambda x: len(x))
masses_cn_12postes_data_frame = masses_cn_12postes_data_frame[masses_cn_12postes_data_frame['len_code'] == 6]
masses_cn_12postes_data_frame['code'] = masses_cn_12postes_data_frame.Code.astype(int)
masses_cn_12postes_data_frame = masses_cn_12postes_data_frame.drop(['len_code', 'code_unicode', 'Code'], 1)
masses_cn_12postes_data_frame.rename(
columns = {
target_year: 'consoCN_COICOP_{}'.format(target_year),
'code': 'poste'
},
inplace = True,
)
masses_cn_12postes_data_frame['poste'] = masses_cn_12postes_data_frame['poste'].astype(str)
masses_cn_12postes_data_frame = masses_cn_12postes_data_frame[masses_cn_12postes_data_frame['poste'] != '15']
for element in masses_cn_12postes_data_frame['poste']:
masses_cn_12postes_data_frame['poste'] = \
masses_cn_12postes_data_frame['poste'].replace(element, 'coicop12_{}'.format(element))
masses_cn_12postes_data_frame.set_index('poste', inplace = True)
return masses_cn_12postes_data_frame * 1e6
def get_inflators_bdf_to_cn(data_year):
'''
Calcule les ratios de calage (bdf sur cn pour année de données)
à partir des masses de comptabilité nationale et des masses de consommation de bdf.
'''
data_cn = get_cn_aggregates(data_year)
data_bdf = get_bdf_aggregates(data_year)
masses = data_cn.merge(
data_bdf, left_index = True, right_index = True
)
masses.rename(columns = {'bdf_aggregates': 'conso_bdf{}'.format(data_year)}, inplace = True)
return (
masses['consoCN_COICOP_{}'.format(data_year)] / masses['conso_bdf{}'.format(data_year)]
).to_dict()
def get_inflators_cn_to_cn(target_year):
'''
Calcule l'inflateur de vieillissement à partir des masses de comptabilité nationale.
'''
data_year = find_nearest_inferior(data_years, target_year)
data_year_cn_aggregates = get_cn_aggregates(data_year)['consoCN_COICOP_{}'.format(data_year)].to_dict()
target_year_cn_aggregates = get_cn_aggregates(target_year)['consoCN_COICOP_{}'.format(target_year)].to_dict()
return dict(
(key, target_year_cn_aggregates[key] / data_year_cn_aggregates[key])
for key in data_year_cn_aggregates.keys()
)
def get_inflators_by_year():
inflators_by_year = dict()
for target_year in range(2000, 2015):
inflators = get_inflators(target_year)
inflators_by_year[target_year] = inflators
return inflators_by_year
def get_aggregates_by_year():
aggregates_by_year = dict()
for target_year in range(2000, 2015):
aggregates = get_cn_aggregates(target_year)['consoCN_COICOP_{}'.format(target_year)].to_dict()
aggregates_by_year[target_year] = aggregates
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound._retrieve_cached_zone_variable | python | def _retrieve_cached_zone_variable(self, zone_id, name):
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable | Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised. | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L78-L90 | null | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound._store_cached_zone_variable | python | def _store_cached_zone_variable(self, zone_id, name, value):
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value) | Stores the current known value of a zone variable into the cache.
Calls any zone callbacks. | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L92-L103 | null | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound._retrieve_cached_source_variable | python | def _retrieve_cached_source_variable(self, source_id, name):
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable | Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised. | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L105-L117 | null | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound._store_cached_source_variable | python | def _store_cached_source_variable(self, source_id, name, value):
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value) | Stores the current known value of a source variable into the cache.
Calls any source callbacks. | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L119-L130 | null | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound.connect | python | def connect(self):
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected") | Connect to the controller and start processing responses. | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L247-L256 | null | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound.close | python | def close(self):
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass | Disconnect from the controller. | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L259-L268 | null | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound.set_zone_variable | python | def set_zone_variable(self, zone_id, variable, value):
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value)) | Set a zone variable to a new value. | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L271-L276 | null | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound.get_zone_variable | python | def get_zone_variable(self, zone_id, variable):
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable))) | Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L279-L288 | [
"def _retrieve_cached_zone_variable(self, zone_id, name):\n \"\"\"\n Retrieves the cache state of the named variable for a particular\n zone. If the variable has not been cached then the UncachedVariable\n exception is raised.\n \"\"\"\n try:\n s = self._zone_state[zone_id][name.lower()]\n logger.debug(\"Zone Cache retrieve %s.%s = %s\",\n zone_id.device_str(), name, s)\n return s\n except KeyError:\n raise UncachedVariable\n"
] | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound.get_cached_zone_variable | python | def get_cached_zone_variable(self, zone_id, variable, default=None):
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default | Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L290-L297 | [
"def _retrieve_cached_zone_variable(self, zone_id, name):\n \"\"\"\n Retrieves the cache state of the named variable for a particular\n zone. If the variable has not been cached then the UncachedVariable\n exception is raised.\n \"\"\"\n try:\n s = self._zone_state[zone_id][name.lower()]\n logger.debug(\"Zone Cache retrieve %s.%s = %s\",\n zone_id.device_str(), name, s)\n return s\n except KeyError:\n raise UncachedVariable\n"
] | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound.watch_zone | python | def watch_zone(self, zone_id):
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r | Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L300-L308 | null | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound.unwatch_zone | python | def unwatch_zone(self, zone_id):
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), ))) | Remove a zone from the watchlist. | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L311-L315 | null | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound.send_zone_event | python | def send_zone_event(self, zone_id, event_name, *args):
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd)) | Send an event to a zone. | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L318-L323 | null | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound.enumerate_zones | python | def enumerate_zones(self):
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones | Return a list of (zone_id, zone_name) tuples | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L326-L338 | null | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound.set_source_variable | python | def set_source_variable(self, source_id, variable, value):
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value)) | Change the value of a source variable. | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L341-L345 | null | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound.get_source_variable | python | def get_source_variable(self, source_id, variable):
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable))) | Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L348-L358 | [
"def _retrieve_cached_source_variable(self, source_id, name):\n \"\"\"\n Retrieves the cache state of the named variable for a particular\n source. If the variable has not been cached then the UncachedVariable\n exception is raised.\n \"\"\"\n try:\n s = self._source_state[source_id][name.lower()]\n logger.debug(\"Source Cache retrieve S[%d].%s = %s\",\n source_id, name, s)\n return s\n except KeyError:\n raise UncachedVariable\n"
] | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound.get_cached_source_variable | python | def get_cached_source_variable(self, source_id, variable, default=None):
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default | Get the cached value of a source variable. If the variable is not
cached return the default value. | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L360-L369 | [
"def _retrieve_cached_source_variable(self, source_id, name):\n \"\"\"\n Retrieves the cache state of the named variable for a particular\n source. If the variable has not been cached then the UncachedVariable\n exception is raised.\n \"\"\"\n try:\n s = self._source_state[source_id][name.lower()]\n logger.debug(\"Source Cache retrieve S[%d].%s = %s\",\n source_id, name, s)\n return s\n except KeyError:\n raise UncachedVariable\n"
] | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound.watch_source | python | def watch_source(self, source_id):
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r | Add a souce to the watchlist. | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L372-L378 | null | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound.unwatch_source | python | def unwatch_source(self, source_id):
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, ))) | Remove a souce from the watchlist. | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L381-L387 | null | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
@asyncio.coroutine
def enumerate_sources(self):
""" Return a list of (source_id, source_name) tuples """
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources
|
wickerwaka/russound_rio | russound_rio/rio.py | Russound.enumerate_sources | python | def enumerate_sources(self):
sources = []
for source_id in range(1, 17):
try:
name = yield from self.get_source_variable(source_id, 'name')
if name:
sources.append((source_id, name))
except CommandException:
break
return sources | Return a list of (source_id, source_name) tuples | train | https://github.com/wickerwaka/russound_rio/blob/e331985fd1544abec6a1da3637090550d6f93f76/russound_rio/rio.py#L390-L400 | null | class Russound:
"""Manages the RIO connection to a Russound device."""
def __init__(self, loop, host, port=9621):
"""
Initialize the Russound object using the event loop, host and port
provided.
"""
self._loop = loop
self._host = host
self._port = port
self._ioloop_future = None
self._cmd_queue = asyncio.Queue(loop=loop)
self._source_state = {}
self._zone_state = {}
self._watched_zones = set()
self._watched_sources = set()
self._zone_callbacks = []
self._source_callbacks = []
def _retrieve_cached_zone_variable(self, zone_id, name):
"""
Retrieves the cache state of the named variable for a particular
zone. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._zone_state[zone_id][name.lower()]
logger.debug("Zone Cache retrieve %s.%s = %s",
zone_id.device_str(), name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_zone_variable(self, zone_id, name, value):
"""
Stores the current known value of a zone variable into the cache.
Calls any zone callbacks.
"""
zone_state = self._zone_state.setdefault(zone_id, {})
name = name.lower()
zone_state[name] = value
logger.debug("Zone Cache store %s.%s = %s",
zone_id.device_str(), name, value)
for callback in self._zone_callbacks:
callback(zone_id, name, value)
def _retrieve_cached_source_variable(self, source_id, name):
"""
Retrieves the cache state of the named variable for a particular
source. If the variable has not been cached then the UncachedVariable
exception is raised.
"""
try:
s = self._source_state[source_id][name.lower()]
logger.debug("Source Cache retrieve S[%d].%s = %s",
source_id, name, s)
return s
except KeyError:
raise UncachedVariable
def _store_cached_source_variable(self, source_id, name, value):
"""
Stores the current known value of a source variable into the cache.
Calls any source callbacks.
"""
source_state = self._source_state.setdefault(source_id, {})
name = name.lower()
source_state[name] = value
logger.debug("Source Cache store S[%d].%s = %s",
source_id, name, value)
for callback in self._source_callbacks:
callback(source_id, name, value)
def _process_response(self, res):
s = str(res, 'utf-8').strip()
ty, payload = s[0], s[2:]
if ty == 'E':
logger.debug("Device responded with error: %s", payload)
raise CommandException(payload)
m = _re_response.match(payload)
if not m:
return ty, None
p = m.groupdict()
if p['source']:
source_id = int(p['source'])
self._store_cached_source_variable(
source_id, p['variable'], p['value'])
elif p['zone']:
zone_id = ZoneID(controller=p['controller'], zone=p['zone'])
self._store_cached_zone_variable(zone_id,
p['variable'],
p['value'])
return ty, p['value']
@asyncio.coroutine
def _ioloop(self, reader, writer):
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
logger.debug("Starting IO loop")
while True:
done, pending = yield from asyncio.wait(
[queue_future, net_future],
return_when=asyncio.FIRST_COMPLETED,
loop=self._loop)
if net_future in done:
response = net_future.result()
try:
self._process_response(response)
except CommandException:
pass
net_future = ensure_future(
reader.readline(), loop=self._loop)
if queue_future in done:
cmd, future = queue_future.result()
cmd += '\r'
writer.write(bytearray(cmd, 'utf-8'))
yield from writer.drain()
queue_future = ensure_future(
self._cmd_queue.get(), loop=self._loop)
while True:
response = yield from net_future
net_future = ensure_future(
reader.readline(), loop=self._loop)
try:
ty, value = self._process_response(response)
if ty == 'S':
future.set_result(value)
break
except CommandException as e:
future.set_exception(e)
break
logger.debug("IO loop exited")
except asyncio.CancelledError:
logger.debug("IO loop cancelled")
writer.close()
queue_future.cancel()
net_future.cancel()
raise
except:
logger.exception("Unhandled exception in IO loop")
raise
@asyncio.coroutine
def _send_cmd(self, cmd):
future = asyncio.Future(loop=self._loop)
yield from self._cmd_queue.put((cmd, future))
r = yield from future
return r
def add_zone_callback(self, callback):
"""
Registers a callback to be called whenever a zone variable changes.
The callback will be passed three arguments: the zone_id, the variable
name and the variable value.
"""
self._zone_callbacks.append(callback)
def remove_zone_callback(self, callback):
"""
Removes a previously registered zone callback.
"""
self._zone_callbacks.remove(callback)
def add_source_callback(self, callback):
"""
Registers a callback to be called whenever a source variable changes.
The callback will be passed three arguments: the source_id, the
variable name and the variable value.
"""
self._source_callbacks.append(callback)
def remove_source_callback(self, source_id, callback):
"""
Removes a previously registered zone callback.
"""
self._source_callbacks.remove(callback)
@asyncio.coroutine
def connect(self):
"""
Connect to the controller and start processing responses.
"""
logger.info("Connecting to %s:%s", self._host, self._port)
reader, writer = yield from asyncio.open_connection(
self._host, self._port, loop=self._loop)
self._ioloop_future = ensure_future(
self._ioloop(reader, writer), loop=self._loop)
logger.info("Connected")
@asyncio.coroutine
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass
@asyncio.coroutine
def set_zone_variable(self, zone_id, variable, value):
"""
Set a zone variable to a new value.
"""
return self._send_cmd("SET %s.%s=\"%s\"" % (
zone_id.device_str(), variable, value))
@asyncio.coroutine
def get_zone_variable(self, zone_id, variable):
""" Retrieve the current value of a zone variable. If the variable is
not found in the local cache then the value is requested from the
controller. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET %s.%s" % (
zone_id.device_str(), variable)))
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_zone(self, zone_id):
""" Add a zone to the watchlist.
Zones on the watchlist will push all
state changes (and those of the source they are currently connected to)
back to the client """
r = yield from self._send_cmd(
"WATCH %s ON" % (zone_id.device_str(), ))
self._watched_zones.add(zone_id)
return r
@asyncio.coroutine
def unwatch_zone(self, zone_id):
""" Remove a zone from the watchlist. """
self._watched_zones.remove(zone_id)
return (yield from
self._send_cmd("WATCH %s OFF" % (zone_id.device_str(), )))
@asyncio.coroutine
def send_zone_event(self, zone_id, event_name, *args):
""" Send an event to a zone. """
cmd = "EVENT %s!%s %s" % (
zone_id.device_str(), event_name,
" ".join(str(x) for x in args))
return (yield from self._send_cmd(cmd))
@asyncio.coroutine
def enumerate_zones(self):
""" Return a list of (zone_id, zone_name) tuples """
zones = []
for controller in range(1, 8):
for zone in range(1, 17):
zone_id = ZoneID(zone, controller)
try:
name = yield from self.get_zone_variable(zone_id, 'name')
if name:
zones.append((zone_id, name))
except CommandException:
break
return zones
@asyncio.coroutine
def set_source_variable(self, source_id, variable, value):
""" Change the value of a source variable. """
source_id = int(source_id)
return self._send_cmd("SET S[%d].%s=\"%s\"" % (
source_id, variable, value))
@asyncio.coroutine
def get_source_variable(self, source_id, variable):
""" Get the current value of a source variable. If the variable is not
in the cache it will be retrieved from the controller. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return (yield from self._send_cmd("GET S[%d].%s" % (
source_id, variable)))
def get_cached_source_variable(self, source_id, variable, default=None):
""" Get the cached value of a source variable. If the variable is not
cached return the default value. """
source_id = int(source_id)
try:
return self._retrieve_cached_source_variable(
source_id, variable)
except UncachedVariable:
return default
@asyncio.coroutine
def watch_source(self, source_id):
""" Add a souce to the watchlist. """
source_id = int(source_id)
r = yield from self._send_cmd(
"WATCH S[%d] ON" % (source_id, ))
self._watched_source.add(source_id)
return r
@asyncio.coroutine
def unwatch_source(self, source_id):
""" Remove a souce from the watchlist. """
source_id = int(source_id)
self._watched_sources.remove(source_id)
return (yield from
self._send_cmd("WATCH S[%d] OFF" % (
source_id, )))
@asyncio.coroutine
|
cimatosa/progression | progression/decorators.py | ProgressBar._get_callargs | python | def _get_callargs(self, *args, **kwargs):
callargs = getcallargs(self.func, *args, **kwargs)
return callargs | Retrieve all arguments that `self.func` needs and
return a dictionary with call arguments. | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/decorators.py#L130-L136 | null | class ProgressBar(object):
""" A wrapper/decorator with a text-based progress bar.
Methods:
- __init__
- __call__
The idea is to add a status bar for a regular
function just by wrapping the function via
python's decorator syntax.
In order to do so, the function needs to provide some
extra information, namely the current state 'count' and
the final state 'max_count'. Simply expand your function
by these two additional keyword arguments (or other pairs
specified in jobmanager.validCountKwargs) and set their
values during the calculation (see example 1 below). In that
manner the decorated function as well as the not decorated
function can simple be called as one would not care about
any status information.
Alternatively one could explicitly set count and max_count
in the function call, which circumvents the need to change
the value of max_count AFTER instantiation of the progressBar.
Example 1
---------
>>> from jobmanager.decorators import ProgressBar
>>> from jobmanager.decorators.progress import UnsignedIntValue
>>> import time
>>>
>>> @ProgressBar
>>> def my_func_1(arg,
>>> kwarg = "1",
>>> count = UnsignedIntValue(val=0),
>>> max_count = UnsignedIntValue(val=1)):
>>> # max_count should as default always be set to a value > 0
>>> maxval = 100
>>> max_count.value = maxval
>>>
>>> for i in range(maxval):
>>> count.value += 1
>>> time.sleep(0.02)
>>>
>>> return arg+kwarg
>>>
>>> my_func_1("one argument", kwarg=" second argument")
# The progress of my_func is monitored on stdout.
one argument second argument
Example 2
---------
>>> from jobmanager.decorators import ProgressBar
>>> from jobmanager.decorators.progress import UnsignedIntValue
>>> import time
>>>
>>> @ProgressBar
>>> def my_func(c, m):
>>> for i in range(m.value):
>>> c.value = i
>>> time.sleep(0.02)
>>>
>>> c = progress.UnsignedIntValue(val=0)
>>> m = progress.UnsignedIntValue(val=100)
>>> my_func(c, m)
Notes
-----
You can also use this class as a wrapper and tune parameters of the
progress bar.
>>> wrapper = ProgressBar(my_func, interval=.1)
>>> result = wrapper("wrapped function", kwarg=" test")
"""
def __init__(self, func, **kwargs):
""" Initiates the wrapper objet.
A function can be wrapped by decorating it with
`ProgressBar` or by instantiating `ProgressBar` and
subsequently calling it with the arguments for `func`.
Parameters
----------
func : callable
The method that is wrapped/decorated. It must accept the
two keyword-arguments `count` and `max_count` (or `c` and
`m`). The method `func` increments `count.value` up to
`max_count.value` (`c.value`, `m.value`).
**kwargs : dict
Keyword-arguments for `jobmanager.ProgressBar`.
Notes
-----
`func` must accept `count` and `max_count` (or `c`, `m`) and
properly set their `.value` properties. This wrapper
automatically creates the necessary `multiprocessing.Value`
objects.
"""
self.__name__ = func.__name__ # act like the function
self.__doc__ = func.__doc__ # copy doc string
self.func = func
self.kwargs = kwargs
# Check arguments
self.cm = progress.getCountKwargs(func)
if self.cm is None:
raise ValueError(
"The wrapped function `{}` ".format(func.func_name)+
"must accept one of the following pairs of "+
"keyword arguments:{}".format(progress.validCountKwargs))
def __call__(self, *args, **kwargs):
""" Calls `func` - previously defined in `__init__`.
Parameters
----------
*args : list
Arguments for `func`.
**kwargs : dict
Keyword-arguments for `func`.
"""
# Bind the args and kwds to the argument names of self.func
callargs = self._get_callargs(*args, **kwargs)
count = callargs[self.cm[0]]
max_count = callargs[self.cm[1]]
with progress.ProgressBar(count = count,
max_count = max_count,
prepend = "{} ".format(self.__name__),
**self.kwargs) as pb:
pb.start()
return self.func(**callargs)
|
cimatosa/progression | progression/terminal.py | get_terminal_size | python | def get_terminal_size(defaultw=80):
if hasattr(shutil_get_terminal_size, "__call__"):
return shutil_get_terminal_size()
else:
try:
import fcntl, termios, struct
fd = 0
hw = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ,
'1234'))
return (hw[1], hw[0])
except:
try:
out = sp.check_output(["tput", "cols"])
width = int(out.decode("utf-8").strip())
return (width, None)
except:
try:
hw = (os.environ['LINES'], os.environ['COLUMNS'])
return (hw[1], hw[0])
except:
return (defaultw, None) | Checks various methods to determine the terminal size
Methods:
- shutil.get_terminal_size (only Python3)
- fcntl.ioctl
- subprocess.check_output
- os.environ
Parameters
----------
defaultw : int
Default width of terminal.
Returns
-------
width, height : int
Width and height of the terminal. If one of them could not be
found, None is return in its place. | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/terminal.py#L40-L82 | null | # -*- coding: utf-8 -*-
from __future__ import division, print_function
import os
import sys
import subprocess as sp
import logging
import platform
if platform.system() == 'Windows':
width_correction = -1
else:
width_correction = 0
try:
from shutil import get_terminal_size as shutil_get_terminal_size
except ImportError:
shutil_get_terminal_size = None
class MultiLineFormatter(logging.Formatter):
"""pads a multiline log message with spaces such that
<HEAD> msg_line1
msg_line2
...
"""
def format(self, record):
_str = logging.Formatter.format(self, record)
header = _str.split(record.message)[0]
_str = _str.replace('\n', '\n' + ' '*len(header))
return _str
def_handl = logging.StreamHandler(stream = sys.stderr) # the default handler simply uses stderr
def_handl.setLevel(logging.DEBUG) # ... listens to all messaged
fmt = MultiLineFormatter('%(asctime)s %(name)s %(levelname)s : %(message)s')
def_handl.setFormatter(fmt) # ... and pads multiline messaged
log = logging.getLogger(__name__) # creates the default log for this module
log.addHandler(def_handl)
def get_terminal_width(default=80, name=None):
try:
width = get_terminal_size(defaultw=default)[0] + width_correction
# print("got width from get_terminal_size", width)
except:
width = default
# print("use default width", width)
return width
def terminal_reserve(progress_obj, terminal_obj=None, identifier=None):
""" Registers the terminal (stdout) for printing.
Useful to prevent multiple processes from writing progress bars
to stdout.
One process (server) prints to stdout and a couple of subprocesses
do not print to the same stdout, because the server has reserved it.
Of course, the clients have to be nice and check with
terminal_reserve first if they should (not) print.
Nothing is locked.
Returns
-------
True if reservation was successful (or if we have already reserved this tty),
False if there already is a reservation from another instance.
"""
if terminal_obj is None:
terminal_obj = sys.stdout
if identifier is None:
identifier = ''
if terminal_obj in TERMINAL_RESERVATION: # terminal was already registered
log.debug("this terminal %s has already been added to reservation list", terminal_obj)
if TERMINAL_RESERVATION[terminal_obj] is progress_obj:
log.debug("we %s have already reserved this terminal %s", progress_obj, terminal_obj)
return True
else:
log.debug("someone else %s has already reserved this terminal %s", TERMINAL_RESERVATION[terminal_obj],
terminal_obj)
return False
else: # terminal not yet registered
log.debug("terminal %s was reserved for us %s", terminal_obj, progress_obj)
TERMINAL_RESERVATION[terminal_obj] = progress_obj
return True
def terminal_unreserve(progress_obj, terminal_obj=None, verbose=0, identifier=None):
""" Unregisters the terminal (stdout) for printing.
an instance (progress_obj) can only unreserve the tty (terminal_obj) when it also reserved it
see terminal_reserved for more information
Returns
-------
None
"""
if terminal_obj is None:
terminal_obj = sys.stdout
if identifier is None:
identifier = ''
else:
identifier = identifier + ': '
po = TERMINAL_RESERVATION.get(terminal_obj)
if po is None:
log.debug("terminal %s was not reserved, nothing happens", terminal_obj)
else:
if po is progress_obj:
log.debug("terminal %s now unreserned", terminal_obj)
del TERMINAL_RESERVATION[terminal_obj]
else:
log.debug("you %s can NOT unreserve terminal %s be cause it was reserved by %s", progress_obj, terminal_obj,
po)
def len_string_without_ESC(s):
return len(remove_ESC_SEQ_from_string(s))
def remove_ESC_SEQ_from_string(s):
old_idx = 0
new_s = ""
ESC_CHAR_START = "\033["
while True:
idx = s.find(ESC_CHAR_START, old_idx)
if idx == -1:
break
j = 2
while s[idx+j] in '0123456789':
j += 1
new_s += s[old_idx:idx]
old_idx = idx+j+1
new_s += s[old_idx:]
return new_s
# for esc_seq in ESC_SEQ_SET:
# s = s.replace(esc_seq, '')
# return s
def _close_kind(stack, which_kind):
stack_tmp = []
s = ""
# close everything until which_kind is found
while True:
kind, start, end = stack.pop()
if kind != which_kind:
s += end
stack_tmp.append((kind, start, end))
else:
break
# close which_kind
s = end
# start everything that was closed before which_kind
for kind, start, end in stack_tmp:
s += start
stack.append((kind, start, end))
return s
def _close_all(stack):
s = ""
for kind, start, end in stack:
s += end
return s
def _open_color(stack, color):
start = '<span style="color:{}">'.format(color)
end = '</span>'
stack.append(('color', start, end))
return start
def _open_bold(stack):
start = '<b>'
end = '</b>'
stack.append(('bold', start, end))
return start
def ESC_SEQ_to_HTML(s):
old_idx = 0
new_s = ""
ESC_CHAR_START = "\033["
color_on = False
bold_on = False
stack = []
while True:
idx = s.find(ESC_CHAR_START, old_idx)
if idx == -1:
break
j = 2
while s[idx + j] in '0123456789':
j += 1
new_s += s[old_idx:idx]
old_idx = idx + j + 1
escseq = s[idx:idx+j+1]
if escseq in ESC_COLOR_TO_HTML: # set color
if color_on:
new_s += _close_kind(stack, which_kind = 'color')
new_s += _open_color(stack, ESC_COLOR_TO_HTML[escseq])
color_on = True
elif escseq == ESC_DEFAULT: # unset color
if color_on:
new_s += _close_kind(stack, which_kind = 'color')
color_on = False
elif escseq == ESC_BOLD:
if not bold_on:
new_s += _open_bold(stack)
bold_on = True
elif escseq == ESC_RESET_BOLD:
if bold_on:
new_s += _close_kind(stack, which_kind = 'bold')
bold_on = False
elif escseq == ESC_NO_CHAR_ATTR:
if color_on:
new_s += _close_kind(stack, which_kind = 'color')
color_on = False
if bold_on:
new_s += _close_kind(stack, which_kind = 'bold')
bold_on = False
else:
pass
new_s += s[old_idx:]
new_s += _close_all(stack)
return new_s
def ESC_MOVE_LINE_UP(n):
return "\033[{}A".format(n)
def ESC_MOVE_LINE_DOWN(n):
return "\033[{}B".format(n)
ESC_NO_CHAR_ATTR = "\033[0m"
ESC_BOLD = "\033[1m"
ESC_DIM = "\033[2m"
ESC_UNDERLINED = "\033[4m"
ESC_BLINK = "\033[5m"
ESC_INVERTED = "\033[7m"
ESC_HIDDEN = "\033[8m"
ESC_MY_MAGIC_ENDING = ESC_HIDDEN + ESC_NO_CHAR_ATTR
# not widely supported, use '22' instead
# ESC_RESET_BOLD = "\033[21m"
ESC_RESET_DIM = "\033[22m"
ESC_RESET_BOLD = ESC_RESET_DIM
ESC_RESET_UNDERLINED = "\033[24m"
ESC_RESET_BLINK = "\033[25m"
ESC_RESET_INVERTED = "\033[27m"
ESC_RESET_HIDDEN = "\033[28m"
ESC_DEFAULT = "\033[39m"
ESC_BLACK = "\033[30m"
ESC_RED = "\033[31m"
ESC_GREEN = "\033[32m"
ESC_YELLOW = "\033[33m"
ESC_BLUE = "\033[34m"
ESC_MAGENTA = "\033[35m"
ESC_CYAN = "\033[36m"
ESC_LIGHT_GREY = "\033[37m"
ESC_DARK_GREY = "\033[90m"
ESC_LIGHT_RED = "\033[91m"
ESC_LIGHT_GREEN = "\033[92m"
ESC_LIGHT_YELLOW = "\033[93m"
ESC_LIGHT_BLUE = "\033[94m"
ESC_LIGHT_MAGENTA = "\033[95m"
ESC_LIGHT_CYAN = "\033[96m"
ESC_WHITE = "\033[97m"
ESC_COLOR_TO_HTML = {
ESC_BLACK : '#000000',
ESC_RED : '#800000',
ESC_GREEN : '#008000',
ESC_YELLOW : '#808000',
ESC_BLUE : '#000080',
ESC_MAGENTA : '#800080',
ESC_CYAN : '#008080',
ESC_LIGHT_GREY : '#c0c0c0',
ESC_DARK_GREY : '#808080',
ESC_LIGHT_RED : '#ff0000',
ESC_LIGHT_GREEN : '#00ff00',
ESC_LIGHT_YELLOW : '#ffff00',
ESC_LIGHT_BLUE : '#0000ff',
ESC_LIGHT_MAGENTA : '#ff00ff',
ESC_LIGHT_CYAN : '#00ffff',
ESC_WHITE : '#ffffff'}
ESC_SEQ_SET = [ESC_NO_CHAR_ATTR,
ESC_BOLD,
ESC_DIM,
ESC_UNDERLINED,
ESC_BLINK,
ESC_INVERTED,
ESC_HIDDEN,
ESC_RESET_BOLD,
ESC_RESET_DIM,
ESC_RESET_UNDERLINED,
ESC_RESET_BLINK,
ESC_RESET_INVERTED,
ESC_RESET_HIDDEN,
ESC_DEFAULT,
ESC_BLACK,
ESC_RED,
ESC_GREEN,
ESC_YELLOW,
ESC_BLUE,
ESC_MAGENTA,
ESC_CYAN,
ESC_LIGHT_GREY,
ESC_DARK_GREY,
ESC_LIGHT_RED,
ESC_LIGHT_GREEN,
ESC_LIGHT_YELLOW,
ESC_LIGHT_BLUE,
ESC_LIGHT_MAGENTA,
ESC_LIGHT_CYAN,
ESC_WHITE]
# terminal reservation list, see terminal_reserve
TERMINAL_RESERVATION = {}
# these are classes that print progress bars, see terminal_reserve
TERMINAL_PRINT_LOOP_CLASSES = ["ProgressBar", "ProgressBarCounter", "ProgressBarFancy", "ProgressBarCounterFancy"]
|
cimatosa/progression | progression/terminal.py | terminal_reserve | python | def terminal_reserve(progress_obj, terminal_obj=None, identifier=None):
if terminal_obj is None:
terminal_obj = sys.stdout
if identifier is None:
identifier = ''
if terminal_obj in TERMINAL_RESERVATION: # terminal was already registered
log.debug("this terminal %s has already been added to reservation list", terminal_obj)
if TERMINAL_RESERVATION[terminal_obj] is progress_obj:
log.debug("we %s have already reserved this terminal %s", progress_obj, terminal_obj)
return True
else:
log.debug("someone else %s has already reserved this terminal %s", TERMINAL_RESERVATION[terminal_obj],
terminal_obj)
return False
else: # terminal not yet registered
log.debug("terminal %s was reserved for us %s", terminal_obj, progress_obj)
TERMINAL_RESERVATION[terminal_obj] = progress_obj
return True | Registers the terminal (stdout) for printing.
Useful to prevent multiple processes from writing progress bars
to stdout.
One process (server) prints to stdout and a couple of subprocesses
do not print to the same stdout, because the server has reserved it.
Of course, the clients have to be nice and check with
terminal_reserve first if they should (not) print.
Nothing is locked.
Returns
-------
True if reservation was successful (or if we have already reserved this tty),
False if there already is a reservation from another instance. | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/terminal.py#L95-L131 | null | # -*- coding: utf-8 -*-
from __future__ import division, print_function
import os
import sys
import subprocess as sp
import logging
import platform
if platform.system() == 'Windows':
width_correction = -1
else:
width_correction = 0
try:
from shutil import get_terminal_size as shutil_get_terminal_size
except ImportError:
shutil_get_terminal_size = None
class MultiLineFormatter(logging.Formatter):
"""pads a multiline log message with spaces such that
<HEAD> msg_line1
msg_line2
...
"""
def format(self, record):
_str = logging.Formatter.format(self, record)
header = _str.split(record.message)[0]
_str = _str.replace('\n', '\n' + ' '*len(header))
return _str
def_handl = logging.StreamHandler(stream = sys.stderr) # the default handler simply uses stderr
def_handl.setLevel(logging.DEBUG) # ... listens to all messaged
fmt = MultiLineFormatter('%(asctime)s %(name)s %(levelname)s : %(message)s')
def_handl.setFormatter(fmt) # ... and pads multiline messaged
log = logging.getLogger(__name__) # creates the default log for this module
log.addHandler(def_handl)
def get_terminal_size(defaultw=80):
""" Checks various methods to determine the terminal size
Methods:
- shutil.get_terminal_size (only Python3)
- fcntl.ioctl
- subprocess.check_output
- os.environ
Parameters
----------
defaultw : int
Default width of terminal.
Returns
-------
width, height : int
Width and height of the terminal. If one of them could not be
found, None is return in its place.
"""
if hasattr(shutil_get_terminal_size, "__call__"):
return shutil_get_terminal_size()
else:
try:
import fcntl, termios, struct
fd = 0
hw = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ,
'1234'))
return (hw[1], hw[0])
except:
try:
out = sp.check_output(["tput", "cols"])
width = int(out.decode("utf-8").strip())
return (width, None)
except:
try:
hw = (os.environ['LINES'], os.environ['COLUMNS'])
return (hw[1], hw[0])
except:
return (defaultw, None)
def get_terminal_width(default=80, name=None):
try:
width = get_terminal_size(defaultw=default)[0] + width_correction
# print("got width from get_terminal_size", width)
except:
width = default
# print("use default width", width)
return width
def terminal_unreserve(progress_obj, terminal_obj=None, verbose=0, identifier=None):
""" Unregisters the terminal (stdout) for printing.
an instance (progress_obj) can only unreserve the tty (terminal_obj) when it also reserved it
see terminal_reserved for more information
Returns
-------
None
"""
if terminal_obj is None:
terminal_obj = sys.stdout
if identifier is None:
identifier = ''
else:
identifier = identifier + ': '
po = TERMINAL_RESERVATION.get(terminal_obj)
if po is None:
log.debug("terminal %s was not reserved, nothing happens", terminal_obj)
else:
if po is progress_obj:
log.debug("terminal %s now unreserned", terminal_obj)
del TERMINAL_RESERVATION[terminal_obj]
else:
log.debug("you %s can NOT unreserve terminal %s be cause it was reserved by %s", progress_obj, terminal_obj,
po)
def len_string_without_ESC(s):
return len(remove_ESC_SEQ_from_string(s))
def remove_ESC_SEQ_from_string(s):
old_idx = 0
new_s = ""
ESC_CHAR_START = "\033["
while True:
idx = s.find(ESC_CHAR_START, old_idx)
if idx == -1:
break
j = 2
while s[idx+j] in '0123456789':
j += 1
new_s += s[old_idx:idx]
old_idx = idx+j+1
new_s += s[old_idx:]
return new_s
# for esc_seq in ESC_SEQ_SET:
# s = s.replace(esc_seq, '')
# return s
def _close_kind(stack, which_kind):
stack_tmp = []
s = ""
# close everything until which_kind is found
while True:
kind, start, end = stack.pop()
if kind != which_kind:
s += end
stack_tmp.append((kind, start, end))
else:
break
# close which_kind
s = end
# start everything that was closed before which_kind
for kind, start, end in stack_tmp:
s += start
stack.append((kind, start, end))
return s
def _close_all(stack):
s = ""
for kind, start, end in stack:
s += end
return s
def _open_color(stack, color):
start = '<span style="color:{}">'.format(color)
end = '</span>'
stack.append(('color', start, end))
return start
def _open_bold(stack):
start = '<b>'
end = '</b>'
stack.append(('bold', start, end))
return start
def ESC_SEQ_to_HTML(s):
old_idx = 0
new_s = ""
ESC_CHAR_START = "\033["
color_on = False
bold_on = False
stack = []
while True:
idx = s.find(ESC_CHAR_START, old_idx)
if idx == -1:
break
j = 2
while s[idx + j] in '0123456789':
j += 1
new_s += s[old_idx:idx]
old_idx = idx + j + 1
escseq = s[idx:idx+j+1]
if escseq in ESC_COLOR_TO_HTML: # set color
if color_on:
new_s += _close_kind(stack, which_kind = 'color')
new_s += _open_color(stack, ESC_COLOR_TO_HTML[escseq])
color_on = True
elif escseq == ESC_DEFAULT: # unset color
if color_on:
new_s += _close_kind(stack, which_kind = 'color')
color_on = False
elif escseq == ESC_BOLD:
if not bold_on:
new_s += _open_bold(stack)
bold_on = True
elif escseq == ESC_RESET_BOLD:
if bold_on:
new_s += _close_kind(stack, which_kind = 'bold')
bold_on = False
elif escseq == ESC_NO_CHAR_ATTR:
if color_on:
new_s += _close_kind(stack, which_kind = 'color')
color_on = False
if bold_on:
new_s += _close_kind(stack, which_kind = 'bold')
bold_on = False
else:
pass
new_s += s[old_idx:]
new_s += _close_all(stack)
return new_s
def ESC_MOVE_LINE_UP(n):
return "\033[{}A".format(n)
def ESC_MOVE_LINE_DOWN(n):
return "\033[{}B".format(n)
ESC_NO_CHAR_ATTR = "\033[0m"
ESC_BOLD = "\033[1m"
ESC_DIM = "\033[2m"
ESC_UNDERLINED = "\033[4m"
ESC_BLINK = "\033[5m"
ESC_INVERTED = "\033[7m"
ESC_HIDDEN = "\033[8m"
ESC_MY_MAGIC_ENDING = ESC_HIDDEN + ESC_NO_CHAR_ATTR
# not widely supported, use '22' instead
# ESC_RESET_BOLD = "\033[21m"
ESC_RESET_DIM = "\033[22m"
ESC_RESET_BOLD = ESC_RESET_DIM
ESC_RESET_UNDERLINED = "\033[24m"
ESC_RESET_BLINK = "\033[25m"
ESC_RESET_INVERTED = "\033[27m"
ESC_RESET_HIDDEN = "\033[28m"
ESC_DEFAULT = "\033[39m"
ESC_BLACK = "\033[30m"
ESC_RED = "\033[31m"
ESC_GREEN = "\033[32m"
ESC_YELLOW = "\033[33m"
ESC_BLUE = "\033[34m"
ESC_MAGENTA = "\033[35m"
ESC_CYAN = "\033[36m"
ESC_LIGHT_GREY = "\033[37m"
ESC_DARK_GREY = "\033[90m"
ESC_LIGHT_RED = "\033[91m"
ESC_LIGHT_GREEN = "\033[92m"
ESC_LIGHT_YELLOW = "\033[93m"
ESC_LIGHT_BLUE = "\033[94m"
ESC_LIGHT_MAGENTA = "\033[95m"
ESC_LIGHT_CYAN = "\033[96m"
ESC_WHITE = "\033[97m"
ESC_COLOR_TO_HTML = {
ESC_BLACK : '#000000',
ESC_RED : '#800000',
ESC_GREEN : '#008000',
ESC_YELLOW : '#808000',
ESC_BLUE : '#000080',
ESC_MAGENTA : '#800080',
ESC_CYAN : '#008080',
ESC_LIGHT_GREY : '#c0c0c0',
ESC_DARK_GREY : '#808080',
ESC_LIGHT_RED : '#ff0000',
ESC_LIGHT_GREEN : '#00ff00',
ESC_LIGHT_YELLOW : '#ffff00',
ESC_LIGHT_BLUE : '#0000ff',
ESC_LIGHT_MAGENTA : '#ff00ff',
ESC_LIGHT_CYAN : '#00ffff',
ESC_WHITE : '#ffffff'}
ESC_SEQ_SET = [ESC_NO_CHAR_ATTR,
ESC_BOLD,
ESC_DIM,
ESC_UNDERLINED,
ESC_BLINK,
ESC_INVERTED,
ESC_HIDDEN,
ESC_RESET_BOLD,
ESC_RESET_DIM,
ESC_RESET_UNDERLINED,
ESC_RESET_BLINK,
ESC_RESET_INVERTED,
ESC_RESET_HIDDEN,
ESC_DEFAULT,
ESC_BLACK,
ESC_RED,
ESC_GREEN,
ESC_YELLOW,
ESC_BLUE,
ESC_MAGENTA,
ESC_CYAN,
ESC_LIGHT_GREY,
ESC_DARK_GREY,
ESC_LIGHT_RED,
ESC_LIGHT_GREEN,
ESC_LIGHT_YELLOW,
ESC_LIGHT_BLUE,
ESC_LIGHT_MAGENTA,
ESC_LIGHT_CYAN,
ESC_WHITE]
# terminal reservation list, see terminal_reserve
TERMINAL_RESERVATION = {}
# these are classes that print progress bars, see terminal_reserve
TERMINAL_PRINT_LOOP_CLASSES = ["ProgressBar", "ProgressBarCounter", "ProgressBarFancy", "ProgressBarCounterFancy"]
|
cimatosa/progression | progression/terminal.py | terminal_unreserve | python | def terminal_unreserve(progress_obj, terminal_obj=None, verbose=0, identifier=None):
if terminal_obj is None:
terminal_obj = sys.stdout
if identifier is None:
identifier = ''
else:
identifier = identifier + ': '
po = TERMINAL_RESERVATION.get(terminal_obj)
if po is None:
log.debug("terminal %s was not reserved, nothing happens", terminal_obj)
else:
if po is progress_obj:
log.debug("terminal %s now unreserned", terminal_obj)
del TERMINAL_RESERVATION[terminal_obj]
else:
log.debug("you %s can NOT unreserve terminal %s be cause it was reserved by %s", progress_obj, terminal_obj,
po) | Unregisters the terminal (stdout) for printing.
an instance (progress_obj) can only unreserve the tty (terminal_obj) when it also reserved it
see terminal_reserved for more information
Returns
-------
None | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/terminal.py#L134-L163 | null | # -*- coding: utf-8 -*-
from __future__ import division, print_function
import os
import sys
import subprocess as sp
import logging
import platform
if platform.system() == 'Windows':
width_correction = -1
else:
width_correction = 0
try:
from shutil import get_terminal_size as shutil_get_terminal_size
except ImportError:
shutil_get_terminal_size = None
class MultiLineFormatter(logging.Formatter):
"""pads a multiline log message with spaces such that
<HEAD> msg_line1
msg_line2
...
"""
def format(self, record):
_str = logging.Formatter.format(self, record)
header = _str.split(record.message)[0]
_str = _str.replace('\n', '\n' + ' '*len(header))
return _str
def_handl = logging.StreamHandler(stream = sys.stderr) # the default handler simply uses stderr
def_handl.setLevel(logging.DEBUG) # ... listens to all messaged
fmt = MultiLineFormatter('%(asctime)s %(name)s %(levelname)s : %(message)s')
def_handl.setFormatter(fmt) # ... and pads multiline messaged
log = logging.getLogger(__name__) # creates the default log for this module
log.addHandler(def_handl)
def get_terminal_size(defaultw=80):
""" Checks various methods to determine the terminal size
Methods:
- shutil.get_terminal_size (only Python3)
- fcntl.ioctl
- subprocess.check_output
- os.environ
Parameters
----------
defaultw : int
Default width of terminal.
Returns
-------
width, height : int
Width and height of the terminal. If one of them could not be
found, None is return in its place.
"""
if hasattr(shutil_get_terminal_size, "__call__"):
return shutil_get_terminal_size()
else:
try:
import fcntl, termios, struct
fd = 0
hw = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ,
'1234'))
return (hw[1], hw[0])
except:
try:
out = sp.check_output(["tput", "cols"])
width = int(out.decode("utf-8").strip())
return (width, None)
except:
try:
hw = (os.environ['LINES'], os.environ['COLUMNS'])
return (hw[1], hw[0])
except:
return (defaultw, None)
def get_terminal_width(default=80, name=None):
try:
width = get_terminal_size(defaultw=default)[0] + width_correction
# print("got width from get_terminal_size", width)
except:
width = default
# print("use default width", width)
return width
def terminal_reserve(progress_obj, terminal_obj=None, identifier=None):
""" Registers the terminal (stdout) for printing.
Useful to prevent multiple processes from writing progress bars
to stdout.
One process (server) prints to stdout and a couple of subprocesses
do not print to the same stdout, because the server has reserved it.
Of course, the clients have to be nice and check with
terminal_reserve first if they should (not) print.
Nothing is locked.
Returns
-------
True if reservation was successful (or if we have already reserved this tty),
False if there already is a reservation from another instance.
"""
if terminal_obj is None:
terminal_obj = sys.stdout
if identifier is None:
identifier = ''
if terminal_obj in TERMINAL_RESERVATION: # terminal was already registered
log.debug("this terminal %s has already been added to reservation list", terminal_obj)
if TERMINAL_RESERVATION[terminal_obj] is progress_obj:
log.debug("we %s have already reserved this terminal %s", progress_obj, terminal_obj)
return True
else:
log.debug("someone else %s has already reserved this terminal %s", TERMINAL_RESERVATION[terminal_obj],
terminal_obj)
return False
else: # terminal not yet registered
log.debug("terminal %s was reserved for us %s", terminal_obj, progress_obj)
TERMINAL_RESERVATION[terminal_obj] = progress_obj
return True
def len_string_without_ESC(s):
return len(remove_ESC_SEQ_from_string(s))
def remove_ESC_SEQ_from_string(s):
old_idx = 0
new_s = ""
ESC_CHAR_START = "\033["
while True:
idx = s.find(ESC_CHAR_START, old_idx)
if idx == -1:
break
j = 2
while s[idx+j] in '0123456789':
j += 1
new_s += s[old_idx:idx]
old_idx = idx+j+1
new_s += s[old_idx:]
return new_s
# for esc_seq in ESC_SEQ_SET:
# s = s.replace(esc_seq, '')
# return s
def _close_kind(stack, which_kind):
stack_tmp = []
s = ""
# close everything until which_kind is found
while True:
kind, start, end = stack.pop()
if kind != which_kind:
s += end
stack_tmp.append((kind, start, end))
else:
break
# close which_kind
s = end
# start everything that was closed before which_kind
for kind, start, end in stack_tmp:
s += start
stack.append((kind, start, end))
return s
def _close_all(stack):
s = ""
for kind, start, end in stack:
s += end
return s
def _open_color(stack, color):
start = '<span style="color:{}">'.format(color)
end = '</span>'
stack.append(('color', start, end))
return start
def _open_bold(stack):
start = '<b>'
end = '</b>'
stack.append(('bold', start, end))
return start
def ESC_SEQ_to_HTML(s):
old_idx = 0
new_s = ""
ESC_CHAR_START = "\033["
color_on = False
bold_on = False
stack = []
while True:
idx = s.find(ESC_CHAR_START, old_idx)
if idx == -1:
break
j = 2
while s[idx + j] in '0123456789':
j += 1
new_s += s[old_idx:idx]
old_idx = idx + j + 1
escseq = s[idx:idx+j+1]
if escseq in ESC_COLOR_TO_HTML: # set color
if color_on:
new_s += _close_kind(stack, which_kind = 'color')
new_s += _open_color(stack, ESC_COLOR_TO_HTML[escseq])
color_on = True
elif escseq == ESC_DEFAULT: # unset color
if color_on:
new_s += _close_kind(stack, which_kind = 'color')
color_on = False
elif escseq == ESC_BOLD:
if not bold_on:
new_s += _open_bold(stack)
bold_on = True
elif escseq == ESC_RESET_BOLD:
if bold_on:
new_s += _close_kind(stack, which_kind = 'bold')
bold_on = False
elif escseq == ESC_NO_CHAR_ATTR:
if color_on:
new_s += _close_kind(stack, which_kind = 'color')
color_on = False
if bold_on:
new_s += _close_kind(stack, which_kind = 'bold')
bold_on = False
else:
pass
new_s += s[old_idx:]
new_s += _close_all(stack)
return new_s
def ESC_MOVE_LINE_UP(n):
return "\033[{}A".format(n)
def ESC_MOVE_LINE_DOWN(n):
return "\033[{}B".format(n)
ESC_NO_CHAR_ATTR = "\033[0m"
ESC_BOLD = "\033[1m"
ESC_DIM = "\033[2m"
ESC_UNDERLINED = "\033[4m"
ESC_BLINK = "\033[5m"
ESC_INVERTED = "\033[7m"
ESC_HIDDEN = "\033[8m"
ESC_MY_MAGIC_ENDING = ESC_HIDDEN + ESC_NO_CHAR_ATTR
# not widely supported, use '22' instead
# ESC_RESET_BOLD = "\033[21m"
ESC_RESET_DIM = "\033[22m"
ESC_RESET_BOLD = ESC_RESET_DIM
ESC_RESET_UNDERLINED = "\033[24m"
ESC_RESET_BLINK = "\033[25m"
ESC_RESET_INVERTED = "\033[27m"
ESC_RESET_HIDDEN = "\033[28m"
ESC_DEFAULT = "\033[39m"
ESC_BLACK = "\033[30m"
ESC_RED = "\033[31m"
ESC_GREEN = "\033[32m"
ESC_YELLOW = "\033[33m"
ESC_BLUE = "\033[34m"
ESC_MAGENTA = "\033[35m"
ESC_CYAN = "\033[36m"
ESC_LIGHT_GREY = "\033[37m"
ESC_DARK_GREY = "\033[90m"
ESC_LIGHT_RED = "\033[91m"
ESC_LIGHT_GREEN = "\033[92m"
ESC_LIGHT_YELLOW = "\033[93m"
ESC_LIGHT_BLUE = "\033[94m"
ESC_LIGHT_MAGENTA = "\033[95m"
ESC_LIGHT_CYAN = "\033[96m"
ESC_WHITE = "\033[97m"
ESC_COLOR_TO_HTML = {
ESC_BLACK : '#000000',
ESC_RED : '#800000',
ESC_GREEN : '#008000',
ESC_YELLOW : '#808000',
ESC_BLUE : '#000080',
ESC_MAGENTA : '#800080',
ESC_CYAN : '#008080',
ESC_LIGHT_GREY : '#c0c0c0',
ESC_DARK_GREY : '#808080',
ESC_LIGHT_RED : '#ff0000',
ESC_LIGHT_GREEN : '#00ff00',
ESC_LIGHT_YELLOW : '#ffff00',
ESC_LIGHT_BLUE : '#0000ff',
ESC_LIGHT_MAGENTA : '#ff00ff',
ESC_LIGHT_CYAN : '#00ffff',
ESC_WHITE : '#ffffff'}
ESC_SEQ_SET = [ESC_NO_CHAR_ATTR,
ESC_BOLD,
ESC_DIM,
ESC_UNDERLINED,
ESC_BLINK,
ESC_INVERTED,
ESC_HIDDEN,
ESC_RESET_BOLD,
ESC_RESET_DIM,
ESC_RESET_UNDERLINED,
ESC_RESET_BLINK,
ESC_RESET_INVERTED,
ESC_RESET_HIDDEN,
ESC_DEFAULT,
ESC_BLACK,
ESC_RED,
ESC_GREEN,
ESC_YELLOW,
ESC_BLUE,
ESC_MAGENTA,
ESC_CYAN,
ESC_LIGHT_GREY,
ESC_DARK_GREY,
ESC_LIGHT_RED,
ESC_LIGHT_GREEN,
ESC_LIGHT_YELLOW,
ESC_LIGHT_BLUE,
ESC_LIGHT_MAGENTA,
ESC_LIGHT_CYAN,
ESC_WHITE]
# terminal reservation list, see terminal_reserve
TERMINAL_RESERVATION = {}
# these are classes that print progress bars, see terminal_reserve
TERMINAL_PRINT_LOOP_CLASSES = ["ProgressBar", "ProgressBarCounter", "ProgressBarFancy", "ProgressBarCounterFancy"]
|
cimatosa/progression | progression/progress.py | _loop_wrapper_func | python | def _loop_wrapper_func(func, args, shared_mem_run, shared_mem_pause, interval, sigint, sigterm, name,
logging_level, conn_send, func_running, log_queue):
prefix = get_identifier(name) + ' '
global log
log = logging.getLogger(__name__+".log_{}".format(get_identifier(name, bold=False)))
log.setLevel(logging_level)
log.addHandler(QueueHandler(log_queue))
sys.stdout = StdoutPipe(conn_send)
log.debug("enter wrapper_func")
SIG_handler_Loop(sigint, sigterm, log, prefix)
func_running.value = True
error = False
while shared_mem_run.value:
try:
# in pause mode, simply sleep
if shared_mem_pause.value:
quit_loop = False
else:
# if not pause mode -> call func and see what happens
try:
quit_loop = func(*args)
except LoopInterruptError:
raise
except Exception as e:
log.error("error %s occurred in loop calling 'func(*args)'", type(e))
log.info("show traceback.print_exc()\n%s", traceback.format_exc())
error = True
break
if quit_loop is True:
log.debug("loop stooped because func returned True")
break
time.sleep(interval)
except LoopInterruptError:
log.debug("quit wrapper_func due to InterruptedError")
break
func_running.value = False
if error:
sys.exit(-1)
else:
log.debug("wrapper_func terminates gracefully")
# gets rid of the following warnings
# Exception ignored in: <_io.FileIO name='/dev/null' mode='rb'>
# ResourceWarning: unclosed file <_io.TextIOWrapper name='/dev/null' mode='r' encoding='UTF-8'>
try:
if mp.get_start_method() == "spawn":
sys.stdin.close()
except AttributeError:
pass | to be executed as a separate process (that's why this functions is declared static) | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/progress.py#L263-L323 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Progression module
------------------
This module provides the (so far) four variants to display progress information:
* :py:class:`.ProgressBar`
This class monitors one or multiple processes showing the total elapsed time (TET), the current speed
estimated from the most recent updated, a colored bar showing the progress and an
estimate for the remaining time, also called time to go (TTG).
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 5.83s [7.2c/s] <span style="color:#00ff00"><b>[=====================> ]</b></span> TTG 8.05s</pre>
</div>
* :py:class:`.ProgressBarCounter`
If a single process is intended to do several sequential task, the :py:class:`.ProgressBarCounter` class can keep track of the number
of accomplished tasks on top of monitoring the individual task just like :py:class:`.ProgressBar` does.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre><span style="color:#00ff00"><b> [</b><b>TET</b>-5.83s-----[7.2c/s]-<b>TTG</b>-8.05s-></span> 42.0% <b>ETA</b> 20161011_16:52:52 <b>ORT</b> 00:00:13<b><span style="color:#00ff00">]</span></b></pre>
</div>
* :py:class:`.ProgressBarFancy`
This class intends to be a replacement for :py:class:`.ProgressBar` with slightly more information and
better handling of small terminal widths.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 00:00:35 [1.4c/min] <span style="color:#00ff00">#3</span> - 5.83s [7.2c/s] <span style="color:#00ff00"><b>[===========> ]</b></span> TTG 8.05s</pre>
</div>
* :py:class:`.ProgressBarCounterFancy`
Just as :py:class:`.ProgressBarFancy` this replaces :py:class:`.ProgressBarCounter`.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 00:00:35 [1.4c/min] <span style="color:#00ff00">#3</span> - <span style="color:#800000"></span><span style="color:#00ff00"><b>[</b><b>E</b>-5.83s-----[7.2c/s]-<b>G</b>-8</span>.05s 42.0% <b>O</b> 00:00:13<b><span style="color:#00ff00">]</span></b></pre>
</div>
.. autoclass:: Progress
:members:
:inherited-members:
.. autoclass:: ProgressBar
:members:
.. autoclass:: ProgressBarCounter
:members:
.. autoclass:: ProgressBarFancy
:members:
.. autoclass:: ProgressBarCounterFancy
:members:
.. autofunction:: UnsignedIntValue
.. autofunction:: FloatValue
.. autofunction:: StringValue
"""
from __future__ import division, print_function
import datetime
import io
import logging
from logging.handlers import QueueHandler, QueueListener
import math
import multiprocessing as mp
from multiprocessing.sharedctypes import Synchronized
import os
import sys
import signal
import subprocess as sp
import threading
import time
import traceback
import warnings
from . import terminal
import platform
_IPYTHON = True
try:
import ipywidgets
except ImportError:
_IPYTHON = False
warnings.warn("could not load ipywidgets (IPython HTML output will not work)", category=ImportWarning)
except DeprecationWarning:
pass
try:
from IPython.display import display
except ImportError:
_IPYTHON = False
warnings.warn("could not load IPython (IPython HTML output will not work)", category=ImportWarning)
# Magic conversion from 3 to 2
if sys.version_info[0] == 2:
ProcessLookupError = OSError
inMemoryBuffer = io.BytesIO
old_math_ceil = math.ceil
def my_int_ceil(f):
return int(old_math_ceil(f))
math.ceil = my_int_ceil
_jm_compatible_bytearray = lambda x: x
class TimeoutError(Exception):
pass
elif sys.version_info[0] == 3:
inMemoryBuffer = io.StringIO
_jm_compatible_bytearray = bytearray
class MultiLineFormatter(logging.Formatter):
"""pads a multiline log message with spaces such that
<HEAD> msg_line1
msg_line2
...
"""
def format(self, record):
_str = logging.Formatter.format(self, record)
header = _str.split(record.message)[0]
_str = _str.replace('\n', '\n' + ' '*len(header))
return _str
# def_handl = logging.StreamHandler(stream = sys.stderr) # the default handler simply uses stderr
# def_handl.setLevel(logging.DEBUG) # ... listens to all messaged
fmt = MultiLineFormatter('%(asctime)s %(name)s %(levelname)s : %(message)s')
# def_handl.setFormatter(fmt) # ... and pads multiline messaged
log = logging.getLogger(__name__) # creates the default log for this module
# log.addHandler(def_handl)
class LoopExceptionError(RuntimeError):
pass
class LoopInterruptError(Exception):
pass
class StdoutPipe(object):
"""replacement for stream objects such as stdout which
forwards all incoming data using the send method of a
connection
example usage:
>>> import sys
>>> from multiprocessing import Pipe
>>> from progression import StdoutPipe
>>> conn_recv, conn_send = Pipe(False)
>>> sys.stdout = StdoutPipe(conn_send)
>>> print("hallo welt", end='') # this is no going through the pipe
>>> msg = conn_recv.recv()
>>> sys.stdout = sys.__stdout__
>>> print(msg)
hallo welt
>>> assert msg == "hallo welt"
"""
def __init__(self, conn):
self.conn = conn
def flush(self):
pass
def write(self, b):
self.conn.send(b)
class PipeToPrint(object):
def __call__(self, b):
print(b, end='')
def close(self):
pass
class PipeFromProgressToIPythonHTMLWidget(object):
def __init__(self):
self.htmlWidget = ipywidgets.widgets.HTML()
display(self.htmlWidget)
self._buff = ""
def __call__(self, b):
self._buff += b
if b.endswith(terminal.ESC_MY_MAGIC_ENDING):
buff = terminal.ESC_SEQ_to_HTML(self._buff)
self.htmlWidget.value = '<style>.widget-html{font-family:monospace}</style><pre>'+buff+'</pre>'
self._buff = ""
def close(self):
self.htmlWidget.close()
PipeHandler = PipeToPrint
def choose_pipe_handler(kind = 'print', color_theme = None):
global PipeHandler
if kind == 'print':
PipeHandler = PipeToPrint
if color_theme is None:
choose_color_theme('term_default')
else:
choose_color_theme(color_theme)
elif kind == 'ipythonhtml':
if _IPYTHON:
PipeHandler = PipeFromProgressToIPythonHTMLWidget
if color_theme is None:
choose_color_theme('ipyt_default')
else:
choose_color_theme(color_theme)
else:
warnings.warn("can not choose ipythonHTML (IPython and/or ipywidgets were not loaded)")
else:
raise ValueError("unknown kind '{}' for pipe_handler, use one out of ('print', 'ipythonhtml')")
def get_terminal_width():
if PipeHandler == PipeToPrint:
return terminal.get_terminal_width()
elif PipeHandler == PipeFromProgressToIPythonHTMLWidget:
return 80
else:
raise NotImplementedError
def get_identifier(name=None, pid=None, bold=True):
if pid is None:
pid = os.getpid()
if bold:
esc_bold = terminal.ESC_BOLD
esc_no_char_attr = terminal.ESC_NO_CHAR_ATTR
else:
esc_bold = ""
esc_no_char_attr = ""
if name is None:
return "{}PID_{}{}".format(esc_bold, pid, esc_no_char_attr)
else:
return "{}{}_{}{}".format(esc_bold, name, pid, esc_no_char_attr)
def _loop_wrapper_func(func, args, shared_mem_run, shared_mem_pause, interval, sigint, sigterm, name,
logging_level, conn_send, func_running, log_queue):
"""
to be executed as a separate process (that's why this functions is declared static)
"""
prefix = get_identifier(name) + ' '
global log
log = logging.getLogger(__name__+".log_{}".format(get_identifier(name, bold=False)))
log.setLevel(logging_level)
log.addHandler(QueueHandler(log_queue))
sys.stdout = StdoutPipe(conn_send)
log.debug("enter wrapper_func")
SIG_handler_Loop(sigint, sigterm, log, prefix)
func_running.value = True
error = False
while shared_mem_run.value:
try:
# in pause mode, simply sleep
if shared_mem_pause.value:
quit_loop = False
else:
# if not pause mode -> call func and see what happens
try:
quit_loop = func(*args)
except LoopInterruptError:
raise
except Exception as e:
log.error("error %s occurred in loop calling 'func(*args)'", type(e))
log.info("show traceback.print_exc()\n%s", traceback.format_exc())
error = True
break
if quit_loop is True:
log.debug("loop stooped because func returned True")
break
time.sleep(interval)
except LoopInterruptError:
log.debug("quit wrapper_func due to InterruptedError")
break
func_running.value = False
if error:
sys.exit(-1)
else:
log.debug("wrapper_func terminates gracefully")
# gets rid of the following warnings
# Exception ignored in: <_io.FileIO name='/dev/null' mode='rb'>
# ResourceWarning: unclosed file <_io.TextIOWrapper name='/dev/null' mode='r' encoding='UTF-8'>
try:
if mp.get_start_method() == "spawn":
sys.stdin.close()
except AttributeError:
pass
class LoopTimeoutError(TimeoutError):
pass
class Loop(object):
"""
class to run a function periodically an seperate process.
In case the called function returns True, the loop will stop.
Otherwise a time interval given by interval will be slept before
another execution is triggered.
The shared memory variable _run (accessible via the class property run)
also determines if the function if executed another time. If set to False
the execution stops.
For safe cleanup (and in order to catch any Errors)
it is advisable to instantiate this class
using 'with' statement as follows:
with Loop(**kwargs) as my_loop:
my_loop.start()
...
this will guarantee you that the spawned loop process is
down when exiting the 'with' scope.
The only circumstance where the process is still running is
when you set auto_kill_on_last_resort to False and answer the
question to send SIGKILL with no.
"""
def __init__(self,
func,
args = (),
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
auto_kill_on_last_resort = False,
raise_error = True):
"""
func [callable] - function to be called periodically
args [tuple] - arguments passed to func when calling
intervall [pos number] - time to "sleep" between each call
verbose - DEPRECATED, only kept for compatibility, use global log.level to
specify verbosity
sigint [string] - signal handler string to set SIGINT behavior (see below)
sigterm [string] - signal handler string to set SIGTERM behavior (see below)
auto_kill_on_last_resort [bool] - If set False (default), ask user to send SIGKILL
to loop process in case normal stop and SIGTERM failed. If set True, send SIDKILL
without asking.
the signal handler string may be one of the following
ing: ignore the incoming signal
stop: raise InterruptedError which is caught silently.
"""
self._proc = None
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
self.func = func
self.args = args
self.interval = interval
assert self.interval >= 0
self._run = mp.Value('b', False)
self._pause = mp.Value('b', False)
self._func_running = mp.Value('b', False)
self._sigint = sigint
self._sigterm = sigterm
self._auto_kill_on_last_resort = auto_kill_on_last_resort
log.debug("auto_kill_on_last_resort = %s", self._auto_kill_on_last_resort)
self._monitor_thread = None
self.pipe_handler = PipeHandler()
self.raise_error = raise_error
def __enter__(self):
return self
def __exit__(self, *exc_args):
if self.is_alive():
log.debug("loop is still running on context exit")
else:
log.debug("loop has stopped on context exit")
self.stop()
def __cleanup(self):
"""
Wait at most twice as long as the given repetition interval
for the _wrapper_function to terminate.
If after that time the _wrapper_function has not terminated,
send SIGTERM to and the process.
Wait at most five times as long as the given repetition interval
for the _wrapper_function to terminate.
If the process still running send SIGKILL automatically if
auto_kill_on_last_resort was set True or ask the
user to confirm sending SIGKILL
"""
# set run to False and wait some time -> see what happens
self._run.value = False
if check_process_termination(proc = self._proc,
timeout = 2*self.interval,
prefix = '',
auto_kill_on_last_resort = self._auto_kill_on_last_resort):
log.debug("cleanup successful")
else:
raise RuntimeError("cleanup FAILED!")
try:
self.conn_send.close()
self._log_queue_listener.stop()
except OSError:
pass
log.debug("wait for monitor thread to join")
self._monitor_thread.join()
log.debug("monitor thread to joined")
self._func_running.value = False
def _monitor_stdout_pipe(self):
while True:
try:
b = self.conn_recv.recv()
self.pipe_handler(b)
except EOFError:
break
def start(self, timeout=None):
"""
uses multiprocess Process to call _wrapper_func in subprocess
"""
if self.is_alive():
log.warning("a process with pid %s is already running", self._proc.pid)
return
self._run.value = True
self._func_running.value = False
name = self.__class__.__name__
self.conn_recv, self.conn_send = mp.Pipe(False)
self._monitor_thread = threading.Thread(target = self._monitor_stdout_pipe)
self._monitor_thread.daemon=True
self._monitor_thread.start()
log.debug("started monitor thread")
self._log_queue = mp.Queue()
self._log_queue_listener = QueueListener(self._log_queue, *log.handlers)
self._log_queue_listener.start()
args = (self.func, self.args, self._run, self._pause, self.interval,
self._sigint, self._sigterm, name, log.level, self.conn_send,
self._func_running, self._log_queue)
self._proc = mp.Process(target = _loop_wrapper_func,
args = args)
self._proc.start()
log.info("started a new process with pid %s", self._proc.pid)
log.debug("wait for loop function to come up")
t0 = time.time()
while not self._func_running.value:
if self._proc.exitcode is not None:
exc = self._proc.exitcode
self._proc = None
if exc == 0:
log.warning("wrapper function already terminated with exitcode 0\nloop is not running")
return
else:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(exc)+
"see log (INFO level) for traceback information")
time.sleep(0.1)
if (timeout is not None) and ((time.time() - t0) > timeout):
err_msg = "could not bring up function on time (timeout: {}s)".format(timeout)
log.error(err_msg)
log.info("either it takes too long to spawn the subprocess (increase the timeout)\n"+
"or an internal error occurred before reaching the function call")
raise LoopTimeoutError(err_msg)
log.debug("loop function is up ({})".format(humanize_time(time.time()-t0)))
def stop(self):
"""
stops the process triggered by start
Setting the shared memory boolean run to false, which should prevent
the loop from repeating. Call __cleanup to make sure the process
stopped. After that we could trigger start() again.
"""
if self.is_alive():
self._proc.terminate()
if self._proc is not None:
self.__cleanup()
if self.raise_error:
if self._proc.exitcode == 255:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(self._proc.exitcode)+
"see log (INFO level) for traceback information")
self.pipe_handler.close()
self._proc = None
def join(self, timeout):
"""
calls join for the spawned process with given timeout
"""
if self.is_alive():
self._proc.join(timeout)
def is_alive(self):
if self._proc is None:
return False
else:
return self._proc.is_alive()
def is_running(self):
if self.is_alive():
return self._func_running.value
else:
return False
def pause(self):
if self._run.value:
self._pause.value = True
log.debug("process with pid %s paused", self._proc.pid)
def resume(self):
if self._run.value:
self._pause.value = False
log.debug("process with pid %s resumed", self._proc.pid)
def getpid(self):
if self._proc is not None:
return self._proc.pid
else:
return None
def show_stat_base(count_value, max_count_value, prepend, speed, tet, ttg, width, **kwargs):
"""A function that formats the progress information
This function will be called periodically for each progress that is monitored.
Overwrite this function in a subclass to implement a specific formating of the progress information
:param count_value: a number holding the current state
:param max_count_value: should be the largest number `count_value` can reach
:param prepend: additional text for each progress
:param speed: the speed estimation
:param tet: the total elapsed time
:param ttg: the time to go
:param width: the width for the progressbar, when set to `"auto"` this function
should try to detect the width available
:type width: int or "auto"
"""
raise NotImplementedError
def _show_stat_wrapper_Progress(count, last_count, start_time, max_count, speed_calc_cycles,
width, q, last_speed, prepend, show_stat_function, add_args,
i, lock):
"""
calculate
"""
count_value, max_count_value, speed, tet, ttg, = Progress._calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock)
return show_stat_function(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **add_args)
def _show_stat_wrapper_multi_Progress(count, last_count, start_time, max_count, speed_calc_cycles,
width, q, last_speed, prepend, show_stat_function, len_,
add_args, lock, info_line, no_move_up=False):
"""
call the static method show_stat_wrapper for each process
"""
# print(ESC_BOLD, end='')
# sys.stdout.flush()
for i in range(len_):
_show_stat_wrapper_Progress(count[i], last_count[i], start_time[i], max_count[i], speed_calc_cycles,
width, q[i], last_speed[i], prepend[i], show_stat_function,
add_args, i, lock[i])
n = len_
if info_line is not None:
s = info_line.value.decode('utf-8')
s = s.split('\n')
n += len(s)
for si in s:
if width == 'auto':
width = get_terminal_width()
if len(si) > width:
si = si[:width]
print("{0:<{1}}".format(si, width))
if no_move_up:
n = 0
# this is only a hack to find the end
# of the message in a stream
# so ESC_HIDDEN+ESC_NO_CHAR_ATTR is a magic ending
print(terminal.ESC_MOVE_LINE_UP(n) + terminal.ESC_MY_MAGIC_ENDING, end='')
sys.stdout.flush()
class Progress(Loop):
"""
Abstract Progress Class
The :py:class:`Progress` Class uses :py:class:`Loop` to provide a repeating
function which calculates progress information from a changing counter value.
The formatting of these information is done by overwriting the static member
:py:func:`Progress.show_stat`. :py:func:`Progress.show_stat` is intended to
format a single progress bar on a single line only.
The extension to multiple progresses is done
automatically base on the formatting of a single line.
"""
def __init__(self,
count,
max_count = None,
prepend = None,
width = 'auto',
speed_calc_cycles = 10,
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
info_line = None,
show_stat = None):
"""
:param count: shared variable for holding the current state
(use :py:func:`UnsignedIntValue` for short hand creation)
:type count: list/single value of multiprocessing.Value
:param max_count: shared variable for holding the final state
:type max_count: None or list/single value of multiprocessing.Value
:param prepend: string to put in front of each progress output
:type prepend: None, str or list of str
:param width: the width to use for the progress line (fixed or automatically determined)
:type width: int or "auto"
:param speed_calc_cycles: number of updated (cycles) to use for estimating the speed
(example: ``speed_calc_sycles = 4`` and ``interval = 1`` means that the speed is estimated from
the current state and state 4 updates before where the elapsed time will roughly be 4s)
:param interval: seconds to wait before updating the progress
:param verbose: DEPRECATED: has no effect, use the global ``log.setLevel()`` to control the
output level
:param sigint: behavior of the subprocess on signal ``SIGINT`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigint: "stop" or "ign"
:param sigterm: behavior of the subprocess on signal ``SIGTERM`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigterm: "stop" or "ign"
:param info_line: additional text to show below the progress (use :py:func:`StringValue`
for short hand creation of shared strings)
:type info_line: None or multiprocessing.Array of characters
.. note::
As `Progress` is derived from :py:class:`Loop` it is highly encurraged to create
any instance of Progress with a context manager (``with`` statement).
This ensures that the subprocess showing the progress terminats on context exit.
Otherwise one has to make sure that at some point the stop() routine is called.
abstract example::
with AProgressClass(...) as p:
p.start()
# do stuff and modify counter
"""
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
# converts count to list and do type check
try:
for c in count:
if not isinstance(c, Synchronized):
raise ValueError("Each element of 'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = True
except TypeError:
if not isinstance(count, Synchronized):
raise ValueError("'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = False
count = [count]
self.len = len(count)
# converts max_count to list and do type check
if max_count is not None:
if self.is_multi:
try:
for i, m in enumerate(max_count):
if not isinstance(m, Synchronized):
max_count[i] = UnsignedIntValue(m)
except TypeError:
raise TypeError("'max_count' must be iterable")
else:
if not isinstance(max_count, Synchronized):
max_count = UnsignedIntValue(max_count)
max_count = [max_count]
else:
max_count = [None] * self.len
self.start_time = []
self.speed_calc_cycles = speed_calc_cycles
self.width = width
self.q = []
self.prepend = []
self.lock = []
self.last_count = []
self.last_speed = []
for i in range(self.len):
self.q.append(myQueue()) # queue to save the last speed_calc_cycles
# (time, count) information to calculate speed
#self.q[-1].cancel_join_thread()
self.last_count.append(UnsignedIntValue())
self.last_speed.append(FloatValue())
self.lock.append(mp.Lock())
self.start_time.append(FloatValue(val=time.time()))
if prepend is None:
# no prepend given
self.prepend.append('')
else:
if isinstance(prepend, str):
self.prepend.append(prepend)
else:
# assume list of prepend, (needs to be a sequence)
self.prepend.append(prepend[i])
self.max_count = max_count # list of multiprocessing value type
self.count = count # list of multiprocessing value type
self.interval = interval
self.verbose = verbose
self.show_on_exit = False
self.add_args = {}
self.info_line = info_line
self.show_stat = show_stat
# setup loop class with func
Loop.__init__(self,
func = _show_stat_wrapper_multi_Progress,
args = (self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
show_stat,
self.len,
self.add_args,
self.lock,
self.info_line),
interval = interval,
sigint = sigint,
sigterm = sigterm,
auto_kill_on_last_resort = True)
def __exit__(self, *exc_args):
self.stop()
@staticmethod
def _calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock):
"""do the pre calculations in order to get TET, speed, TTG
:param count: count
:param last_count: count at the last call, allows to treat the case of no progress
between sequential calls
:param start_time: the time when start was triggered
:param max_count: the maximal value count
:type max_count:
:param speed_calc_cycles:
:type speed_calc_cycles:
:param q:
:type q:
:param last_speed:
:type last_speed:
:param lock:
:type lock:
"""
count_value = count.value
start_time_value = start_time.value
current_time = time.time()
if last_count.value != count_value:
# some progress happened
with lock:
# save current state (count, time) to queue
q.put((count_value, current_time))
# get older state from queue (or initial state)
# to to speed estimation
if q.qsize() > speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, start_time_value
last_count.value = count_value
#last_old_count.value = old_count_value
#last_old_time.value = old_time
speed = (count_value - old_count_value) / (current_time - old_time)
last_speed.value = speed
else:
# progress has not changed since last call
# use also old (cached) data from the queue
#old_count_value, old_time = last_old_count.value, last_old_time.value
speed = last_speed.value
if (max_count is None):
max_count_value = None
else:
max_count_value = max_count.value
tet = (current_time - start_time_value)
if (speed == 0) or (max_count_value is None) or (max_count_value == 0):
ttg = None
else:
ttg = math.ceil((max_count_value - count_value) / speed)
return count_value, max_count_value, speed, tet, ttg
def _reset_all(self):
"""
reset all progress information
"""
for i in range(self.len):
self._reset_i(i)
def _reset_i(self, i):
"""
reset i-th progress information
"""
self.count[i].value=0
log.debug("reset counter %s", i)
self.lock[i].acquire()
for x in range(self.q[i].qsize()):
self.q[i].get()
self.lock[i].release()
self.start_time[i].value = time.time()
def _show_stat(self):
"""
convenient functions to call the static show_stat_wrapper_multi with
the given class members
"""
_show_stat_wrapper_multi_Progress(self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
self.show_stat,
self.len,
self.add_args,
self.lock,
self.info_line,
no_move_up=True)
def reset(self, i = None):
"""resets the progress informaion
:param i: tell which progress to reset, if None reset all
:type i: None, int
"""
if i is None:
self._reset_all()
else:
self._reset_i(i)
def start(self):
"""
start
"""
# before printing any output to stout, we can now check this
# variable to see if any other ProgressBar has reserved that
# terminal.
if (self.__class__.__name__ in terminal.TERMINAL_PRINT_LOOP_CLASSES):
if not terminal.terminal_reserve(progress_obj=self):
log.warning("tty already reserved, NOT starting the progress loop!")
return
super(Progress, self).start()
self.show_on_exit = True
def stop(self):
"""
trigger clean up by hand, needs to be done when not using
context management via 'with' statement
- will terminate loop process
- show a last progress -> see the full 100% on exit
- releases terminal reservation
"""
super(Progress, self).stop()
terminal.terminal_unreserve(progress_obj=self, verbose=self.verbose)
if self.show_on_exit:
if not isinstance(self.pipe_handler, PipeToPrint):
myout = inMemoryBuffer()
stdout = sys.stdout
sys.stdout = myout
self._show_stat()
self.pipe_handler(myout.getvalue())
sys.stdout = stdout
else:
self._show_stat()
print()
self.show_on_exit = False
def show_stat_ProgressBar(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
if (max_count_value is None) or (max_count_value == 0):
# only show current absolute progress as number and estimated speed
print("{}{}{} [{}] {}#{} ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet), humanize_speed(speed),
terminal.ESC_BOLD + COLTHM['BAR_COL'],
count_value))
else:
if width == 'auto':
width = get_terminal_width()
# deduce relative progress and show as bar on screen
if ttg is None:
s3 = " TTG --"
else:
s3 = " TTG {}".format(humanize_time(ttg))
s1 = "{}{}{} [{}] ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet),
humanize_speed(speed))
l = terminal.len_string_without_ESC(s1 + s3)
l2 = width - l - 3
a = int(l2 * count_value / max_count_value)
b = l2 - a
s2 = COLTHM['BAR_COL'] + terminal.ESC_BOLD + "[" + "=" * a + ">" + " " * b + "]" + terminal.ESC_RESET_BOLD + terminal.ESC_DEFAULT
print(s1 + s2 + s3)
class ProgressBar(Progress):
"""
implements a progress bar similar to the one known from 'wget' or 'pv'
"""
def __init__(self, *args, **kwargs):
"""
width [int/'auto'] - the number of characters used to show the Progress bar,
use 'auto' to determine width from terminal information -> see _set_width
"""
Progress.__init__(self, *args, show_stat = show_stat_ProgressBar, **kwargs)
# self._PRE_PREPEND = terminal.ESC_NO_CHAR_ATTR + ESC_RED
# self._POST_PREPEND = ESC_BOLD + ESC_GREEN
def show_stat_ProgressBarCounter(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
counter_count = kwargs['counter_count'][i]
counter_speed = kwargs['counter_speed'][i]
counter_tet = time.time() - kwargs['init_time']
s_c = "{}{}{} [{}] {}#{} - ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(counter_tet),
humanize_speed(counter_speed.value),
COLTHM['BAR_COL'],
str(counter_count.value) + terminal.ESC_DEFAULT)
if width == 'auto':
width = get_terminal_width()
if (max_count_value is None) or (max_count_value == 0):
s_c = "{}{} [{}] {}#{} ".format(s_c,
humanize_time(tet),
humanize_speed(speed),
COLTHM['BAR_COL'],
str(count_value) + terminal.ESC_DEFAULT)
else:
if ttg is None:
s3 = " TTG --"
else:
s3 = " TTG {}".format(humanize_time(ttg))
s1 = "{} [{}] ".format(humanize_time(tet), humanize_speed(speed))
l = terminal.len_string_without_ESC(s1 + s3 + s_c)
l2 = width - l - 3
a = int(l2 * count_value / max_count_value)
b = l2 - a
s2 = COLTHM['BAR_COL'] + terminal.ESC_BOLD + "[" + "=" * a + ">" + " " * b + "]" + terminal.ESC_RESET_BOLD + terminal.ESC_DEFAULT
s_c = s_c + s1 + s2 + s3
print(s_c)
class ProgressBarCounter(Progress):
"""
records also the time of each reset and calculates the speed
of the resets.
shows the TET since init (not effected by reset)
the speed of the resets (number of finished processed per time)
and the number of finished processes
after that also show a progress of each process
max_count > 0 and not None -> bar
max_count == None -> absolute count statistic
max_count == 0 -> hide process statistic at all
"""
def __init__(self, speed_calc_cycles_counter=5, **kwargs):
Progress.__init__(self, show_stat = show_stat_ProgressBarCounter, **kwargs)
self.counter_count = []
self.counter_q = []
self.counter_speed = []
for i in range(self.len):
self.counter_count.append(UnsignedIntValue(val=0))
self.counter_q.append(myQueue())
self.counter_speed.append(FloatValue())
self.counter_speed_calc_cycles = speed_calc_cycles_counter
self.init_time = time.time()
self.add_args['counter_count'] = self.counter_count
self.add_args['counter_speed'] = self.counter_speed
self.add_args['init_time'] = self.init_time
def get_counter_count(self, i=0):
return self.counter_count[i].value
def _reset_i(self, i):
c = self.counter_count[i]
with c.get_lock():
c.value += 1
count_value = c.value
q = self.counter_q[i]
current_time = time.time()
q.put((count_value, current_time))
if q.qsize() > self.counter_speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, self.init_time
speed = (count_value - old_count_value) / (current_time - old_time)
self.counter_speed[i].value = speed
Progress._reset_i(self, i)
def get_d(s1, s2, width, lp, lps):
d = width - len(terminal.remove_ESC_SEQ_from_string(s1)) - len(terminal.remove_ESC_SEQ_from_string(s2)) - 2 - lp - lps
if d >= 0:
d1 = d // 2
d2 = d - d1
return s1, s2, d1, d2
def full_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "TET {} {:>12} TTG {}".format(tet, speed, ttg)
s2 = "ETA {} ORT {}".format(eta, ort)
return get_d(s1, s2, width, lp, lps)
def full_minor_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} {:>12} G {}".format(tet, speed, ttg)
s2 = "A {} O {}".format(eta, ort)
return get_d(s1, s2, width, lp, lps)
def reduced_1_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} {:>12} G {}".format(tet, speed, ttg)
s2 = "O {}".format(ort)
return get_d(s1, s2, width, lp, lps)
def reduced_2_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} G {}".format(tet, ttg)
s2 = "O {}".format(ort)
return get_d(s1, s2, width, lp, lps)
def reduced_3_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} G {}".format(tet, ttg)
s2 = ''
return get_d(s1, s2, width, lp, lps)
def reduced_4_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = ''
s2 = ''
return get_d(s1, s2, width, lp, lps)
def kw_bold(s, ch_after):
kws = ['TET', 'TTG', 'ETA', 'ORT', 'E', 'G', 'A', 'O']
for kw in kws:
for c in ch_after:
s = s.replace(kw + c, terminal.ESC_BOLD + kw + terminal.ESC_RESET_BOLD + c)
return s
def _stat(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
if (max_count_value is None) or (max_count_value == 0):
# only show current absolute progress as number and estimated speed
stat = "{}{} [{}] {}#{} ".format(COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet),
humanize_speed(speed),
COLTHM['BAR_COL'],
str(count_value) + terminal.ESC_DEFAULT)
else:
if width == 'auto':
width = get_terminal_width()
# deduce relative progress
p = count_value / max_count_value
if p < 1:
ps = " {:.1%} ".format(p)
else:
ps = " {:.0%} ".format(p)
if ttg is None:
eta = '--'
ort = None
else:
eta = datetime.datetime.fromtimestamp(time.time() + ttg).strftime("%Y%m%d_%H:%M:%S")
ort = tet + ttg
tet = humanize_time(tet)
speed = '[' + humanize_speed(speed) + ']'
ttg = humanize_time(ttg)
ort = humanize_time(ort)
repl_ch = '-'
lp = len(prepend)
args = p, tet, speed, ttg, eta, ort, repl_ch, width, lp, len(ps)
res = full_stat(*args)
if res is None:
res = full_minor_stat(*args)
if res is None:
res = reduced_1_stat(*args)
if res is None:
res = reduced_2_stat(*args)
if res is None:
res = reduced_3_stat(*args)
if res is None:
res = reduced_4_stat(*args)
if res is not None:
s1, s2, d1, d2 = res
s = s1 + ' ' * d1 + ps + ' ' * d2 + s2
idx_p = math.ceil( (width-lp-2)*p)
s_before = s[:idx_p].replace(' ', repl_ch)
if (len(s_before) > 0) and (s_before[-1] == repl_ch):
s_before = s_before[:-1] + '>'
s_after = s[idx_p:]
s_before = kw_bold(s_before, ch_after=[repl_ch, '>'])
s_after = kw_bold(s_after, ch_after=[' '])
stat = (COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT +
COLTHM['BAR_COL'] + terminal.ESC_BOLD + '[' + terminal.ESC_RESET_BOLD + s_before + terminal.ESC_DEFAULT +
s_after + terminal.ESC_BOLD + COLTHM['BAR_COL'] + ']' + terminal.ESC_NO_CHAR_ATTR)
else:
ps = ps.strip()
if p == 1:
ps = ' ' + ps
stat = prepend + ps
return stat
def show_stat_ProgressBarFancy(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
stat = _stat(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs)
print(stat)
class ProgressBarFancy(Progress):
"""
implements a progress bar where the color indicates the current status
similar to the bars known from 'htop'
"""
def __init__(self, *args, **kwargs):
"""
width [int/'auto'] - the number of characters used to show the Progress bar,
use 'auto' to determine width from terminal information -> see _set_width
"""
Progress.__init__(self, *args, show_stat = show_stat_ProgressBarFancy, **kwargs)
def show_stat_ProgressBarCounterFancy(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
counter_count = kwargs['counter_count'][i]
counter_speed = kwargs['counter_speed'][i]
counter_tet = time.time() - kwargs['init_time']
s_c = "{}{}{} [{}] {}#{}".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL']+prepend+terminal.ESC_DEFAULT,
humanize_time(counter_tet),
humanize_speed(counter_speed.value),
COLTHM['BAR_COL'],
str(counter_count.value) + terminal.ESC_DEFAULT)
if max_count_value is not None:
if width == 'auto':
width = get_terminal_width()
s_c += ' - '
if max_count_value == 0:
s_c = "{}{} [{}] {}#{} ".format(s_c, humanize_time(tet), humanize_speed(speed),
COLTHM['BAR_COL'], str(count_value)+terminal.ESC_DEFAULT)
else:
_width = width - terminal.len_string_without_ESC(s_c)
s_c += _stat(count_value, max_count_value, '', speed, tet, ttg, _width, i)
print(s_c)
class ProgressBarCounterFancy(ProgressBarCounter):
def __init__(self, *args, **kwargs):
ProgressBarCounter.__init__(self, *args, **kwargs)
self.show_stat = show_stat_ProgressBarCounterFancy
class SIG_handler_Loop(object):
"""class to setup signal handling for the Loop class
Note: each subprocess receives the default signal handling from it's parent.
If the signal function from the module signal is evoked within the subprocess
this default behavior can be overwritten.
The init function receives a shared memory boolean object which will be set
false in case of signal detection. Since the Loop class will check the state
of this boolean object before each repetition, the loop will stop when
a signal was receives.
"""
def __init__(self, sigint, sigterm, log, prefix):
self.set_signal(signal.SIGINT, sigint)
self.set_signal(signal.SIGTERM, sigterm)
self.prefix = prefix
self.log = log
self.log.info("setup signal handler for loop (SIGINT:%s, SIGTERM:%s)", sigint, sigterm)
def set_signal(self, sig, handler_str):
if handler_str == 'ign':
signal.signal(sig, self._ignore_signal)
elif handler_str == 'stop':
signal.signal(sig, self._stop_on_signal)
else:
raise TypeError("unknown signal hander string '%s'", handler_str)
def _ignore_signal(self, signal, frame):
self.log.debug("ignore received sig %s", signal_dict[signal])
pass
def _stop_on_signal(self, signal, frame):
self.log.info("received sig %s -> raise InterruptedError", signal_dict[signal])
raise LoopInterruptError()
def FloatValue(val=0.):
"""returns a `multiprocessing.Value` of type `float` with initial value `val`"""
return mp.Value('d', val, lock=True)
def UnsignedIntValue(val=0):
"""returns a `multiprocessing.Value` of type `unsigned int` with initial value `val`"""
return mp.Value('I', val, lock=True)
def StringValue(num_of_bytes):
"""returns a `multiprocessing.Array` of type `character` and length `num_of_bytes`"""
return mp.Array('c', _jm_compatible_bytearray(num_of_bytes), lock=True)
def check_process_termination(proc, prefix, timeout, auto_kill_on_last_resort = False):
proc.join(timeout)
if not proc.is_alive():
log.debug("termination of process (pid %s) within timeout of %s SUCCEEDED!", proc.pid, humanize_time(timeout))
return True
# process still runs -> send SIGTERM -> see what happens
log.warning("termination of process (pid %s) within given timeout of %s FAILED!", proc.pid, humanize_time(timeout))
proc.terminate()
new_timeout = 3*timeout
log.debug("wait for termination (timeout %s)", humanize_time(new_timeout))
proc.join(new_timeout)
if not proc.is_alive():
log.info("termination of process (pid %s) via SIGTERM with timeout of %s SUCCEEDED!", proc.pid, humanize_time(new_timeout))
return True
log.warning("termination of process (pid %s) via SIGTERM with timeout of %s FAILED!", proc.pid, humanize_time(new_timeout))
log.debug("auto_kill_on_last_resort is %s", auto_kill_on_last_resort)
answer = 'k' if auto_kill_on_last_resort else '_'
while True:
log.debug("answer string is %s", answer)
if answer == 'k':
log.warning("send SIGKILL to process with pid %s", proc.pid)
os.kill(proc.pid, signal.SIGKILL)
time.sleep(0.1)
else:
log.info("send SIGTERM to process with pid %s", proc.pid)
os.kill(proc.pid, signal.SIGTERM)
time.sleep(0.1)
if not proc.is_alive():
log.info("process (pid %s) has stopped running!", proc.pid)
return True
else:
log.warning("process (pid %s) is still running!", proc.pid)
print("the process (pid {}) seems still running".format(proc.pid))
try:
answer = input("press 'enter' to send SIGTERM, enter 'k' to send SIGKILL or enter 'ignore' to not bother about the process anymore")
except Exception as e:
log.error("could not ask for sending SIGKILL due to {}".format(type(e)))
log.info(traceback.format_exc())
log.warning("send SIGKILL now")
answer = 'k'
if answer == 'ignore':
log.warning("ignore process %s", proc.pid)
return False
elif answer != 'k':
answer = ''
def getCountKwargs(func):
""" Returns a list ["count kwarg", "count_max kwarg"] for a
given function. Valid combinations are defined in
`progress.validCountKwargs`.
Returns None if no keyword arguments are found.
"""
# Get all arguments of the function
if hasattr(func, "__code__"):
func_args = func.__code__.co_varnames[:func.__code__.co_argcount]
for pair in validCountKwargs:
if ( pair[0] in func_args and pair[1] in func_args ):
return pair
# else
return None
def humanize_speed(c_per_sec):
"""convert a speed in counts per second to counts per [s, min, h, d], choosing the smallest value greater zero.
"""
scales = [60, 60, 24]
units = ['c/s', 'c/min', 'c/h', 'c/d']
speed = c_per_sec
i = 0
if speed > 0:
while (speed < 1) and (i < len(scales)):
speed *= scales[i]
i += 1
return "{:.1f}{}".format(speed, units[i])
def humanize_time(secs):
"""convert second in to hh:mm:ss format
"""
if secs is None:
return '--'
if secs < 1:
return "{:.2f}ms".format(secs*1000)
elif secs < 10:
return "{:.2f}s".format(secs)
else:
mins, secs = divmod(secs, 60)
hours, mins = divmod(mins, 60)
return '{:02d}:{:02d}:{:02d}'.format(int(hours), int(mins), int(secs))
def codecov_subprocess_check():
print("this line will be only called from a subprocess")
myQueue = mp.Queue
# a mapping from the numeric values of the signals to their names used in the
# standard python module signals
signal_dict = {}
for s in dir(signal):
if s.startswith('SIG') and s[3] != '_':
n = getattr(signal, s)
if n in signal_dict:
signal_dict[n] += ('/'+s)
else:
signal_dict[n] = s
_colthm_term_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_LIGHT_GREEN, 'ADD_LNS_UP':0}
_colthm_ipyt_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_LIGHT_BLUE, 'ADD_LNS_UP':0}
_colthm_wincmd_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_GREEN, 'ADD_LNS_UP':1}
color_themes = {'term_default': _colthm_term_default,
'ipyt_default': _colthm_ipyt_default,
'wincmd_default': _colthm_wincmd_default}
if platform.system() == 'Windows':
COLTHM = _colthm_wincmd_default
else:
COLTHM = _colthm_term_default
def choose_color_theme(name):
global COLTHM
if name in color_themes:
COLTHM = color_themes[name]
else:
warnings.warn("no such color theme {}".format(name))
# keyword arguments that define counting in wrapped functions
validCountKwargs = [
[ "count", "count_max"],
[ "count", "max_count"],
[ "c", "m"],
[ "jmc", "jmm"],
]
|
cimatosa/progression | progression/progress.py | show_stat_base | python | def show_stat_base(count_value, max_count_value, prepend, speed, tet, ttg, width, **kwargs):
raise NotImplementedError | A function that formats the progress information
This function will be called periodically for each progress that is monitored.
Overwrite this function in a subclass to implement a specific formating of the progress information
:param count_value: a number holding the current state
:param max_count_value: should be the largest number `count_value` can reach
:param prepend: additional text for each progress
:param speed: the speed estimation
:param tet: the total elapsed time
:param ttg: the time to go
:param width: the width for the progressbar, when set to `"auto"` this function
should try to detect the width available
:type width: int or "auto" | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/progress.py#L574-L590 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Progression module
------------------
This module provides the (so far) four variants to display progress information:
* :py:class:`.ProgressBar`
This class monitors one or multiple processes showing the total elapsed time (TET), the current speed
estimated from the most recent updated, a colored bar showing the progress and an
estimate for the remaining time, also called time to go (TTG).
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 5.83s [7.2c/s] <span style="color:#00ff00"><b>[=====================> ]</b></span> TTG 8.05s</pre>
</div>
* :py:class:`.ProgressBarCounter`
If a single process is intended to do several sequential task, the :py:class:`.ProgressBarCounter` class can keep track of the number
of accomplished tasks on top of monitoring the individual task just like :py:class:`.ProgressBar` does.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre><span style="color:#00ff00"><b> [</b><b>TET</b>-5.83s-----[7.2c/s]-<b>TTG</b>-8.05s-></span> 42.0% <b>ETA</b> 20161011_16:52:52 <b>ORT</b> 00:00:13<b><span style="color:#00ff00">]</span></b></pre>
</div>
* :py:class:`.ProgressBarFancy`
This class intends to be a replacement for :py:class:`.ProgressBar` with slightly more information and
better handling of small terminal widths.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 00:00:35 [1.4c/min] <span style="color:#00ff00">#3</span> - 5.83s [7.2c/s] <span style="color:#00ff00"><b>[===========> ]</b></span> TTG 8.05s</pre>
</div>
* :py:class:`.ProgressBarCounterFancy`
Just as :py:class:`.ProgressBarFancy` this replaces :py:class:`.ProgressBarCounter`.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 00:00:35 [1.4c/min] <span style="color:#00ff00">#3</span> - <span style="color:#800000"></span><span style="color:#00ff00"><b>[</b><b>E</b>-5.83s-----[7.2c/s]-<b>G</b>-8</span>.05s 42.0% <b>O</b> 00:00:13<b><span style="color:#00ff00">]</span></b></pre>
</div>
.. autoclass:: Progress
:members:
:inherited-members:
.. autoclass:: ProgressBar
:members:
.. autoclass:: ProgressBarCounter
:members:
.. autoclass:: ProgressBarFancy
:members:
.. autoclass:: ProgressBarCounterFancy
:members:
.. autofunction:: UnsignedIntValue
.. autofunction:: FloatValue
.. autofunction:: StringValue
"""
from __future__ import division, print_function
import datetime
import io
import logging
from logging.handlers import QueueHandler, QueueListener
import math
import multiprocessing as mp
from multiprocessing.sharedctypes import Synchronized
import os
import sys
import signal
import subprocess as sp
import threading
import time
import traceback
import warnings
from . import terminal
import platform
_IPYTHON = True
try:
import ipywidgets
except ImportError:
_IPYTHON = False
warnings.warn("could not load ipywidgets (IPython HTML output will not work)", category=ImportWarning)
except DeprecationWarning:
pass
try:
from IPython.display import display
except ImportError:
_IPYTHON = False
warnings.warn("could not load IPython (IPython HTML output will not work)", category=ImportWarning)
# Magic conversion from 3 to 2
if sys.version_info[0] == 2:
ProcessLookupError = OSError
inMemoryBuffer = io.BytesIO
old_math_ceil = math.ceil
def my_int_ceil(f):
return int(old_math_ceil(f))
math.ceil = my_int_ceil
_jm_compatible_bytearray = lambda x: x
class TimeoutError(Exception):
pass
elif sys.version_info[0] == 3:
inMemoryBuffer = io.StringIO
_jm_compatible_bytearray = bytearray
class MultiLineFormatter(logging.Formatter):
"""pads a multiline log message with spaces such that
<HEAD> msg_line1
msg_line2
...
"""
def format(self, record):
_str = logging.Formatter.format(self, record)
header = _str.split(record.message)[0]
_str = _str.replace('\n', '\n' + ' '*len(header))
return _str
# def_handl = logging.StreamHandler(stream = sys.stderr) # the default handler simply uses stderr
# def_handl.setLevel(logging.DEBUG) # ... listens to all messaged
fmt = MultiLineFormatter('%(asctime)s %(name)s %(levelname)s : %(message)s')
# def_handl.setFormatter(fmt) # ... and pads multiline messaged
log = logging.getLogger(__name__) # creates the default log for this module
# log.addHandler(def_handl)
class LoopExceptionError(RuntimeError):
pass
class LoopInterruptError(Exception):
pass
class StdoutPipe(object):
"""replacement for stream objects such as stdout which
forwards all incoming data using the send method of a
connection
example usage:
>>> import sys
>>> from multiprocessing import Pipe
>>> from progression import StdoutPipe
>>> conn_recv, conn_send = Pipe(False)
>>> sys.stdout = StdoutPipe(conn_send)
>>> print("hallo welt", end='') # this is no going through the pipe
>>> msg = conn_recv.recv()
>>> sys.stdout = sys.__stdout__
>>> print(msg)
hallo welt
>>> assert msg == "hallo welt"
"""
def __init__(self, conn):
self.conn = conn
def flush(self):
pass
def write(self, b):
self.conn.send(b)
class PipeToPrint(object):
def __call__(self, b):
print(b, end='')
def close(self):
pass
class PipeFromProgressToIPythonHTMLWidget(object):
def __init__(self):
self.htmlWidget = ipywidgets.widgets.HTML()
display(self.htmlWidget)
self._buff = ""
def __call__(self, b):
self._buff += b
if b.endswith(terminal.ESC_MY_MAGIC_ENDING):
buff = terminal.ESC_SEQ_to_HTML(self._buff)
self.htmlWidget.value = '<style>.widget-html{font-family:monospace}</style><pre>'+buff+'</pre>'
self._buff = ""
def close(self):
self.htmlWidget.close()
PipeHandler = PipeToPrint
def choose_pipe_handler(kind = 'print', color_theme = None):
global PipeHandler
if kind == 'print':
PipeHandler = PipeToPrint
if color_theme is None:
choose_color_theme('term_default')
else:
choose_color_theme(color_theme)
elif kind == 'ipythonhtml':
if _IPYTHON:
PipeHandler = PipeFromProgressToIPythonHTMLWidget
if color_theme is None:
choose_color_theme('ipyt_default')
else:
choose_color_theme(color_theme)
else:
warnings.warn("can not choose ipythonHTML (IPython and/or ipywidgets were not loaded)")
else:
raise ValueError("unknown kind '{}' for pipe_handler, use one out of ('print', 'ipythonhtml')")
def get_terminal_width():
if PipeHandler == PipeToPrint:
return terminal.get_terminal_width()
elif PipeHandler == PipeFromProgressToIPythonHTMLWidget:
return 80
else:
raise NotImplementedError
def get_identifier(name=None, pid=None, bold=True):
if pid is None:
pid = os.getpid()
if bold:
esc_bold = terminal.ESC_BOLD
esc_no_char_attr = terminal.ESC_NO_CHAR_ATTR
else:
esc_bold = ""
esc_no_char_attr = ""
if name is None:
return "{}PID_{}{}".format(esc_bold, pid, esc_no_char_attr)
else:
return "{}{}_{}{}".format(esc_bold, name, pid, esc_no_char_attr)
def _loop_wrapper_func(func, args, shared_mem_run, shared_mem_pause, interval, sigint, sigterm, name,
logging_level, conn_send, func_running, log_queue):
"""
to be executed as a separate process (that's why this functions is declared static)
"""
prefix = get_identifier(name) + ' '
global log
log = logging.getLogger(__name__+".log_{}".format(get_identifier(name, bold=False)))
log.setLevel(logging_level)
log.addHandler(QueueHandler(log_queue))
sys.stdout = StdoutPipe(conn_send)
log.debug("enter wrapper_func")
SIG_handler_Loop(sigint, sigterm, log, prefix)
func_running.value = True
error = False
while shared_mem_run.value:
try:
# in pause mode, simply sleep
if shared_mem_pause.value:
quit_loop = False
else:
# if not pause mode -> call func and see what happens
try:
quit_loop = func(*args)
except LoopInterruptError:
raise
except Exception as e:
log.error("error %s occurred in loop calling 'func(*args)'", type(e))
log.info("show traceback.print_exc()\n%s", traceback.format_exc())
error = True
break
if quit_loop is True:
log.debug("loop stooped because func returned True")
break
time.sleep(interval)
except LoopInterruptError:
log.debug("quit wrapper_func due to InterruptedError")
break
func_running.value = False
if error:
sys.exit(-1)
else:
log.debug("wrapper_func terminates gracefully")
# gets rid of the following warnings
# Exception ignored in: <_io.FileIO name='/dev/null' mode='rb'>
# ResourceWarning: unclosed file <_io.TextIOWrapper name='/dev/null' mode='r' encoding='UTF-8'>
try:
if mp.get_start_method() == "spawn":
sys.stdin.close()
except AttributeError:
pass
class LoopTimeoutError(TimeoutError):
pass
class Loop(object):
"""
class to run a function periodically an seperate process.
In case the called function returns True, the loop will stop.
Otherwise a time interval given by interval will be slept before
another execution is triggered.
The shared memory variable _run (accessible via the class property run)
also determines if the function if executed another time. If set to False
the execution stops.
For safe cleanup (and in order to catch any Errors)
it is advisable to instantiate this class
using 'with' statement as follows:
with Loop(**kwargs) as my_loop:
my_loop.start()
...
this will guarantee you that the spawned loop process is
down when exiting the 'with' scope.
The only circumstance where the process is still running is
when you set auto_kill_on_last_resort to False and answer the
question to send SIGKILL with no.
"""
def __init__(self,
func,
args = (),
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
auto_kill_on_last_resort = False,
raise_error = True):
"""
func [callable] - function to be called periodically
args [tuple] - arguments passed to func when calling
intervall [pos number] - time to "sleep" between each call
verbose - DEPRECATED, only kept for compatibility, use global log.level to
specify verbosity
sigint [string] - signal handler string to set SIGINT behavior (see below)
sigterm [string] - signal handler string to set SIGTERM behavior (see below)
auto_kill_on_last_resort [bool] - If set False (default), ask user to send SIGKILL
to loop process in case normal stop and SIGTERM failed. If set True, send SIDKILL
without asking.
the signal handler string may be one of the following
ing: ignore the incoming signal
stop: raise InterruptedError which is caught silently.
"""
self._proc = None
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
self.func = func
self.args = args
self.interval = interval
assert self.interval >= 0
self._run = mp.Value('b', False)
self._pause = mp.Value('b', False)
self._func_running = mp.Value('b', False)
self._sigint = sigint
self._sigterm = sigterm
self._auto_kill_on_last_resort = auto_kill_on_last_resort
log.debug("auto_kill_on_last_resort = %s", self._auto_kill_on_last_resort)
self._monitor_thread = None
self.pipe_handler = PipeHandler()
self.raise_error = raise_error
def __enter__(self):
return self
def __exit__(self, *exc_args):
if self.is_alive():
log.debug("loop is still running on context exit")
else:
log.debug("loop has stopped on context exit")
self.stop()
def __cleanup(self):
"""
Wait at most twice as long as the given repetition interval
for the _wrapper_function to terminate.
If after that time the _wrapper_function has not terminated,
send SIGTERM to and the process.
Wait at most five times as long as the given repetition interval
for the _wrapper_function to terminate.
If the process still running send SIGKILL automatically if
auto_kill_on_last_resort was set True or ask the
user to confirm sending SIGKILL
"""
# set run to False and wait some time -> see what happens
self._run.value = False
if check_process_termination(proc = self._proc,
timeout = 2*self.interval,
prefix = '',
auto_kill_on_last_resort = self._auto_kill_on_last_resort):
log.debug("cleanup successful")
else:
raise RuntimeError("cleanup FAILED!")
try:
self.conn_send.close()
self._log_queue_listener.stop()
except OSError:
pass
log.debug("wait for monitor thread to join")
self._monitor_thread.join()
log.debug("monitor thread to joined")
self._func_running.value = False
def _monitor_stdout_pipe(self):
while True:
try:
b = self.conn_recv.recv()
self.pipe_handler(b)
except EOFError:
break
def start(self, timeout=None):
"""
uses multiprocess Process to call _wrapper_func in subprocess
"""
if self.is_alive():
log.warning("a process with pid %s is already running", self._proc.pid)
return
self._run.value = True
self._func_running.value = False
name = self.__class__.__name__
self.conn_recv, self.conn_send = mp.Pipe(False)
self._monitor_thread = threading.Thread(target = self._monitor_stdout_pipe)
self._monitor_thread.daemon=True
self._monitor_thread.start()
log.debug("started monitor thread")
self._log_queue = mp.Queue()
self._log_queue_listener = QueueListener(self._log_queue, *log.handlers)
self._log_queue_listener.start()
args = (self.func, self.args, self._run, self._pause, self.interval,
self._sigint, self._sigterm, name, log.level, self.conn_send,
self._func_running, self._log_queue)
self._proc = mp.Process(target = _loop_wrapper_func,
args = args)
self._proc.start()
log.info("started a new process with pid %s", self._proc.pid)
log.debug("wait for loop function to come up")
t0 = time.time()
while not self._func_running.value:
if self._proc.exitcode is not None:
exc = self._proc.exitcode
self._proc = None
if exc == 0:
log.warning("wrapper function already terminated with exitcode 0\nloop is not running")
return
else:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(exc)+
"see log (INFO level) for traceback information")
time.sleep(0.1)
if (timeout is not None) and ((time.time() - t0) > timeout):
err_msg = "could not bring up function on time (timeout: {}s)".format(timeout)
log.error(err_msg)
log.info("either it takes too long to spawn the subprocess (increase the timeout)\n"+
"or an internal error occurred before reaching the function call")
raise LoopTimeoutError(err_msg)
log.debug("loop function is up ({})".format(humanize_time(time.time()-t0)))
def stop(self):
"""
stops the process triggered by start
Setting the shared memory boolean run to false, which should prevent
the loop from repeating. Call __cleanup to make sure the process
stopped. After that we could trigger start() again.
"""
if self.is_alive():
self._proc.terminate()
if self._proc is not None:
self.__cleanup()
if self.raise_error:
if self._proc.exitcode == 255:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(self._proc.exitcode)+
"see log (INFO level) for traceback information")
self.pipe_handler.close()
self._proc = None
def join(self, timeout):
"""
calls join for the spawned process with given timeout
"""
if self.is_alive():
self._proc.join(timeout)
def is_alive(self):
if self._proc is None:
return False
else:
return self._proc.is_alive()
def is_running(self):
if self.is_alive():
return self._func_running.value
else:
return False
def pause(self):
if self._run.value:
self._pause.value = True
log.debug("process with pid %s paused", self._proc.pid)
def resume(self):
if self._run.value:
self._pause.value = False
log.debug("process with pid %s resumed", self._proc.pid)
def getpid(self):
if self._proc is not None:
return self._proc.pid
else:
return None
def _show_stat_wrapper_Progress(count, last_count, start_time, max_count, speed_calc_cycles,
width, q, last_speed, prepend, show_stat_function, add_args,
i, lock):
"""
calculate
"""
count_value, max_count_value, speed, tet, ttg, = Progress._calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock)
return show_stat_function(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **add_args)
def _show_stat_wrapper_multi_Progress(count, last_count, start_time, max_count, speed_calc_cycles,
width, q, last_speed, prepend, show_stat_function, len_,
add_args, lock, info_line, no_move_up=False):
"""
call the static method show_stat_wrapper for each process
"""
# print(ESC_BOLD, end='')
# sys.stdout.flush()
for i in range(len_):
_show_stat_wrapper_Progress(count[i], last_count[i], start_time[i], max_count[i], speed_calc_cycles,
width, q[i], last_speed[i], prepend[i], show_stat_function,
add_args, i, lock[i])
n = len_
if info_line is not None:
s = info_line.value.decode('utf-8')
s = s.split('\n')
n += len(s)
for si in s:
if width == 'auto':
width = get_terminal_width()
if len(si) > width:
si = si[:width]
print("{0:<{1}}".format(si, width))
if no_move_up:
n = 0
# this is only a hack to find the end
# of the message in a stream
# so ESC_HIDDEN+ESC_NO_CHAR_ATTR is a magic ending
print(terminal.ESC_MOVE_LINE_UP(n) + terminal.ESC_MY_MAGIC_ENDING, end='')
sys.stdout.flush()
class Progress(Loop):
"""
Abstract Progress Class
The :py:class:`Progress` Class uses :py:class:`Loop` to provide a repeating
function which calculates progress information from a changing counter value.
The formatting of these information is done by overwriting the static member
:py:func:`Progress.show_stat`. :py:func:`Progress.show_stat` is intended to
format a single progress bar on a single line only.
The extension to multiple progresses is done
automatically base on the formatting of a single line.
"""
def __init__(self,
count,
max_count = None,
prepend = None,
width = 'auto',
speed_calc_cycles = 10,
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
info_line = None,
show_stat = None):
"""
:param count: shared variable for holding the current state
(use :py:func:`UnsignedIntValue` for short hand creation)
:type count: list/single value of multiprocessing.Value
:param max_count: shared variable for holding the final state
:type max_count: None or list/single value of multiprocessing.Value
:param prepend: string to put in front of each progress output
:type prepend: None, str or list of str
:param width: the width to use for the progress line (fixed or automatically determined)
:type width: int or "auto"
:param speed_calc_cycles: number of updated (cycles) to use for estimating the speed
(example: ``speed_calc_sycles = 4`` and ``interval = 1`` means that the speed is estimated from
the current state and state 4 updates before where the elapsed time will roughly be 4s)
:param interval: seconds to wait before updating the progress
:param verbose: DEPRECATED: has no effect, use the global ``log.setLevel()`` to control the
output level
:param sigint: behavior of the subprocess on signal ``SIGINT`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigint: "stop" or "ign"
:param sigterm: behavior of the subprocess on signal ``SIGTERM`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigterm: "stop" or "ign"
:param info_line: additional text to show below the progress (use :py:func:`StringValue`
for short hand creation of shared strings)
:type info_line: None or multiprocessing.Array of characters
.. note::
As `Progress` is derived from :py:class:`Loop` it is highly encurraged to create
any instance of Progress with a context manager (``with`` statement).
This ensures that the subprocess showing the progress terminats on context exit.
Otherwise one has to make sure that at some point the stop() routine is called.
abstract example::
with AProgressClass(...) as p:
p.start()
# do stuff and modify counter
"""
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
# converts count to list and do type check
try:
for c in count:
if not isinstance(c, Synchronized):
raise ValueError("Each element of 'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = True
except TypeError:
if not isinstance(count, Synchronized):
raise ValueError("'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = False
count = [count]
self.len = len(count)
# converts max_count to list and do type check
if max_count is not None:
if self.is_multi:
try:
for i, m in enumerate(max_count):
if not isinstance(m, Synchronized):
max_count[i] = UnsignedIntValue(m)
except TypeError:
raise TypeError("'max_count' must be iterable")
else:
if not isinstance(max_count, Synchronized):
max_count = UnsignedIntValue(max_count)
max_count = [max_count]
else:
max_count = [None] * self.len
self.start_time = []
self.speed_calc_cycles = speed_calc_cycles
self.width = width
self.q = []
self.prepend = []
self.lock = []
self.last_count = []
self.last_speed = []
for i in range(self.len):
self.q.append(myQueue()) # queue to save the last speed_calc_cycles
# (time, count) information to calculate speed
#self.q[-1].cancel_join_thread()
self.last_count.append(UnsignedIntValue())
self.last_speed.append(FloatValue())
self.lock.append(mp.Lock())
self.start_time.append(FloatValue(val=time.time()))
if prepend is None:
# no prepend given
self.prepend.append('')
else:
if isinstance(prepend, str):
self.prepend.append(prepend)
else:
# assume list of prepend, (needs to be a sequence)
self.prepend.append(prepend[i])
self.max_count = max_count # list of multiprocessing value type
self.count = count # list of multiprocessing value type
self.interval = interval
self.verbose = verbose
self.show_on_exit = False
self.add_args = {}
self.info_line = info_line
self.show_stat = show_stat
# setup loop class with func
Loop.__init__(self,
func = _show_stat_wrapper_multi_Progress,
args = (self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
show_stat,
self.len,
self.add_args,
self.lock,
self.info_line),
interval = interval,
sigint = sigint,
sigterm = sigterm,
auto_kill_on_last_resort = True)
def __exit__(self, *exc_args):
self.stop()
@staticmethod
def _calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock):
"""do the pre calculations in order to get TET, speed, TTG
:param count: count
:param last_count: count at the last call, allows to treat the case of no progress
between sequential calls
:param start_time: the time when start was triggered
:param max_count: the maximal value count
:type max_count:
:param speed_calc_cycles:
:type speed_calc_cycles:
:param q:
:type q:
:param last_speed:
:type last_speed:
:param lock:
:type lock:
"""
count_value = count.value
start_time_value = start_time.value
current_time = time.time()
if last_count.value != count_value:
# some progress happened
with lock:
# save current state (count, time) to queue
q.put((count_value, current_time))
# get older state from queue (or initial state)
# to to speed estimation
if q.qsize() > speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, start_time_value
last_count.value = count_value
#last_old_count.value = old_count_value
#last_old_time.value = old_time
speed = (count_value - old_count_value) / (current_time - old_time)
last_speed.value = speed
else:
# progress has not changed since last call
# use also old (cached) data from the queue
#old_count_value, old_time = last_old_count.value, last_old_time.value
speed = last_speed.value
if (max_count is None):
max_count_value = None
else:
max_count_value = max_count.value
tet = (current_time - start_time_value)
if (speed == 0) or (max_count_value is None) or (max_count_value == 0):
ttg = None
else:
ttg = math.ceil((max_count_value - count_value) / speed)
return count_value, max_count_value, speed, tet, ttg
def _reset_all(self):
"""
reset all progress information
"""
for i in range(self.len):
self._reset_i(i)
def _reset_i(self, i):
"""
reset i-th progress information
"""
self.count[i].value=0
log.debug("reset counter %s", i)
self.lock[i].acquire()
for x in range(self.q[i].qsize()):
self.q[i].get()
self.lock[i].release()
self.start_time[i].value = time.time()
def _show_stat(self):
"""
convenient functions to call the static show_stat_wrapper_multi with
the given class members
"""
_show_stat_wrapper_multi_Progress(self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
self.show_stat,
self.len,
self.add_args,
self.lock,
self.info_line,
no_move_up=True)
def reset(self, i = None):
"""resets the progress informaion
:param i: tell which progress to reset, if None reset all
:type i: None, int
"""
if i is None:
self._reset_all()
else:
self._reset_i(i)
def start(self):
"""
start
"""
# before printing any output to stout, we can now check this
# variable to see if any other ProgressBar has reserved that
# terminal.
if (self.__class__.__name__ in terminal.TERMINAL_PRINT_LOOP_CLASSES):
if not terminal.terminal_reserve(progress_obj=self):
log.warning("tty already reserved, NOT starting the progress loop!")
return
super(Progress, self).start()
self.show_on_exit = True
def stop(self):
"""
trigger clean up by hand, needs to be done when not using
context management via 'with' statement
- will terminate loop process
- show a last progress -> see the full 100% on exit
- releases terminal reservation
"""
super(Progress, self).stop()
terminal.terminal_unreserve(progress_obj=self, verbose=self.verbose)
if self.show_on_exit:
if not isinstance(self.pipe_handler, PipeToPrint):
myout = inMemoryBuffer()
stdout = sys.stdout
sys.stdout = myout
self._show_stat()
self.pipe_handler(myout.getvalue())
sys.stdout = stdout
else:
self._show_stat()
print()
self.show_on_exit = False
def show_stat_ProgressBar(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
if (max_count_value is None) or (max_count_value == 0):
# only show current absolute progress as number and estimated speed
print("{}{}{} [{}] {}#{} ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet), humanize_speed(speed),
terminal.ESC_BOLD + COLTHM['BAR_COL'],
count_value))
else:
if width == 'auto':
width = get_terminal_width()
# deduce relative progress and show as bar on screen
if ttg is None:
s3 = " TTG --"
else:
s3 = " TTG {}".format(humanize_time(ttg))
s1 = "{}{}{} [{}] ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet),
humanize_speed(speed))
l = terminal.len_string_without_ESC(s1 + s3)
l2 = width - l - 3
a = int(l2 * count_value / max_count_value)
b = l2 - a
s2 = COLTHM['BAR_COL'] + terminal.ESC_BOLD + "[" + "=" * a + ">" + " " * b + "]" + terminal.ESC_RESET_BOLD + terminal.ESC_DEFAULT
print(s1 + s2 + s3)
class ProgressBar(Progress):
"""
implements a progress bar similar to the one known from 'wget' or 'pv'
"""
def __init__(self, *args, **kwargs):
"""
width [int/'auto'] - the number of characters used to show the Progress bar,
use 'auto' to determine width from terminal information -> see _set_width
"""
Progress.__init__(self, *args, show_stat = show_stat_ProgressBar, **kwargs)
# self._PRE_PREPEND = terminal.ESC_NO_CHAR_ATTR + ESC_RED
# self._POST_PREPEND = ESC_BOLD + ESC_GREEN
def show_stat_ProgressBarCounter(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
counter_count = kwargs['counter_count'][i]
counter_speed = kwargs['counter_speed'][i]
counter_tet = time.time() - kwargs['init_time']
s_c = "{}{}{} [{}] {}#{} - ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(counter_tet),
humanize_speed(counter_speed.value),
COLTHM['BAR_COL'],
str(counter_count.value) + terminal.ESC_DEFAULT)
if width == 'auto':
width = get_terminal_width()
if (max_count_value is None) or (max_count_value == 0):
s_c = "{}{} [{}] {}#{} ".format(s_c,
humanize_time(tet),
humanize_speed(speed),
COLTHM['BAR_COL'],
str(count_value) + terminal.ESC_DEFAULT)
else:
if ttg is None:
s3 = " TTG --"
else:
s3 = " TTG {}".format(humanize_time(ttg))
s1 = "{} [{}] ".format(humanize_time(tet), humanize_speed(speed))
l = terminal.len_string_without_ESC(s1 + s3 + s_c)
l2 = width - l - 3
a = int(l2 * count_value / max_count_value)
b = l2 - a
s2 = COLTHM['BAR_COL'] + terminal.ESC_BOLD + "[" + "=" * a + ">" + " " * b + "]" + terminal.ESC_RESET_BOLD + terminal.ESC_DEFAULT
s_c = s_c + s1 + s2 + s3
print(s_c)
class ProgressBarCounter(Progress):
"""
records also the time of each reset and calculates the speed
of the resets.
shows the TET since init (not effected by reset)
the speed of the resets (number of finished processed per time)
and the number of finished processes
after that also show a progress of each process
max_count > 0 and not None -> bar
max_count == None -> absolute count statistic
max_count == 0 -> hide process statistic at all
"""
def __init__(self, speed_calc_cycles_counter=5, **kwargs):
Progress.__init__(self, show_stat = show_stat_ProgressBarCounter, **kwargs)
self.counter_count = []
self.counter_q = []
self.counter_speed = []
for i in range(self.len):
self.counter_count.append(UnsignedIntValue(val=0))
self.counter_q.append(myQueue())
self.counter_speed.append(FloatValue())
self.counter_speed_calc_cycles = speed_calc_cycles_counter
self.init_time = time.time()
self.add_args['counter_count'] = self.counter_count
self.add_args['counter_speed'] = self.counter_speed
self.add_args['init_time'] = self.init_time
def get_counter_count(self, i=0):
return self.counter_count[i].value
def _reset_i(self, i):
c = self.counter_count[i]
with c.get_lock():
c.value += 1
count_value = c.value
q = self.counter_q[i]
current_time = time.time()
q.put((count_value, current_time))
if q.qsize() > self.counter_speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, self.init_time
speed = (count_value - old_count_value) / (current_time - old_time)
self.counter_speed[i].value = speed
Progress._reset_i(self, i)
def get_d(s1, s2, width, lp, lps):
d = width - len(terminal.remove_ESC_SEQ_from_string(s1)) - len(terminal.remove_ESC_SEQ_from_string(s2)) - 2 - lp - lps
if d >= 0:
d1 = d // 2
d2 = d - d1
return s1, s2, d1, d2
def full_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "TET {} {:>12} TTG {}".format(tet, speed, ttg)
s2 = "ETA {} ORT {}".format(eta, ort)
return get_d(s1, s2, width, lp, lps)
def full_minor_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} {:>12} G {}".format(tet, speed, ttg)
s2 = "A {} O {}".format(eta, ort)
return get_d(s1, s2, width, lp, lps)
def reduced_1_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} {:>12} G {}".format(tet, speed, ttg)
s2 = "O {}".format(ort)
return get_d(s1, s2, width, lp, lps)
def reduced_2_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} G {}".format(tet, ttg)
s2 = "O {}".format(ort)
return get_d(s1, s2, width, lp, lps)
def reduced_3_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} G {}".format(tet, ttg)
s2 = ''
return get_d(s1, s2, width, lp, lps)
def reduced_4_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = ''
s2 = ''
return get_d(s1, s2, width, lp, lps)
def kw_bold(s, ch_after):
kws = ['TET', 'TTG', 'ETA', 'ORT', 'E', 'G', 'A', 'O']
for kw in kws:
for c in ch_after:
s = s.replace(kw + c, terminal.ESC_BOLD + kw + terminal.ESC_RESET_BOLD + c)
return s
def _stat(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
if (max_count_value is None) or (max_count_value == 0):
# only show current absolute progress as number and estimated speed
stat = "{}{} [{}] {}#{} ".format(COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet),
humanize_speed(speed),
COLTHM['BAR_COL'],
str(count_value) + terminal.ESC_DEFAULT)
else:
if width == 'auto':
width = get_terminal_width()
# deduce relative progress
p = count_value / max_count_value
if p < 1:
ps = " {:.1%} ".format(p)
else:
ps = " {:.0%} ".format(p)
if ttg is None:
eta = '--'
ort = None
else:
eta = datetime.datetime.fromtimestamp(time.time() + ttg).strftime("%Y%m%d_%H:%M:%S")
ort = tet + ttg
tet = humanize_time(tet)
speed = '[' + humanize_speed(speed) + ']'
ttg = humanize_time(ttg)
ort = humanize_time(ort)
repl_ch = '-'
lp = len(prepend)
args = p, tet, speed, ttg, eta, ort, repl_ch, width, lp, len(ps)
res = full_stat(*args)
if res is None:
res = full_minor_stat(*args)
if res is None:
res = reduced_1_stat(*args)
if res is None:
res = reduced_2_stat(*args)
if res is None:
res = reduced_3_stat(*args)
if res is None:
res = reduced_4_stat(*args)
if res is not None:
s1, s2, d1, d2 = res
s = s1 + ' ' * d1 + ps + ' ' * d2 + s2
idx_p = math.ceil( (width-lp-2)*p)
s_before = s[:idx_p].replace(' ', repl_ch)
if (len(s_before) > 0) and (s_before[-1] == repl_ch):
s_before = s_before[:-1] + '>'
s_after = s[idx_p:]
s_before = kw_bold(s_before, ch_after=[repl_ch, '>'])
s_after = kw_bold(s_after, ch_after=[' '])
stat = (COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT +
COLTHM['BAR_COL'] + terminal.ESC_BOLD + '[' + terminal.ESC_RESET_BOLD + s_before + terminal.ESC_DEFAULT +
s_after + terminal.ESC_BOLD + COLTHM['BAR_COL'] + ']' + terminal.ESC_NO_CHAR_ATTR)
else:
ps = ps.strip()
if p == 1:
ps = ' ' + ps
stat = prepend + ps
return stat
def show_stat_ProgressBarFancy(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
stat = _stat(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs)
print(stat)
class ProgressBarFancy(Progress):
"""
implements a progress bar where the color indicates the current status
similar to the bars known from 'htop'
"""
def __init__(self, *args, **kwargs):
"""
width [int/'auto'] - the number of characters used to show the Progress bar,
use 'auto' to determine width from terminal information -> see _set_width
"""
Progress.__init__(self, *args, show_stat = show_stat_ProgressBarFancy, **kwargs)
def show_stat_ProgressBarCounterFancy(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
counter_count = kwargs['counter_count'][i]
counter_speed = kwargs['counter_speed'][i]
counter_tet = time.time() - kwargs['init_time']
s_c = "{}{}{} [{}] {}#{}".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL']+prepend+terminal.ESC_DEFAULT,
humanize_time(counter_tet),
humanize_speed(counter_speed.value),
COLTHM['BAR_COL'],
str(counter_count.value) + terminal.ESC_DEFAULT)
if max_count_value is not None:
if width == 'auto':
width = get_terminal_width()
s_c += ' - '
if max_count_value == 0:
s_c = "{}{} [{}] {}#{} ".format(s_c, humanize_time(tet), humanize_speed(speed),
COLTHM['BAR_COL'], str(count_value)+terminal.ESC_DEFAULT)
else:
_width = width - terminal.len_string_without_ESC(s_c)
s_c += _stat(count_value, max_count_value, '', speed, tet, ttg, _width, i)
print(s_c)
class ProgressBarCounterFancy(ProgressBarCounter):
def __init__(self, *args, **kwargs):
ProgressBarCounter.__init__(self, *args, **kwargs)
self.show_stat = show_stat_ProgressBarCounterFancy
class SIG_handler_Loop(object):
"""class to setup signal handling for the Loop class
Note: each subprocess receives the default signal handling from it's parent.
If the signal function from the module signal is evoked within the subprocess
this default behavior can be overwritten.
The init function receives a shared memory boolean object which will be set
false in case of signal detection. Since the Loop class will check the state
of this boolean object before each repetition, the loop will stop when
a signal was receives.
"""
def __init__(self, sigint, sigterm, log, prefix):
self.set_signal(signal.SIGINT, sigint)
self.set_signal(signal.SIGTERM, sigterm)
self.prefix = prefix
self.log = log
self.log.info("setup signal handler for loop (SIGINT:%s, SIGTERM:%s)", sigint, sigterm)
def set_signal(self, sig, handler_str):
if handler_str == 'ign':
signal.signal(sig, self._ignore_signal)
elif handler_str == 'stop':
signal.signal(sig, self._stop_on_signal)
else:
raise TypeError("unknown signal hander string '%s'", handler_str)
def _ignore_signal(self, signal, frame):
self.log.debug("ignore received sig %s", signal_dict[signal])
pass
def _stop_on_signal(self, signal, frame):
self.log.info("received sig %s -> raise InterruptedError", signal_dict[signal])
raise LoopInterruptError()
def FloatValue(val=0.):
"""returns a `multiprocessing.Value` of type `float` with initial value `val`"""
return mp.Value('d', val, lock=True)
def UnsignedIntValue(val=0):
"""returns a `multiprocessing.Value` of type `unsigned int` with initial value `val`"""
return mp.Value('I', val, lock=True)
def StringValue(num_of_bytes):
"""returns a `multiprocessing.Array` of type `character` and length `num_of_bytes`"""
return mp.Array('c', _jm_compatible_bytearray(num_of_bytes), lock=True)
def check_process_termination(proc, prefix, timeout, auto_kill_on_last_resort = False):
proc.join(timeout)
if not proc.is_alive():
log.debug("termination of process (pid %s) within timeout of %s SUCCEEDED!", proc.pid, humanize_time(timeout))
return True
# process still runs -> send SIGTERM -> see what happens
log.warning("termination of process (pid %s) within given timeout of %s FAILED!", proc.pid, humanize_time(timeout))
proc.terminate()
new_timeout = 3*timeout
log.debug("wait for termination (timeout %s)", humanize_time(new_timeout))
proc.join(new_timeout)
if not proc.is_alive():
log.info("termination of process (pid %s) via SIGTERM with timeout of %s SUCCEEDED!", proc.pid, humanize_time(new_timeout))
return True
log.warning("termination of process (pid %s) via SIGTERM with timeout of %s FAILED!", proc.pid, humanize_time(new_timeout))
log.debug("auto_kill_on_last_resort is %s", auto_kill_on_last_resort)
answer = 'k' if auto_kill_on_last_resort else '_'
while True:
log.debug("answer string is %s", answer)
if answer == 'k':
log.warning("send SIGKILL to process with pid %s", proc.pid)
os.kill(proc.pid, signal.SIGKILL)
time.sleep(0.1)
else:
log.info("send SIGTERM to process with pid %s", proc.pid)
os.kill(proc.pid, signal.SIGTERM)
time.sleep(0.1)
if not proc.is_alive():
log.info("process (pid %s) has stopped running!", proc.pid)
return True
else:
log.warning("process (pid %s) is still running!", proc.pid)
print("the process (pid {}) seems still running".format(proc.pid))
try:
answer = input("press 'enter' to send SIGTERM, enter 'k' to send SIGKILL or enter 'ignore' to not bother about the process anymore")
except Exception as e:
log.error("could not ask for sending SIGKILL due to {}".format(type(e)))
log.info(traceback.format_exc())
log.warning("send SIGKILL now")
answer = 'k'
if answer == 'ignore':
log.warning("ignore process %s", proc.pid)
return False
elif answer != 'k':
answer = ''
def getCountKwargs(func):
""" Returns a list ["count kwarg", "count_max kwarg"] for a
given function. Valid combinations are defined in
`progress.validCountKwargs`.
Returns None if no keyword arguments are found.
"""
# Get all arguments of the function
if hasattr(func, "__code__"):
func_args = func.__code__.co_varnames[:func.__code__.co_argcount]
for pair in validCountKwargs:
if ( pair[0] in func_args and pair[1] in func_args ):
return pair
# else
return None
def humanize_speed(c_per_sec):
"""convert a speed in counts per second to counts per [s, min, h, d], choosing the smallest value greater zero.
"""
scales = [60, 60, 24]
units = ['c/s', 'c/min', 'c/h', 'c/d']
speed = c_per_sec
i = 0
if speed > 0:
while (speed < 1) and (i < len(scales)):
speed *= scales[i]
i += 1
return "{:.1f}{}".format(speed, units[i])
def humanize_time(secs):
"""convert second in to hh:mm:ss format
"""
if secs is None:
return '--'
if secs < 1:
return "{:.2f}ms".format(secs*1000)
elif secs < 10:
return "{:.2f}s".format(secs)
else:
mins, secs = divmod(secs, 60)
hours, mins = divmod(mins, 60)
return '{:02d}:{:02d}:{:02d}'.format(int(hours), int(mins), int(secs))
def codecov_subprocess_check():
print("this line will be only called from a subprocess")
myQueue = mp.Queue
# a mapping from the numeric values of the signals to their names used in the
# standard python module signals
signal_dict = {}
for s in dir(signal):
if s.startswith('SIG') and s[3] != '_':
n = getattr(signal, s)
if n in signal_dict:
signal_dict[n] += ('/'+s)
else:
signal_dict[n] = s
_colthm_term_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_LIGHT_GREEN, 'ADD_LNS_UP':0}
_colthm_ipyt_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_LIGHT_BLUE, 'ADD_LNS_UP':0}
_colthm_wincmd_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_GREEN, 'ADD_LNS_UP':1}
color_themes = {'term_default': _colthm_term_default,
'ipyt_default': _colthm_ipyt_default,
'wincmd_default': _colthm_wincmd_default}
if platform.system() == 'Windows':
COLTHM = _colthm_wincmd_default
else:
COLTHM = _colthm_term_default
def choose_color_theme(name):
global COLTHM
if name in color_themes:
COLTHM = color_themes[name]
else:
warnings.warn("no such color theme {}".format(name))
# keyword arguments that define counting in wrapped functions
validCountKwargs = [
[ "count", "count_max"],
[ "count", "max_count"],
[ "c", "m"],
[ "jmc", "jmm"],
]
|
cimatosa/progression | progression/progress.py | _show_stat_wrapper_Progress | python | def _show_stat_wrapper_Progress(count, last_count, start_time, max_count, speed_calc_cycles,
width, q, last_speed, prepend, show_stat_function, add_args,
i, lock):
count_value, max_count_value, speed, tet, ttg, = Progress._calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock)
return show_stat_function(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **add_args) | calculate | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/progress.py#L592-L606 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Progression module
------------------
This module provides the (so far) four variants to display progress information:
* :py:class:`.ProgressBar`
This class monitors one or multiple processes showing the total elapsed time (TET), the current speed
estimated from the most recent updated, a colored bar showing the progress and an
estimate for the remaining time, also called time to go (TTG).
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 5.83s [7.2c/s] <span style="color:#00ff00"><b>[=====================> ]</b></span> TTG 8.05s</pre>
</div>
* :py:class:`.ProgressBarCounter`
If a single process is intended to do several sequential task, the :py:class:`.ProgressBarCounter` class can keep track of the number
of accomplished tasks on top of monitoring the individual task just like :py:class:`.ProgressBar` does.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre><span style="color:#00ff00"><b> [</b><b>TET</b>-5.83s-----[7.2c/s]-<b>TTG</b>-8.05s-></span> 42.0% <b>ETA</b> 20161011_16:52:52 <b>ORT</b> 00:00:13<b><span style="color:#00ff00">]</span></b></pre>
</div>
* :py:class:`.ProgressBarFancy`
This class intends to be a replacement for :py:class:`.ProgressBar` with slightly more information and
better handling of small terminal widths.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 00:00:35 [1.4c/min] <span style="color:#00ff00">#3</span> - 5.83s [7.2c/s] <span style="color:#00ff00"><b>[===========> ]</b></span> TTG 8.05s</pre>
</div>
* :py:class:`.ProgressBarCounterFancy`
Just as :py:class:`.ProgressBarFancy` this replaces :py:class:`.ProgressBarCounter`.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 00:00:35 [1.4c/min] <span style="color:#00ff00">#3</span> - <span style="color:#800000"></span><span style="color:#00ff00"><b>[</b><b>E</b>-5.83s-----[7.2c/s]-<b>G</b>-8</span>.05s 42.0% <b>O</b> 00:00:13<b><span style="color:#00ff00">]</span></b></pre>
</div>
.. autoclass:: Progress
:members:
:inherited-members:
.. autoclass:: ProgressBar
:members:
.. autoclass:: ProgressBarCounter
:members:
.. autoclass:: ProgressBarFancy
:members:
.. autoclass:: ProgressBarCounterFancy
:members:
.. autofunction:: UnsignedIntValue
.. autofunction:: FloatValue
.. autofunction:: StringValue
"""
from __future__ import division, print_function
import datetime
import io
import logging
from logging.handlers import QueueHandler, QueueListener
import math
import multiprocessing as mp
from multiprocessing.sharedctypes import Synchronized
import os
import sys
import signal
import subprocess as sp
import threading
import time
import traceback
import warnings
from . import terminal
import platform
_IPYTHON = True
try:
import ipywidgets
except ImportError:
_IPYTHON = False
warnings.warn("could not load ipywidgets (IPython HTML output will not work)", category=ImportWarning)
except DeprecationWarning:
pass
try:
from IPython.display import display
except ImportError:
_IPYTHON = False
warnings.warn("could not load IPython (IPython HTML output will not work)", category=ImportWarning)
# Magic conversion from 3 to 2
if sys.version_info[0] == 2:
ProcessLookupError = OSError
inMemoryBuffer = io.BytesIO
old_math_ceil = math.ceil
def my_int_ceil(f):
return int(old_math_ceil(f))
math.ceil = my_int_ceil
_jm_compatible_bytearray = lambda x: x
class TimeoutError(Exception):
pass
elif sys.version_info[0] == 3:
inMemoryBuffer = io.StringIO
_jm_compatible_bytearray = bytearray
class MultiLineFormatter(logging.Formatter):
"""pads a multiline log message with spaces such that
<HEAD> msg_line1
msg_line2
...
"""
def format(self, record):
_str = logging.Formatter.format(self, record)
header = _str.split(record.message)[0]
_str = _str.replace('\n', '\n' + ' '*len(header))
return _str
# def_handl = logging.StreamHandler(stream = sys.stderr) # the default handler simply uses stderr
# def_handl.setLevel(logging.DEBUG) # ... listens to all messaged
fmt = MultiLineFormatter('%(asctime)s %(name)s %(levelname)s : %(message)s')
# def_handl.setFormatter(fmt) # ... and pads multiline messaged
log = logging.getLogger(__name__) # creates the default log for this module
# log.addHandler(def_handl)
class LoopExceptionError(RuntimeError):
pass
class LoopInterruptError(Exception):
pass
class StdoutPipe(object):
"""replacement for stream objects such as stdout which
forwards all incoming data using the send method of a
connection
example usage:
>>> import sys
>>> from multiprocessing import Pipe
>>> from progression import StdoutPipe
>>> conn_recv, conn_send = Pipe(False)
>>> sys.stdout = StdoutPipe(conn_send)
>>> print("hallo welt", end='') # this is no going through the pipe
>>> msg = conn_recv.recv()
>>> sys.stdout = sys.__stdout__
>>> print(msg)
hallo welt
>>> assert msg == "hallo welt"
"""
def __init__(self, conn):
self.conn = conn
def flush(self):
pass
def write(self, b):
self.conn.send(b)
class PipeToPrint(object):
def __call__(self, b):
print(b, end='')
def close(self):
pass
class PipeFromProgressToIPythonHTMLWidget(object):
def __init__(self):
self.htmlWidget = ipywidgets.widgets.HTML()
display(self.htmlWidget)
self._buff = ""
def __call__(self, b):
self._buff += b
if b.endswith(terminal.ESC_MY_MAGIC_ENDING):
buff = terminal.ESC_SEQ_to_HTML(self._buff)
self.htmlWidget.value = '<style>.widget-html{font-family:monospace}</style><pre>'+buff+'</pre>'
self._buff = ""
def close(self):
self.htmlWidget.close()
PipeHandler = PipeToPrint
def choose_pipe_handler(kind = 'print', color_theme = None):
global PipeHandler
if kind == 'print':
PipeHandler = PipeToPrint
if color_theme is None:
choose_color_theme('term_default')
else:
choose_color_theme(color_theme)
elif kind == 'ipythonhtml':
if _IPYTHON:
PipeHandler = PipeFromProgressToIPythonHTMLWidget
if color_theme is None:
choose_color_theme('ipyt_default')
else:
choose_color_theme(color_theme)
else:
warnings.warn("can not choose ipythonHTML (IPython and/or ipywidgets were not loaded)")
else:
raise ValueError("unknown kind '{}' for pipe_handler, use one out of ('print', 'ipythonhtml')")
def get_terminal_width():
if PipeHandler == PipeToPrint:
return terminal.get_terminal_width()
elif PipeHandler == PipeFromProgressToIPythonHTMLWidget:
return 80
else:
raise NotImplementedError
def get_identifier(name=None, pid=None, bold=True):
if pid is None:
pid = os.getpid()
if bold:
esc_bold = terminal.ESC_BOLD
esc_no_char_attr = terminal.ESC_NO_CHAR_ATTR
else:
esc_bold = ""
esc_no_char_attr = ""
if name is None:
return "{}PID_{}{}".format(esc_bold, pid, esc_no_char_attr)
else:
return "{}{}_{}{}".format(esc_bold, name, pid, esc_no_char_attr)
def _loop_wrapper_func(func, args, shared_mem_run, shared_mem_pause, interval, sigint, sigterm, name,
logging_level, conn_send, func_running, log_queue):
"""
to be executed as a separate process (that's why this functions is declared static)
"""
prefix = get_identifier(name) + ' '
global log
log = logging.getLogger(__name__+".log_{}".format(get_identifier(name, bold=False)))
log.setLevel(logging_level)
log.addHandler(QueueHandler(log_queue))
sys.stdout = StdoutPipe(conn_send)
log.debug("enter wrapper_func")
SIG_handler_Loop(sigint, sigterm, log, prefix)
func_running.value = True
error = False
while shared_mem_run.value:
try:
# in pause mode, simply sleep
if shared_mem_pause.value:
quit_loop = False
else:
# if not pause mode -> call func and see what happens
try:
quit_loop = func(*args)
except LoopInterruptError:
raise
except Exception as e:
log.error("error %s occurred in loop calling 'func(*args)'", type(e))
log.info("show traceback.print_exc()\n%s", traceback.format_exc())
error = True
break
if quit_loop is True:
log.debug("loop stooped because func returned True")
break
time.sleep(interval)
except LoopInterruptError:
log.debug("quit wrapper_func due to InterruptedError")
break
func_running.value = False
if error:
sys.exit(-1)
else:
log.debug("wrapper_func terminates gracefully")
# gets rid of the following warnings
# Exception ignored in: <_io.FileIO name='/dev/null' mode='rb'>
# ResourceWarning: unclosed file <_io.TextIOWrapper name='/dev/null' mode='r' encoding='UTF-8'>
try:
if mp.get_start_method() == "spawn":
sys.stdin.close()
except AttributeError:
pass
class LoopTimeoutError(TimeoutError):
pass
class Loop(object):
"""
class to run a function periodically an seperate process.
In case the called function returns True, the loop will stop.
Otherwise a time interval given by interval will be slept before
another execution is triggered.
The shared memory variable _run (accessible via the class property run)
also determines if the function if executed another time. If set to False
the execution stops.
For safe cleanup (and in order to catch any Errors)
it is advisable to instantiate this class
using 'with' statement as follows:
with Loop(**kwargs) as my_loop:
my_loop.start()
...
this will guarantee you that the spawned loop process is
down when exiting the 'with' scope.
The only circumstance where the process is still running is
when you set auto_kill_on_last_resort to False and answer the
question to send SIGKILL with no.
"""
def __init__(self,
func,
args = (),
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
auto_kill_on_last_resort = False,
raise_error = True):
"""
func [callable] - function to be called periodically
args [tuple] - arguments passed to func when calling
intervall [pos number] - time to "sleep" between each call
verbose - DEPRECATED, only kept for compatibility, use global log.level to
specify verbosity
sigint [string] - signal handler string to set SIGINT behavior (see below)
sigterm [string] - signal handler string to set SIGTERM behavior (see below)
auto_kill_on_last_resort [bool] - If set False (default), ask user to send SIGKILL
to loop process in case normal stop and SIGTERM failed. If set True, send SIDKILL
without asking.
the signal handler string may be one of the following
ing: ignore the incoming signal
stop: raise InterruptedError which is caught silently.
"""
self._proc = None
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
self.func = func
self.args = args
self.interval = interval
assert self.interval >= 0
self._run = mp.Value('b', False)
self._pause = mp.Value('b', False)
self._func_running = mp.Value('b', False)
self._sigint = sigint
self._sigterm = sigterm
self._auto_kill_on_last_resort = auto_kill_on_last_resort
log.debug("auto_kill_on_last_resort = %s", self._auto_kill_on_last_resort)
self._monitor_thread = None
self.pipe_handler = PipeHandler()
self.raise_error = raise_error
def __enter__(self):
return self
def __exit__(self, *exc_args):
if self.is_alive():
log.debug("loop is still running on context exit")
else:
log.debug("loop has stopped on context exit")
self.stop()
def __cleanup(self):
"""
Wait at most twice as long as the given repetition interval
for the _wrapper_function to terminate.
If after that time the _wrapper_function has not terminated,
send SIGTERM to and the process.
Wait at most five times as long as the given repetition interval
for the _wrapper_function to terminate.
If the process still running send SIGKILL automatically if
auto_kill_on_last_resort was set True or ask the
user to confirm sending SIGKILL
"""
# set run to False and wait some time -> see what happens
self._run.value = False
if check_process_termination(proc = self._proc,
timeout = 2*self.interval,
prefix = '',
auto_kill_on_last_resort = self._auto_kill_on_last_resort):
log.debug("cleanup successful")
else:
raise RuntimeError("cleanup FAILED!")
try:
self.conn_send.close()
self._log_queue_listener.stop()
except OSError:
pass
log.debug("wait for monitor thread to join")
self._monitor_thread.join()
log.debug("monitor thread to joined")
self._func_running.value = False
def _monitor_stdout_pipe(self):
while True:
try:
b = self.conn_recv.recv()
self.pipe_handler(b)
except EOFError:
break
def start(self, timeout=None):
"""
uses multiprocess Process to call _wrapper_func in subprocess
"""
if self.is_alive():
log.warning("a process with pid %s is already running", self._proc.pid)
return
self._run.value = True
self._func_running.value = False
name = self.__class__.__name__
self.conn_recv, self.conn_send = mp.Pipe(False)
self._monitor_thread = threading.Thread(target = self._monitor_stdout_pipe)
self._monitor_thread.daemon=True
self._monitor_thread.start()
log.debug("started monitor thread")
self._log_queue = mp.Queue()
self._log_queue_listener = QueueListener(self._log_queue, *log.handlers)
self._log_queue_listener.start()
args = (self.func, self.args, self._run, self._pause, self.interval,
self._sigint, self._sigterm, name, log.level, self.conn_send,
self._func_running, self._log_queue)
self._proc = mp.Process(target = _loop_wrapper_func,
args = args)
self._proc.start()
log.info("started a new process with pid %s", self._proc.pid)
log.debug("wait for loop function to come up")
t0 = time.time()
while not self._func_running.value:
if self._proc.exitcode is not None:
exc = self._proc.exitcode
self._proc = None
if exc == 0:
log.warning("wrapper function already terminated with exitcode 0\nloop is not running")
return
else:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(exc)+
"see log (INFO level) for traceback information")
time.sleep(0.1)
if (timeout is not None) and ((time.time() - t0) > timeout):
err_msg = "could not bring up function on time (timeout: {}s)".format(timeout)
log.error(err_msg)
log.info("either it takes too long to spawn the subprocess (increase the timeout)\n"+
"or an internal error occurred before reaching the function call")
raise LoopTimeoutError(err_msg)
log.debug("loop function is up ({})".format(humanize_time(time.time()-t0)))
def stop(self):
"""
stops the process triggered by start
Setting the shared memory boolean run to false, which should prevent
the loop from repeating. Call __cleanup to make sure the process
stopped. After that we could trigger start() again.
"""
if self.is_alive():
self._proc.terminate()
if self._proc is not None:
self.__cleanup()
if self.raise_error:
if self._proc.exitcode == 255:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(self._proc.exitcode)+
"see log (INFO level) for traceback information")
self.pipe_handler.close()
self._proc = None
def join(self, timeout):
"""
calls join for the spawned process with given timeout
"""
if self.is_alive():
self._proc.join(timeout)
def is_alive(self):
if self._proc is None:
return False
else:
return self._proc.is_alive()
def is_running(self):
if self.is_alive():
return self._func_running.value
else:
return False
def pause(self):
if self._run.value:
self._pause.value = True
log.debug("process with pid %s paused", self._proc.pid)
def resume(self):
if self._run.value:
self._pause.value = False
log.debug("process with pid %s resumed", self._proc.pid)
def getpid(self):
if self._proc is not None:
return self._proc.pid
else:
return None
def show_stat_base(count_value, max_count_value, prepend, speed, tet, ttg, width, **kwargs):
"""A function that formats the progress information
This function will be called periodically for each progress that is monitored.
Overwrite this function in a subclass to implement a specific formating of the progress information
:param count_value: a number holding the current state
:param max_count_value: should be the largest number `count_value` can reach
:param prepend: additional text for each progress
:param speed: the speed estimation
:param tet: the total elapsed time
:param ttg: the time to go
:param width: the width for the progressbar, when set to `"auto"` this function
should try to detect the width available
:type width: int or "auto"
"""
raise NotImplementedError
def _show_stat_wrapper_multi_Progress(count, last_count, start_time, max_count, speed_calc_cycles,
width, q, last_speed, prepend, show_stat_function, len_,
add_args, lock, info_line, no_move_up=False):
"""
call the static method show_stat_wrapper for each process
"""
# print(ESC_BOLD, end='')
# sys.stdout.flush()
for i in range(len_):
_show_stat_wrapper_Progress(count[i], last_count[i], start_time[i], max_count[i], speed_calc_cycles,
width, q[i], last_speed[i], prepend[i], show_stat_function,
add_args, i, lock[i])
n = len_
if info_line is not None:
s = info_line.value.decode('utf-8')
s = s.split('\n')
n += len(s)
for si in s:
if width == 'auto':
width = get_terminal_width()
if len(si) > width:
si = si[:width]
print("{0:<{1}}".format(si, width))
if no_move_up:
n = 0
# this is only a hack to find the end
# of the message in a stream
# so ESC_HIDDEN+ESC_NO_CHAR_ATTR is a magic ending
print(terminal.ESC_MOVE_LINE_UP(n) + terminal.ESC_MY_MAGIC_ENDING, end='')
sys.stdout.flush()
class Progress(Loop):
"""
Abstract Progress Class
The :py:class:`Progress` Class uses :py:class:`Loop` to provide a repeating
function which calculates progress information from a changing counter value.
The formatting of these information is done by overwriting the static member
:py:func:`Progress.show_stat`. :py:func:`Progress.show_stat` is intended to
format a single progress bar on a single line only.
The extension to multiple progresses is done
automatically base on the formatting of a single line.
"""
def __init__(self,
count,
max_count = None,
prepend = None,
width = 'auto',
speed_calc_cycles = 10,
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
info_line = None,
show_stat = None):
"""
:param count: shared variable for holding the current state
(use :py:func:`UnsignedIntValue` for short hand creation)
:type count: list/single value of multiprocessing.Value
:param max_count: shared variable for holding the final state
:type max_count: None or list/single value of multiprocessing.Value
:param prepend: string to put in front of each progress output
:type prepend: None, str or list of str
:param width: the width to use for the progress line (fixed or automatically determined)
:type width: int or "auto"
:param speed_calc_cycles: number of updated (cycles) to use for estimating the speed
(example: ``speed_calc_sycles = 4`` and ``interval = 1`` means that the speed is estimated from
the current state and state 4 updates before where the elapsed time will roughly be 4s)
:param interval: seconds to wait before updating the progress
:param verbose: DEPRECATED: has no effect, use the global ``log.setLevel()`` to control the
output level
:param sigint: behavior of the subprocess on signal ``SIGINT`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigint: "stop" or "ign"
:param sigterm: behavior of the subprocess on signal ``SIGTERM`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigterm: "stop" or "ign"
:param info_line: additional text to show below the progress (use :py:func:`StringValue`
for short hand creation of shared strings)
:type info_line: None or multiprocessing.Array of characters
.. note::
As `Progress` is derived from :py:class:`Loop` it is highly encurraged to create
any instance of Progress with a context manager (``with`` statement).
This ensures that the subprocess showing the progress terminats on context exit.
Otherwise one has to make sure that at some point the stop() routine is called.
abstract example::
with AProgressClass(...) as p:
p.start()
# do stuff and modify counter
"""
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
# converts count to list and do type check
try:
for c in count:
if not isinstance(c, Synchronized):
raise ValueError("Each element of 'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = True
except TypeError:
if not isinstance(count, Synchronized):
raise ValueError("'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = False
count = [count]
self.len = len(count)
# converts max_count to list and do type check
if max_count is not None:
if self.is_multi:
try:
for i, m in enumerate(max_count):
if not isinstance(m, Synchronized):
max_count[i] = UnsignedIntValue(m)
except TypeError:
raise TypeError("'max_count' must be iterable")
else:
if not isinstance(max_count, Synchronized):
max_count = UnsignedIntValue(max_count)
max_count = [max_count]
else:
max_count = [None] * self.len
self.start_time = []
self.speed_calc_cycles = speed_calc_cycles
self.width = width
self.q = []
self.prepend = []
self.lock = []
self.last_count = []
self.last_speed = []
for i in range(self.len):
self.q.append(myQueue()) # queue to save the last speed_calc_cycles
# (time, count) information to calculate speed
#self.q[-1].cancel_join_thread()
self.last_count.append(UnsignedIntValue())
self.last_speed.append(FloatValue())
self.lock.append(mp.Lock())
self.start_time.append(FloatValue(val=time.time()))
if prepend is None:
# no prepend given
self.prepend.append('')
else:
if isinstance(prepend, str):
self.prepend.append(prepend)
else:
# assume list of prepend, (needs to be a sequence)
self.prepend.append(prepend[i])
self.max_count = max_count # list of multiprocessing value type
self.count = count # list of multiprocessing value type
self.interval = interval
self.verbose = verbose
self.show_on_exit = False
self.add_args = {}
self.info_line = info_line
self.show_stat = show_stat
# setup loop class with func
Loop.__init__(self,
func = _show_stat_wrapper_multi_Progress,
args = (self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
show_stat,
self.len,
self.add_args,
self.lock,
self.info_line),
interval = interval,
sigint = sigint,
sigterm = sigterm,
auto_kill_on_last_resort = True)
def __exit__(self, *exc_args):
self.stop()
@staticmethod
def _calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock):
"""do the pre calculations in order to get TET, speed, TTG
:param count: count
:param last_count: count at the last call, allows to treat the case of no progress
between sequential calls
:param start_time: the time when start was triggered
:param max_count: the maximal value count
:type max_count:
:param speed_calc_cycles:
:type speed_calc_cycles:
:param q:
:type q:
:param last_speed:
:type last_speed:
:param lock:
:type lock:
"""
count_value = count.value
start_time_value = start_time.value
current_time = time.time()
if last_count.value != count_value:
# some progress happened
with lock:
# save current state (count, time) to queue
q.put((count_value, current_time))
# get older state from queue (or initial state)
# to to speed estimation
if q.qsize() > speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, start_time_value
last_count.value = count_value
#last_old_count.value = old_count_value
#last_old_time.value = old_time
speed = (count_value - old_count_value) / (current_time - old_time)
last_speed.value = speed
else:
# progress has not changed since last call
# use also old (cached) data from the queue
#old_count_value, old_time = last_old_count.value, last_old_time.value
speed = last_speed.value
if (max_count is None):
max_count_value = None
else:
max_count_value = max_count.value
tet = (current_time - start_time_value)
if (speed == 0) or (max_count_value is None) or (max_count_value == 0):
ttg = None
else:
ttg = math.ceil((max_count_value - count_value) / speed)
return count_value, max_count_value, speed, tet, ttg
def _reset_all(self):
"""
reset all progress information
"""
for i in range(self.len):
self._reset_i(i)
def _reset_i(self, i):
"""
reset i-th progress information
"""
self.count[i].value=0
log.debug("reset counter %s", i)
self.lock[i].acquire()
for x in range(self.q[i].qsize()):
self.q[i].get()
self.lock[i].release()
self.start_time[i].value = time.time()
def _show_stat(self):
"""
convenient functions to call the static show_stat_wrapper_multi with
the given class members
"""
_show_stat_wrapper_multi_Progress(self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
self.show_stat,
self.len,
self.add_args,
self.lock,
self.info_line,
no_move_up=True)
def reset(self, i = None):
"""resets the progress informaion
:param i: tell which progress to reset, if None reset all
:type i: None, int
"""
if i is None:
self._reset_all()
else:
self._reset_i(i)
def start(self):
"""
start
"""
# before printing any output to stout, we can now check this
# variable to see if any other ProgressBar has reserved that
# terminal.
if (self.__class__.__name__ in terminal.TERMINAL_PRINT_LOOP_CLASSES):
if not terminal.terminal_reserve(progress_obj=self):
log.warning("tty already reserved, NOT starting the progress loop!")
return
super(Progress, self).start()
self.show_on_exit = True
def stop(self):
"""
trigger clean up by hand, needs to be done when not using
context management via 'with' statement
- will terminate loop process
- show a last progress -> see the full 100% on exit
- releases terminal reservation
"""
super(Progress, self).stop()
terminal.terminal_unreserve(progress_obj=self, verbose=self.verbose)
if self.show_on_exit:
if not isinstance(self.pipe_handler, PipeToPrint):
myout = inMemoryBuffer()
stdout = sys.stdout
sys.stdout = myout
self._show_stat()
self.pipe_handler(myout.getvalue())
sys.stdout = stdout
else:
self._show_stat()
print()
self.show_on_exit = False
def show_stat_ProgressBar(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
if (max_count_value is None) or (max_count_value == 0):
# only show current absolute progress as number and estimated speed
print("{}{}{} [{}] {}#{} ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet), humanize_speed(speed),
terminal.ESC_BOLD + COLTHM['BAR_COL'],
count_value))
else:
if width == 'auto':
width = get_terminal_width()
# deduce relative progress and show as bar on screen
if ttg is None:
s3 = " TTG --"
else:
s3 = " TTG {}".format(humanize_time(ttg))
s1 = "{}{}{} [{}] ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet),
humanize_speed(speed))
l = terminal.len_string_without_ESC(s1 + s3)
l2 = width - l - 3
a = int(l2 * count_value / max_count_value)
b = l2 - a
s2 = COLTHM['BAR_COL'] + terminal.ESC_BOLD + "[" + "=" * a + ">" + " " * b + "]" + terminal.ESC_RESET_BOLD + terminal.ESC_DEFAULT
print(s1 + s2 + s3)
class ProgressBar(Progress):
"""
implements a progress bar similar to the one known from 'wget' or 'pv'
"""
def __init__(self, *args, **kwargs):
"""
width [int/'auto'] - the number of characters used to show the Progress bar,
use 'auto' to determine width from terminal information -> see _set_width
"""
Progress.__init__(self, *args, show_stat = show_stat_ProgressBar, **kwargs)
# self._PRE_PREPEND = terminal.ESC_NO_CHAR_ATTR + ESC_RED
# self._POST_PREPEND = ESC_BOLD + ESC_GREEN
def show_stat_ProgressBarCounter(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
counter_count = kwargs['counter_count'][i]
counter_speed = kwargs['counter_speed'][i]
counter_tet = time.time() - kwargs['init_time']
s_c = "{}{}{} [{}] {}#{} - ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(counter_tet),
humanize_speed(counter_speed.value),
COLTHM['BAR_COL'],
str(counter_count.value) + terminal.ESC_DEFAULT)
if width == 'auto':
width = get_terminal_width()
if (max_count_value is None) or (max_count_value == 0):
s_c = "{}{} [{}] {}#{} ".format(s_c,
humanize_time(tet),
humanize_speed(speed),
COLTHM['BAR_COL'],
str(count_value) + terminal.ESC_DEFAULT)
else:
if ttg is None:
s3 = " TTG --"
else:
s3 = " TTG {}".format(humanize_time(ttg))
s1 = "{} [{}] ".format(humanize_time(tet), humanize_speed(speed))
l = terminal.len_string_without_ESC(s1 + s3 + s_c)
l2 = width - l - 3
a = int(l2 * count_value / max_count_value)
b = l2 - a
s2 = COLTHM['BAR_COL'] + terminal.ESC_BOLD + "[" + "=" * a + ">" + " " * b + "]" + terminal.ESC_RESET_BOLD + terminal.ESC_DEFAULT
s_c = s_c + s1 + s2 + s3
print(s_c)
class ProgressBarCounter(Progress):
"""
records also the time of each reset and calculates the speed
of the resets.
shows the TET since init (not effected by reset)
the speed of the resets (number of finished processed per time)
and the number of finished processes
after that also show a progress of each process
max_count > 0 and not None -> bar
max_count == None -> absolute count statistic
max_count == 0 -> hide process statistic at all
"""
def __init__(self, speed_calc_cycles_counter=5, **kwargs):
Progress.__init__(self, show_stat = show_stat_ProgressBarCounter, **kwargs)
self.counter_count = []
self.counter_q = []
self.counter_speed = []
for i in range(self.len):
self.counter_count.append(UnsignedIntValue(val=0))
self.counter_q.append(myQueue())
self.counter_speed.append(FloatValue())
self.counter_speed_calc_cycles = speed_calc_cycles_counter
self.init_time = time.time()
self.add_args['counter_count'] = self.counter_count
self.add_args['counter_speed'] = self.counter_speed
self.add_args['init_time'] = self.init_time
def get_counter_count(self, i=0):
return self.counter_count[i].value
def _reset_i(self, i):
c = self.counter_count[i]
with c.get_lock():
c.value += 1
count_value = c.value
q = self.counter_q[i]
current_time = time.time()
q.put((count_value, current_time))
if q.qsize() > self.counter_speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, self.init_time
speed = (count_value - old_count_value) / (current_time - old_time)
self.counter_speed[i].value = speed
Progress._reset_i(self, i)
def get_d(s1, s2, width, lp, lps):
d = width - len(terminal.remove_ESC_SEQ_from_string(s1)) - len(terminal.remove_ESC_SEQ_from_string(s2)) - 2 - lp - lps
if d >= 0:
d1 = d // 2
d2 = d - d1
return s1, s2, d1, d2
def full_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "TET {} {:>12} TTG {}".format(tet, speed, ttg)
s2 = "ETA {} ORT {}".format(eta, ort)
return get_d(s1, s2, width, lp, lps)
def full_minor_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} {:>12} G {}".format(tet, speed, ttg)
s2 = "A {} O {}".format(eta, ort)
return get_d(s1, s2, width, lp, lps)
def reduced_1_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} {:>12} G {}".format(tet, speed, ttg)
s2 = "O {}".format(ort)
return get_d(s1, s2, width, lp, lps)
def reduced_2_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} G {}".format(tet, ttg)
s2 = "O {}".format(ort)
return get_d(s1, s2, width, lp, lps)
def reduced_3_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} G {}".format(tet, ttg)
s2 = ''
return get_d(s1, s2, width, lp, lps)
def reduced_4_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = ''
s2 = ''
return get_d(s1, s2, width, lp, lps)
def kw_bold(s, ch_after):
kws = ['TET', 'TTG', 'ETA', 'ORT', 'E', 'G', 'A', 'O']
for kw in kws:
for c in ch_after:
s = s.replace(kw + c, terminal.ESC_BOLD + kw + terminal.ESC_RESET_BOLD + c)
return s
def _stat(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
if (max_count_value is None) or (max_count_value == 0):
# only show current absolute progress as number and estimated speed
stat = "{}{} [{}] {}#{} ".format(COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet),
humanize_speed(speed),
COLTHM['BAR_COL'],
str(count_value) + terminal.ESC_DEFAULT)
else:
if width == 'auto':
width = get_terminal_width()
# deduce relative progress
p = count_value / max_count_value
if p < 1:
ps = " {:.1%} ".format(p)
else:
ps = " {:.0%} ".format(p)
if ttg is None:
eta = '--'
ort = None
else:
eta = datetime.datetime.fromtimestamp(time.time() + ttg).strftime("%Y%m%d_%H:%M:%S")
ort = tet + ttg
tet = humanize_time(tet)
speed = '[' + humanize_speed(speed) + ']'
ttg = humanize_time(ttg)
ort = humanize_time(ort)
repl_ch = '-'
lp = len(prepend)
args = p, tet, speed, ttg, eta, ort, repl_ch, width, lp, len(ps)
res = full_stat(*args)
if res is None:
res = full_minor_stat(*args)
if res is None:
res = reduced_1_stat(*args)
if res is None:
res = reduced_2_stat(*args)
if res is None:
res = reduced_3_stat(*args)
if res is None:
res = reduced_4_stat(*args)
if res is not None:
s1, s2, d1, d2 = res
s = s1 + ' ' * d1 + ps + ' ' * d2 + s2
idx_p = math.ceil( (width-lp-2)*p)
s_before = s[:idx_p].replace(' ', repl_ch)
if (len(s_before) > 0) and (s_before[-1] == repl_ch):
s_before = s_before[:-1] + '>'
s_after = s[idx_p:]
s_before = kw_bold(s_before, ch_after=[repl_ch, '>'])
s_after = kw_bold(s_after, ch_after=[' '])
stat = (COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT +
COLTHM['BAR_COL'] + terminal.ESC_BOLD + '[' + terminal.ESC_RESET_BOLD + s_before + terminal.ESC_DEFAULT +
s_after + terminal.ESC_BOLD + COLTHM['BAR_COL'] + ']' + terminal.ESC_NO_CHAR_ATTR)
else:
ps = ps.strip()
if p == 1:
ps = ' ' + ps
stat = prepend + ps
return stat
def show_stat_ProgressBarFancy(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
stat = _stat(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs)
print(stat)
class ProgressBarFancy(Progress):
"""
implements a progress bar where the color indicates the current status
similar to the bars known from 'htop'
"""
def __init__(self, *args, **kwargs):
"""
width [int/'auto'] - the number of characters used to show the Progress bar,
use 'auto' to determine width from terminal information -> see _set_width
"""
Progress.__init__(self, *args, show_stat = show_stat_ProgressBarFancy, **kwargs)
def show_stat_ProgressBarCounterFancy(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
counter_count = kwargs['counter_count'][i]
counter_speed = kwargs['counter_speed'][i]
counter_tet = time.time() - kwargs['init_time']
s_c = "{}{}{} [{}] {}#{}".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL']+prepend+terminal.ESC_DEFAULT,
humanize_time(counter_tet),
humanize_speed(counter_speed.value),
COLTHM['BAR_COL'],
str(counter_count.value) + terminal.ESC_DEFAULT)
if max_count_value is not None:
if width == 'auto':
width = get_terminal_width()
s_c += ' - '
if max_count_value == 0:
s_c = "{}{} [{}] {}#{} ".format(s_c, humanize_time(tet), humanize_speed(speed),
COLTHM['BAR_COL'], str(count_value)+terminal.ESC_DEFAULT)
else:
_width = width - terminal.len_string_without_ESC(s_c)
s_c += _stat(count_value, max_count_value, '', speed, tet, ttg, _width, i)
print(s_c)
class ProgressBarCounterFancy(ProgressBarCounter):
def __init__(self, *args, **kwargs):
ProgressBarCounter.__init__(self, *args, **kwargs)
self.show_stat = show_stat_ProgressBarCounterFancy
class SIG_handler_Loop(object):
"""class to setup signal handling for the Loop class
Note: each subprocess receives the default signal handling from it's parent.
If the signal function from the module signal is evoked within the subprocess
this default behavior can be overwritten.
The init function receives a shared memory boolean object which will be set
false in case of signal detection. Since the Loop class will check the state
of this boolean object before each repetition, the loop will stop when
a signal was receives.
"""
def __init__(self, sigint, sigterm, log, prefix):
self.set_signal(signal.SIGINT, sigint)
self.set_signal(signal.SIGTERM, sigterm)
self.prefix = prefix
self.log = log
self.log.info("setup signal handler for loop (SIGINT:%s, SIGTERM:%s)", sigint, sigterm)
def set_signal(self, sig, handler_str):
if handler_str == 'ign':
signal.signal(sig, self._ignore_signal)
elif handler_str == 'stop':
signal.signal(sig, self._stop_on_signal)
else:
raise TypeError("unknown signal hander string '%s'", handler_str)
def _ignore_signal(self, signal, frame):
self.log.debug("ignore received sig %s", signal_dict[signal])
pass
def _stop_on_signal(self, signal, frame):
self.log.info("received sig %s -> raise InterruptedError", signal_dict[signal])
raise LoopInterruptError()
def FloatValue(val=0.):
"""returns a `multiprocessing.Value` of type `float` with initial value `val`"""
return mp.Value('d', val, lock=True)
def UnsignedIntValue(val=0):
"""returns a `multiprocessing.Value` of type `unsigned int` with initial value `val`"""
return mp.Value('I', val, lock=True)
def StringValue(num_of_bytes):
"""returns a `multiprocessing.Array` of type `character` and length `num_of_bytes`"""
return mp.Array('c', _jm_compatible_bytearray(num_of_bytes), lock=True)
def check_process_termination(proc, prefix, timeout, auto_kill_on_last_resort = False):
proc.join(timeout)
if not proc.is_alive():
log.debug("termination of process (pid %s) within timeout of %s SUCCEEDED!", proc.pid, humanize_time(timeout))
return True
# process still runs -> send SIGTERM -> see what happens
log.warning("termination of process (pid %s) within given timeout of %s FAILED!", proc.pid, humanize_time(timeout))
proc.terminate()
new_timeout = 3*timeout
log.debug("wait for termination (timeout %s)", humanize_time(new_timeout))
proc.join(new_timeout)
if not proc.is_alive():
log.info("termination of process (pid %s) via SIGTERM with timeout of %s SUCCEEDED!", proc.pid, humanize_time(new_timeout))
return True
log.warning("termination of process (pid %s) via SIGTERM with timeout of %s FAILED!", proc.pid, humanize_time(new_timeout))
log.debug("auto_kill_on_last_resort is %s", auto_kill_on_last_resort)
answer = 'k' if auto_kill_on_last_resort else '_'
while True:
log.debug("answer string is %s", answer)
if answer == 'k':
log.warning("send SIGKILL to process with pid %s", proc.pid)
os.kill(proc.pid, signal.SIGKILL)
time.sleep(0.1)
else:
log.info("send SIGTERM to process with pid %s", proc.pid)
os.kill(proc.pid, signal.SIGTERM)
time.sleep(0.1)
if not proc.is_alive():
log.info("process (pid %s) has stopped running!", proc.pid)
return True
else:
log.warning("process (pid %s) is still running!", proc.pid)
print("the process (pid {}) seems still running".format(proc.pid))
try:
answer = input("press 'enter' to send SIGTERM, enter 'k' to send SIGKILL or enter 'ignore' to not bother about the process anymore")
except Exception as e:
log.error("could not ask for sending SIGKILL due to {}".format(type(e)))
log.info(traceback.format_exc())
log.warning("send SIGKILL now")
answer = 'k'
if answer == 'ignore':
log.warning("ignore process %s", proc.pid)
return False
elif answer != 'k':
answer = ''
def getCountKwargs(func):
""" Returns a list ["count kwarg", "count_max kwarg"] for a
given function. Valid combinations are defined in
`progress.validCountKwargs`.
Returns None if no keyword arguments are found.
"""
# Get all arguments of the function
if hasattr(func, "__code__"):
func_args = func.__code__.co_varnames[:func.__code__.co_argcount]
for pair in validCountKwargs:
if ( pair[0] in func_args and pair[1] in func_args ):
return pair
# else
return None
def humanize_speed(c_per_sec):
"""convert a speed in counts per second to counts per [s, min, h, d], choosing the smallest value greater zero.
"""
scales = [60, 60, 24]
units = ['c/s', 'c/min', 'c/h', 'c/d']
speed = c_per_sec
i = 0
if speed > 0:
while (speed < 1) and (i < len(scales)):
speed *= scales[i]
i += 1
return "{:.1f}{}".format(speed, units[i])
def humanize_time(secs):
"""convert second in to hh:mm:ss format
"""
if secs is None:
return '--'
if secs < 1:
return "{:.2f}ms".format(secs*1000)
elif secs < 10:
return "{:.2f}s".format(secs)
else:
mins, secs = divmod(secs, 60)
hours, mins = divmod(mins, 60)
return '{:02d}:{:02d}:{:02d}'.format(int(hours), int(mins), int(secs))
def codecov_subprocess_check():
print("this line will be only called from a subprocess")
myQueue = mp.Queue
# a mapping from the numeric values of the signals to their names used in the
# standard python module signals
signal_dict = {}
for s in dir(signal):
if s.startswith('SIG') and s[3] != '_':
n = getattr(signal, s)
if n in signal_dict:
signal_dict[n] += ('/'+s)
else:
signal_dict[n] = s
_colthm_term_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_LIGHT_GREEN, 'ADD_LNS_UP':0}
_colthm_ipyt_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_LIGHT_BLUE, 'ADD_LNS_UP':0}
_colthm_wincmd_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_GREEN, 'ADD_LNS_UP':1}
color_themes = {'term_default': _colthm_term_default,
'ipyt_default': _colthm_ipyt_default,
'wincmd_default': _colthm_wincmd_default}
if platform.system() == 'Windows':
COLTHM = _colthm_wincmd_default
else:
COLTHM = _colthm_term_default
def choose_color_theme(name):
global COLTHM
if name in color_themes:
COLTHM = color_themes[name]
else:
warnings.warn("no such color theme {}".format(name))
# keyword arguments that define counting in wrapped functions
validCountKwargs = [
[ "count", "count_max"],
[ "count", "max_count"],
[ "c", "m"],
[ "jmc", "jmm"],
]
|
cimatosa/progression | progression/progress.py | _show_stat_wrapper_multi_Progress | python | def _show_stat_wrapper_multi_Progress(count, last_count, start_time, max_count, speed_calc_cycles,
width, q, last_speed, prepend, show_stat_function, len_,
add_args, lock, info_line, no_move_up=False):
# print(ESC_BOLD, end='')
# sys.stdout.flush()
for i in range(len_):
_show_stat_wrapper_Progress(count[i], last_count[i], start_time[i], max_count[i], speed_calc_cycles,
width, q[i], last_speed[i], prepend[i], show_stat_function,
add_args, i, lock[i])
n = len_
if info_line is not None:
s = info_line.value.decode('utf-8')
s = s.split('\n')
n += len(s)
for si in s:
if width == 'auto':
width = get_terminal_width()
if len(si) > width:
si = si[:width]
print("{0:<{1}}".format(si, width))
if no_move_up:
n = 0
# this is only a hack to find the end
# of the message in a stream
# so ESC_HIDDEN+ESC_NO_CHAR_ATTR is a magic ending
print(terminal.ESC_MOVE_LINE_UP(n) + terminal.ESC_MY_MAGIC_ENDING, end='')
sys.stdout.flush() | call the static method show_stat_wrapper for each process | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/progress.py#L608-L638 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Progression module
------------------
This module provides the (so far) four variants to display progress information:
* :py:class:`.ProgressBar`
This class monitors one or multiple processes showing the total elapsed time (TET), the current speed
estimated from the most recent updated, a colored bar showing the progress and an
estimate for the remaining time, also called time to go (TTG).
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 5.83s [7.2c/s] <span style="color:#00ff00"><b>[=====================> ]</b></span> TTG 8.05s</pre>
</div>
* :py:class:`.ProgressBarCounter`
If a single process is intended to do several sequential task, the :py:class:`.ProgressBarCounter` class can keep track of the number
of accomplished tasks on top of monitoring the individual task just like :py:class:`.ProgressBar` does.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre><span style="color:#00ff00"><b> [</b><b>TET</b>-5.83s-----[7.2c/s]-<b>TTG</b>-8.05s-></span> 42.0% <b>ETA</b> 20161011_16:52:52 <b>ORT</b> 00:00:13<b><span style="color:#00ff00">]</span></b></pre>
</div>
* :py:class:`.ProgressBarFancy`
This class intends to be a replacement for :py:class:`.ProgressBar` with slightly more information and
better handling of small terminal widths.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 00:00:35 [1.4c/min] <span style="color:#00ff00">#3</span> - 5.83s [7.2c/s] <span style="color:#00ff00"><b>[===========> ]</b></span> TTG 8.05s</pre>
</div>
* :py:class:`.ProgressBarCounterFancy`
Just as :py:class:`.ProgressBarFancy` this replaces :py:class:`.ProgressBarCounter`.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 00:00:35 [1.4c/min] <span style="color:#00ff00">#3</span> - <span style="color:#800000"></span><span style="color:#00ff00"><b>[</b><b>E</b>-5.83s-----[7.2c/s]-<b>G</b>-8</span>.05s 42.0% <b>O</b> 00:00:13<b><span style="color:#00ff00">]</span></b></pre>
</div>
.. autoclass:: Progress
:members:
:inherited-members:
.. autoclass:: ProgressBar
:members:
.. autoclass:: ProgressBarCounter
:members:
.. autoclass:: ProgressBarFancy
:members:
.. autoclass:: ProgressBarCounterFancy
:members:
.. autofunction:: UnsignedIntValue
.. autofunction:: FloatValue
.. autofunction:: StringValue
"""
from __future__ import division, print_function
import datetime
import io
import logging
from logging.handlers import QueueHandler, QueueListener
import math
import multiprocessing as mp
from multiprocessing.sharedctypes import Synchronized
import os
import sys
import signal
import subprocess as sp
import threading
import time
import traceback
import warnings
from . import terminal
import platform
_IPYTHON = True
try:
import ipywidgets
except ImportError:
_IPYTHON = False
warnings.warn("could not load ipywidgets (IPython HTML output will not work)", category=ImportWarning)
except DeprecationWarning:
pass
try:
from IPython.display import display
except ImportError:
_IPYTHON = False
warnings.warn("could not load IPython (IPython HTML output will not work)", category=ImportWarning)
# Magic conversion from 3 to 2
if sys.version_info[0] == 2:
ProcessLookupError = OSError
inMemoryBuffer = io.BytesIO
old_math_ceil = math.ceil
def my_int_ceil(f):
return int(old_math_ceil(f))
math.ceil = my_int_ceil
_jm_compatible_bytearray = lambda x: x
class TimeoutError(Exception):
pass
elif sys.version_info[0] == 3:
inMemoryBuffer = io.StringIO
_jm_compatible_bytearray = bytearray
class MultiLineFormatter(logging.Formatter):
"""pads a multiline log message with spaces such that
<HEAD> msg_line1
msg_line2
...
"""
def format(self, record):
_str = logging.Formatter.format(self, record)
header = _str.split(record.message)[0]
_str = _str.replace('\n', '\n' + ' '*len(header))
return _str
# def_handl = logging.StreamHandler(stream = sys.stderr) # the default handler simply uses stderr
# def_handl.setLevel(logging.DEBUG) # ... listens to all messaged
fmt = MultiLineFormatter('%(asctime)s %(name)s %(levelname)s : %(message)s')
# def_handl.setFormatter(fmt) # ... and pads multiline messaged
log = logging.getLogger(__name__) # creates the default log for this module
# log.addHandler(def_handl)
class LoopExceptionError(RuntimeError):
pass
class LoopInterruptError(Exception):
pass
class StdoutPipe(object):
"""replacement for stream objects such as stdout which
forwards all incoming data using the send method of a
connection
example usage:
>>> import sys
>>> from multiprocessing import Pipe
>>> from progression import StdoutPipe
>>> conn_recv, conn_send = Pipe(False)
>>> sys.stdout = StdoutPipe(conn_send)
>>> print("hallo welt", end='') # this is no going through the pipe
>>> msg = conn_recv.recv()
>>> sys.stdout = sys.__stdout__
>>> print(msg)
hallo welt
>>> assert msg == "hallo welt"
"""
def __init__(self, conn):
self.conn = conn
def flush(self):
pass
def write(self, b):
self.conn.send(b)
class PipeToPrint(object):
def __call__(self, b):
print(b, end='')
def close(self):
pass
class PipeFromProgressToIPythonHTMLWidget(object):
def __init__(self):
self.htmlWidget = ipywidgets.widgets.HTML()
display(self.htmlWidget)
self._buff = ""
def __call__(self, b):
self._buff += b
if b.endswith(terminal.ESC_MY_MAGIC_ENDING):
buff = terminal.ESC_SEQ_to_HTML(self._buff)
self.htmlWidget.value = '<style>.widget-html{font-family:monospace}</style><pre>'+buff+'</pre>'
self._buff = ""
def close(self):
self.htmlWidget.close()
PipeHandler = PipeToPrint
def choose_pipe_handler(kind = 'print', color_theme = None):
global PipeHandler
if kind == 'print':
PipeHandler = PipeToPrint
if color_theme is None:
choose_color_theme('term_default')
else:
choose_color_theme(color_theme)
elif kind == 'ipythonhtml':
if _IPYTHON:
PipeHandler = PipeFromProgressToIPythonHTMLWidget
if color_theme is None:
choose_color_theme('ipyt_default')
else:
choose_color_theme(color_theme)
else:
warnings.warn("can not choose ipythonHTML (IPython and/or ipywidgets were not loaded)")
else:
raise ValueError("unknown kind '{}' for pipe_handler, use one out of ('print', 'ipythonhtml')")
def get_terminal_width():
if PipeHandler == PipeToPrint:
return terminal.get_terminal_width()
elif PipeHandler == PipeFromProgressToIPythonHTMLWidget:
return 80
else:
raise NotImplementedError
def get_identifier(name=None, pid=None, bold=True):
if pid is None:
pid = os.getpid()
if bold:
esc_bold = terminal.ESC_BOLD
esc_no_char_attr = terminal.ESC_NO_CHAR_ATTR
else:
esc_bold = ""
esc_no_char_attr = ""
if name is None:
return "{}PID_{}{}".format(esc_bold, pid, esc_no_char_attr)
else:
return "{}{}_{}{}".format(esc_bold, name, pid, esc_no_char_attr)
def _loop_wrapper_func(func, args, shared_mem_run, shared_mem_pause, interval, sigint, sigterm, name,
logging_level, conn_send, func_running, log_queue):
"""
to be executed as a separate process (that's why this functions is declared static)
"""
prefix = get_identifier(name) + ' '
global log
log = logging.getLogger(__name__+".log_{}".format(get_identifier(name, bold=False)))
log.setLevel(logging_level)
log.addHandler(QueueHandler(log_queue))
sys.stdout = StdoutPipe(conn_send)
log.debug("enter wrapper_func")
SIG_handler_Loop(sigint, sigterm, log, prefix)
func_running.value = True
error = False
while shared_mem_run.value:
try:
# in pause mode, simply sleep
if shared_mem_pause.value:
quit_loop = False
else:
# if not pause mode -> call func and see what happens
try:
quit_loop = func(*args)
except LoopInterruptError:
raise
except Exception as e:
log.error("error %s occurred in loop calling 'func(*args)'", type(e))
log.info("show traceback.print_exc()\n%s", traceback.format_exc())
error = True
break
if quit_loop is True:
log.debug("loop stooped because func returned True")
break
time.sleep(interval)
except LoopInterruptError:
log.debug("quit wrapper_func due to InterruptedError")
break
func_running.value = False
if error:
sys.exit(-1)
else:
log.debug("wrapper_func terminates gracefully")
# gets rid of the following warnings
# Exception ignored in: <_io.FileIO name='/dev/null' mode='rb'>
# ResourceWarning: unclosed file <_io.TextIOWrapper name='/dev/null' mode='r' encoding='UTF-8'>
try:
if mp.get_start_method() == "spawn":
sys.stdin.close()
except AttributeError:
pass
class LoopTimeoutError(TimeoutError):
pass
class Loop(object):
"""
class to run a function periodically an seperate process.
In case the called function returns True, the loop will stop.
Otherwise a time interval given by interval will be slept before
another execution is triggered.
The shared memory variable _run (accessible via the class property run)
also determines if the function if executed another time. If set to False
the execution stops.
For safe cleanup (and in order to catch any Errors)
it is advisable to instantiate this class
using 'with' statement as follows:
with Loop(**kwargs) as my_loop:
my_loop.start()
...
this will guarantee you that the spawned loop process is
down when exiting the 'with' scope.
The only circumstance where the process is still running is
when you set auto_kill_on_last_resort to False and answer the
question to send SIGKILL with no.
"""
def __init__(self,
func,
args = (),
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
auto_kill_on_last_resort = False,
raise_error = True):
"""
func [callable] - function to be called periodically
args [tuple] - arguments passed to func when calling
intervall [pos number] - time to "sleep" between each call
verbose - DEPRECATED, only kept for compatibility, use global log.level to
specify verbosity
sigint [string] - signal handler string to set SIGINT behavior (see below)
sigterm [string] - signal handler string to set SIGTERM behavior (see below)
auto_kill_on_last_resort [bool] - If set False (default), ask user to send SIGKILL
to loop process in case normal stop and SIGTERM failed. If set True, send SIDKILL
without asking.
the signal handler string may be one of the following
ing: ignore the incoming signal
stop: raise InterruptedError which is caught silently.
"""
self._proc = None
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
self.func = func
self.args = args
self.interval = interval
assert self.interval >= 0
self._run = mp.Value('b', False)
self._pause = mp.Value('b', False)
self._func_running = mp.Value('b', False)
self._sigint = sigint
self._sigterm = sigterm
self._auto_kill_on_last_resort = auto_kill_on_last_resort
log.debug("auto_kill_on_last_resort = %s", self._auto_kill_on_last_resort)
self._monitor_thread = None
self.pipe_handler = PipeHandler()
self.raise_error = raise_error
def __enter__(self):
return self
def __exit__(self, *exc_args):
if self.is_alive():
log.debug("loop is still running on context exit")
else:
log.debug("loop has stopped on context exit")
self.stop()
def __cleanup(self):
"""
Wait at most twice as long as the given repetition interval
for the _wrapper_function to terminate.
If after that time the _wrapper_function has not terminated,
send SIGTERM to and the process.
Wait at most five times as long as the given repetition interval
for the _wrapper_function to terminate.
If the process still running send SIGKILL automatically if
auto_kill_on_last_resort was set True or ask the
user to confirm sending SIGKILL
"""
# set run to False and wait some time -> see what happens
self._run.value = False
if check_process_termination(proc = self._proc,
timeout = 2*self.interval,
prefix = '',
auto_kill_on_last_resort = self._auto_kill_on_last_resort):
log.debug("cleanup successful")
else:
raise RuntimeError("cleanup FAILED!")
try:
self.conn_send.close()
self._log_queue_listener.stop()
except OSError:
pass
log.debug("wait for monitor thread to join")
self._monitor_thread.join()
log.debug("monitor thread to joined")
self._func_running.value = False
def _monitor_stdout_pipe(self):
while True:
try:
b = self.conn_recv.recv()
self.pipe_handler(b)
except EOFError:
break
def start(self, timeout=None):
"""
uses multiprocess Process to call _wrapper_func in subprocess
"""
if self.is_alive():
log.warning("a process with pid %s is already running", self._proc.pid)
return
self._run.value = True
self._func_running.value = False
name = self.__class__.__name__
self.conn_recv, self.conn_send = mp.Pipe(False)
self._monitor_thread = threading.Thread(target = self._monitor_stdout_pipe)
self._monitor_thread.daemon=True
self._monitor_thread.start()
log.debug("started monitor thread")
self._log_queue = mp.Queue()
self._log_queue_listener = QueueListener(self._log_queue, *log.handlers)
self._log_queue_listener.start()
args = (self.func, self.args, self._run, self._pause, self.interval,
self._sigint, self._sigterm, name, log.level, self.conn_send,
self._func_running, self._log_queue)
self._proc = mp.Process(target = _loop_wrapper_func,
args = args)
self._proc.start()
log.info("started a new process with pid %s", self._proc.pid)
log.debug("wait for loop function to come up")
t0 = time.time()
while not self._func_running.value:
if self._proc.exitcode is not None:
exc = self._proc.exitcode
self._proc = None
if exc == 0:
log.warning("wrapper function already terminated with exitcode 0\nloop is not running")
return
else:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(exc)+
"see log (INFO level) for traceback information")
time.sleep(0.1)
if (timeout is not None) and ((time.time() - t0) > timeout):
err_msg = "could not bring up function on time (timeout: {}s)".format(timeout)
log.error(err_msg)
log.info("either it takes too long to spawn the subprocess (increase the timeout)\n"+
"or an internal error occurred before reaching the function call")
raise LoopTimeoutError(err_msg)
log.debug("loop function is up ({})".format(humanize_time(time.time()-t0)))
def stop(self):
"""
stops the process triggered by start
Setting the shared memory boolean run to false, which should prevent
the loop from repeating. Call __cleanup to make sure the process
stopped. After that we could trigger start() again.
"""
if self.is_alive():
self._proc.terminate()
if self._proc is not None:
self.__cleanup()
if self.raise_error:
if self._proc.exitcode == 255:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(self._proc.exitcode)+
"see log (INFO level) for traceback information")
self.pipe_handler.close()
self._proc = None
def join(self, timeout):
"""
calls join for the spawned process with given timeout
"""
if self.is_alive():
self._proc.join(timeout)
def is_alive(self):
if self._proc is None:
return False
else:
return self._proc.is_alive()
def is_running(self):
if self.is_alive():
return self._func_running.value
else:
return False
def pause(self):
if self._run.value:
self._pause.value = True
log.debug("process with pid %s paused", self._proc.pid)
def resume(self):
if self._run.value:
self._pause.value = False
log.debug("process with pid %s resumed", self._proc.pid)
def getpid(self):
if self._proc is not None:
return self._proc.pid
else:
return None
def show_stat_base(count_value, max_count_value, prepend, speed, tet, ttg, width, **kwargs):
"""A function that formats the progress information
This function will be called periodically for each progress that is monitored.
Overwrite this function in a subclass to implement a specific formating of the progress information
:param count_value: a number holding the current state
:param max_count_value: should be the largest number `count_value` can reach
:param prepend: additional text for each progress
:param speed: the speed estimation
:param tet: the total elapsed time
:param ttg: the time to go
:param width: the width for the progressbar, when set to `"auto"` this function
should try to detect the width available
:type width: int or "auto"
"""
raise NotImplementedError
def _show_stat_wrapper_Progress(count, last_count, start_time, max_count, speed_calc_cycles,
width, q, last_speed, prepend, show_stat_function, add_args,
i, lock):
"""
calculate
"""
count_value, max_count_value, speed, tet, ttg, = Progress._calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock)
return show_stat_function(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **add_args)
def _show_stat_wrapper_multi_Progress(count, last_count, start_time, max_count, speed_calc_cycles,
width, q, last_speed, prepend, show_stat_function, len_,
add_args, lock, info_line, no_move_up=False):
"""
call the static method show_stat_wrapper for each process
"""
# print(ESC_BOLD, end='')
# sys.stdout.flush()
for i in range(len_):
_show_stat_wrapper_Progress(count[i], last_count[i], start_time[i], max_count[i], speed_calc_cycles,
width, q[i], last_speed[i], prepend[i], show_stat_function,
add_args, i, lock[i])
n = len_
if info_line is not None:
s = info_line.value.decode('utf-8')
s = s.split('\n')
n += len(s)
for si in s:
if width == 'auto':
width = get_terminal_width()
if len(si) > width:
si = si[:width]
print("{0:<{1}}".format(si, width))
if no_move_up:
n = 0
# this is only a hack to find the end
# of the message in a stream
# so ESC_HIDDEN+ESC_NO_CHAR_ATTR is a magic ending
print(terminal.ESC_MOVE_LINE_UP(n) + terminal.ESC_MY_MAGIC_ENDING, end='')
sys.stdout.flush()
class Progress(Loop):
"""
Abstract Progress Class
The :py:class:`Progress` Class uses :py:class:`Loop` to provide a repeating
function which calculates progress information from a changing counter value.
The formatting of these information is done by overwriting the static member
:py:func:`Progress.show_stat`. :py:func:`Progress.show_stat` is intended to
format a single progress bar on a single line only.
The extension to multiple progresses is done
automatically base on the formatting of a single line.
"""
def __init__(self,
count,
max_count = None,
prepend = None,
width = 'auto',
speed_calc_cycles = 10,
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
info_line = None,
show_stat = None):
"""
:param count: shared variable for holding the current state
(use :py:func:`UnsignedIntValue` for short hand creation)
:type count: list/single value of multiprocessing.Value
:param max_count: shared variable for holding the final state
:type max_count: None or list/single value of multiprocessing.Value
:param prepend: string to put in front of each progress output
:type prepend: None, str or list of str
:param width: the width to use for the progress line (fixed or automatically determined)
:type width: int or "auto"
:param speed_calc_cycles: number of updated (cycles) to use for estimating the speed
(example: ``speed_calc_sycles = 4`` and ``interval = 1`` means that the speed is estimated from
the current state and state 4 updates before where the elapsed time will roughly be 4s)
:param interval: seconds to wait before updating the progress
:param verbose: DEPRECATED: has no effect, use the global ``log.setLevel()`` to control the
output level
:param sigint: behavior of the subprocess on signal ``SIGINT`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigint: "stop" or "ign"
:param sigterm: behavior of the subprocess on signal ``SIGTERM`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigterm: "stop" or "ign"
:param info_line: additional text to show below the progress (use :py:func:`StringValue`
for short hand creation of shared strings)
:type info_line: None or multiprocessing.Array of characters
.. note::
As `Progress` is derived from :py:class:`Loop` it is highly encurraged to create
any instance of Progress with a context manager (``with`` statement).
This ensures that the subprocess showing the progress terminats on context exit.
Otherwise one has to make sure that at some point the stop() routine is called.
abstract example::
with AProgressClass(...) as p:
p.start()
# do stuff and modify counter
"""
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
# converts count to list and do type check
try:
for c in count:
if not isinstance(c, Synchronized):
raise ValueError("Each element of 'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = True
except TypeError:
if not isinstance(count, Synchronized):
raise ValueError("'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = False
count = [count]
self.len = len(count)
# converts max_count to list and do type check
if max_count is not None:
if self.is_multi:
try:
for i, m in enumerate(max_count):
if not isinstance(m, Synchronized):
max_count[i] = UnsignedIntValue(m)
except TypeError:
raise TypeError("'max_count' must be iterable")
else:
if not isinstance(max_count, Synchronized):
max_count = UnsignedIntValue(max_count)
max_count = [max_count]
else:
max_count = [None] * self.len
self.start_time = []
self.speed_calc_cycles = speed_calc_cycles
self.width = width
self.q = []
self.prepend = []
self.lock = []
self.last_count = []
self.last_speed = []
for i in range(self.len):
self.q.append(myQueue()) # queue to save the last speed_calc_cycles
# (time, count) information to calculate speed
#self.q[-1].cancel_join_thread()
self.last_count.append(UnsignedIntValue())
self.last_speed.append(FloatValue())
self.lock.append(mp.Lock())
self.start_time.append(FloatValue(val=time.time()))
if prepend is None:
# no prepend given
self.prepend.append('')
else:
if isinstance(prepend, str):
self.prepend.append(prepend)
else:
# assume list of prepend, (needs to be a sequence)
self.prepend.append(prepend[i])
self.max_count = max_count # list of multiprocessing value type
self.count = count # list of multiprocessing value type
self.interval = interval
self.verbose = verbose
self.show_on_exit = False
self.add_args = {}
self.info_line = info_line
self.show_stat = show_stat
# setup loop class with func
Loop.__init__(self,
func = _show_stat_wrapper_multi_Progress,
args = (self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
show_stat,
self.len,
self.add_args,
self.lock,
self.info_line),
interval = interval,
sigint = sigint,
sigterm = sigterm,
auto_kill_on_last_resort = True)
def __exit__(self, *exc_args):
self.stop()
@staticmethod
def _calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock):
"""do the pre calculations in order to get TET, speed, TTG
:param count: count
:param last_count: count at the last call, allows to treat the case of no progress
between sequential calls
:param start_time: the time when start was triggered
:param max_count: the maximal value count
:type max_count:
:param speed_calc_cycles:
:type speed_calc_cycles:
:param q:
:type q:
:param last_speed:
:type last_speed:
:param lock:
:type lock:
"""
count_value = count.value
start_time_value = start_time.value
current_time = time.time()
if last_count.value != count_value:
# some progress happened
with lock:
# save current state (count, time) to queue
q.put((count_value, current_time))
# get older state from queue (or initial state)
# to to speed estimation
if q.qsize() > speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, start_time_value
last_count.value = count_value
#last_old_count.value = old_count_value
#last_old_time.value = old_time
speed = (count_value - old_count_value) / (current_time - old_time)
last_speed.value = speed
else:
# progress has not changed since last call
# use also old (cached) data from the queue
#old_count_value, old_time = last_old_count.value, last_old_time.value
speed = last_speed.value
if (max_count is None):
max_count_value = None
else:
max_count_value = max_count.value
tet = (current_time - start_time_value)
if (speed == 0) or (max_count_value is None) or (max_count_value == 0):
ttg = None
else:
ttg = math.ceil((max_count_value - count_value) / speed)
return count_value, max_count_value, speed, tet, ttg
def _reset_all(self):
"""
reset all progress information
"""
for i in range(self.len):
self._reset_i(i)
def _reset_i(self, i):
"""
reset i-th progress information
"""
self.count[i].value=0
log.debug("reset counter %s", i)
self.lock[i].acquire()
for x in range(self.q[i].qsize()):
self.q[i].get()
self.lock[i].release()
self.start_time[i].value = time.time()
def _show_stat(self):
"""
convenient functions to call the static show_stat_wrapper_multi with
the given class members
"""
_show_stat_wrapper_multi_Progress(self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
self.show_stat,
self.len,
self.add_args,
self.lock,
self.info_line,
no_move_up=True)
def reset(self, i = None):
"""resets the progress informaion
:param i: tell which progress to reset, if None reset all
:type i: None, int
"""
if i is None:
self._reset_all()
else:
self._reset_i(i)
def start(self):
"""
start
"""
# before printing any output to stout, we can now check this
# variable to see if any other ProgressBar has reserved that
# terminal.
if (self.__class__.__name__ in terminal.TERMINAL_PRINT_LOOP_CLASSES):
if not terminal.terminal_reserve(progress_obj=self):
log.warning("tty already reserved, NOT starting the progress loop!")
return
super(Progress, self).start()
self.show_on_exit = True
def stop(self):
"""
trigger clean up by hand, needs to be done when not using
context management via 'with' statement
- will terminate loop process
- show a last progress -> see the full 100% on exit
- releases terminal reservation
"""
super(Progress, self).stop()
terminal.terminal_unreserve(progress_obj=self, verbose=self.verbose)
if self.show_on_exit:
if not isinstance(self.pipe_handler, PipeToPrint):
myout = inMemoryBuffer()
stdout = sys.stdout
sys.stdout = myout
self._show_stat()
self.pipe_handler(myout.getvalue())
sys.stdout = stdout
else:
self._show_stat()
print()
self.show_on_exit = False
def show_stat_ProgressBar(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
if (max_count_value is None) or (max_count_value == 0):
# only show current absolute progress as number and estimated speed
print("{}{}{} [{}] {}#{} ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet), humanize_speed(speed),
terminal.ESC_BOLD + COLTHM['BAR_COL'],
count_value))
else:
if width == 'auto':
width = get_terminal_width()
# deduce relative progress and show as bar on screen
if ttg is None:
s3 = " TTG --"
else:
s3 = " TTG {}".format(humanize_time(ttg))
s1 = "{}{}{} [{}] ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet),
humanize_speed(speed))
l = terminal.len_string_without_ESC(s1 + s3)
l2 = width - l - 3
a = int(l2 * count_value / max_count_value)
b = l2 - a
s2 = COLTHM['BAR_COL'] + terminal.ESC_BOLD + "[" + "=" * a + ">" + " " * b + "]" + terminal.ESC_RESET_BOLD + terminal.ESC_DEFAULT
print(s1 + s2 + s3)
class ProgressBar(Progress):
"""
implements a progress bar similar to the one known from 'wget' or 'pv'
"""
def __init__(self, *args, **kwargs):
"""
width [int/'auto'] - the number of characters used to show the Progress bar,
use 'auto' to determine width from terminal information -> see _set_width
"""
Progress.__init__(self, *args, show_stat = show_stat_ProgressBar, **kwargs)
# self._PRE_PREPEND = terminal.ESC_NO_CHAR_ATTR + ESC_RED
# self._POST_PREPEND = ESC_BOLD + ESC_GREEN
def show_stat_ProgressBarCounter(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
counter_count = kwargs['counter_count'][i]
counter_speed = kwargs['counter_speed'][i]
counter_tet = time.time() - kwargs['init_time']
s_c = "{}{}{} [{}] {}#{} - ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(counter_tet),
humanize_speed(counter_speed.value),
COLTHM['BAR_COL'],
str(counter_count.value) + terminal.ESC_DEFAULT)
if width == 'auto':
width = get_terminal_width()
if (max_count_value is None) or (max_count_value == 0):
s_c = "{}{} [{}] {}#{} ".format(s_c,
humanize_time(tet),
humanize_speed(speed),
COLTHM['BAR_COL'],
str(count_value) + terminal.ESC_DEFAULT)
else:
if ttg is None:
s3 = " TTG --"
else:
s3 = " TTG {}".format(humanize_time(ttg))
s1 = "{} [{}] ".format(humanize_time(tet), humanize_speed(speed))
l = terminal.len_string_without_ESC(s1 + s3 + s_c)
l2 = width - l - 3
a = int(l2 * count_value / max_count_value)
b = l2 - a
s2 = COLTHM['BAR_COL'] + terminal.ESC_BOLD + "[" + "=" * a + ">" + " " * b + "]" + terminal.ESC_RESET_BOLD + terminal.ESC_DEFAULT
s_c = s_c + s1 + s2 + s3
print(s_c)
class ProgressBarCounter(Progress):
"""
records also the time of each reset and calculates the speed
of the resets.
shows the TET since init (not effected by reset)
the speed of the resets (number of finished processed per time)
and the number of finished processes
after that also show a progress of each process
max_count > 0 and not None -> bar
max_count == None -> absolute count statistic
max_count == 0 -> hide process statistic at all
"""
def __init__(self, speed_calc_cycles_counter=5, **kwargs):
Progress.__init__(self, show_stat = show_stat_ProgressBarCounter, **kwargs)
self.counter_count = []
self.counter_q = []
self.counter_speed = []
for i in range(self.len):
self.counter_count.append(UnsignedIntValue(val=0))
self.counter_q.append(myQueue())
self.counter_speed.append(FloatValue())
self.counter_speed_calc_cycles = speed_calc_cycles_counter
self.init_time = time.time()
self.add_args['counter_count'] = self.counter_count
self.add_args['counter_speed'] = self.counter_speed
self.add_args['init_time'] = self.init_time
def get_counter_count(self, i=0):
return self.counter_count[i].value
def _reset_i(self, i):
c = self.counter_count[i]
with c.get_lock():
c.value += 1
count_value = c.value
q = self.counter_q[i]
current_time = time.time()
q.put((count_value, current_time))
if q.qsize() > self.counter_speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, self.init_time
speed = (count_value - old_count_value) / (current_time - old_time)
self.counter_speed[i].value = speed
Progress._reset_i(self, i)
def get_d(s1, s2, width, lp, lps):
d = width - len(terminal.remove_ESC_SEQ_from_string(s1)) - len(terminal.remove_ESC_SEQ_from_string(s2)) - 2 - lp - lps
if d >= 0:
d1 = d // 2
d2 = d - d1
return s1, s2, d1, d2
def full_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "TET {} {:>12} TTG {}".format(tet, speed, ttg)
s2 = "ETA {} ORT {}".format(eta, ort)
return get_d(s1, s2, width, lp, lps)
def full_minor_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} {:>12} G {}".format(tet, speed, ttg)
s2 = "A {} O {}".format(eta, ort)
return get_d(s1, s2, width, lp, lps)
def reduced_1_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} {:>12} G {}".format(tet, speed, ttg)
s2 = "O {}".format(ort)
return get_d(s1, s2, width, lp, lps)
def reduced_2_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} G {}".format(tet, ttg)
s2 = "O {}".format(ort)
return get_d(s1, s2, width, lp, lps)
def reduced_3_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} G {}".format(tet, ttg)
s2 = ''
return get_d(s1, s2, width, lp, lps)
def reduced_4_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = ''
s2 = ''
return get_d(s1, s2, width, lp, lps)
def kw_bold(s, ch_after):
kws = ['TET', 'TTG', 'ETA', 'ORT', 'E', 'G', 'A', 'O']
for kw in kws:
for c in ch_after:
s = s.replace(kw + c, terminal.ESC_BOLD + kw + terminal.ESC_RESET_BOLD + c)
return s
def _stat(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
if (max_count_value is None) or (max_count_value == 0):
# only show current absolute progress as number and estimated speed
stat = "{}{} [{}] {}#{} ".format(COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet),
humanize_speed(speed),
COLTHM['BAR_COL'],
str(count_value) + terminal.ESC_DEFAULT)
else:
if width == 'auto':
width = get_terminal_width()
# deduce relative progress
p = count_value / max_count_value
if p < 1:
ps = " {:.1%} ".format(p)
else:
ps = " {:.0%} ".format(p)
if ttg is None:
eta = '--'
ort = None
else:
eta = datetime.datetime.fromtimestamp(time.time() + ttg).strftime("%Y%m%d_%H:%M:%S")
ort = tet + ttg
tet = humanize_time(tet)
speed = '[' + humanize_speed(speed) + ']'
ttg = humanize_time(ttg)
ort = humanize_time(ort)
repl_ch = '-'
lp = len(prepend)
args = p, tet, speed, ttg, eta, ort, repl_ch, width, lp, len(ps)
res = full_stat(*args)
if res is None:
res = full_minor_stat(*args)
if res is None:
res = reduced_1_stat(*args)
if res is None:
res = reduced_2_stat(*args)
if res is None:
res = reduced_3_stat(*args)
if res is None:
res = reduced_4_stat(*args)
if res is not None:
s1, s2, d1, d2 = res
s = s1 + ' ' * d1 + ps + ' ' * d2 + s2
idx_p = math.ceil( (width-lp-2)*p)
s_before = s[:idx_p].replace(' ', repl_ch)
if (len(s_before) > 0) and (s_before[-1] == repl_ch):
s_before = s_before[:-1] + '>'
s_after = s[idx_p:]
s_before = kw_bold(s_before, ch_after=[repl_ch, '>'])
s_after = kw_bold(s_after, ch_after=[' '])
stat = (COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT +
COLTHM['BAR_COL'] + terminal.ESC_BOLD + '[' + terminal.ESC_RESET_BOLD + s_before + terminal.ESC_DEFAULT +
s_after + terminal.ESC_BOLD + COLTHM['BAR_COL'] + ']' + terminal.ESC_NO_CHAR_ATTR)
else:
ps = ps.strip()
if p == 1:
ps = ' ' + ps
stat = prepend + ps
return stat
def show_stat_ProgressBarFancy(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
stat = _stat(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs)
print(stat)
class ProgressBarFancy(Progress):
"""
implements a progress bar where the color indicates the current status
similar to the bars known from 'htop'
"""
def __init__(self, *args, **kwargs):
"""
width [int/'auto'] - the number of characters used to show the Progress bar,
use 'auto' to determine width from terminal information -> see _set_width
"""
Progress.__init__(self, *args, show_stat = show_stat_ProgressBarFancy, **kwargs)
def show_stat_ProgressBarCounterFancy(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
counter_count = kwargs['counter_count'][i]
counter_speed = kwargs['counter_speed'][i]
counter_tet = time.time() - kwargs['init_time']
s_c = "{}{}{} [{}] {}#{}".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL']+prepend+terminal.ESC_DEFAULT,
humanize_time(counter_tet),
humanize_speed(counter_speed.value),
COLTHM['BAR_COL'],
str(counter_count.value) + terminal.ESC_DEFAULT)
if max_count_value is not None:
if width == 'auto':
width = get_terminal_width()
s_c += ' - '
if max_count_value == 0:
s_c = "{}{} [{}] {}#{} ".format(s_c, humanize_time(tet), humanize_speed(speed),
COLTHM['BAR_COL'], str(count_value)+terminal.ESC_DEFAULT)
else:
_width = width - terminal.len_string_without_ESC(s_c)
s_c += _stat(count_value, max_count_value, '', speed, tet, ttg, _width, i)
print(s_c)
class ProgressBarCounterFancy(ProgressBarCounter):
def __init__(self, *args, **kwargs):
ProgressBarCounter.__init__(self, *args, **kwargs)
self.show_stat = show_stat_ProgressBarCounterFancy
class SIG_handler_Loop(object):
"""class to setup signal handling for the Loop class
Note: each subprocess receives the default signal handling from it's parent.
If the signal function from the module signal is evoked within the subprocess
this default behavior can be overwritten.
The init function receives a shared memory boolean object which will be set
false in case of signal detection. Since the Loop class will check the state
of this boolean object before each repetition, the loop will stop when
a signal was receives.
"""
def __init__(self, sigint, sigterm, log, prefix):
self.set_signal(signal.SIGINT, sigint)
self.set_signal(signal.SIGTERM, sigterm)
self.prefix = prefix
self.log = log
self.log.info("setup signal handler for loop (SIGINT:%s, SIGTERM:%s)", sigint, sigterm)
def set_signal(self, sig, handler_str):
if handler_str == 'ign':
signal.signal(sig, self._ignore_signal)
elif handler_str == 'stop':
signal.signal(sig, self._stop_on_signal)
else:
raise TypeError("unknown signal hander string '%s'", handler_str)
def _ignore_signal(self, signal, frame):
self.log.debug("ignore received sig %s", signal_dict[signal])
pass
def _stop_on_signal(self, signal, frame):
self.log.info("received sig %s -> raise InterruptedError", signal_dict[signal])
raise LoopInterruptError()
def FloatValue(val=0.):
"""returns a `multiprocessing.Value` of type `float` with initial value `val`"""
return mp.Value('d', val, lock=True)
def UnsignedIntValue(val=0):
"""returns a `multiprocessing.Value` of type `unsigned int` with initial value `val`"""
return mp.Value('I', val, lock=True)
def StringValue(num_of_bytes):
"""returns a `multiprocessing.Array` of type `character` and length `num_of_bytes`"""
return mp.Array('c', _jm_compatible_bytearray(num_of_bytes), lock=True)
def check_process_termination(proc, prefix, timeout, auto_kill_on_last_resort = False):
proc.join(timeout)
if not proc.is_alive():
log.debug("termination of process (pid %s) within timeout of %s SUCCEEDED!", proc.pid, humanize_time(timeout))
return True
# process still runs -> send SIGTERM -> see what happens
log.warning("termination of process (pid %s) within given timeout of %s FAILED!", proc.pid, humanize_time(timeout))
proc.terminate()
new_timeout = 3*timeout
log.debug("wait for termination (timeout %s)", humanize_time(new_timeout))
proc.join(new_timeout)
if not proc.is_alive():
log.info("termination of process (pid %s) via SIGTERM with timeout of %s SUCCEEDED!", proc.pid, humanize_time(new_timeout))
return True
log.warning("termination of process (pid %s) via SIGTERM with timeout of %s FAILED!", proc.pid, humanize_time(new_timeout))
log.debug("auto_kill_on_last_resort is %s", auto_kill_on_last_resort)
answer = 'k' if auto_kill_on_last_resort else '_'
while True:
log.debug("answer string is %s", answer)
if answer == 'k':
log.warning("send SIGKILL to process with pid %s", proc.pid)
os.kill(proc.pid, signal.SIGKILL)
time.sleep(0.1)
else:
log.info("send SIGTERM to process with pid %s", proc.pid)
os.kill(proc.pid, signal.SIGTERM)
time.sleep(0.1)
if not proc.is_alive():
log.info("process (pid %s) has stopped running!", proc.pid)
return True
else:
log.warning("process (pid %s) is still running!", proc.pid)
print("the process (pid {}) seems still running".format(proc.pid))
try:
answer = input("press 'enter' to send SIGTERM, enter 'k' to send SIGKILL or enter 'ignore' to not bother about the process anymore")
except Exception as e:
log.error("could not ask for sending SIGKILL due to {}".format(type(e)))
log.info(traceback.format_exc())
log.warning("send SIGKILL now")
answer = 'k'
if answer == 'ignore':
log.warning("ignore process %s", proc.pid)
return False
elif answer != 'k':
answer = ''
def getCountKwargs(func):
""" Returns a list ["count kwarg", "count_max kwarg"] for a
given function. Valid combinations are defined in
`progress.validCountKwargs`.
Returns None if no keyword arguments are found.
"""
# Get all arguments of the function
if hasattr(func, "__code__"):
func_args = func.__code__.co_varnames[:func.__code__.co_argcount]
for pair in validCountKwargs:
if ( pair[0] in func_args and pair[1] in func_args ):
return pair
# else
return None
def humanize_speed(c_per_sec):
"""convert a speed in counts per second to counts per [s, min, h, d], choosing the smallest value greater zero.
"""
scales = [60, 60, 24]
units = ['c/s', 'c/min', 'c/h', 'c/d']
speed = c_per_sec
i = 0
if speed > 0:
while (speed < 1) and (i < len(scales)):
speed *= scales[i]
i += 1
return "{:.1f}{}".format(speed, units[i])
def humanize_time(secs):
"""convert second in to hh:mm:ss format
"""
if secs is None:
return '--'
if secs < 1:
return "{:.2f}ms".format(secs*1000)
elif secs < 10:
return "{:.2f}s".format(secs)
else:
mins, secs = divmod(secs, 60)
hours, mins = divmod(mins, 60)
return '{:02d}:{:02d}:{:02d}'.format(int(hours), int(mins), int(secs))
def codecov_subprocess_check():
print("this line will be only called from a subprocess")
myQueue = mp.Queue
# a mapping from the numeric values of the signals to their names used in the
# standard python module signals
signal_dict = {}
for s in dir(signal):
if s.startswith('SIG') and s[3] != '_':
n = getattr(signal, s)
if n in signal_dict:
signal_dict[n] += ('/'+s)
else:
signal_dict[n] = s
_colthm_term_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_LIGHT_GREEN, 'ADD_LNS_UP':0}
_colthm_ipyt_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_LIGHT_BLUE, 'ADD_LNS_UP':0}
_colthm_wincmd_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_GREEN, 'ADD_LNS_UP':1}
color_themes = {'term_default': _colthm_term_default,
'ipyt_default': _colthm_ipyt_default,
'wincmd_default': _colthm_wincmd_default}
if platform.system() == 'Windows':
COLTHM = _colthm_wincmd_default
else:
COLTHM = _colthm_term_default
def choose_color_theme(name):
global COLTHM
if name in color_themes:
COLTHM = color_themes[name]
else:
warnings.warn("no such color theme {}".format(name))
# keyword arguments that define counting in wrapped functions
validCountKwargs = [
[ "count", "count_max"],
[ "count", "max_count"],
[ "c", "m"],
[ "jmc", "jmm"],
]
|
cimatosa/progression | progression/progress.py | getCountKwargs | python | def getCountKwargs(func):
# Get all arguments of the function
if hasattr(func, "__code__"):
func_args = func.__code__.co_varnames[:func.__code__.co_argcount]
for pair in validCountKwargs:
if ( pair[0] in func_args and pair[1] in func_args ):
return pair
# else
return None | Returns a list ["count kwarg", "count_max kwarg"] for a
given function. Valid combinations are defined in
`progress.validCountKwargs`.
Returns None if no keyword arguments are found. | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/progress.py#L1378-L1392 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Progression module
------------------
This module provides the (so far) four variants to display progress information:
* :py:class:`.ProgressBar`
This class monitors one or multiple processes showing the total elapsed time (TET), the current speed
estimated from the most recent updated, a colored bar showing the progress and an
estimate for the remaining time, also called time to go (TTG).
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 5.83s [7.2c/s] <span style="color:#00ff00"><b>[=====================> ]</b></span> TTG 8.05s</pre>
</div>
* :py:class:`.ProgressBarCounter`
If a single process is intended to do several sequential task, the :py:class:`.ProgressBarCounter` class can keep track of the number
of accomplished tasks on top of monitoring the individual task just like :py:class:`.ProgressBar` does.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre><span style="color:#00ff00"><b> [</b><b>TET</b>-5.83s-----[7.2c/s]-<b>TTG</b>-8.05s-></span> 42.0% <b>ETA</b> 20161011_16:52:52 <b>ORT</b> 00:00:13<b><span style="color:#00ff00">]</span></b></pre>
</div>
* :py:class:`.ProgressBarFancy`
This class intends to be a replacement for :py:class:`.ProgressBar` with slightly more information and
better handling of small terminal widths.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 00:00:35 [1.4c/min] <span style="color:#00ff00">#3</span> - 5.83s [7.2c/s] <span style="color:#00ff00"><b>[===========> ]</b></span> TTG 8.05s</pre>
</div>
* :py:class:`.ProgressBarCounterFancy`
Just as :py:class:`.ProgressBarFancy` this replaces :py:class:`.ProgressBarCounter`.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 00:00:35 [1.4c/min] <span style="color:#00ff00">#3</span> - <span style="color:#800000"></span><span style="color:#00ff00"><b>[</b><b>E</b>-5.83s-----[7.2c/s]-<b>G</b>-8</span>.05s 42.0% <b>O</b> 00:00:13<b><span style="color:#00ff00">]</span></b></pre>
</div>
.. autoclass:: Progress
:members:
:inherited-members:
.. autoclass:: ProgressBar
:members:
.. autoclass:: ProgressBarCounter
:members:
.. autoclass:: ProgressBarFancy
:members:
.. autoclass:: ProgressBarCounterFancy
:members:
.. autofunction:: UnsignedIntValue
.. autofunction:: FloatValue
.. autofunction:: StringValue
"""
from __future__ import division, print_function
import datetime
import io
import logging
from logging.handlers import QueueHandler, QueueListener
import math
import multiprocessing as mp
from multiprocessing.sharedctypes import Synchronized
import os
import sys
import signal
import subprocess as sp
import threading
import time
import traceback
import warnings
from . import terminal
import platform
_IPYTHON = True
try:
import ipywidgets
except ImportError:
_IPYTHON = False
warnings.warn("could not load ipywidgets (IPython HTML output will not work)", category=ImportWarning)
except DeprecationWarning:
pass
try:
from IPython.display import display
except ImportError:
_IPYTHON = False
warnings.warn("could not load IPython (IPython HTML output will not work)", category=ImportWarning)
# Magic conversion from 3 to 2
if sys.version_info[0] == 2:
ProcessLookupError = OSError
inMemoryBuffer = io.BytesIO
old_math_ceil = math.ceil
def my_int_ceil(f):
return int(old_math_ceil(f))
math.ceil = my_int_ceil
_jm_compatible_bytearray = lambda x: x
class TimeoutError(Exception):
pass
elif sys.version_info[0] == 3:
inMemoryBuffer = io.StringIO
_jm_compatible_bytearray = bytearray
class MultiLineFormatter(logging.Formatter):
"""pads a multiline log message with spaces such that
<HEAD> msg_line1
msg_line2
...
"""
def format(self, record):
_str = logging.Formatter.format(self, record)
header = _str.split(record.message)[0]
_str = _str.replace('\n', '\n' + ' '*len(header))
return _str
# def_handl = logging.StreamHandler(stream = sys.stderr) # the default handler simply uses stderr
# def_handl.setLevel(logging.DEBUG) # ... listens to all messaged
fmt = MultiLineFormatter('%(asctime)s %(name)s %(levelname)s : %(message)s')
# def_handl.setFormatter(fmt) # ... and pads multiline messaged
log = logging.getLogger(__name__) # creates the default log for this module
# log.addHandler(def_handl)
class LoopExceptionError(RuntimeError):
pass
class LoopInterruptError(Exception):
pass
class StdoutPipe(object):
"""replacement for stream objects such as stdout which
forwards all incoming data using the send method of a
connection
example usage:
>>> import sys
>>> from multiprocessing import Pipe
>>> from progression import StdoutPipe
>>> conn_recv, conn_send = Pipe(False)
>>> sys.stdout = StdoutPipe(conn_send)
>>> print("hallo welt", end='') # this is no going through the pipe
>>> msg = conn_recv.recv()
>>> sys.stdout = sys.__stdout__
>>> print(msg)
hallo welt
>>> assert msg == "hallo welt"
"""
def __init__(self, conn):
self.conn = conn
def flush(self):
pass
def write(self, b):
self.conn.send(b)
class PipeToPrint(object):
def __call__(self, b):
print(b, end='')
def close(self):
pass
class PipeFromProgressToIPythonHTMLWidget(object):
def __init__(self):
self.htmlWidget = ipywidgets.widgets.HTML()
display(self.htmlWidget)
self._buff = ""
def __call__(self, b):
self._buff += b
if b.endswith(terminal.ESC_MY_MAGIC_ENDING):
buff = terminal.ESC_SEQ_to_HTML(self._buff)
self.htmlWidget.value = '<style>.widget-html{font-family:monospace}</style><pre>'+buff+'</pre>'
self._buff = ""
def close(self):
self.htmlWidget.close()
PipeHandler = PipeToPrint
def choose_pipe_handler(kind = 'print', color_theme = None):
global PipeHandler
if kind == 'print':
PipeHandler = PipeToPrint
if color_theme is None:
choose_color_theme('term_default')
else:
choose_color_theme(color_theme)
elif kind == 'ipythonhtml':
if _IPYTHON:
PipeHandler = PipeFromProgressToIPythonHTMLWidget
if color_theme is None:
choose_color_theme('ipyt_default')
else:
choose_color_theme(color_theme)
else:
warnings.warn("can not choose ipythonHTML (IPython and/or ipywidgets were not loaded)")
else:
raise ValueError("unknown kind '{}' for pipe_handler, use one out of ('print', 'ipythonhtml')")
def get_terminal_width():
if PipeHandler == PipeToPrint:
return terminal.get_terminal_width()
elif PipeHandler == PipeFromProgressToIPythonHTMLWidget:
return 80
else:
raise NotImplementedError
def get_identifier(name=None, pid=None, bold=True):
if pid is None:
pid = os.getpid()
if bold:
esc_bold = terminal.ESC_BOLD
esc_no_char_attr = terminal.ESC_NO_CHAR_ATTR
else:
esc_bold = ""
esc_no_char_attr = ""
if name is None:
return "{}PID_{}{}".format(esc_bold, pid, esc_no_char_attr)
else:
return "{}{}_{}{}".format(esc_bold, name, pid, esc_no_char_attr)
def _loop_wrapper_func(func, args, shared_mem_run, shared_mem_pause, interval, sigint, sigterm, name,
logging_level, conn_send, func_running, log_queue):
"""
to be executed as a separate process (that's why this functions is declared static)
"""
prefix = get_identifier(name) + ' '
global log
log = logging.getLogger(__name__+".log_{}".format(get_identifier(name, bold=False)))
log.setLevel(logging_level)
log.addHandler(QueueHandler(log_queue))
sys.stdout = StdoutPipe(conn_send)
log.debug("enter wrapper_func")
SIG_handler_Loop(sigint, sigterm, log, prefix)
func_running.value = True
error = False
while shared_mem_run.value:
try:
# in pause mode, simply sleep
if shared_mem_pause.value:
quit_loop = False
else:
# if not pause mode -> call func and see what happens
try:
quit_loop = func(*args)
except LoopInterruptError:
raise
except Exception as e:
log.error("error %s occurred in loop calling 'func(*args)'", type(e))
log.info("show traceback.print_exc()\n%s", traceback.format_exc())
error = True
break
if quit_loop is True:
log.debug("loop stooped because func returned True")
break
time.sleep(interval)
except LoopInterruptError:
log.debug("quit wrapper_func due to InterruptedError")
break
func_running.value = False
if error:
sys.exit(-1)
else:
log.debug("wrapper_func terminates gracefully")
# gets rid of the following warnings
# Exception ignored in: <_io.FileIO name='/dev/null' mode='rb'>
# ResourceWarning: unclosed file <_io.TextIOWrapper name='/dev/null' mode='r' encoding='UTF-8'>
try:
if mp.get_start_method() == "spawn":
sys.stdin.close()
except AttributeError:
pass
class LoopTimeoutError(TimeoutError):
pass
class Loop(object):
"""
class to run a function periodically an seperate process.
In case the called function returns True, the loop will stop.
Otherwise a time interval given by interval will be slept before
another execution is triggered.
The shared memory variable _run (accessible via the class property run)
also determines if the function if executed another time. If set to False
the execution stops.
For safe cleanup (and in order to catch any Errors)
it is advisable to instantiate this class
using 'with' statement as follows:
with Loop(**kwargs) as my_loop:
my_loop.start()
...
this will guarantee you that the spawned loop process is
down when exiting the 'with' scope.
The only circumstance where the process is still running is
when you set auto_kill_on_last_resort to False and answer the
question to send SIGKILL with no.
"""
def __init__(self,
func,
args = (),
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
auto_kill_on_last_resort = False,
raise_error = True):
"""
func [callable] - function to be called periodically
args [tuple] - arguments passed to func when calling
intervall [pos number] - time to "sleep" between each call
verbose - DEPRECATED, only kept for compatibility, use global log.level to
specify verbosity
sigint [string] - signal handler string to set SIGINT behavior (see below)
sigterm [string] - signal handler string to set SIGTERM behavior (see below)
auto_kill_on_last_resort [bool] - If set False (default), ask user to send SIGKILL
to loop process in case normal stop and SIGTERM failed. If set True, send SIDKILL
without asking.
the signal handler string may be one of the following
ing: ignore the incoming signal
stop: raise InterruptedError which is caught silently.
"""
self._proc = None
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
self.func = func
self.args = args
self.interval = interval
assert self.interval >= 0
self._run = mp.Value('b', False)
self._pause = mp.Value('b', False)
self._func_running = mp.Value('b', False)
self._sigint = sigint
self._sigterm = sigterm
self._auto_kill_on_last_resort = auto_kill_on_last_resort
log.debug("auto_kill_on_last_resort = %s", self._auto_kill_on_last_resort)
self._monitor_thread = None
self.pipe_handler = PipeHandler()
self.raise_error = raise_error
def __enter__(self):
return self
def __exit__(self, *exc_args):
if self.is_alive():
log.debug("loop is still running on context exit")
else:
log.debug("loop has stopped on context exit")
self.stop()
def __cleanup(self):
"""
Wait at most twice as long as the given repetition interval
for the _wrapper_function to terminate.
If after that time the _wrapper_function has not terminated,
send SIGTERM to and the process.
Wait at most five times as long as the given repetition interval
for the _wrapper_function to terminate.
If the process still running send SIGKILL automatically if
auto_kill_on_last_resort was set True or ask the
user to confirm sending SIGKILL
"""
# set run to False and wait some time -> see what happens
self._run.value = False
if check_process_termination(proc = self._proc,
timeout = 2*self.interval,
prefix = '',
auto_kill_on_last_resort = self._auto_kill_on_last_resort):
log.debug("cleanup successful")
else:
raise RuntimeError("cleanup FAILED!")
try:
self.conn_send.close()
self._log_queue_listener.stop()
except OSError:
pass
log.debug("wait for monitor thread to join")
self._monitor_thread.join()
log.debug("monitor thread to joined")
self._func_running.value = False
def _monitor_stdout_pipe(self):
while True:
try:
b = self.conn_recv.recv()
self.pipe_handler(b)
except EOFError:
break
def start(self, timeout=None):
"""
uses multiprocess Process to call _wrapper_func in subprocess
"""
if self.is_alive():
log.warning("a process with pid %s is already running", self._proc.pid)
return
self._run.value = True
self._func_running.value = False
name = self.__class__.__name__
self.conn_recv, self.conn_send = mp.Pipe(False)
self._monitor_thread = threading.Thread(target = self._monitor_stdout_pipe)
self._monitor_thread.daemon=True
self._monitor_thread.start()
log.debug("started monitor thread")
self._log_queue = mp.Queue()
self._log_queue_listener = QueueListener(self._log_queue, *log.handlers)
self._log_queue_listener.start()
args = (self.func, self.args, self._run, self._pause, self.interval,
self._sigint, self._sigterm, name, log.level, self.conn_send,
self._func_running, self._log_queue)
self._proc = mp.Process(target = _loop_wrapper_func,
args = args)
self._proc.start()
log.info("started a new process with pid %s", self._proc.pid)
log.debug("wait for loop function to come up")
t0 = time.time()
while not self._func_running.value:
if self._proc.exitcode is not None:
exc = self._proc.exitcode
self._proc = None
if exc == 0:
log.warning("wrapper function already terminated with exitcode 0\nloop is not running")
return
else:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(exc)+
"see log (INFO level) for traceback information")
time.sleep(0.1)
if (timeout is not None) and ((time.time() - t0) > timeout):
err_msg = "could not bring up function on time (timeout: {}s)".format(timeout)
log.error(err_msg)
log.info("either it takes too long to spawn the subprocess (increase the timeout)\n"+
"or an internal error occurred before reaching the function call")
raise LoopTimeoutError(err_msg)
log.debug("loop function is up ({})".format(humanize_time(time.time()-t0)))
def stop(self):
"""
stops the process triggered by start
Setting the shared memory boolean run to false, which should prevent
the loop from repeating. Call __cleanup to make sure the process
stopped. After that we could trigger start() again.
"""
if self.is_alive():
self._proc.terminate()
if self._proc is not None:
self.__cleanup()
if self.raise_error:
if self._proc.exitcode == 255:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(self._proc.exitcode)+
"see log (INFO level) for traceback information")
self.pipe_handler.close()
self._proc = None
def join(self, timeout):
"""
calls join for the spawned process with given timeout
"""
if self.is_alive():
self._proc.join(timeout)
def is_alive(self):
if self._proc is None:
return False
else:
return self._proc.is_alive()
def is_running(self):
if self.is_alive():
return self._func_running.value
else:
return False
def pause(self):
if self._run.value:
self._pause.value = True
log.debug("process with pid %s paused", self._proc.pid)
def resume(self):
if self._run.value:
self._pause.value = False
log.debug("process with pid %s resumed", self._proc.pid)
def getpid(self):
if self._proc is not None:
return self._proc.pid
else:
return None
def show_stat_base(count_value, max_count_value, prepend, speed, tet, ttg, width, **kwargs):
"""A function that formats the progress information
This function will be called periodically for each progress that is monitored.
Overwrite this function in a subclass to implement a specific formating of the progress information
:param count_value: a number holding the current state
:param max_count_value: should be the largest number `count_value` can reach
:param prepend: additional text for each progress
:param speed: the speed estimation
:param tet: the total elapsed time
:param ttg: the time to go
:param width: the width for the progressbar, when set to `"auto"` this function
should try to detect the width available
:type width: int or "auto"
"""
raise NotImplementedError
def _show_stat_wrapper_Progress(count, last_count, start_time, max_count, speed_calc_cycles,
width, q, last_speed, prepend, show_stat_function, add_args,
i, lock):
"""
calculate
"""
count_value, max_count_value, speed, tet, ttg, = Progress._calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock)
return show_stat_function(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **add_args)
def _show_stat_wrapper_multi_Progress(count, last_count, start_time, max_count, speed_calc_cycles,
width, q, last_speed, prepend, show_stat_function, len_,
add_args, lock, info_line, no_move_up=False):
"""
call the static method show_stat_wrapper for each process
"""
# print(ESC_BOLD, end='')
# sys.stdout.flush()
for i in range(len_):
_show_stat_wrapper_Progress(count[i], last_count[i], start_time[i], max_count[i], speed_calc_cycles,
width, q[i], last_speed[i], prepend[i], show_stat_function,
add_args, i, lock[i])
n = len_
if info_line is not None:
s = info_line.value.decode('utf-8')
s = s.split('\n')
n += len(s)
for si in s:
if width == 'auto':
width = get_terminal_width()
if len(si) > width:
si = si[:width]
print("{0:<{1}}".format(si, width))
if no_move_up:
n = 0
# this is only a hack to find the end
# of the message in a stream
# so ESC_HIDDEN+ESC_NO_CHAR_ATTR is a magic ending
print(terminal.ESC_MOVE_LINE_UP(n) + terminal.ESC_MY_MAGIC_ENDING, end='')
sys.stdout.flush()
class Progress(Loop):
"""
Abstract Progress Class
The :py:class:`Progress` Class uses :py:class:`Loop` to provide a repeating
function which calculates progress information from a changing counter value.
The formatting of these information is done by overwriting the static member
:py:func:`Progress.show_stat`. :py:func:`Progress.show_stat` is intended to
format a single progress bar on a single line only.
The extension to multiple progresses is done
automatically base on the formatting of a single line.
"""
def __init__(self,
count,
max_count = None,
prepend = None,
width = 'auto',
speed_calc_cycles = 10,
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
info_line = None,
show_stat = None):
"""
:param count: shared variable for holding the current state
(use :py:func:`UnsignedIntValue` for short hand creation)
:type count: list/single value of multiprocessing.Value
:param max_count: shared variable for holding the final state
:type max_count: None or list/single value of multiprocessing.Value
:param prepend: string to put in front of each progress output
:type prepend: None, str or list of str
:param width: the width to use for the progress line (fixed or automatically determined)
:type width: int or "auto"
:param speed_calc_cycles: number of updated (cycles) to use for estimating the speed
(example: ``speed_calc_sycles = 4`` and ``interval = 1`` means that the speed is estimated from
the current state and state 4 updates before where the elapsed time will roughly be 4s)
:param interval: seconds to wait before updating the progress
:param verbose: DEPRECATED: has no effect, use the global ``log.setLevel()`` to control the
output level
:param sigint: behavior of the subprocess on signal ``SIGINT`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigint: "stop" or "ign"
:param sigterm: behavior of the subprocess on signal ``SIGTERM`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigterm: "stop" or "ign"
:param info_line: additional text to show below the progress (use :py:func:`StringValue`
for short hand creation of shared strings)
:type info_line: None or multiprocessing.Array of characters
.. note::
As `Progress` is derived from :py:class:`Loop` it is highly encurraged to create
any instance of Progress with a context manager (``with`` statement).
This ensures that the subprocess showing the progress terminats on context exit.
Otherwise one has to make sure that at some point the stop() routine is called.
abstract example::
with AProgressClass(...) as p:
p.start()
# do stuff and modify counter
"""
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
# converts count to list and do type check
try:
for c in count:
if not isinstance(c, Synchronized):
raise ValueError("Each element of 'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = True
except TypeError:
if not isinstance(count, Synchronized):
raise ValueError("'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = False
count = [count]
self.len = len(count)
# converts max_count to list and do type check
if max_count is not None:
if self.is_multi:
try:
for i, m in enumerate(max_count):
if not isinstance(m, Synchronized):
max_count[i] = UnsignedIntValue(m)
except TypeError:
raise TypeError("'max_count' must be iterable")
else:
if not isinstance(max_count, Synchronized):
max_count = UnsignedIntValue(max_count)
max_count = [max_count]
else:
max_count = [None] * self.len
self.start_time = []
self.speed_calc_cycles = speed_calc_cycles
self.width = width
self.q = []
self.prepend = []
self.lock = []
self.last_count = []
self.last_speed = []
for i in range(self.len):
self.q.append(myQueue()) # queue to save the last speed_calc_cycles
# (time, count) information to calculate speed
#self.q[-1].cancel_join_thread()
self.last_count.append(UnsignedIntValue())
self.last_speed.append(FloatValue())
self.lock.append(mp.Lock())
self.start_time.append(FloatValue(val=time.time()))
if prepend is None:
# no prepend given
self.prepend.append('')
else:
if isinstance(prepend, str):
self.prepend.append(prepend)
else:
# assume list of prepend, (needs to be a sequence)
self.prepend.append(prepend[i])
self.max_count = max_count # list of multiprocessing value type
self.count = count # list of multiprocessing value type
self.interval = interval
self.verbose = verbose
self.show_on_exit = False
self.add_args = {}
self.info_line = info_line
self.show_stat = show_stat
# setup loop class with func
Loop.__init__(self,
func = _show_stat_wrapper_multi_Progress,
args = (self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
show_stat,
self.len,
self.add_args,
self.lock,
self.info_line),
interval = interval,
sigint = sigint,
sigterm = sigterm,
auto_kill_on_last_resort = True)
def __exit__(self, *exc_args):
self.stop()
@staticmethod
def _calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock):
"""do the pre calculations in order to get TET, speed, TTG
:param count: count
:param last_count: count at the last call, allows to treat the case of no progress
between sequential calls
:param start_time: the time when start was triggered
:param max_count: the maximal value count
:type max_count:
:param speed_calc_cycles:
:type speed_calc_cycles:
:param q:
:type q:
:param last_speed:
:type last_speed:
:param lock:
:type lock:
"""
count_value = count.value
start_time_value = start_time.value
current_time = time.time()
if last_count.value != count_value:
# some progress happened
with lock:
# save current state (count, time) to queue
q.put((count_value, current_time))
# get older state from queue (or initial state)
# to to speed estimation
if q.qsize() > speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, start_time_value
last_count.value = count_value
#last_old_count.value = old_count_value
#last_old_time.value = old_time
speed = (count_value - old_count_value) / (current_time - old_time)
last_speed.value = speed
else:
# progress has not changed since last call
# use also old (cached) data from the queue
#old_count_value, old_time = last_old_count.value, last_old_time.value
speed = last_speed.value
if (max_count is None):
max_count_value = None
else:
max_count_value = max_count.value
tet = (current_time - start_time_value)
if (speed == 0) or (max_count_value is None) or (max_count_value == 0):
ttg = None
else:
ttg = math.ceil((max_count_value - count_value) / speed)
return count_value, max_count_value, speed, tet, ttg
def _reset_all(self):
"""
reset all progress information
"""
for i in range(self.len):
self._reset_i(i)
def _reset_i(self, i):
"""
reset i-th progress information
"""
self.count[i].value=0
log.debug("reset counter %s", i)
self.lock[i].acquire()
for x in range(self.q[i].qsize()):
self.q[i].get()
self.lock[i].release()
self.start_time[i].value = time.time()
def _show_stat(self):
"""
convenient functions to call the static show_stat_wrapper_multi with
the given class members
"""
_show_stat_wrapper_multi_Progress(self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
self.show_stat,
self.len,
self.add_args,
self.lock,
self.info_line,
no_move_up=True)
def reset(self, i = None):
"""resets the progress informaion
:param i: tell which progress to reset, if None reset all
:type i: None, int
"""
if i is None:
self._reset_all()
else:
self._reset_i(i)
def start(self):
"""
start
"""
# before printing any output to stout, we can now check this
# variable to see if any other ProgressBar has reserved that
# terminal.
if (self.__class__.__name__ in terminal.TERMINAL_PRINT_LOOP_CLASSES):
if not terminal.terminal_reserve(progress_obj=self):
log.warning("tty already reserved, NOT starting the progress loop!")
return
super(Progress, self).start()
self.show_on_exit = True
def stop(self):
"""
trigger clean up by hand, needs to be done when not using
context management via 'with' statement
- will terminate loop process
- show a last progress -> see the full 100% on exit
- releases terminal reservation
"""
super(Progress, self).stop()
terminal.terminal_unreserve(progress_obj=self, verbose=self.verbose)
if self.show_on_exit:
if not isinstance(self.pipe_handler, PipeToPrint):
myout = inMemoryBuffer()
stdout = sys.stdout
sys.stdout = myout
self._show_stat()
self.pipe_handler(myout.getvalue())
sys.stdout = stdout
else:
self._show_stat()
print()
self.show_on_exit = False
def show_stat_ProgressBar(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
if (max_count_value is None) or (max_count_value == 0):
# only show current absolute progress as number and estimated speed
print("{}{}{} [{}] {}#{} ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet), humanize_speed(speed),
terminal.ESC_BOLD + COLTHM['BAR_COL'],
count_value))
else:
if width == 'auto':
width = get_terminal_width()
# deduce relative progress and show as bar on screen
if ttg is None:
s3 = " TTG --"
else:
s3 = " TTG {}".format(humanize_time(ttg))
s1 = "{}{}{} [{}] ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet),
humanize_speed(speed))
l = terminal.len_string_without_ESC(s1 + s3)
l2 = width - l - 3
a = int(l2 * count_value / max_count_value)
b = l2 - a
s2 = COLTHM['BAR_COL'] + terminal.ESC_BOLD + "[" + "=" * a + ">" + " " * b + "]" + terminal.ESC_RESET_BOLD + terminal.ESC_DEFAULT
print(s1 + s2 + s3)
class ProgressBar(Progress):
"""
implements a progress bar similar to the one known from 'wget' or 'pv'
"""
def __init__(self, *args, **kwargs):
"""
width [int/'auto'] - the number of characters used to show the Progress bar,
use 'auto' to determine width from terminal information -> see _set_width
"""
Progress.__init__(self, *args, show_stat = show_stat_ProgressBar, **kwargs)
# self._PRE_PREPEND = terminal.ESC_NO_CHAR_ATTR + ESC_RED
# self._POST_PREPEND = ESC_BOLD + ESC_GREEN
def show_stat_ProgressBarCounter(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
counter_count = kwargs['counter_count'][i]
counter_speed = kwargs['counter_speed'][i]
counter_tet = time.time() - kwargs['init_time']
s_c = "{}{}{} [{}] {}#{} - ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(counter_tet),
humanize_speed(counter_speed.value),
COLTHM['BAR_COL'],
str(counter_count.value) + terminal.ESC_DEFAULT)
if width == 'auto':
width = get_terminal_width()
if (max_count_value is None) or (max_count_value == 0):
s_c = "{}{} [{}] {}#{} ".format(s_c,
humanize_time(tet),
humanize_speed(speed),
COLTHM['BAR_COL'],
str(count_value) + terminal.ESC_DEFAULT)
else:
if ttg is None:
s3 = " TTG --"
else:
s3 = " TTG {}".format(humanize_time(ttg))
s1 = "{} [{}] ".format(humanize_time(tet), humanize_speed(speed))
l = terminal.len_string_without_ESC(s1 + s3 + s_c)
l2 = width - l - 3
a = int(l2 * count_value / max_count_value)
b = l2 - a
s2 = COLTHM['BAR_COL'] + terminal.ESC_BOLD + "[" + "=" * a + ">" + " " * b + "]" + terminal.ESC_RESET_BOLD + terminal.ESC_DEFAULT
s_c = s_c + s1 + s2 + s3
print(s_c)
class ProgressBarCounter(Progress):
"""
records also the time of each reset and calculates the speed
of the resets.
shows the TET since init (not effected by reset)
the speed of the resets (number of finished processed per time)
and the number of finished processes
after that also show a progress of each process
max_count > 0 and not None -> bar
max_count == None -> absolute count statistic
max_count == 0 -> hide process statistic at all
"""
def __init__(self, speed_calc_cycles_counter=5, **kwargs):
Progress.__init__(self, show_stat = show_stat_ProgressBarCounter, **kwargs)
self.counter_count = []
self.counter_q = []
self.counter_speed = []
for i in range(self.len):
self.counter_count.append(UnsignedIntValue(val=0))
self.counter_q.append(myQueue())
self.counter_speed.append(FloatValue())
self.counter_speed_calc_cycles = speed_calc_cycles_counter
self.init_time = time.time()
self.add_args['counter_count'] = self.counter_count
self.add_args['counter_speed'] = self.counter_speed
self.add_args['init_time'] = self.init_time
def get_counter_count(self, i=0):
return self.counter_count[i].value
def _reset_i(self, i):
c = self.counter_count[i]
with c.get_lock():
c.value += 1
count_value = c.value
q = self.counter_q[i]
current_time = time.time()
q.put((count_value, current_time))
if q.qsize() > self.counter_speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, self.init_time
speed = (count_value - old_count_value) / (current_time - old_time)
self.counter_speed[i].value = speed
Progress._reset_i(self, i)
def get_d(s1, s2, width, lp, lps):
d = width - len(terminal.remove_ESC_SEQ_from_string(s1)) - len(terminal.remove_ESC_SEQ_from_string(s2)) - 2 - lp - lps
if d >= 0:
d1 = d // 2
d2 = d - d1
return s1, s2, d1, d2
def full_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "TET {} {:>12} TTG {}".format(tet, speed, ttg)
s2 = "ETA {} ORT {}".format(eta, ort)
return get_d(s1, s2, width, lp, lps)
def full_minor_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} {:>12} G {}".format(tet, speed, ttg)
s2 = "A {} O {}".format(eta, ort)
return get_d(s1, s2, width, lp, lps)
def reduced_1_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} {:>12} G {}".format(tet, speed, ttg)
s2 = "O {}".format(ort)
return get_d(s1, s2, width, lp, lps)
def reduced_2_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} G {}".format(tet, ttg)
s2 = "O {}".format(ort)
return get_d(s1, s2, width, lp, lps)
def reduced_3_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} G {}".format(tet, ttg)
s2 = ''
return get_d(s1, s2, width, lp, lps)
def reduced_4_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = ''
s2 = ''
return get_d(s1, s2, width, lp, lps)
def kw_bold(s, ch_after):
kws = ['TET', 'TTG', 'ETA', 'ORT', 'E', 'G', 'A', 'O']
for kw in kws:
for c in ch_after:
s = s.replace(kw + c, terminal.ESC_BOLD + kw + terminal.ESC_RESET_BOLD + c)
return s
def _stat(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
if (max_count_value is None) or (max_count_value == 0):
# only show current absolute progress as number and estimated speed
stat = "{}{} [{}] {}#{} ".format(COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet),
humanize_speed(speed),
COLTHM['BAR_COL'],
str(count_value) + terminal.ESC_DEFAULT)
else:
if width == 'auto':
width = get_terminal_width()
# deduce relative progress
p = count_value / max_count_value
if p < 1:
ps = " {:.1%} ".format(p)
else:
ps = " {:.0%} ".format(p)
if ttg is None:
eta = '--'
ort = None
else:
eta = datetime.datetime.fromtimestamp(time.time() + ttg).strftime("%Y%m%d_%H:%M:%S")
ort = tet + ttg
tet = humanize_time(tet)
speed = '[' + humanize_speed(speed) + ']'
ttg = humanize_time(ttg)
ort = humanize_time(ort)
repl_ch = '-'
lp = len(prepend)
args = p, tet, speed, ttg, eta, ort, repl_ch, width, lp, len(ps)
res = full_stat(*args)
if res is None:
res = full_minor_stat(*args)
if res is None:
res = reduced_1_stat(*args)
if res is None:
res = reduced_2_stat(*args)
if res is None:
res = reduced_3_stat(*args)
if res is None:
res = reduced_4_stat(*args)
if res is not None:
s1, s2, d1, d2 = res
s = s1 + ' ' * d1 + ps + ' ' * d2 + s2
idx_p = math.ceil( (width-lp-2)*p)
s_before = s[:idx_p].replace(' ', repl_ch)
if (len(s_before) > 0) and (s_before[-1] == repl_ch):
s_before = s_before[:-1] + '>'
s_after = s[idx_p:]
s_before = kw_bold(s_before, ch_after=[repl_ch, '>'])
s_after = kw_bold(s_after, ch_after=[' '])
stat = (COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT +
COLTHM['BAR_COL'] + terminal.ESC_BOLD + '[' + terminal.ESC_RESET_BOLD + s_before + terminal.ESC_DEFAULT +
s_after + terminal.ESC_BOLD + COLTHM['BAR_COL'] + ']' + terminal.ESC_NO_CHAR_ATTR)
else:
ps = ps.strip()
if p == 1:
ps = ' ' + ps
stat = prepend + ps
return stat
def show_stat_ProgressBarFancy(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
stat = _stat(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs)
print(stat)
class ProgressBarFancy(Progress):
"""
implements a progress bar where the color indicates the current status
similar to the bars known from 'htop'
"""
def __init__(self, *args, **kwargs):
"""
width [int/'auto'] - the number of characters used to show the Progress bar,
use 'auto' to determine width from terminal information -> see _set_width
"""
Progress.__init__(self, *args, show_stat = show_stat_ProgressBarFancy, **kwargs)
def show_stat_ProgressBarCounterFancy(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
counter_count = kwargs['counter_count'][i]
counter_speed = kwargs['counter_speed'][i]
counter_tet = time.time() - kwargs['init_time']
s_c = "{}{}{} [{}] {}#{}".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL']+prepend+terminal.ESC_DEFAULT,
humanize_time(counter_tet),
humanize_speed(counter_speed.value),
COLTHM['BAR_COL'],
str(counter_count.value) + terminal.ESC_DEFAULT)
if max_count_value is not None:
if width == 'auto':
width = get_terminal_width()
s_c += ' - '
if max_count_value == 0:
s_c = "{}{} [{}] {}#{} ".format(s_c, humanize_time(tet), humanize_speed(speed),
COLTHM['BAR_COL'], str(count_value)+terminal.ESC_DEFAULT)
else:
_width = width - terminal.len_string_without_ESC(s_c)
s_c += _stat(count_value, max_count_value, '', speed, tet, ttg, _width, i)
print(s_c)
class ProgressBarCounterFancy(ProgressBarCounter):
def __init__(self, *args, **kwargs):
ProgressBarCounter.__init__(self, *args, **kwargs)
self.show_stat = show_stat_ProgressBarCounterFancy
class SIG_handler_Loop(object):
"""class to setup signal handling for the Loop class
Note: each subprocess receives the default signal handling from it's parent.
If the signal function from the module signal is evoked within the subprocess
this default behavior can be overwritten.
The init function receives a shared memory boolean object which will be set
false in case of signal detection. Since the Loop class will check the state
of this boolean object before each repetition, the loop will stop when
a signal was receives.
"""
def __init__(self, sigint, sigterm, log, prefix):
self.set_signal(signal.SIGINT, sigint)
self.set_signal(signal.SIGTERM, sigterm)
self.prefix = prefix
self.log = log
self.log.info("setup signal handler for loop (SIGINT:%s, SIGTERM:%s)", sigint, sigterm)
def set_signal(self, sig, handler_str):
if handler_str == 'ign':
signal.signal(sig, self._ignore_signal)
elif handler_str == 'stop':
signal.signal(sig, self._stop_on_signal)
else:
raise TypeError("unknown signal hander string '%s'", handler_str)
def _ignore_signal(self, signal, frame):
self.log.debug("ignore received sig %s", signal_dict[signal])
pass
def _stop_on_signal(self, signal, frame):
self.log.info("received sig %s -> raise InterruptedError", signal_dict[signal])
raise LoopInterruptError()
def FloatValue(val=0.):
"""returns a `multiprocessing.Value` of type `float` with initial value `val`"""
return mp.Value('d', val, lock=True)
def UnsignedIntValue(val=0):
"""returns a `multiprocessing.Value` of type `unsigned int` with initial value `val`"""
return mp.Value('I', val, lock=True)
def StringValue(num_of_bytes):
"""returns a `multiprocessing.Array` of type `character` and length `num_of_bytes`"""
return mp.Array('c', _jm_compatible_bytearray(num_of_bytes), lock=True)
def check_process_termination(proc, prefix, timeout, auto_kill_on_last_resort = False):
proc.join(timeout)
if not proc.is_alive():
log.debug("termination of process (pid %s) within timeout of %s SUCCEEDED!", proc.pid, humanize_time(timeout))
return True
# process still runs -> send SIGTERM -> see what happens
log.warning("termination of process (pid %s) within given timeout of %s FAILED!", proc.pid, humanize_time(timeout))
proc.terminate()
new_timeout = 3*timeout
log.debug("wait for termination (timeout %s)", humanize_time(new_timeout))
proc.join(new_timeout)
if not proc.is_alive():
log.info("termination of process (pid %s) via SIGTERM with timeout of %s SUCCEEDED!", proc.pid, humanize_time(new_timeout))
return True
log.warning("termination of process (pid %s) via SIGTERM with timeout of %s FAILED!", proc.pid, humanize_time(new_timeout))
log.debug("auto_kill_on_last_resort is %s", auto_kill_on_last_resort)
answer = 'k' if auto_kill_on_last_resort else '_'
while True:
log.debug("answer string is %s", answer)
if answer == 'k':
log.warning("send SIGKILL to process with pid %s", proc.pid)
os.kill(proc.pid, signal.SIGKILL)
time.sleep(0.1)
else:
log.info("send SIGTERM to process with pid %s", proc.pid)
os.kill(proc.pid, signal.SIGTERM)
time.sleep(0.1)
if not proc.is_alive():
log.info("process (pid %s) has stopped running!", proc.pid)
return True
else:
log.warning("process (pid %s) is still running!", proc.pid)
print("the process (pid {}) seems still running".format(proc.pid))
try:
answer = input("press 'enter' to send SIGTERM, enter 'k' to send SIGKILL or enter 'ignore' to not bother about the process anymore")
except Exception as e:
log.error("could not ask for sending SIGKILL due to {}".format(type(e)))
log.info(traceback.format_exc())
log.warning("send SIGKILL now")
answer = 'k'
if answer == 'ignore':
log.warning("ignore process %s", proc.pid)
return False
elif answer != 'k':
answer = ''
def getCountKwargs(func):
""" Returns a list ["count kwarg", "count_max kwarg"] for a
given function. Valid combinations are defined in
`progress.validCountKwargs`.
Returns None if no keyword arguments are found.
"""
# Get all arguments of the function
if hasattr(func, "__code__"):
func_args = func.__code__.co_varnames[:func.__code__.co_argcount]
for pair in validCountKwargs:
if ( pair[0] in func_args and pair[1] in func_args ):
return pair
# else
return None
def humanize_speed(c_per_sec):
"""convert a speed in counts per second to counts per [s, min, h, d], choosing the smallest value greater zero.
"""
scales = [60, 60, 24]
units = ['c/s', 'c/min', 'c/h', 'c/d']
speed = c_per_sec
i = 0
if speed > 0:
while (speed < 1) and (i < len(scales)):
speed *= scales[i]
i += 1
return "{:.1f}{}".format(speed, units[i])
def humanize_time(secs):
"""convert second in to hh:mm:ss format
"""
if secs is None:
return '--'
if secs < 1:
return "{:.2f}ms".format(secs*1000)
elif secs < 10:
return "{:.2f}s".format(secs)
else:
mins, secs = divmod(secs, 60)
hours, mins = divmod(mins, 60)
return '{:02d}:{:02d}:{:02d}'.format(int(hours), int(mins), int(secs))
def codecov_subprocess_check():
print("this line will be only called from a subprocess")
myQueue = mp.Queue
# a mapping from the numeric values of the signals to their names used in the
# standard python module signals
signal_dict = {}
for s in dir(signal):
if s.startswith('SIG') and s[3] != '_':
n = getattr(signal, s)
if n in signal_dict:
signal_dict[n] += ('/'+s)
else:
signal_dict[n] = s
_colthm_term_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_LIGHT_GREEN, 'ADD_LNS_UP':0}
_colthm_ipyt_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_LIGHT_BLUE, 'ADD_LNS_UP':0}
_colthm_wincmd_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_GREEN, 'ADD_LNS_UP':1}
color_themes = {'term_default': _colthm_term_default,
'ipyt_default': _colthm_ipyt_default,
'wincmd_default': _colthm_wincmd_default}
if platform.system() == 'Windows':
COLTHM = _colthm_wincmd_default
else:
COLTHM = _colthm_term_default
def choose_color_theme(name):
global COLTHM
if name in color_themes:
COLTHM = color_themes[name]
else:
warnings.warn("no such color theme {}".format(name))
# keyword arguments that define counting in wrapped functions
validCountKwargs = [
[ "count", "count_max"],
[ "count", "max_count"],
[ "c", "m"],
[ "jmc", "jmm"],
]
|
cimatosa/progression | progression/progress.py | humanize_speed | python | def humanize_speed(c_per_sec):
scales = [60, 60, 24]
units = ['c/s', 'c/min', 'c/h', 'c/d']
speed = c_per_sec
i = 0
if speed > 0:
while (speed < 1) and (i < len(scales)):
speed *= scales[i]
i += 1
return "{:.1f}{}".format(speed, units[i]) | convert a speed in counts per second to counts per [s, min, h, d], choosing the smallest value greater zero. | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/progress.py#L1394-L1406 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Progression module
------------------
This module provides the (so far) four variants to display progress information:
* :py:class:`.ProgressBar`
This class monitors one or multiple processes showing the total elapsed time (TET), the current speed
estimated from the most recent updated, a colored bar showing the progress and an
estimate for the remaining time, also called time to go (TTG).
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 5.83s [7.2c/s] <span style="color:#00ff00"><b>[=====================> ]</b></span> TTG 8.05s</pre>
</div>
* :py:class:`.ProgressBarCounter`
If a single process is intended to do several sequential task, the :py:class:`.ProgressBarCounter` class can keep track of the number
of accomplished tasks on top of monitoring the individual task just like :py:class:`.ProgressBar` does.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre><span style="color:#00ff00"><b> [</b><b>TET</b>-5.83s-----[7.2c/s]-<b>TTG</b>-8.05s-></span> 42.0% <b>ETA</b> 20161011_16:52:52 <b>ORT</b> 00:00:13<b><span style="color:#00ff00">]</span></b></pre>
</div>
* :py:class:`.ProgressBarFancy`
This class intends to be a replacement for :py:class:`.ProgressBar` with slightly more information and
better handling of small terminal widths.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 00:00:35 [1.4c/min] <span style="color:#00ff00">#3</span> - 5.83s [7.2c/s] <span style="color:#00ff00"><b>[===========> ]</b></span> TTG 8.05s</pre>
</div>
* :py:class:`.ProgressBarCounterFancy`
Just as :py:class:`.ProgressBarFancy` this replaces :py:class:`.ProgressBarCounter`.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 00:00:35 [1.4c/min] <span style="color:#00ff00">#3</span> - <span style="color:#800000"></span><span style="color:#00ff00"><b>[</b><b>E</b>-5.83s-----[7.2c/s]-<b>G</b>-8</span>.05s 42.0% <b>O</b> 00:00:13<b><span style="color:#00ff00">]</span></b></pre>
</div>
.. autoclass:: Progress
:members:
:inherited-members:
.. autoclass:: ProgressBar
:members:
.. autoclass:: ProgressBarCounter
:members:
.. autoclass:: ProgressBarFancy
:members:
.. autoclass:: ProgressBarCounterFancy
:members:
.. autofunction:: UnsignedIntValue
.. autofunction:: FloatValue
.. autofunction:: StringValue
"""
from __future__ import division, print_function
import datetime
import io
import logging
from logging.handlers import QueueHandler, QueueListener
import math
import multiprocessing as mp
from multiprocessing.sharedctypes import Synchronized
import os
import sys
import signal
import subprocess as sp
import threading
import time
import traceback
import warnings
from . import terminal
import platform
_IPYTHON = True
try:
import ipywidgets
except ImportError:
_IPYTHON = False
warnings.warn("could not load ipywidgets (IPython HTML output will not work)", category=ImportWarning)
except DeprecationWarning:
pass
try:
from IPython.display import display
except ImportError:
_IPYTHON = False
warnings.warn("could not load IPython (IPython HTML output will not work)", category=ImportWarning)
# Magic conversion from 3 to 2
if sys.version_info[0] == 2:
ProcessLookupError = OSError
inMemoryBuffer = io.BytesIO
old_math_ceil = math.ceil
def my_int_ceil(f):
return int(old_math_ceil(f))
math.ceil = my_int_ceil
_jm_compatible_bytearray = lambda x: x
class TimeoutError(Exception):
pass
elif sys.version_info[0] == 3:
inMemoryBuffer = io.StringIO
_jm_compatible_bytearray = bytearray
class MultiLineFormatter(logging.Formatter):
"""pads a multiline log message with spaces such that
<HEAD> msg_line1
msg_line2
...
"""
def format(self, record):
_str = logging.Formatter.format(self, record)
header = _str.split(record.message)[0]
_str = _str.replace('\n', '\n' + ' '*len(header))
return _str
# def_handl = logging.StreamHandler(stream = sys.stderr) # the default handler simply uses stderr
# def_handl.setLevel(logging.DEBUG) # ... listens to all messaged
fmt = MultiLineFormatter('%(asctime)s %(name)s %(levelname)s : %(message)s')
# def_handl.setFormatter(fmt) # ... and pads multiline messaged
log = logging.getLogger(__name__) # creates the default log for this module
# log.addHandler(def_handl)
class LoopExceptionError(RuntimeError):
pass
class LoopInterruptError(Exception):
pass
class StdoutPipe(object):
"""replacement for stream objects such as stdout which
forwards all incoming data using the send method of a
connection
example usage:
>>> import sys
>>> from multiprocessing import Pipe
>>> from progression import StdoutPipe
>>> conn_recv, conn_send = Pipe(False)
>>> sys.stdout = StdoutPipe(conn_send)
>>> print("hallo welt", end='') # this is no going through the pipe
>>> msg = conn_recv.recv()
>>> sys.stdout = sys.__stdout__
>>> print(msg)
hallo welt
>>> assert msg == "hallo welt"
"""
def __init__(self, conn):
self.conn = conn
def flush(self):
pass
def write(self, b):
self.conn.send(b)
class PipeToPrint(object):
def __call__(self, b):
print(b, end='')
def close(self):
pass
class PipeFromProgressToIPythonHTMLWidget(object):
def __init__(self):
self.htmlWidget = ipywidgets.widgets.HTML()
display(self.htmlWidget)
self._buff = ""
def __call__(self, b):
self._buff += b
if b.endswith(terminal.ESC_MY_MAGIC_ENDING):
buff = terminal.ESC_SEQ_to_HTML(self._buff)
self.htmlWidget.value = '<style>.widget-html{font-family:monospace}</style><pre>'+buff+'</pre>'
self._buff = ""
def close(self):
self.htmlWidget.close()
PipeHandler = PipeToPrint
def choose_pipe_handler(kind = 'print', color_theme = None):
global PipeHandler
if kind == 'print':
PipeHandler = PipeToPrint
if color_theme is None:
choose_color_theme('term_default')
else:
choose_color_theme(color_theme)
elif kind == 'ipythonhtml':
if _IPYTHON:
PipeHandler = PipeFromProgressToIPythonHTMLWidget
if color_theme is None:
choose_color_theme('ipyt_default')
else:
choose_color_theme(color_theme)
else:
warnings.warn("can not choose ipythonHTML (IPython and/or ipywidgets were not loaded)")
else:
raise ValueError("unknown kind '{}' for pipe_handler, use one out of ('print', 'ipythonhtml')")
def get_terminal_width():
if PipeHandler == PipeToPrint:
return terminal.get_terminal_width()
elif PipeHandler == PipeFromProgressToIPythonHTMLWidget:
return 80
else:
raise NotImplementedError
def get_identifier(name=None, pid=None, bold=True):
if pid is None:
pid = os.getpid()
if bold:
esc_bold = terminal.ESC_BOLD
esc_no_char_attr = terminal.ESC_NO_CHAR_ATTR
else:
esc_bold = ""
esc_no_char_attr = ""
if name is None:
return "{}PID_{}{}".format(esc_bold, pid, esc_no_char_attr)
else:
return "{}{}_{}{}".format(esc_bold, name, pid, esc_no_char_attr)
def _loop_wrapper_func(func, args, shared_mem_run, shared_mem_pause, interval, sigint, sigterm, name,
logging_level, conn_send, func_running, log_queue):
"""
to be executed as a separate process (that's why this functions is declared static)
"""
prefix = get_identifier(name) + ' '
global log
log = logging.getLogger(__name__+".log_{}".format(get_identifier(name, bold=False)))
log.setLevel(logging_level)
log.addHandler(QueueHandler(log_queue))
sys.stdout = StdoutPipe(conn_send)
log.debug("enter wrapper_func")
SIG_handler_Loop(sigint, sigterm, log, prefix)
func_running.value = True
error = False
while shared_mem_run.value:
try:
# in pause mode, simply sleep
if shared_mem_pause.value:
quit_loop = False
else:
# if not pause mode -> call func and see what happens
try:
quit_loop = func(*args)
except LoopInterruptError:
raise
except Exception as e:
log.error("error %s occurred in loop calling 'func(*args)'", type(e))
log.info("show traceback.print_exc()\n%s", traceback.format_exc())
error = True
break
if quit_loop is True:
log.debug("loop stooped because func returned True")
break
time.sleep(interval)
except LoopInterruptError:
log.debug("quit wrapper_func due to InterruptedError")
break
func_running.value = False
if error:
sys.exit(-1)
else:
log.debug("wrapper_func terminates gracefully")
# gets rid of the following warnings
# Exception ignored in: <_io.FileIO name='/dev/null' mode='rb'>
# ResourceWarning: unclosed file <_io.TextIOWrapper name='/dev/null' mode='r' encoding='UTF-8'>
try:
if mp.get_start_method() == "spawn":
sys.stdin.close()
except AttributeError:
pass
class LoopTimeoutError(TimeoutError):
pass
class Loop(object):
"""
class to run a function periodically an seperate process.
In case the called function returns True, the loop will stop.
Otherwise a time interval given by interval will be slept before
another execution is triggered.
The shared memory variable _run (accessible via the class property run)
also determines if the function if executed another time. If set to False
the execution stops.
For safe cleanup (and in order to catch any Errors)
it is advisable to instantiate this class
using 'with' statement as follows:
with Loop(**kwargs) as my_loop:
my_loop.start()
...
this will guarantee you that the spawned loop process is
down when exiting the 'with' scope.
The only circumstance where the process is still running is
when you set auto_kill_on_last_resort to False and answer the
question to send SIGKILL with no.
"""
def __init__(self,
func,
args = (),
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
auto_kill_on_last_resort = False,
raise_error = True):
"""
func [callable] - function to be called periodically
args [tuple] - arguments passed to func when calling
intervall [pos number] - time to "sleep" between each call
verbose - DEPRECATED, only kept for compatibility, use global log.level to
specify verbosity
sigint [string] - signal handler string to set SIGINT behavior (see below)
sigterm [string] - signal handler string to set SIGTERM behavior (see below)
auto_kill_on_last_resort [bool] - If set False (default), ask user to send SIGKILL
to loop process in case normal stop and SIGTERM failed. If set True, send SIDKILL
without asking.
the signal handler string may be one of the following
ing: ignore the incoming signal
stop: raise InterruptedError which is caught silently.
"""
self._proc = None
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
self.func = func
self.args = args
self.interval = interval
assert self.interval >= 0
self._run = mp.Value('b', False)
self._pause = mp.Value('b', False)
self._func_running = mp.Value('b', False)
self._sigint = sigint
self._sigterm = sigterm
self._auto_kill_on_last_resort = auto_kill_on_last_resort
log.debug("auto_kill_on_last_resort = %s", self._auto_kill_on_last_resort)
self._monitor_thread = None
self.pipe_handler = PipeHandler()
self.raise_error = raise_error
def __enter__(self):
return self
def __exit__(self, *exc_args):
if self.is_alive():
log.debug("loop is still running on context exit")
else:
log.debug("loop has stopped on context exit")
self.stop()
def __cleanup(self):
"""
Wait at most twice as long as the given repetition interval
for the _wrapper_function to terminate.
If after that time the _wrapper_function has not terminated,
send SIGTERM to and the process.
Wait at most five times as long as the given repetition interval
for the _wrapper_function to terminate.
If the process still running send SIGKILL automatically if
auto_kill_on_last_resort was set True or ask the
user to confirm sending SIGKILL
"""
# set run to False and wait some time -> see what happens
self._run.value = False
if check_process_termination(proc = self._proc,
timeout = 2*self.interval,
prefix = '',
auto_kill_on_last_resort = self._auto_kill_on_last_resort):
log.debug("cleanup successful")
else:
raise RuntimeError("cleanup FAILED!")
try:
self.conn_send.close()
self._log_queue_listener.stop()
except OSError:
pass
log.debug("wait for monitor thread to join")
self._monitor_thread.join()
log.debug("monitor thread to joined")
self._func_running.value = False
def _monitor_stdout_pipe(self):
while True:
try:
b = self.conn_recv.recv()
self.pipe_handler(b)
except EOFError:
break
def start(self, timeout=None):
"""
uses multiprocess Process to call _wrapper_func in subprocess
"""
if self.is_alive():
log.warning("a process with pid %s is already running", self._proc.pid)
return
self._run.value = True
self._func_running.value = False
name = self.__class__.__name__
self.conn_recv, self.conn_send = mp.Pipe(False)
self._monitor_thread = threading.Thread(target = self._monitor_stdout_pipe)
self._monitor_thread.daemon=True
self._monitor_thread.start()
log.debug("started monitor thread")
self._log_queue = mp.Queue()
self._log_queue_listener = QueueListener(self._log_queue, *log.handlers)
self._log_queue_listener.start()
args = (self.func, self.args, self._run, self._pause, self.interval,
self._sigint, self._sigterm, name, log.level, self.conn_send,
self._func_running, self._log_queue)
self._proc = mp.Process(target = _loop_wrapper_func,
args = args)
self._proc.start()
log.info("started a new process with pid %s", self._proc.pid)
log.debug("wait for loop function to come up")
t0 = time.time()
while not self._func_running.value:
if self._proc.exitcode is not None:
exc = self._proc.exitcode
self._proc = None
if exc == 0:
log.warning("wrapper function already terminated with exitcode 0\nloop is not running")
return
else:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(exc)+
"see log (INFO level) for traceback information")
time.sleep(0.1)
if (timeout is not None) and ((time.time() - t0) > timeout):
err_msg = "could not bring up function on time (timeout: {}s)".format(timeout)
log.error(err_msg)
log.info("either it takes too long to spawn the subprocess (increase the timeout)\n"+
"or an internal error occurred before reaching the function call")
raise LoopTimeoutError(err_msg)
log.debug("loop function is up ({})".format(humanize_time(time.time()-t0)))
def stop(self):
"""
stops the process triggered by start
Setting the shared memory boolean run to false, which should prevent
the loop from repeating. Call __cleanup to make sure the process
stopped. After that we could trigger start() again.
"""
if self.is_alive():
self._proc.terminate()
if self._proc is not None:
self.__cleanup()
if self.raise_error:
if self._proc.exitcode == 255:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(self._proc.exitcode)+
"see log (INFO level) for traceback information")
self.pipe_handler.close()
self._proc = None
def join(self, timeout):
"""
calls join for the spawned process with given timeout
"""
if self.is_alive():
self._proc.join(timeout)
def is_alive(self):
if self._proc is None:
return False
else:
return self._proc.is_alive()
def is_running(self):
if self.is_alive():
return self._func_running.value
else:
return False
def pause(self):
if self._run.value:
self._pause.value = True
log.debug("process with pid %s paused", self._proc.pid)
def resume(self):
if self._run.value:
self._pause.value = False
log.debug("process with pid %s resumed", self._proc.pid)
def getpid(self):
if self._proc is not None:
return self._proc.pid
else:
return None
def show_stat_base(count_value, max_count_value, prepend, speed, tet, ttg, width, **kwargs):
"""A function that formats the progress information
This function will be called periodically for each progress that is monitored.
Overwrite this function in a subclass to implement a specific formating of the progress information
:param count_value: a number holding the current state
:param max_count_value: should be the largest number `count_value` can reach
:param prepend: additional text for each progress
:param speed: the speed estimation
:param tet: the total elapsed time
:param ttg: the time to go
:param width: the width for the progressbar, when set to `"auto"` this function
should try to detect the width available
:type width: int or "auto"
"""
raise NotImplementedError
def _show_stat_wrapper_Progress(count, last_count, start_time, max_count, speed_calc_cycles,
width, q, last_speed, prepend, show_stat_function, add_args,
i, lock):
"""
calculate
"""
count_value, max_count_value, speed, tet, ttg, = Progress._calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock)
return show_stat_function(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **add_args)
def _show_stat_wrapper_multi_Progress(count, last_count, start_time, max_count, speed_calc_cycles,
width, q, last_speed, prepend, show_stat_function, len_,
add_args, lock, info_line, no_move_up=False):
"""
call the static method show_stat_wrapper for each process
"""
# print(ESC_BOLD, end='')
# sys.stdout.flush()
for i in range(len_):
_show_stat_wrapper_Progress(count[i], last_count[i], start_time[i], max_count[i], speed_calc_cycles,
width, q[i], last_speed[i], prepend[i], show_stat_function,
add_args, i, lock[i])
n = len_
if info_line is not None:
s = info_line.value.decode('utf-8')
s = s.split('\n')
n += len(s)
for si in s:
if width == 'auto':
width = get_terminal_width()
if len(si) > width:
si = si[:width]
print("{0:<{1}}".format(si, width))
if no_move_up:
n = 0
# this is only a hack to find the end
# of the message in a stream
# so ESC_HIDDEN+ESC_NO_CHAR_ATTR is a magic ending
print(terminal.ESC_MOVE_LINE_UP(n) + terminal.ESC_MY_MAGIC_ENDING, end='')
sys.stdout.flush()
class Progress(Loop):
"""
Abstract Progress Class
The :py:class:`Progress` Class uses :py:class:`Loop` to provide a repeating
function which calculates progress information from a changing counter value.
The formatting of these information is done by overwriting the static member
:py:func:`Progress.show_stat`. :py:func:`Progress.show_stat` is intended to
format a single progress bar on a single line only.
The extension to multiple progresses is done
automatically base on the formatting of a single line.
"""
def __init__(self,
count,
max_count = None,
prepend = None,
width = 'auto',
speed_calc_cycles = 10,
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
info_line = None,
show_stat = None):
"""
:param count: shared variable for holding the current state
(use :py:func:`UnsignedIntValue` for short hand creation)
:type count: list/single value of multiprocessing.Value
:param max_count: shared variable for holding the final state
:type max_count: None or list/single value of multiprocessing.Value
:param prepend: string to put in front of each progress output
:type prepend: None, str or list of str
:param width: the width to use for the progress line (fixed or automatically determined)
:type width: int or "auto"
:param speed_calc_cycles: number of updated (cycles) to use for estimating the speed
(example: ``speed_calc_sycles = 4`` and ``interval = 1`` means that the speed is estimated from
the current state and state 4 updates before where the elapsed time will roughly be 4s)
:param interval: seconds to wait before updating the progress
:param verbose: DEPRECATED: has no effect, use the global ``log.setLevel()`` to control the
output level
:param sigint: behavior of the subprocess on signal ``SIGINT`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigint: "stop" or "ign"
:param sigterm: behavior of the subprocess on signal ``SIGTERM`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigterm: "stop" or "ign"
:param info_line: additional text to show below the progress (use :py:func:`StringValue`
for short hand creation of shared strings)
:type info_line: None or multiprocessing.Array of characters
.. note::
As `Progress` is derived from :py:class:`Loop` it is highly encurraged to create
any instance of Progress with a context manager (``with`` statement).
This ensures that the subprocess showing the progress terminats on context exit.
Otherwise one has to make sure that at some point the stop() routine is called.
abstract example::
with AProgressClass(...) as p:
p.start()
# do stuff and modify counter
"""
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
# converts count to list and do type check
try:
for c in count:
if not isinstance(c, Synchronized):
raise ValueError("Each element of 'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = True
except TypeError:
if not isinstance(count, Synchronized):
raise ValueError("'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = False
count = [count]
self.len = len(count)
# converts max_count to list and do type check
if max_count is not None:
if self.is_multi:
try:
for i, m in enumerate(max_count):
if not isinstance(m, Synchronized):
max_count[i] = UnsignedIntValue(m)
except TypeError:
raise TypeError("'max_count' must be iterable")
else:
if not isinstance(max_count, Synchronized):
max_count = UnsignedIntValue(max_count)
max_count = [max_count]
else:
max_count = [None] * self.len
self.start_time = []
self.speed_calc_cycles = speed_calc_cycles
self.width = width
self.q = []
self.prepend = []
self.lock = []
self.last_count = []
self.last_speed = []
for i in range(self.len):
self.q.append(myQueue()) # queue to save the last speed_calc_cycles
# (time, count) information to calculate speed
#self.q[-1].cancel_join_thread()
self.last_count.append(UnsignedIntValue())
self.last_speed.append(FloatValue())
self.lock.append(mp.Lock())
self.start_time.append(FloatValue(val=time.time()))
if prepend is None:
# no prepend given
self.prepend.append('')
else:
if isinstance(prepend, str):
self.prepend.append(prepend)
else:
# assume list of prepend, (needs to be a sequence)
self.prepend.append(prepend[i])
self.max_count = max_count # list of multiprocessing value type
self.count = count # list of multiprocessing value type
self.interval = interval
self.verbose = verbose
self.show_on_exit = False
self.add_args = {}
self.info_line = info_line
self.show_stat = show_stat
# setup loop class with func
Loop.__init__(self,
func = _show_stat_wrapper_multi_Progress,
args = (self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
show_stat,
self.len,
self.add_args,
self.lock,
self.info_line),
interval = interval,
sigint = sigint,
sigterm = sigterm,
auto_kill_on_last_resort = True)
def __exit__(self, *exc_args):
self.stop()
@staticmethod
def _calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock):
"""do the pre calculations in order to get TET, speed, TTG
:param count: count
:param last_count: count at the last call, allows to treat the case of no progress
between sequential calls
:param start_time: the time when start was triggered
:param max_count: the maximal value count
:type max_count:
:param speed_calc_cycles:
:type speed_calc_cycles:
:param q:
:type q:
:param last_speed:
:type last_speed:
:param lock:
:type lock:
"""
count_value = count.value
start_time_value = start_time.value
current_time = time.time()
if last_count.value != count_value:
# some progress happened
with lock:
# save current state (count, time) to queue
q.put((count_value, current_time))
# get older state from queue (or initial state)
# to to speed estimation
if q.qsize() > speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, start_time_value
last_count.value = count_value
#last_old_count.value = old_count_value
#last_old_time.value = old_time
speed = (count_value - old_count_value) / (current_time - old_time)
last_speed.value = speed
else:
# progress has not changed since last call
# use also old (cached) data from the queue
#old_count_value, old_time = last_old_count.value, last_old_time.value
speed = last_speed.value
if (max_count is None):
max_count_value = None
else:
max_count_value = max_count.value
tet = (current_time - start_time_value)
if (speed == 0) or (max_count_value is None) or (max_count_value == 0):
ttg = None
else:
ttg = math.ceil((max_count_value - count_value) / speed)
return count_value, max_count_value, speed, tet, ttg
def _reset_all(self):
"""
reset all progress information
"""
for i in range(self.len):
self._reset_i(i)
def _reset_i(self, i):
"""
reset i-th progress information
"""
self.count[i].value=0
log.debug("reset counter %s", i)
self.lock[i].acquire()
for x in range(self.q[i].qsize()):
self.q[i].get()
self.lock[i].release()
self.start_time[i].value = time.time()
def _show_stat(self):
"""
convenient functions to call the static show_stat_wrapper_multi with
the given class members
"""
_show_stat_wrapper_multi_Progress(self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
self.show_stat,
self.len,
self.add_args,
self.lock,
self.info_line,
no_move_up=True)
def reset(self, i = None):
"""resets the progress informaion
:param i: tell which progress to reset, if None reset all
:type i: None, int
"""
if i is None:
self._reset_all()
else:
self._reset_i(i)
def start(self):
"""
start
"""
# before printing any output to stout, we can now check this
# variable to see if any other ProgressBar has reserved that
# terminal.
if (self.__class__.__name__ in terminal.TERMINAL_PRINT_LOOP_CLASSES):
if not terminal.terminal_reserve(progress_obj=self):
log.warning("tty already reserved, NOT starting the progress loop!")
return
super(Progress, self).start()
self.show_on_exit = True
def stop(self):
"""
trigger clean up by hand, needs to be done when not using
context management via 'with' statement
- will terminate loop process
- show a last progress -> see the full 100% on exit
- releases terminal reservation
"""
super(Progress, self).stop()
terminal.terminal_unreserve(progress_obj=self, verbose=self.verbose)
if self.show_on_exit:
if not isinstance(self.pipe_handler, PipeToPrint):
myout = inMemoryBuffer()
stdout = sys.stdout
sys.stdout = myout
self._show_stat()
self.pipe_handler(myout.getvalue())
sys.stdout = stdout
else:
self._show_stat()
print()
self.show_on_exit = False
def show_stat_ProgressBar(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
if (max_count_value is None) or (max_count_value == 0):
# only show current absolute progress as number and estimated speed
print("{}{}{} [{}] {}#{} ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet), humanize_speed(speed),
terminal.ESC_BOLD + COLTHM['BAR_COL'],
count_value))
else:
if width == 'auto':
width = get_terminal_width()
# deduce relative progress and show as bar on screen
if ttg is None:
s3 = " TTG --"
else:
s3 = " TTG {}".format(humanize_time(ttg))
s1 = "{}{}{} [{}] ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet),
humanize_speed(speed))
l = terminal.len_string_without_ESC(s1 + s3)
l2 = width - l - 3
a = int(l2 * count_value / max_count_value)
b = l2 - a
s2 = COLTHM['BAR_COL'] + terminal.ESC_BOLD + "[" + "=" * a + ">" + " " * b + "]" + terminal.ESC_RESET_BOLD + terminal.ESC_DEFAULT
print(s1 + s2 + s3)
class ProgressBar(Progress):
"""
implements a progress bar similar to the one known from 'wget' or 'pv'
"""
def __init__(self, *args, **kwargs):
"""
width [int/'auto'] - the number of characters used to show the Progress bar,
use 'auto' to determine width from terminal information -> see _set_width
"""
Progress.__init__(self, *args, show_stat = show_stat_ProgressBar, **kwargs)
# self._PRE_PREPEND = terminal.ESC_NO_CHAR_ATTR + ESC_RED
# self._POST_PREPEND = ESC_BOLD + ESC_GREEN
def show_stat_ProgressBarCounter(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
counter_count = kwargs['counter_count'][i]
counter_speed = kwargs['counter_speed'][i]
counter_tet = time.time() - kwargs['init_time']
s_c = "{}{}{} [{}] {}#{} - ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(counter_tet),
humanize_speed(counter_speed.value),
COLTHM['BAR_COL'],
str(counter_count.value) + terminal.ESC_DEFAULT)
if width == 'auto':
width = get_terminal_width()
if (max_count_value is None) or (max_count_value == 0):
s_c = "{}{} [{}] {}#{} ".format(s_c,
humanize_time(tet),
humanize_speed(speed),
COLTHM['BAR_COL'],
str(count_value) + terminal.ESC_DEFAULT)
else:
if ttg is None:
s3 = " TTG --"
else:
s3 = " TTG {}".format(humanize_time(ttg))
s1 = "{} [{}] ".format(humanize_time(tet), humanize_speed(speed))
l = terminal.len_string_without_ESC(s1 + s3 + s_c)
l2 = width - l - 3
a = int(l2 * count_value / max_count_value)
b = l2 - a
s2 = COLTHM['BAR_COL'] + terminal.ESC_BOLD + "[" + "=" * a + ">" + " " * b + "]" + terminal.ESC_RESET_BOLD + terminal.ESC_DEFAULT
s_c = s_c + s1 + s2 + s3
print(s_c)
class ProgressBarCounter(Progress):
"""
records also the time of each reset and calculates the speed
of the resets.
shows the TET since init (not effected by reset)
the speed of the resets (number of finished processed per time)
and the number of finished processes
after that also show a progress of each process
max_count > 0 and not None -> bar
max_count == None -> absolute count statistic
max_count == 0 -> hide process statistic at all
"""
def __init__(self, speed_calc_cycles_counter=5, **kwargs):
Progress.__init__(self, show_stat = show_stat_ProgressBarCounter, **kwargs)
self.counter_count = []
self.counter_q = []
self.counter_speed = []
for i in range(self.len):
self.counter_count.append(UnsignedIntValue(val=0))
self.counter_q.append(myQueue())
self.counter_speed.append(FloatValue())
self.counter_speed_calc_cycles = speed_calc_cycles_counter
self.init_time = time.time()
self.add_args['counter_count'] = self.counter_count
self.add_args['counter_speed'] = self.counter_speed
self.add_args['init_time'] = self.init_time
def get_counter_count(self, i=0):
return self.counter_count[i].value
def _reset_i(self, i):
c = self.counter_count[i]
with c.get_lock():
c.value += 1
count_value = c.value
q = self.counter_q[i]
current_time = time.time()
q.put((count_value, current_time))
if q.qsize() > self.counter_speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, self.init_time
speed = (count_value - old_count_value) / (current_time - old_time)
self.counter_speed[i].value = speed
Progress._reset_i(self, i)
def get_d(s1, s2, width, lp, lps):
d = width - len(terminal.remove_ESC_SEQ_from_string(s1)) - len(terminal.remove_ESC_SEQ_from_string(s2)) - 2 - lp - lps
if d >= 0:
d1 = d // 2
d2 = d - d1
return s1, s2, d1, d2
def full_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "TET {} {:>12} TTG {}".format(tet, speed, ttg)
s2 = "ETA {} ORT {}".format(eta, ort)
return get_d(s1, s2, width, lp, lps)
def full_minor_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} {:>12} G {}".format(tet, speed, ttg)
s2 = "A {} O {}".format(eta, ort)
return get_d(s1, s2, width, lp, lps)
def reduced_1_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} {:>12} G {}".format(tet, speed, ttg)
s2 = "O {}".format(ort)
return get_d(s1, s2, width, lp, lps)
def reduced_2_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} G {}".format(tet, ttg)
s2 = "O {}".format(ort)
return get_d(s1, s2, width, lp, lps)
def reduced_3_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} G {}".format(tet, ttg)
s2 = ''
return get_d(s1, s2, width, lp, lps)
def reduced_4_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = ''
s2 = ''
return get_d(s1, s2, width, lp, lps)
def kw_bold(s, ch_after):
kws = ['TET', 'TTG', 'ETA', 'ORT', 'E', 'G', 'A', 'O']
for kw in kws:
for c in ch_after:
s = s.replace(kw + c, terminal.ESC_BOLD + kw + terminal.ESC_RESET_BOLD + c)
return s
def _stat(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
if (max_count_value is None) or (max_count_value == 0):
# only show current absolute progress as number and estimated speed
stat = "{}{} [{}] {}#{} ".format(COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet),
humanize_speed(speed),
COLTHM['BAR_COL'],
str(count_value) + terminal.ESC_DEFAULT)
else:
if width == 'auto':
width = get_terminal_width()
# deduce relative progress
p = count_value / max_count_value
if p < 1:
ps = " {:.1%} ".format(p)
else:
ps = " {:.0%} ".format(p)
if ttg is None:
eta = '--'
ort = None
else:
eta = datetime.datetime.fromtimestamp(time.time() + ttg).strftime("%Y%m%d_%H:%M:%S")
ort = tet + ttg
tet = humanize_time(tet)
speed = '[' + humanize_speed(speed) + ']'
ttg = humanize_time(ttg)
ort = humanize_time(ort)
repl_ch = '-'
lp = len(prepend)
args = p, tet, speed, ttg, eta, ort, repl_ch, width, lp, len(ps)
res = full_stat(*args)
if res is None:
res = full_minor_stat(*args)
if res is None:
res = reduced_1_stat(*args)
if res is None:
res = reduced_2_stat(*args)
if res is None:
res = reduced_3_stat(*args)
if res is None:
res = reduced_4_stat(*args)
if res is not None:
s1, s2, d1, d2 = res
s = s1 + ' ' * d1 + ps + ' ' * d2 + s2
idx_p = math.ceil( (width-lp-2)*p)
s_before = s[:idx_p].replace(' ', repl_ch)
if (len(s_before) > 0) and (s_before[-1] == repl_ch):
s_before = s_before[:-1] + '>'
s_after = s[idx_p:]
s_before = kw_bold(s_before, ch_after=[repl_ch, '>'])
s_after = kw_bold(s_after, ch_after=[' '])
stat = (COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT +
COLTHM['BAR_COL'] + terminal.ESC_BOLD + '[' + terminal.ESC_RESET_BOLD + s_before + terminal.ESC_DEFAULT +
s_after + terminal.ESC_BOLD + COLTHM['BAR_COL'] + ']' + terminal.ESC_NO_CHAR_ATTR)
else:
ps = ps.strip()
if p == 1:
ps = ' ' + ps
stat = prepend + ps
return stat
def show_stat_ProgressBarFancy(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
stat = _stat(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs)
print(stat)
class ProgressBarFancy(Progress):
"""
implements a progress bar where the color indicates the current status
similar to the bars known from 'htop'
"""
def __init__(self, *args, **kwargs):
"""
width [int/'auto'] - the number of characters used to show the Progress bar,
use 'auto' to determine width from terminal information -> see _set_width
"""
Progress.__init__(self, *args, show_stat = show_stat_ProgressBarFancy, **kwargs)
def show_stat_ProgressBarCounterFancy(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
counter_count = kwargs['counter_count'][i]
counter_speed = kwargs['counter_speed'][i]
counter_tet = time.time() - kwargs['init_time']
s_c = "{}{}{} [{}] {}#{}".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL']+prepend+terminal.ESC_DEFAULT,
humanize_time(counter_tet),
humanize_speed(counter_speed.value),
COLTHM['BAR_COL'],
str(counter_count.value) + terminal.ESC_DEFAULT)
if max_count_value is not None:
if width == 'auto':
width = get_terminal_width()
s_c += ' - '
if max_count_value == 0:
s_c = "{}{} [{}] {}#{} ".format(s_c, humanize_time(tet), humanize_speed(speed),
COLTHM['BAR_COL'], str(count_value)+terminal.ESC_DEFAULT)
else:
_width = width - terminal.len_string_without_ESC(s_c)
s_c += _stat(count_value, max_count_value, '', speed, tet, ttg, _width, i)
print(s_c)
class ProgressBarCounterFancy(ProgressBarCounter):
def __init__(self, *args, **kwargs):
ProgressBarCounter.__init__(self, *args, **kwargs)
self.show_stat = show_stat_ProgressBarCounterFancy
class SIG_handler_Loop(object):
"""class to setup signal handling for the Loop class
Note: each subprocess receives the default signal handling from it's parent.
If the signal function from the module signal is evoked within the subprocess
this default behavior can be overwritten.
The init function receives a shared memory boolean object which will be set
false in case of signal detection. Since the Loop class will check the state
of this boolean object before each repetition, the loop will stop when
a signal was receives.
"""
def __init__(self, sigint, sigterm, log, prefix):
self.set_signal(signal.SIGINT, sigint)
self.set_signal(signal.SIGTERM, sigterm)
self.prefix = prefix
self.log = log
self.log.info("setup signal handler for loop (SIGINT:%s, SIGTERM:%s)", sigint, sigterm)
def set_signal(self, sig, handler_str):
if handler_str == 'ign':
signal.signal(sig, self._ignore_signal)
elif handler_str == 'stop':
signal.signal(sig, self._stop_on_signal)
else:
raise TypeError("unknown signal hander string '%s'", handler_str)
def _ignore_signal(self, signal, frame):
self.log.debug("ignore received sig %s", signal_dict[signal])
pass
def _stop_on_signal(self, signal, frame):
self.log.info("received sig %s -> raise InterruptedError", signal_dict[signal])
raise LoopInterruptError()
def FloatValue(val=0.):
"""returns a `multiprocessing.Value` of type `float` with initial value `val`"""
return mp.Value('d', val, lock=True)
def UnsignedIntValue(val=0):
"""returns a `multiprocessing.Value` of type `unsigned int` with initial value `val`"""
return mp.Value('I', val, lock=True)
def StringValue(num_of_bytes):
"""returns a `multiprocessing.Array` of type `character` and length `num_of_bytes`"""
return mp.Array('c', _jm_compatible_bytearray(num_of_bytes), lock=True)
def check_process_termination(proc, prefix, timeout, auto_kill_on_last_resort = False):
proc.join(timeout)
if not proc.is_alive():
log.debug("termination of process (pid %s) within timeout of %s SUCCEEDED!", proc.pid, humanize_time(timeout))
return True
# process still runs -> send SIGTERM -> see what happens
log.warning("termination of process (pid %s) within given timeout of %s FAILED!", proc.pid, humanize_time(timeout))
proc.terminate()
new_timeout = 3*timeout
log.debug("wait for termination (timeout %s)", humanize_time(new_timeout))
proc.join(new_timeout)
if not proc.is_alive():
log.info("termination of process (pid %s) via SIGTERM with timeout of %s SUCCEEDED!", proc.pid, humanize_time(new_timeout))
return True
log.warning("termination of process (pid %s) via SIGTERM with timeout of %s FAILED!", proc.pid, humanize_time(new_timeout))
log.debug("auto_kill_on_last_resort is %s", auto_kill_on_last_resort)
answer = 'k' if auto_kill_on_last_resort else '_'
while True:
log.debug("answer string is %s", answer)
if answer == 'k':
log.warning("send SIGKILL to process with pid %s", proc.pid)
os.kill(proc.pid, signal.SIGKILL)
time.sleep(0.1)
else:
log.info("send SIGTERM to process with pid %s", proc.pid)
os.kill(proc.pid, signal.SIGTERM)
time.sleep(0.1)
if not proc.is_alive():
log.info("process (pid %s) has stopped running!", proc.pid)
return True
else:
log.warning("process (pid %s) is still running!", proc.pid)
print("the process (pid {}) seems still running".format(proc.pid))
try:
answer = input("press 'enter' to send SIGTERM, enter 'k' to send SIGKILL or enter 'ignore' to not bother about the process anymore")
except Exception as e:
log.error("could not ask for sending SIGKILL due to {}".format(type(e)))
log.info(traceback.format_exc())
log.warning("send SIGKILL now")
answer = 'k'
if answer == 'ignore':
log.warning("ignore process %s", proc.pid)
return False
elif answer != 'k':
answer = ''
def getCountKwargs(func):
""" Returns a list ["count kwarg", "count_max kwarg"] for a
given function. Valid combinations are defined in
`progress.validCountKwargs`.
Returns None if no keyword arguments are found.
"""
# Get all arguments of the function
if hasattr(func, "__code__"):
func_args = func.__code__.co_varnames[:func.__code__.co_argcount]
for pair in validCountKwargs:
if ( pair[0] in func_args and pair[1] in func_args ):
return pair
# else
return None
def humanize_speed(c_per_sec):
"""convert a speed in counts per second to counts per [s, min, h, d], choosing the smallest value greater zero.
"""
scales = [60, 60, 24]
units = ['c/s', 'c/min', 'c/h', 'c/d']
speed = c_per_sec
i = 0
if speed > 0:
while (speed < 1) and (i < len(scales)):
speed *= scales[i]
i += 1
return "{:.1f}{}".format(speed, units[i])
def humanize_time(secs):
"""convert second in to hh:mm:ss format
"""
if secs is None:
return '--'
if secs < 1:
return "{:.2f}ms".format(secs*1000)
elif secs < 10:
return "{:.2f}s".format(secs)
else:
mins, secs = divmod(secs, 60)
hours, mins = divmod(mins, 60)
return '{:02d}:{:02d}:{:02d}'.format(int(hours), int(mins), int(secs))
def codecov_subprocess_check():
print("this line will be only called from a subprocess")
myQueue = mp.Queue
# a mapping from the numeric values of the signals to their names used in the
# standard python module signals
signal_dict = {}
for s in dir(signal):
if s.startswith('SIG') and s[3] != '_':
n = getattr(signal, s)
if n in signal_dict:
signal_dict[n] += ('/'+s)
else:
signal_dict[n] = s
_colthm_term_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_LIGHT_GREEN, 'ADD_LNS_UP':0}
_colthm_ipyt_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_LIGHT_BLUE, 'ADD_LNS_UP':0}
_colthm_wincmd_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_GREEN, 'ADD_LNS_UP':1}
color_themes = {'term_default': _colthm_term_default,
'ipyt_default': _colthm_ipyt_default,
'wincmd_default': _colthm_wincmd_default}
if platform.system() == 'Windows':
COLTHM = _colthm_wincmd_default
else:
COLTHM = _colthm_term_default
def choose_color_theme(name):
global COLTHM
if name in color_themes:
COLTHM = color_themes[name]
else:
warnings.warn("no such color theme {}".format(name))
# keyword arguments that define counting in wrapped functions
validCountKwargs = [
[ "count", "count_max"],
[ "count", "max_count"],
[ "c", "m"],
[ "jmc", "jmm"],
]
|
cimatosa/progression | progression/progress.py | humanize_time | python | def humanize_time(secs):
if secs is None:
return '--'
if secs < 1:
return "{:.2f}ms".format(secs*1000)
elif secs < 10:
return "{:.2f}s".format(secs)
else:
mins, secs = divmod(secs, 60)
hours, mins = divmod(mins, 60)
return '{:02d}:{:02d}:{:02d}'.format(int(hours), int(mins), int(secs)) | convert second in to hh:mm:ss format | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/progress.py#L1409-L1422 | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Progression module
------------------
This module provides the (so far) four variants to display progress information:
* :py:class:`.ProgressBar`
This class monitors one or multiple processes showing the total elapsed time (TET), the current speed
estimated from the most recent updated, a colored bar showing the progress and an
estimate for the remaining time, also called time to go (TTG).
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 5.83s [7.2c/s] <span style="color:#00ff00"><b>[=====================> ]</b></span> TTG 8.05s</pre>
</div>
* :py:class:`.ProgressBarCounter`
If a single process is intended to do several sequential task, the :py:class:`.ProgressBarCounter` class can keep track of the number
of accomplished tasks on top of monitoring the individual task just like :py:class:`.ProgressBar` does.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre><span style="color:#00ff00"><b> [</b><b>TET</b>-5.83s-----[7.2c/s]-<b>TTG</b>-8.05s-></span> 42.0% <b>ETA</b> 20161011_16:52:52 <b>ORT</b> 00:00:13<b><span style="color:#00ff00">]</span></b></pre>
</div>
* :py:class:`.ProgressBarFancy`
This class intends to be a replacement for :py:class:`.ProgressBar` with slightly more information and
better handling of small terminal widths.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 00:00:35 [1.4c/min] <span style="color:#00ff00">#3</span> - 5.83s [7.2c/s] <span style="color:#00ff00"><b>[===========> ]</b></span> TTG 8.05s</pre>
</div>
* :py:class:`.ProgressBarCounterFancy`
Just as :py:class:`.ProgressBarFancy` this replaces :py:class:`.ProgressBarCounter`.
.. raw:: html
<div class="widget-html">
<style>.widget-html{font-family:monospace;
color: #c0c0c0;
background-color:black}</style>
<pre> 00:00:35 [1.4c/min] <span style="color:#00ff00">#3</span> - <span style="color:#800000"></span><span style="color:#00ff00"><b>[</b><b>E</b>-5.83s-----[7.2c/s]-<b>G</b>-8</span>.05s 42.0% <b>O</b> 00:00:13<b><span style="color:#00ff00">]</span></b></pre>
</div>
.. autoclass:: Progress
:members:
:inherited-members:
.. autoclass:: ProgressBar
:members:
.. autoclass:: ProgressBarCounter
:members:
.. autoclass:: ProgressBarFancy
:members:
.. autoclass:: ProgressBarCounterFancy
:members:
.. autofunction:: UnsignedIntValue
.. autofunction:: FloatValue
.. autofunction:: StringValue
"""
from __future__ import division, print_function
import datetime
import io
import logging
from logging.handlers import QueueHandler, QueueListener
import math
import multiprocessing as mp
from multiprocessing.sharedctypes import Synchronized
import os
import sys
import signal
import subprocess as sp
import threading
import time
import traceback
import warnings
from . import terminal
import platform
_IPYTHON = True
try:
import ipywidgets
except ImportError:
_IPYTHON = False
warnings.warn("could not load ipywidgets (IPython HTML output will not work)", category=ImportWarning)
except DeprecationWarning:
pass
try:
from IPython.display import display
except ImportError:
_IPYTHON = False
warnings.warn("could not load IPython (IPython HTML output will not work)", category=ImportWarning)
# Magic conversion from 3 to 2
if sys.version_info[0] == 2:
ProcessLookupError = OSError
inMemoryBuffer = io.BytesIO
old_math_ceil = math.ceil
def my_int_ceil(f):
return int(old_math_ceil(f))
math.ceil = my_int_ceil
_jm_compatible_bytearray = lambda x: x
class TimeoutError(Exception):
pass
elif sys.version_info[0] == 3:
inMemoryBuffer = io.StringIO
_jm_compatible_bytearray = bytearray
class MultiLineFormatter(logging.Formatter):
"""pads a multiline log message with spaces such that
<HEAD> msg_line1
msg_line2
...
"""
def format(self, record):
_str = logging.Formatter.format(self, record)
header = _str.split(record.message)[0]
_str = _str.replace('\n', '\n' + ' '*len(header))
return _str
# def_handl = logging.StreamHandler(stream = sys.stderr) # the default handler simply uses stderr
# def_handl.setLevel(logging.DEBUG) # ... listens to all messaged
fmt = MultiLineFormatter('%(asctime)s %(name)s %(levelname)s : %(message)s')
# def_handl.setFormatter(fmt) # ... and pads multiline messaged
log = logging.getLogger(__name__) # creates the default log for this module
# log.addHandler(def_handl)
class LoopExceptionError(RuntimeError):
pass
class LoopInterruptError(Exception):
pass
class StdoutPipe(object):
"""replacement for stream objects such as stdout which
forwards all incoming data using the send method of a
connection
example usage:
>>> import sys
>>> from multiprocessing import Pipe
>>> from progression import StdoutPipe
>>> conn_recv, conn_send = Pipe(False)
>>> sys.stdout = StdoutPipe(conn_send)
>>> print("hallo welt", end='') # this is no going through the pipe
>>> msg = conn_recv.recv()
>>> sys.stdout = sys.__stdout__
>>> print(msg)
hallo welt
>>> assert msg == "hallo welt"
"""
def __init__(self, conn):
self.conn = conn
def flush(self):
pass
def write(self, b):
self.conn.send(b)
class PipeToPrint(object):
def __call__(self, b):
print(b, end='')
def close(self):
pass
class PipeFromProgressToIPythonHTMLWidget(object):
def __init__(self):
self.htmlWidget = ipywidgets.widgets.HTML()
display(self.htmlWidget)
self._buff = ""
def __call__(self, b):
self._buff += b
if b.endswith(terminal.ESC_MY_MAGIC_ENDING):
buff = terminal.ESC_SEQ_to_HTML(self._buff)
self.htmlWidget.value = '<style>.widget-html{font-family:monospace}</style><pre>'+buff+'</pre>'
self._buff = ""
def close(self):
self.htmlWidget.close()
PipeHandler = PipeToPrint
def choose_pipe_handler(kind = 'print', color_theme = None):
global PipeHandler
if kind == 'print':
PipeHandler = PipeToPrint
if color_theme is None:
choose_color_theme('term_default')
else:
choose_color_theme(color_theme)
elif kind == 'ipythonhtml':
if _IPYTHON:
PipeHandler = PipeFromProgressToIPythonHTMLWidget
if color_theme is None:
choose_color_theme('ipyt_default')
else:
choose_color_theme(color_theme)
else:
warnings.warn("can not choose ipythonHTML (IPython and/or ipywidgets were not loaded)")
else:
raise ValueError("unknown kind '{}' for pipe_handler, use one out of ('print', 'ipythonhtml')")
def get_terminal_width():
if PipeHandler == PipeToPrint:
return terminal.get_terminal_width()
elif PipeHandler == PipeFromProgressToIPythonHTMLWidget:
return 80
else:
raise NotImplementedError
def get_identifier(name=None, pid=None, bold=True):
if pid is None:
pid = os.getpid()
if bold:
esc_bold = terminal.ESC_BOLD
esc_no_char_attr = terminal.ESC_NO_CHAR_ATTR
else:
esc_bold = ""
esc_no_char_attr = ""
if name is None:
return "{}PID_{}{}".format(esc_bold, pid, esc_no_char_attr)
else:
return "{}{}_{}{}".format(esc_bold, name, pid, esc_no_char_attr)
def _loop_wrapper_func(func, args, shared_mem_run, shared_mem_pause, interval, sigint, sigterm, name,
logging_level, conn_send, func_running, log_queue):
"""
to be executed as a separate process (that's why this functions is declared static)
"""
prefix = get_identifier(name) + ' '
global log
log = logging.getLogger(__name__+".log_{}".format(get_identifier(name, bold=False)))
log.setLevel(logging_level)
log.addHandler(QueueHandler(log_queue))
sys.stdout = StdoutPipe(conn_send)
log.debug("enter wrapper_func")
SIG_handler_Loop(sigint, sigterm, log, prefix)
func_running.value = True
error = False
while shared_mem_run.value:
try:
# in pause mode, simply sleep
if shared_mem_pause.value:
quit_loop = False
else:
# if not pause mode -> call func and see what happens
try:
quit_loop = func(*args)
except LoopInterruptError:
raise
except Exception as e:
log.error("error %s occurred in loop calling 'func(*args)'", type(e))
log.info("show traceback.print_exc()\n%s", traceback.format_exc())
error = True
break
if quit_loop is True:
log.debug("loop stooped because func returned True")
break
time.sleep(interval)
except LoopInterruptError:
log.debug("quit wrapper_func due to InterruptedError")
break
func_running.value = False
if error:
sys.exit(-1)
else:
log.debug("wrapper_func terminates gracefully")
# gets rid of the following warnings
# Exception ignored in: <_io.FileIO name='/dev/null' mode='rb'>
# ResourceWarning: unclosed file <_io.TextIOWrapper name='/dev/null' mode='r' encoding='UTF-8'>
try:
if mp.get_start_method() == "spawn":
sys.stdin.close()
except AttributeError:
pass
class LoopTimeoutError(TimeoutError):
pass
class Loop(object):
"""
class to run a function periodically an seperate process.
In case the called function returns True, the loop will stop.
Otherwise a time interval given by interval will be slept before
another execution is triggered.
The shared memory variable _run (accessible via the class property run)
also determines if the function if executed another time. If set to False
the execution stops.
For safe cleanup (and in order to catch any Errors)
it is advisable to instantiate this class
using 'with' statement as follows:
with Loop(**kwargs) as my_loop:
my_loop.start()
...
this will guarantee you that the spawned loop process is
down when exiting the 'with' scope.
The only circumstance where the process is still running is
when you set auto_kill_on_last_resort to False and answer the
question to send SIGKILL with no.
"""
def __init__(self,
func,
args = (),
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
auto_kill_on_last_resort = False,
raise_error = True):
"""
func [callable] - function to be called periodically
args [tuple] - arguments passed to func when calling
intervall [pos number] - time to "sleep" between each call
verbose - DEPRECATED, only kept for compatibility, use global log.level to
specify verbosity
sigint [string] - signal handler string to set SIGINT behavior (see below)
sigterm [string] - signal handler string to set SIGTERM behavior (see below)
auto_kill_on_last_resort [bool] - If set False (default), ask user to send SIGKILL
to loop process in case normal stop and SIGTERM failed. If set True, send SIDKILL
without asking.
the signal handler string may be one of the following
ing: ignore the incoming signal
stop: raise InterruptedError which is caught silently.
"""
self._proc = None
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
self.func = func
self.args = args
self.interval = interval
assert self.interval >= 0
self._run = mp.Value('b', False)
self._pause = mp.Value('b', False)
self._func_running = mp.Value('b', False)
self._sigint = sigint
self._sigterm = sigterm
self._auto_kill_on_last_resort = auto_kill_on_last_resort
log.debug("auto_kill_on_last_resort = %s", self._auto_kill_on_last_resort)
self._monitor_thread = None
self.pipe_handler = PipeHandler()
self.raise_error = raise_error
def __enter__(self):
return self
def __exit__(self, *exc_args):
if self.is_alive():
log.debug("loop is still running on context exit")
else:
log.debug("loop has stopped on context exit")
self.stop()
def __cleanup(self):
"""
Wait at most twice as long as the given repetition interval
for the _wrapper_function to terminate.
If after that time the _wrapper_function has not terminated,
send SIGTERM to and the process.
Wait at most five times as long as the given repetition interval
for the _wrapper_function to terminate.
If the process still running send SIGKILL automatically if
auto_kill_on_last_resort was set True or ask the
user to confirm sending SIGKILL
"""
# set run to False and wait some time -> see what happens
self._run.value = False
if check_process_termination(proc = self._proc,
timeout = 2*self.interval,
prefix = '',
auto_kill_on_last_resort = self._auto_kill_on_last_resort):
log.debug("cleanup successful")
else:
raise RuntimeError("cleanup FAILED!")
try:
self.conn_send.close()
self._log_queue_listener.stop()
except OSError:
pass
log.debug("wait for monitor thread to join")
self._monitor_thread.join()
log.debug("monitor thread to joined")
self._func_running.value = False
def _monitor_stdout_pipe(self):
while True:
try:
b = self.conn_recv.recv()
self.pipe_handler(b)
except EOFError:
break
def start(self, timeout=None):
"""
uses multiprocess Process to call _wrapper_func in subprocess
"""
if self.is_alive():
log.warning("a process with pid %s is already running", self._proc.pid)
return
self._run.value = True
self._func_running.value = False
name = self.__class__.__name__
self.conn_recv, self.conn_send = mp.Pipe(False)
self._monitor_thread = threading.Thread(target = self._monitor_stdout_pipe)
self._monitor_thread.daemon=True
self._monitor_thread.start()
log.debug("started monitor thread")
self._log_queue = mp.Queue()
self._log_queue_listener = QueueListener(self._log_queue, *log.handlers)
self._log_queue_listener.start()
args = (self.func, self.args, self._run, self._pause, self.interval,
self._sigint, self._sigterm, name, log.level, self.conn_send,
self._func_running, self._log_queue)
self._proc = mp.Process(target = _loop_wrapper_func,
args = args)
self._proc.start()
log.info("started a new process with pid %s", self._proc.pid)
log.debug("wait for loop function to come up")
t0 = time.time()
while not self._func_running.value:
if self._proc.exitcode is not None:
exc = self._proc.exitcode
self._proc = None
if exc == 0:
log.warning("wrapper function already terminated with exitcode 0\nloop is not running")
return
else:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(exc)+
"see log (INFO level) for traceback information")
time.sleep(0.1)
if (timeout is not None) and ((time.time() - t0) > timeout):
err_msg = "could not bring up function on time (timeout: {}s)".format(timeout)
log.error(err_msg)
log.info("either it takes too long to spawn the subprocess (increase the timeout)\n"+
"or an internal error occurred before reaching the function call")
raise LoopTimeoutError(err_msg)
log.debug("loop function is up ({})".format(humanize_time(time.time()-t0)))
def stop(self):
"""
stops the process triggered by start
Setting the shared memory boolean run to false, which should prevent
the loop from repeating. Call __cleanup to make sure the process
stopped. After that we could trigger start() again.
"""
if self.is_alive():
self._proc.terminate()
if self._proc is not None:
self.__cleanup()
if self.raise_error:
if self._proc.exitcode == 255:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(self._proc.exitcode)+
"see log (INFO level) for traceback information")
self.pipe_handler.close()
self._proc = None
def join(self, timeout):
"""
calls join for the spawned process with given timeout
"""
if self.is_alive():
self._proc.join(timeout)
def is_alive(self):
if self._proc is None:
return False
else:
return self._proc.is_alive()
def is_running(self):
if self.is_alive():
return self._func_running.value
else:
return False
def pause(self):
if self._run.value:
self._pause.value = True
log.debug("process with pid %s paused", self._proc.pid)
def resume(self):
if self._run.value:
self._pause.value = False
log.debug("process with pid %s resumed", self._proc.pid)
def getpid(self):
if self._proc is not None:
return self._proc.pid
else:
return None
def show_stat_base(count_value, max_count_value, prepend, speed, tet, ttg, width, **kwargs):
"""A function that formats the progress information
This function will be called periodically for each progress that is monitored.
Overwrite this function in a subclass to implement a specific formating of the progress information
:param count_value: a number holding the current state
:param max_count_value: should be the largest number `count_value` can reach
:param prepend: additional text for each progress
:param speed: the speed estimation
:param tet: the total elapsed time
:param ttg: the time to go
:param width: the width for the progressbar, when set to `"auto"` this function
should try to detect the width available
:type width: int or "auto"
"""
raise NotImplementedError
def _show_stat_wrapper_Progress(count, last_count, start_time, max_count, speed_calc_cycles,
width, q, last_speed, prepend, show_stat_function, add_args,
i, lock):
"""
calculate
"""
count_value, max_count_value, speed, tet, ttg, = Progress._calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock)
return show_stat_function(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **add_args)
def _show_stat_wrapper_multi_Progress(count, last_count, start_time, max_count, speed_calc_cycles,
width, q, last_speed, prepend, show_stat_function, len_,
add_args, lock, info_line, no_move_up=False):
"""
call the static method show_stat_wrapper for each process
"""
# print(ESC_BOLD, end='')
# sys.stdout.flush()
for i in range(len_):
_show_stat_wrapper_Progress(count[i], last_count[i], start_time[i], max_count[i], speed_calc_cycles,
width, q[i], last_speed[i], prepend[i], show_stat_function,
add_args, i, lock[i])
n = len_
if info_line is not None:
s = info_line.value.decode('utf-8')
s = s.split('\n')
n += len(s)
for si in s:
if width == 'auto':
width = get_terminal_width()
if len(si) > width:
si = si[:width]
print("{0:<{1}}".format(si, width))
if no_move_up:
n = 0
# this is only a hack to find the end
# of the message in a stream
# so ESC_HIDDEN+ESC_NO_CHAR_ATTR is a magic ending
print(terminal.ESC_MOVE_LINE_UP(n) + terminal.ESC_MY_MAGIC_ENDING, end='')
sys.stdout.flush()
class Progress(Loop):
"""
Abstract Progress Class
The :py:class:`Progress` Class uses :py:class:`Loop` to provide a repeating
function which calculates progress information from a changing counter value.
The formatting of these information is done by overwriting the static member
:py:func:`Progress.show_stat`. :py:func:`Progress.show_stat` is intended to
format a single progress bar on a single line only.
The extension to multiple progresses is done
automatically base on the formatting of a single line.
"""
def __init__(self,
count,
max_count = None,
prepend = None,
width = 'auto',
speed_calc_cycles = 10,
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
info_line = None,
show_stat = None):
"""
:param count: shared variable for holding the current state
(use :py:func:`UnsignedIntValue` for short hand creation)
:type count: list/single value of multiprocessing.Value
:param max_count: shared variable for holding the final state
:type max_count: None or list/single value of multiprocessing.Value
:param prepend: string to put in front of each progress output
:type prepend: None, str or list of str
:param width: the width to use for the progress line (fixed or automatically determined)
:type width: int or "auto"
:param speed_calc_cycles: number of updated (cycles) to use for estimating the speed
(example: ``speed_calc_sycles = 4`` and ``interval = 1`` means that the speed is estimated from
the current state and state 4 updates before where the elapsed time will roughly be 4s)
:param interval: seconds to wait before updating the progress
:param verbose: DEPRECATED: has no effect, use the global ``log.setLevel()`` to control the
output level
:param sigint: behavior of the subprocess on signal ``SIGINT`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigint: "stop" or "ign"
:param sigterm: behavior of the subprocess on signal ``SIGTERM`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigterm: "stop" or "ign"
:param info_line: additional text to show below the progress (use :py:func:`StringValue`
for short hand creation of shared strings)
:type info_line: None or multiprocessing.Array of characters
.. note::
As `Progress` is derived from :py:class:`Loop` it is highly encurraged to create
any instance of Progress with a context manager (``with`` statement).
This ensures that the subprocess showing the progress terminats on context exit.
Otherwise one has to make sure that at some point the stop() routine is called.
abstract example::
with AProgressClass(...) as p:
p.start()
# do stuff and modify counter
"""
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
# converts count to list and do type check
try:
for c in count:
if not isinstance(c, Synchronized):
raise ValueError("Each element of 'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = True
except TypeError:
if not isinstance(count, Synchronized):
raise ValueError("'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = False
count = [count]
self.len = len(count)
# converts max_count to list and do type check
if max_count is not None:
if self.is_multi:
try:
for i, m in enumerate(max_count):
if not isinstance(m, Synchronized):
max_count[i] = UnsignedIntValue(m)
except TypeError:
raise TypeError("'max_count' must be iterable")
else:
if not isinstance(max_count, Synchronized):
max_count = UnsignedIntValue(max_count)
max_count = [max_count]
else:
max_count = [None] * self.len
self.start_time = []
self.speed_calc_cycles = speed_calc_cycles
self.width = width
self.q = []
self.prepend = []
self.lock = []
self.last_count = []
self.last_speed = []
for i in range(self.len):
self.q.append(myQueue()) # queue to save the last speed_calc_cycles
# (time, count) information to calculate speed
#self.q[-1].cancel_join_thread()
self.last_count.append(UnsignedIntValue())
self.last_speed.append(FloatValue())
self.lock.append(mp.Lock())
self.start_time.append(FloatValue(val=time.time()))
if prepend is None:
# no prepend given
self.prepend.append('')
else:
if isinstance(prepend, str):
self.prepend.append(prepend)
else:
# assume list of prepend, (needs to be a sequence)
self.prepend.append(prepend[i])
self.max_count = max_count # list of multiprocessing value type
self.count = count # list of multiprocessing value type
self.interval = interval
self.verbose = verbose
self.show_on_exit = False
self.add_args = {}
self.info_line = info_line
self.show_stat = show_stat
# setup loop class with func
Loop.__init__(self,
func = _show_stat_wrapper_multi_Progress,
args = (self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
show_stat,
self.len,
self.add_args,
self.lock,
self.info_line),
interval = interval,
sigint = sigint,
sigterm = sigterm,
auto_kill_on_last_resort = True)
def __exit__(self, *exc_args):
self.stop()
@staticmethod
def _calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock):
"""do the pre calculations in order to get TET, speed, TTG
:param count: count
:param last_count: count at the last call, allows to treat the case of no progress
between sequential calls
:param start_time: the time when start was triggered
:param max_count: the maximal value count
:type max_count:
:param speed_calc_cycles:
:type speed_calc_cycles:
:param q:
:type q:
:param last_speed:
:type last_speed:
:param lock:
:type lock:
"""
count_value = count.value
start_time_value = start_time.value
current_time = time.time()
if last_count.value != count_value:
# some progress happened
with lock:
# save current state (count, time) to queue
q.put((count_value, current_time))
# get older state from queue (or initial state)
# to to speed estimation
if q.qsize() > speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, start_time_value
last_count.value = count_value
#last_old_count.value = old_count_value
#last_old_time.value = old_time
speed = (count_value - old_count_value) / (current_time - old_time)
last_speed.value = speed
else:
# progress has not changed since last call
# use also old (cached) data from the queue
#old_count_value, old_time = last_old_count.value, last_old_time.value
speed = last_speed.value
if (max_count is None):
max_count_value = None
else:
max_count_value = max_count.value
tet = (current_time - start_time_value)
if (speed == 0) or (max_count_value is None) or (max_count_value == 0):
ttg = None
else:
ttg = math.ceil((max_count_value - count_value) / speed)
return count_value, max_count_value, speed, tet, ttg
def _reset_all(self):
"""
reset all progress information
"""
for i in range(self.len):
self._reset_i(i)
def _reset_i(self, i):
"""
reset i-th progress information
"""
self.count[i].value=0
log.debug("reset counter %s", i)
self.lock[i].acquire()
for x in range(self.q[i].qsize()):
self.q[i].get()
self.lock[i].release()
self.start_time[i].value = time.time()
def _show_stat(self):
"""
convenient functions to call the static show_stat_wrapper_multi with
the given class members
"""
_show_stat_wrapper_multi_Progress(self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
self.show_stat,
self.len,
self.add_args,
self.lock,
self.info_line,
no_move_up=True)
def reset(self, i = None):
"""resets the progress informaion
:param i: tell which progress to reset, if None reset all
:type i: None, int
"""
if i is None:
self._reset_all()
else:
self._reset_i(i)
def start(self):
"""
start
"""
# before printing any output to stout, we can now check this
# variable to see if any other ProgressBar has reserved that
# terminal.
if (self.__class__.__name__ in terminal.TERMINAL_PRINT_LOOP_CLASSES):
if not terminal.terminal_reserve(progress_obj=self):
log.warning("tty already reserved, NOT starting the progress loop!")
return
super(Progress, self).start()
self.show_on_exit = True
def stop(self):
"""
trigger clean up by hand, needs to be done when not using
context management via 'with' statement
- will terminate loop process
- show a last progress -> see the full 100% on exit
- releases terminal reservation
"""
super(Progress, self).stop()
terminal.terminal_unreserve(progress_obj=self, verbose=self.verbose)
if self.show_on_exit:
if not isinstance(self.pipe_handler, PipeToPrint):
myout = inMemoryBuffer()
stdout = sys.stdout
sys.stdout = myout
self._show_stat()
self.pipe_handler(myout.getvalue())
sys.stdout = stdout
else:
self._show_stat()
print()
self.show_on_exit = False
def show_stat_ProgressBar(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
if (max_count_value is None) or (max_count_value == 0):
# only show current absolute progress as number and estimated speed
print("{}{}{} [{}] {}#{} ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet), humanize_speed(speed),
terminal.ESC_BOLD + COLTHM['BAR_COL'],
count_value))
else:
if width == 'auto':
width = get_terminal_width()
# deduce relative progress and show as bar on screen
if ttg is None:
s3 = " TTG --"
else:
s3 = " TTG {}".format(humanize_time(ttg))
s1 = "{}{}{} [{}] ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet),
humanize_speed(speed))
l = terminal.len_string_without_ESC(s1 + s3)
l2 = width - l - 3
a = int(l2 * count_value / max_count_value)
b = l2 - a
s2 = COLTHM['BAR_COL'] + terminal.ESC_BOLD + "[" + "=" * a + ">" + " " * b + "]" + terminal.ESC_RESET_BOLD + terminal.ESC_DEFAULT
print(s1 + s2 + s3)
class ProgressBar(Progress):
"""
implements a progress bar similar to the one known from 'wget' or 'pv'
"""
def __init__(self, *args, **kwargs):
"""
width [int/'auto'] - the number of characters used to show the Progress bar,
use 'auto' to determine width from terminal information -> see _set_width
"""
Progress.__init__(self, *args, show_stat = show_stat_ProgressBar, **kwargs)
# self._PRE_PREPEND = terminal.ESC_NO_CHAR_ATTR + ESC_RED
# self._POST_PREPEND = ESC_BOLD + ESC_GREEN
def show_stat_ProgressBarCounter(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
counter_count = kwargs['counter_count'][i]
counter_speed = kwargs['counter_speed'][i]
counter_tet = time.time() - kwargs['init_time']
s_c = "{}{}{} [{}] {}#{} - ".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(counter_tet),
humanize_speed(counter_speed.value),
COLTHM['BAR_COL'],
str(counter_count.value) + terminal.ESC_DEFAULT)
if width == 'auto':
width = get_terminal_width()
if (max_count_value is None) or (max_count_value == 0):
s_c = "{}{} [{}] {}#{} ".format(s_c,
humanize_time(tet),
humanize_speed(speed),
COLTHM['BAR_COL'],
str(count_value) + terminal.ESC_DEFAULT)
else:
if ttg is None:
s3 = " TTG --"
else:
s3 = " TTG {}".format(humanize_time(ttg))
s1 = "{} [{}] ".format(humanize_time(tet), humanize_speed(speed))
l = terminal.len_string_without_ESC(s1 + s3 + s_c)
l2 = width - l - 3
a = int(l2 * count_value / max_count_value)
b = l2 - a
s2 = COLTHM['BAR_COL'] + terminal.ESC_BOLD + "[" + "=" * a + ">" + " " * b + "]" + terminal.ESC_RESET_BOLD + terminal.ESC_DEFAULT
s_c = s_c + s1 + s2 + s3
print(s_c)
class ProgressBarCounter(Progress):
"""
records also the time of each reset and calculates the speed
of the resets.
shows the TET since init (not effected by reset)
the speed of the resets (number of finished processed per time)
and the number of finished processes
after that also show a progress of each process
max_count > 0 and not None -> bar
max_count == None -> absolute count statistic
max_count == 0 -> hide process statistic at all
"""
def __init__(self, speed_calc_cycles_counter=5, **kwargs):
Progress.__init__(self, show_stat = show_stat_ProgressBarCounter, **kwargs)
self.counter_count = []
self.counter_q = []
self.counter_speed = []
for i in range(self.len):
self.counter_count.append(UnsignedIntValue(val=0))
self.counter_q.append(myQueue())
self.counter_speed.append(FloatValue())
self.counter_speed_calc_cycles = speed_calc_cycles_counter
self.init_time = time.time()
self.add_args['counter_count'] = self.counter_count
self.add_args['counter_speed'] = self.counter_speed
self.add_args['init_time'] = self.init_time
def get_counter_count(self, i=0):
return self.counter_count[i].value
def _reset_i(self, i):
c = self.counter_count[i]
with c.get_lock():
c.value += 1
count_value = c.value
q = self.counter_q[i]
current_time = time.time()
q.put((count_value, current_time))
if q.qsize() > self.counter_speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, self.init_time
speed = (count_value - old_count_value) / (current_time - old_time)
self.counter_speed[i].value = speed
Progress._reset_i(self, i)
def get_d(s1, s2, width, lp, lps):
d = width - len(terminal.remove_ESC_SEQ_from_string(s1)) - len(terminal.remove_ESC_SEQ_from_string(s2)) - 2 - lp - lps
if d >= 0:
d1 = d // 2
d2 = d - d1
return s1, s2, d1, d2
def full_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "TET {} {:>12} TTG {}".format(tet, speed, ttg)
s2 = "ETA {} ORT {}".format(eta, ort)
return get_d(s1, s2, width, lp, lps)
def full_minor_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} {:>12} G {}".format(tet, speed, ttg)
s2 = "A {} O {}".format(eta, ort)
return get_d(s1, s2, width, lp, lps)
def reduced_1_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} {:>12} G {}".format(tet, speed, ttg)
s2 = "O {}".format(ort)
return get_d(s1, s2, width, lp, lps)
def reduced_2_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} G {}".format(tet, ttg)
s2 = "O {}".format(ort)
return get_d(s1, s2, width, lp, lps)
def reduced_3_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = "E {} G {}".format(tet, ttg)
s2 = ''
return get_d(s1, s2, width, lp, lps)
def reduced_4_stat(p, tet, speed, ttg, eta, ort, repl_ch, width, lp, lps):
s1 = ''
s2 = ''
return get_d(s1, s2, width, lp, lps)
def kw_bold(s, ch_after):
kws = ['TET', 'TTG', 'ETA', 'ORT', 'E', 'G', 'A', 'O']
for kw in kws:
for c in ch_after:
s = s.replace(kw + c, terminal.ESC_BOLD + kw + terminal.ESC_RESET_BOLD + c)
return s
def _stat(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
if (max_count_value is None) or (max_count_value == 0):
# only show current absolute progress as number and estimated speed
stat = "{}{} [{}] {}#{} ".format(COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT,
humanize_time(tet),
humanize_speed(speed),
COLTHM['BAR_COL'],
str(count_value) + terminal.ESC_DEFAULT)
else:
if width == 'auto':
width = get_terminal_width()
# deduce relative progress
p = count_value / max_count_value
if p < 1:
ps = " {:.1%} ".format(p)
else:
ps = " {:.0%} ".format(p)
if ttg is None:
eta = '--'
ort = None
else:
eta = datetime.datetime.fromtimestamp(time.time() + ttg).strftime("%Y%m%d_%H:%M:%S")
ort = tet + ttg
tet = humanize_time(tet)
speed = '[' + humanize_speed(speed) + ']'
ttg = humanize_time(ttg)
ort = humanize_time(ort)
repl_ch = '-'
lp = len(prepend)
args = p, tet, speed, ttg, eta, ort, repl_ch, width, lp, len(ps)
res = full_stat(*args)
if res is None:
res = full_minor_stat(*args)
if res is None:
res = reduced_1_stat(*args)
if res is None:
res = reduced_2_stat(*args)
if res is None:
res = reduced_3_stat(*args)
if res is None:
res = reduced_4_stat(*args)
if res is not None:
s1, s2, d1, d2 = res
s = s1 + ' ' * d1 + ps + ' ' * d2 + s2
idx_p = math.ceil( (width-lp-2)*p)
s_before = s[:idx_p].replace(' ', repl_ch)
if (len(s_before) > 0) and (s_before[-1] == repl_ch):
s_before = s_before[:-1] + '>'
s_after = s[idx_p:]
s_before = kw_bold(s_before, ch_after=[repl_ch, '>'])
s_after = kw_bold(s_after, ch_after=[' '])
stat = (COLTHM['PRE_COL'] + prepend + terminal.ESC_DEFAULT +
COLTHM['BAR_COL'] + terminal.ESC_BOLD + '[' + terminal.ESC_RESET_BOLD + s_before + terminal.ESC_DEFAULT +
s_after + terminal.ESC_BOLD + COLTHM['BAR_COL'] + ']' + terminal.ESC_NO_CHAR_ATTR)
else:
ps = ps.strip()
if p == 1:
ps = ' ' + ps
stat = prepend + ps
return stat
def show_stat_ProgressBarFancy(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
stat = _stat(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs)
print(stat)
class ProgressBarFancy(Progress):
"""
implements a progress bar where the color indicates the current status
similar to the bars known from 'htop'
"""
def __init__(self, *args, **kwargs):
"""
width [int/'auto'] - the number of characters used to show the Progress bar,
use 'auto' to determine width from terminal information -> see _set_width
"""
Progress.__init__(self, *args, show_stat = show_stat_ProgressBarFancy, **kwargs)
def show_stat_ProgressBarCounterFancy(count_value, max_count_value, prepend, speed, tet, ttg, width, i, **kwargs):
counter_count = kwargs['counter_count'][i]
counter_speed = kwargs['counter_speed'][i]
counter_tet = time.time() - kwargs['init_time']
s_c = "{}{}{} [{}] {}#{}".format(terminal.ESC_NO_CHAR_ATTR,
COLTHM['PRE_COL']+prepend+terminal.ESC_DEFAULT,
humanize_time(counter_tet),
humanize_speed(counter_speed.value),
COLTHM['BAR_COL'],
str(counter_count.value) + terminal.ESC_DEFAULT)
if max_count_value is not None:
if width == 'auto':
width = get_terminal_width()
s_c += ' - '
if max_count_value == 0:
s_c = "{}{} [{}] {}#{} ".format(s_c, humanize_time(tet), humanize_speed(speed),
COLTHM['BAR_COL'], str(count_value)+terminal.ESC_DEFAULT)
else:
_width = width - terminal.len_string_without_ESC(s_c)
s_c += _stat(count_value, max_count_value, '', speed, tet, ttg, _width, i)
print(s_c)
class ProgressBarCounterFancy(ProgressBarCounter):
def __init__(self, *args, **kwargs):
ProgressBarCounter.__init__(self, *args, **kwargs)
self.show_stat = show_stat_ProgressBarCounterFancy
class SIG_handler_Loop(object):
"""class to setup signal handling for the Loop class
Note: each subprocess receives the default signal handling from it's parent.
If the signal function from the module signal is evoked within the subprocess
this default behavior can be overwritten.
The init function receives a shared memory boolean object which will be set
false in case of signal detection. Since the Loop class will check the state
of this boolean object before each repetition, the loop will stop when
a signal was receives.
"""
def __init__(self, sigint, sigterm, log, prefix):
self.set_signal(signal.SIGINT, sigint)
self.set_signal(signal.SIGTERM, sigterm)
self.prefix = prefix
self.log = log
self.log.info("setup signal handler for loop (SIGINT:%s, SIGTERM:%s)", sigint, sigterm)
def set_signal(self, sig, handler_str):
if handler_str == 'ign':
signal.signal(sig, self._ignore_signal)
elif handler_str == 'stop':
signal.signal(sig, self._stop_on_signal)
else:
raise TypeError("unknown signal hander string '%s'", handler_str)
def _ignore_signal(self, signal, frame):
self.log.debug("ignore received sig %s", signal_dict[signal])
pass
def _stop_on_signal(self, signal, frame):
self.log.info("received sig %s -> raise InterruptedError", signal_dict[signal])
raise LoopInterruptError()
def FloatValue(val=0.):
"""returns a `multiprocessing.Value` of type `float` with initial value `val`"""
return mp.Value('d', val, lock=True)
def UnsignedIntValue(val=0):
"""returns a `multiprocessing.Value` of type `unsigned int` with initial value `val`"""
return mp.Value('I', val, lock=True)
def StringValue(num_of_bytes):
"""returns a `multiprocessing.Array` of type `character` and length `num_of_bytes`"""
return mp.Array('c', _jm_compatible_bytearray(num_of_bytes), lock=True)
def check_process_termination(proc, prefix, timeout, auto_kill_on_last_resort = False):
proc.join(timeout)
if not proc.is_alive():
log.debug("termination of process (pid %s) within timeout of %s SUCCEEDED!", proc.pid, humanize_time(timeout))
return True
# process still runs -> send SIGTERM -> see what happens
log.warning("termination of process (pid %s) within given timeout of %s FAILED!", proc.pid, humanize_time(timeout))
proc.terminate()
new_timeout = 3*timeout
log.debug("wait for termination (timeout %s)", humanize_time(new_timeout))
proc.join(new_timeout)
if not proc.is_alive():
log.info("termination of process (pid %s) via SIGTERM with timeout of %s SUCCEEDED!", proc.pid, humanize_time(new_timeout))
return True
log.warning("termination of process (pid %s) via SIGTERM with timeout of %s FAILED!", proc.pid, humanize_time(new_timeout))
log.debug("auto_kill_on_last_resort is %s", auto_kill_on_last_resort)
answer = 'k' if auto_kill_on_last_resort else '_'
while True:
log.debug("answer string is %s", answer)
if answer == 'k':
log.warning("send SIGKILL to process with pid %s", proc.pid)
os.kill(proc.pid, signal.SIGKILL)
time.sleep(0.1)
else:
log.info("send SIGTERM to process with pid %s", proc.pid)
os.kill(proc.pid, signal.SIGTERM)
time.sleep(0.1)
if not proc.is_alive():
log.info("process (pid %s) has stopped running!", proc.pid)
return True
else:
log.warning("process (pid %s) is still running!", proc.pid)
print("the process (pid {}) seems still running".format(proc.pid))
try:
answer = input("press 'enter' to send SIGTERM, enter 'k' to send SIGKILL or enter 'ignore' to not bother about the process anymore")
except Exception as e:
log.error("could not ask for sending SIGKILL due to {}".format(type(e)))
log.info(traceback.format_exc())
log.warning("send SIGKILL now")
answer = 'k'
if answer == 'ignore':
log.warning("ignore process %s", proc.pid)
return False
elif answer != 'k':
answer = ''
def getCountKwargs(func):
""" Returns a list ["count kwarg", "count_max kwarg"] for a
given function. Valid combinations are defined in
`progress.validCountKwargs`.
Returns None if no keyword arguments are found.
"""
# Get all arguments of the function
if hasattr(func, "__code__"):
func_args = func.__code__.co_varnames[:func.__code__.co_argcount]
for pair in validCountKwargs:
if ( pair[0] in func_args and pair[1] in func_args ):
return pair
# else
return None
def humanize_speed(c_per_sec):
"""convert a speed in counts per second to counts per [s, min, h, d], choosing the smallest value greater zero.
"""
scales = [60, 60, 24]
units = ['c/s', 'c/min', 'c/h', 'c/d']
speed = c_per_sec
i = 0
if speed > 0:
while (speed < 1) and (i < len(scales)):
speed *= scales[i]
i += 1
return "{:.1f}{}".format(speed, units[i])
def codecov_subprocess_check():
print("this line will be only called from a subprocess")
myQueue = mp.Queue
# a mapping from the numeric values of the signals to their names used in the
# standard python module signals
signal_dict = {}
for s in dir(signal):
if s.startswith('SIG') and s[3] != '_':
n = getattr(signal, s)
if n in signal_dict:
signal_dict[n] += ('/'+s)
else:
signal_dict[n] = s
_colthm_term_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_LIGHT_GREEN, 'ADD_LNS_UP':0}
_colthm_ipyt_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_LIGHT_BLUE, 'ADD_LNS_UP':0}
_colthm_wincmd_default = {'PRE_COL': terminal.ESC_RED, 'BAR_COL': terminal.ESC_GREEN, 'ADD_LNS_UP':1}
color_themes = {'term_default': _colthm_term_default,
'ipyt_default': _colthm_ipyt_default,
'wincmd_default': _colthm_wincmd_default}
if platform.system() == 'Windows':
COLTHM = _colthm_wincmd_default
else:
COLTHM = _colthm_term_default
def choose_color_theme(name):
global COLTHM
if name in color_themes:
COLTHM = color_themes[name]
else:
warnings.warn("no such color theme {}".format(name))
# keyword arguments that define counting in wrapped functions
validCountKwargs = [
[ "count", "count_max"],
[ "count", "max_count"],
[ "c", "m"],
[ "jmc", "jmm"],
]
|
cimatosa/progression | progression/progress.py | Loop.__cleanup | python | def __cleanup(self):
# set run to False and wait some time -> see what happens
self._run.value = False
if check_process_termination(proc = self._proc,
timeout = 2*self.interval,
prefix = '',
auto_kill_on_last_resort = self._auto_kill_on_last_resort):
log.debug("cleanup successful")
else:
raise RuntimeError("cleanup FAILED!")
try:
self.conn_send.close()
self._log_queue_listener.stop()
except OSError:
pass
log.debug("wait for monitor thread to join")
self._monitor_thread.join()
log.debug("monitor thread to joined")
self._func_running.value = False | Wait at most twice as long as the given repetition interval
for the _wrapper_function to terminate.
If after that time the _wrapper_function has not terminated,
send SIGTERM to and the process.
Wait at most five times as long as the given repetition interval
for the _wrapper_function to terminate.
If the process still running send SIGKILL automatically if
auto_kill_on_last_resort was set True or ask the
user to confirm sending SIGKILL | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/progress.py#L422-L454 | [
"def check_process_termination(proc, prefix, timeout, auto_kill_on_last_resort = False):\n proc.join(timeout)\n if not proc.is_alive():\n log.debug(\"termination of process (pid %s) within timeout of %s SUCCEEDED!\", proc.pid, humanize_time(timeout))\n return True\n\n # process still runs -> send SIGTERM -> see what happens\n log.warning(\"termination of process (pid %s) within given timeout of %s FAILED!\", proc.pid, humanize_time(timeout))\n\n proc.terminate()\n new_timeout = 3*timeout\n log.debug(\"wait for termination (timeout %s)\", humanize_time(new_timeout))\n proc.join(new_timeout)\n if not proc.is_alive():\n log.info(\"termination of process (pid %s) via SIGTERM with timeout of %s SUCCEEDED!\", proc.pid, humanize_time(new_timeout))\n return True\n log.warning(\"termination of process (pid %s) via SIGTERM with timeout of %s FAILED!\", proc.pid, humanize_time(new_timeout))\n\n log.debug(\"auto_kill_on_last_resort is %s\", auto_kill_on_last_resort)\n answer = 'k' if auto_kill_on_last_resort else '_'\n while True:\n log.debug(\"answer string is %s\", answer)\n if answer == 'k':\n log.warning(\"send SIGKILL to process with pid %s\", proc.pid)\n os.kill(proc.pid, signal.SIGKILL)\n time.sleep(0.1)\n else:\n log.info(\"send SIGTERM to process with pid %s\", proc.pid)\n os.kill(proc.pid, signal.SIGTERM)\n time.sleep(0.1)\n\n if not proc.is_alive():\n log.info(\"process (pid %s) has stopped running!\", proc.pid)\n return True\n else:\n log.warning(\"process (pid %s) is still running!\", proc.pid)\n\n print(\"the process (pid {}) seems still running\".format(proc.pid))\n try:\n answer = input(\"press 'enter' to send SIGTERM, enter 'k' to send SIGKILL or enter 'ignore' to not bother about the process anymore\")\n except Exception as e:\n log.error(\"could not ask for sending SIGKILL due to {}\".format(type(e)))\n log.info(traceback.format_exc())\n log.warning(\"send SIGKILL now\")\n answer = 'k'\n\n if answer == 'ignore':\n log.warning(\"ignore process %s\", proc.pid)\n return False\n elif answer != 'k':\n answer = ''\n"
] | class Loop(object):
"""
class to run a function periodically an seperate process.
In case the called function returns True, the loop will stop.
Otherwise a time interval given by interval will be slept before
another execution is triggered.
The shared memory variable _run (accessible via the class property run)
also determines if the function if executed another time. If set to False
the execution stops.
For safe cleanup (and in order to catch any Errors)
it is advisable to instantiate this class
using 'with' statement as follows:
with Loop(**kwargs) as my_loop:
my_loop.start()
...
this will guarantee you that the spawned loop process is
down when exiting the 'with' scope.
The only circumstance where the process is still running is
when you set auto_kill_on_last_resort to False and answer the
question to send SIGKILL with no.
"""
def __init__(self,
func,
args = (),
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
auto_kill_on_last_resort = False,
raise_error = True):
"""
func [callable] - function to be called periodically
args [tuple] - arguments passed to func when calling
intervall [pos number] - time to "sleep" between each call
verbose - DEPRECATED, only kept for compatibility, use global log.level to
specify verbosity
sigint [string] - signal handler string to set SIGINT behavior (see below)
sigterm [string] - signal handler string to set SIGTERM behavior (see below)
auto_kill_on_last_resort [bool] - If set False (default), ask user to send SIGKILL
to loop process in case normal stop and SIGTERM failed. If set True, send SIDKILL
without asking.
the signal handler string may be one of the following
ing: ignore the incoming signal
stop: raise InterruptedError which is caught silently.
"""
self._proc = None
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
self.func = func
self.args = args
self.interval = interval
assert self.interval >= 0
self._run = mp.Value('b', False)
self._pause = mp.Value('b', False)
self._func_running = mp.Value('b', False)
self._sigint = sigint
self._sigterm = sigterm
self._auto_kill_on_last_resort = auto_kill_on_last_resort
log.debug("auto_kill_on_last_resort = %s", self._auto_kill_on_last_resort)
self._monitor_thread = None
self.pipe_handler = PipeHandler()
self.raise_error = raise_error
def __enter__(self):
return self
def __exit__(self, *exc_args):
if self.is_alive():
log.debug("loop is still running on context exit")
else:
log.debug("loop has stopped on context exit")
self.stop()
def __cleanup(self):
"""
Wait at most twice as long as the given repetition interval
for the _wrapper_function to terminate.
If after that time the _wrapper_function has not terminated,
send SIGTERM to and the process.
Wait at most five times as long as the given repetition interval
for the _wrapper_function to terminate.
If the process still running send SIGKILL automatically if
auto_kill_on_last_resort was set True or ask the
user to confirm sending SIGKILL
"""
# set run to False and wait some time -> see what happens
self._run.value = False
if check_process_termination(proc = self._proc,
timeout = 2*self.interval,
prefix = '',
auto_kill_on_last_resort = self._auto_kill_on_last_resort):
log.debug("cleanup successful")
else:
raise RuntimeError("cleanup FAILED!")
try:
self.conn_send.close()
self._log_queue_listener.stop()
except OSError:
pass
log.debug("wait for monitor thread to join")
self._monitor_thread.join()
log.debug("monitor thread to joined")
self._func_running.value = False
def _monitor_stdout_pipe(self):
while True:
try:
b = self.conn_recv.recv()
self.pipe_handler(b)
except EOFError:
break
def start(self, timeout=None):
"""
uses multiprocess Process to call _wrapper_func in subprocess
"""
if self.is_alive():
log.warning("a process with pid %s is already running", self._proc.pid)
return
self._run.value = True
self._func_running.value = False
name = self.__class__.__name__
self.conn_recv, self.conn_send = mp.Pipe(False)
self._monitor_thread = threading.Thread(target = self._monitor_stdout_pipe)
self._monitor_thread.daemon=True
self._monitor_thread.start()
log.debug("started monitor thread")
self._log_queue = mp.Queue()
self._log_queue_listener = QueueListener(self._log_queue, *log.handlers)
self._log_queue_listener.start()
args = (self.func, self.args, self._run, self._pause, self.interval,
self._sigint, self._sigterm, name, log.level, self.conn_send,
self._func_running, self._log_queue)
self._proc = mp.Process(target = _loop_wrapper_func,
args = args)
self._proc.start()
log.info("started a new process with pid %s", self._proc.pid)
log.debug("wait for loop function to come up")
t0 = time.time()
while not self._func_running.value:
if self._proc.exitcode is not None:
exc = self._proc.exitcode
self._proc = None
if exc == 0:
log.warning("wrapper function already terminated with exitcode 0\nloop is not running")
return
else:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(exc)+
"see log (INFO level) for traceback information")
time.sleep(0.1)
if (timeout is not None) and ((time.time() - t0) > timeout):
err_msg = "could not bring up function on time (timeout: {}s)".format(timeout)
log.error(err_msg)
log.info("either it takes too long to spawn the subprocess (increase the timeout)\n"+
"or an internal error occurred before reaching the function call")
raise LoopTimeoutError(err_msg)
log.debug("loop function is up ({})".format(humanize_time(time.time()-t0)))
def stop(self):
"""
stops the process triggered by start
Setting the shared memory boolean run to false, which should prevent
the loop from repeating. Call __cleanup to make sure the process
stopped. After that we could trigger start() again.
"""
if self.is_alive():
self._proc.terminate()
if self._proc is not None:
self.__cleanup()
if self.raise_error:
if self._proc.exitcode == 255:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(self._proc.exitcode)+
"see log (INFO level) for traceback information")
self.pipe_handler.close()
self._proc = None
def join(self, timeout):
"""
calls join for the spawned process with given timeout
"""
if self.is_alive():
self._proc.join(timeout)
def is_alive(self):
if self._proc is None:
return False
else:
return self._proc.is_alive()
def is_running(self):
if self.is_alive():
return self._func_running.value
else:
return False
def pause(self):
if self._run.value:
self._pause.value = True
log.debug("process with pid %s paused", self._proc.pid)
def resume(self):
if self._run.value:
self._pause.value = False
log.debug("process with pid %s resumed", self._proc.pid)
def getpid(self):
if self._proc is not None:
return self._proc.pid
else:
return None
|
cimatosa/progression | progression/progress.py | Loop.start | python | def start(self, timeout=None):
if self.is_alive():
log.warning("a process with pid %s is already running", self._proc.pid)
return
self._run.value = True
self._func_running.value = False
name = self.__class__.__name__
self.conn_recv, self.conn_send = mp.Pipe(False)
self._monitor_thread = threading.Thread(target = self._monitor_stdout_pipe)
self._monitor_thread.daemon=True
self._monitor_thread.start()
log.debug("started monitor thread")
self._log_queue = mp.Queue()
self._log_queue_listener = QueueListener(self._log_queue, *log.handlers)
self._log_queue_listener.start()
args = (self.func, self.args, self._run, self._pause, self.interval,
self._sigint, self._sigterm, name, log.level, self.conn_send,
self._func_running, self._log_queue)
self._proc = mp.Process(target = _loop_wrapper_func,
args = args)
self._proc.start()
log.info("started a new process with pid %s", self._proc.pid)
log.debug("wait for loop function to come up")
t0 = time.time()
while not self._func_running.value:
if self._proc.exitcode is not None:
exc = self._proc.exitcode
self._proc = None
if exc == 0:
log.warning("wrapper function already terminated with exitcode 0\nloop is not running")
return
else:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(exc)+
"see log (INFO level) for traceback information")
time.sleep(0.1)
if (timeout is not None) and ((time.time() - t0) > timeout):
err_msg = "could not bring up function on time (timeout: {}s)".format(timeout)
log.error(err_msg)
log.info("either it takes too long to spawn the subprocess (increase the timeout)\n"+
"or an internal error occurred before reaching the function call")
raise LoopTimeoutError(err_msg)
log.debug("loop function is up ({})".format(humanize_time(time.time()-t0))) | uses multiprocess Process to call _wrapper_func in subprocess | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/progress.py#L465-L516 | [
"def is_alive(self):\n if self._proc is None:\n return False\n else:\n return self._proc.is_alive()\n"
] | class Loop(object):
"""
class to run a function periodically an seperate process.
In case the called function returns True, the loop will stop.
Otherwise a time interval given by interval will be slept before
another execution is triggered.
The shared memory variable _run (accessible via the class property run)
also determines if the function if executed another time. If set to False
the execution stops.
For safe cleanup (and in order to catch any Errors)
it is advisable to instantiate this class
using 'with' statement as follows:
with Loop(**kwargs) as my_loop:
my_loop.start()
...
this will guarantee you that the spawned loop process is
down when exiting the 'with' scope.
The only circumstance where the process is still running is
when you set auto_kill_on_last_resort to False and answer the
question to send SIGKILL with no.
"""
def __init__(self,
func,
args = (),
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
auto_kill_on_last_resort = False,
raise_error = True):
"""
func [callable] - function to be called periodically
args [tuple] - arguments passed to func when calling
intervall [pos number] - time to "sleep" between each call
verbose - DEPRECATED, only kept for compatibility, use global log.level to
specify verbosity
sigint [string] - signal handler string to set SIGINT behavior (see below)
sigterm [string] - signal handler string to set SIGTERM behavior (see below)
auto_kill_on_last_resort [bool] - If set False (default), ask user to send SIGKILL
to loop process in case normal stop and SIGTERM failed. If set True, send SIDKILL
without asking.
the signal handler string may be one of the following
ing: ignore the incoming signal
stop: raise InterruptedError which is caught silently.
"""
self._proc = None
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
self.func = func
self.args = args
self.interval = interval
assert self.interval >= 0
self._run = mp.Value('b', False)
self._pause = mp.Value('b', False)
self._func_running = mp.Value('b', False)
self._sigint = sigint
self._sigterm = sigterm
self._auto_kill_on_last_resort = auto_kill_on_last_resort
log.debug("auto_kill_on_last_resort = %s", self._auto_kill_on_last_resort)
self._monitor_thread = None
self.pipe_handler = PipeHandler()
self.raise_error = raise_error
def __enter__(self):
return self
def __exit__(self, *exc_args):
if self.is_alive():
log.debug("loop is still running on context exit")
else:
log.debug("loop has stopped on context exit")
self.stop()
def __cleanup(self):
"""
Wait at most twice as long as the given repetition interval
for the _wrapper_function to terminate.
If after that time the _wrapper_function has not terminated,
send SIGTERM to and the process.
Wait at most five times as long as the given repetition interval
for the _wrapper_function to terminate.
If the process still running send SIGKILL automatically if
auto_kill_on_last_resort was set True or ask the
user to confirm sending SIGKILL
"""
# set run to False and wait some time -> see what happens
self._run.value = False
if check_process_termination(proc = self._proc,
timeout = 2*self.interval,
prefix = '',
auto_kill_on_last_resort = self._auto_kill_on_last_resort):
log.debug("cleanup successful")
else:
raise RuntimeError("cleanup FAILED!")
try:
self.conn_send.close()
self._log_queue_listener.stop()
except OSError:
pass
log.debug("wait for monitor thread to join")
self._monitor_thread.join()
log.debug("monitor thread to joined")
self._func_running.value = False
def _monitor_stdout_pipe(self):
while True:
try:
b = self.conn_recv.recv()
self.pipe_handler(b)
except EOFError:
break
def start(self, timeout=None):
"""
uses multiprocess Process to call _wrapper_func in subprocess
"""
if self.is_alive():
log.warning("a process with pid %s is already running", self._proc.pid)
return
self._run.value = True
self._func_running.value = False
name = self.__class__.__name__
self.conn_recv, self.conn_send = mp.Pipe(False)
self._monitor_thread = threading.Thread(target = self._monitor_stdout_pipe)
self._monitor_thread.daemon=True
self._monitor_thread.start()
log.debug("started monitor thread")
self._log_queue = mp.Queue()
self._log_queue_listener = QueueListener(self._log_queue, *log.handlers)
self._log_queue_listener.start()
args = (self.func, self.args, self._run, self._pause, self.interval,
self._sigint, self._sigterm, name, log.level, self.conn_send,
self._func_running, self._log_queue)
self._proc = mp.Process(target = _loop_wrapper_func,
args = args)
self._proc.start()
log.info("started a new process with pid %s", self._proc.pid)
log.debug("wait for loop function to come up")
t0 = time.time()
while not self._func_running.value:
if self._proc.exitcode is not None:
exc = self._proc.exitcode
self._proc = None
if exc == 0:
log.warning("wrapper function already terminated with exitcode 0\nloop is not running")
return
else:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(exc)+
"see log (INFO level) for traceback information")
time.sleep(0.1)
if (timeout is not None) and ((time.time() - t0) > timeout):
err_msg = "could not bring up function on time (timeout: {}s)".format(timeout)
log.error(err_msg)
log.info("either it takes too long to spawn the subprocess (increase the timeout)\n"+
"or an internal error occurred before reaching the function call")
raise LoopTimeoutError(err_msg)
log.debug("loop function is up ({})".format(humanize_time(time.time()-t0)))
def stop(self):
"""
stops the process triggered by start
Setting the shared memory boolean run to false, which should prevent
the loop from repeating. Call __cleanup to make sure the process
stopped. After that we could trigger start() again.
"""
if self.is_alive():
self._proc.terminate()
if self._proc is not None:
self.__cleanup()
if self.raise_error:
if self._proc.exitcode == 255:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(self._proc.exitcode)+
"see log (INFO level) for traceback information")
self.pipe_handler.close()
self._proc = None
def join(self, timeout):
"""
calls join for the spawned process with given timeout
"""
if self.is_alive():
self._proc.join(timeout)
def is_alive(self):
if self._proc is None:
return False
else:
return self._proc.is_alive()
def is_running(self):
if self.is_alive():
return self._func_running.value
else:
return False
def pause(self):
if self._run.value:
self._pause.value = True
log.debug("process with pid %s paused", self._proc.pid)
def resume(self):
if self._run.value:
self._pause.value = False
log.debug("process with pid %s resumed", self._proc.pid)
def getpid(self):
if self._proc is not None:
return self._proc.pid
else:
return None
|
cimatosa/progression | progression/progress.py | Loop.stop | python | def stop(self):
"""
stops the process triggered by start
Setting the shared memory boolean run to false, which should prevent
the loop from repeating. Call __cleanup to make sure the process
stopped. After that we could trigger start() again.
"""
if self.is_alive():
self._proc.terminate()
if self._proc is not None:
self.__cleanup()
if self.raise_error:
if self._proc.exitcode == 255:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(self._proc.exitcode)+
"see log (INFO level) for traceback information")
self.pipe_handler.close()
self._proc = None | stops the process triggered by start
Setting the shared memory boolean run to false, which should prevent
the loop from repeating. Call __cleanup to make sure the process
stopped. After that we could trigger start() again. | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/progress.py#L518-L537 | [
"def __cleanup(self):\n \"\"\"\n Wait at most twice as long as the given repetition interval\n for the _wrapper_function to terminate.\n\n If after that time the _wrapper_function has not terminated,\n send SIGTERM to and the process.\n\n Wait at most five times as long as the given repetition interval\n for the _wrapper_function to terminate.\n\n If the process still running send SIGKILL automatically if\n auto_kill_on_last_resort was set True or ask the\n user to confirm sending SIGKILL\n \"\"\"\n # set run to False and wait some time -> see what happens \n self._run.value = False\n if check_process_termination(proc = self._proc,\n timeout = 2*self.interval,\n prefix = '',\n auto_kill_on_last_resort = self._auto_kill_on_last_resort):\n log.debug(\"cleanup successful\")\n else:\n raise RuntimeError(\"cleanup FAILED!\")\n try:\n self.conn_send.close()\n self._log_queue_listener.stop()\n except OSError:\n pass\n log.debug(\"wait for monitor thread to join\")\n self._monitor_thread.join()\n log.debug(\"monitor thread to joined\")\n self._func_running.value = False\n",
"def is_alive(self):\n if self._proc is None:\n return False\n else:\n return self._proc.is_alive()\n"
] | class Loop(object):
"""
class to run a function periodically an seperate process.
In case the called function returns True, the loop will stop.
Otherwise a time interval given by interval will be slept before
another execution is triggered.
The shared memory variable _run (accessible via the class property run)
also determines if the function if executed another time. If set to False
the execution stops.
For safe cleanup (and in order to catch any Errors)
it is advisable to instantiate this class
using 'with' statement as follows:
with Loop(**kwargs) as my_loop:
my_loop.start()
...
this will guarantee you that the spawned loop process is
down when exiting the 'with' scope.
The only circumstance where the process is still running is
when you set auto_kill_on_last_resort to False and answer the
question to send SIGKILL with no.
"""
def __init__(self,
func,
args = (),
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
auto_kill_on_last_resort = False,
raise_error = True):
"""
func [callable] - function to be called periodically
args [tuple] - arguments passed to func when calling
intervall [pos number] - time to "sleep" between each call
verbose - DEPRECATED, only kept for compatibility, use global log.level to
specify verbosity
sigint [string] - signal handler string to set SIGINT behavior (see below)
sigterm [string] - signal handler string to set SIGTERM behavior (see below)
auto_kill_on_last_resort [bool] - If set False (default), ask user to send SIGKILL
to loop process in case normal stop and SIGTERM failed. If set True, send SIDKILL
without asking.
the signal handler string may be one of the following
ing: ignore the incoming signal
stop: raise InterruptedError which is caught silently.
"""
self._proc = None
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
self.func = func
self.args = args
self.interval = interval
assert self.interval >= 0
self._run = mp.Value('b', False)
self._pause = mp.Value('b', False)
self._func_running = mp.Value('b', False)
self._sigint = sigint
self._sigterm = sigterm
self._auto_kill_on_last_resort = auto_kill_on_last_resort
log.debug("auto_kill_on_last_resort = %s", self._auto_kill_on_last_resort)
self._monitor_thread = None
self.pipe_handler = PipeHandler()
self.raise_error = raise_error
def __enter__(self):
return self
def __exit__(self, *exc_args):
if self.is_alive():
log.debug("loop is still running on context exit")
else:
log.debug("loop has stopped on context exit")
self.stop()
def __cleanup(self):
"""
Wait at most twice as long as the given repetition interval
for the _wrapper_function to terminate.
If after that time the _wrapper_function has not terminated,
send SIGTERM to and the process.
Wait at most five times as long as the given repetition interval
for the _wrapper_function to terminate.
If the process still running send SIGKILL automatically if
auto_kill_on_last_resort was set True or ask the
user to confirm sending SIGKILL
"""
# set run to False and wait some time -> see what happens
self._run.value = False
if check_process_termination(proc = self._proc,
timeout = 2*self.interval,
prefix = '',
auto_kill_on_last_resort = self._auto_kill_on_last_resort):
log.debug("cleanup successful")
else:
raise RuntimeError("cleanup FAILED!")
try:
self.conn_send.close()
self._log_queue_listener.stop()
except OSError:
pass
log.debug("wait for monitor thread to join")
self._monitor_thread.join()
log.debug("monitor thread to joined")
self._func_running.value = False
def _monitor_stdout_pipe(self):
while True:
try:
b = self.conn_recv.recv()
self.pipe_handler(b)
except EOFError:
break
def start(self, timeout=None):
"""
uses multiprocess Process to call _wrapper_func in subprocess
"""
if self.is_alive():
log.warning("a process with pid %s is already running", self._proc.pid)
return
self._run.value = True
self._func_running.value = False
name = self.__class__.__name__
self.conn_recv, self.conn_send = mp.Pipe(False)
self._monitor_thread = threading.Thread(target = self._monitor_stdout_pipe)
self._monitor_thread.daemon=True
self._monitor_thread.start()
log.debug("started monitor thread")
self._log_queue = mp.Queue()
self._log_queue_listener = QueueListener(self._log_queue, *log.handlers)
self._log_queue_listener.start()
args = (self.func, self.args, self._run, self._pause, self.interval,
self._sigint, self._sigterm, name, log.level, self.conn_send,
self._func_running, self._log_queue)
self._proc = mp.Process(target = _loop_wrapper_func,
args = args)
self._proc.start()
log.info("started a new process with pid %s", self._proc.pid)
log.debug("wait for loop function to come up")
t0 = time.time()
while not self._func_running.value:
if self._proc.exitcode is not None:
exc = self._proc.exitcode
self._proc = None
if exc == 0:
log.warning("wrapper function already terminated with exitcode 0\nloop is not running")
return
else:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(exc)+
"see log (INFO level) for traceback information")
time.sleep(0.1)
if (timeout is not None) and ((time.time() - t0) > timeout):
err_msg = "could not bring up function on time (timeout: {}s)".format(timeout)
log.error(err_msg)
log.info("either it takes too long to spawn the subprocess (increase the timeout)\n"+
"or an internal error occurred before reaching the function call")
raise LoopTimeoutError(err_msg)
log.debug("loop function is up ({})".format(humanize_time(time.time()-t0)))
def stop(self):
"""
stops the process triggered by start
Setting the shared memory boolean run to false, which should prevent
the loop from repeating. Call __cleanup to make sure the process
stopped. After that we could trigger start() again.
"""
if self.is_alive():
self._proc.terminate()
if self._proc is not None:
self.__cleanup()
if self.raise_error:
if self._proc.exitcode == 255:
raise LoopExceptionError("the loop function return non zero exticode ({})!\n".format(self._proc.exitcode)+
"see log (INFO level) for traceback information")
self.pipe_handler.close()
self._proc = None
def join(self, timeout):
"""
calls join for the spawned process with given timeout
"""
if self.is_alive():
self._proc.join(timeout)
def is_alive(self):
if self._proc is None:
return False
else:
return self._proc.is_alive()
def is_running(self):
if self.is_alive():
return self._func_running.value
else:
return False
def pause(self):
if self._run.value:
self._pause.value = True
log.debug("process with pid %s paused", self._proc.pid)
def resume(self):
if self._run.value:
self._pause.value = False
log.debug("process with pid %s resumed", self._proc.pid)
def getpid(self):
if self._proc is not None:
return self._proc.pid
else:
return None
|
cimatosa/progression | progression/progress.py | Progress._calc | python | def _calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock):
count_value = count.value
start_time_value = start_time.value
current_time = time.time()
if last_count.value != count_value:
# some progress happened
with lock:
# save current state (count, time) to queue
q.put((count_value, current_time))
# get older state from queue (or initial state)
# to to speed estimation
if q.qsize() > speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, start_time_value
last_count.value = count_value
#last_old_count.value = old_count_value
#last_old_time.value = old_time
speed = (count_value - old_count_value) / (current_time - old_time)
last_speed.value = speed
else:
# progress has not changed since last call
# use also old (cached) data from the queue
#old_count_value, old_time = last_old_count.value, last_old_time.value
speed = last_speed.value
if (max_count is None):
max_count_value = None
else:
max_count_value = max_count.value
tet = (current_time - start_time_value)
if (speed == 0) or (max_count_value is None) or (max_count_value == 0):
ttg = None
else:
ttg = math.ceil((max_count_value - count_value) / speed)
return count_value, max_count_value, speed, tet, ttg | do the pre calculations in order to get TET, speed, TTG
:param count: count
:param last_count: count at the last call, allows to treat the case of no progress
between sequential calls
:param start_time: the time when start was triggered
:param max_count: the maximal value count
:type max_count:
:param speed_calc_cycles:
:type speed_calc_cycles:
:param q:
:type q:
:param last_speed:
:type last_speed:
:param lock:
:type lock: | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/progress.py#L806-L874 | null | class Progress(Loop):
"""
Abstract Progress Class
The :py:class:`Progress` Class uses :py:class:`Loop` to provide a repeating
function which calculates progress information from a changing counter value.
The formatting of these information is done by overwriting the static member
:py:func:`Progress.show_stat`. :py:func:`Progress.show_stat` is intended to
format a single progress bar on a single line only.
The extension to multiple progresses is done
automatically base on the formatting of a single line.
"""
def __init__(self,
count,
max_count = None,
prepend = None,
width = 'auto',
speed_calc_cycles = 10,
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
info_line = None,
show_stat = None):
"""
:param count: shared variable for holding the current state
(use :py:func:`UnsignedIntValue` for short hand creation)
:type count: list/single value of multiprocessing.Value
:param max_count: shared variable for holding the final state
:type max_count: None or list/single value of multiprocessing.Value
:param prepend: string to put in front of each progress output
:type prepend: None, str or list of str
:param width: the width to use for the progress line (fixed or automatically determined)
:type width: int or "auto"
:param speed_calc_cycles: number of updated (cycles) to use for estimating the speed
(example: ``speed_calc_sycles = 4`` and ``interval = 1`` means that the speed is estimated from
the current state and state 4 updates before where the elapsed time will roughly be 4s)
:param interval: seconds to wait before updating the progress
:param verbose: DEPRECATED: has no effect, use the global ``log.setLevel()`` to control the
output level
:param sigint: behavior of the subprocess on signal ``SIGINT`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigint: "stop" or "ign"
:param sigterm: behavior of the subprocess on signal ``SIGTERM`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigterm: "stop" or "ign"
:param info_line: additional text to show below the progress (use :py:func:`StringValue`
for short hand creation of shared strings)
:type info_line: None or multiprocessing.Array of characters
.. note::
As `Progress` is derived from :py:class:`Loop` it is highly encurraged to create
any instance of Progress with a context manager (``with`` statement).
This ensures that the subprocess showing the progress terminats on context exit.
Otherwise one has to make sure that at some point the stop() routine is called.
abstract example::
with AProgressClass(...) as p:
p.start()
# do stuff and modify counter
"""
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
# converts count to list and do type check
try:
for c in count:
if not isinstance(c, Synchronized):
raise ValueError("Each element of 'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = True
except TypeError:
if not isinstance(count, Synchronized):
raise ValueError("'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = False
count = [count]
self.len = len(count)
# converts max_count to list and do type check
if max_count is not None:
if self.is_multi:
try:
for i, m in enumerate(max_count):
if not isinstance(m, Synchronized):
max_count[i] = UnsignedIntValue(m)
except TypeError:
raise TypeError("'max_count' must be iterable")
else:
if not isinstance(max_count, Synchronized):
max_count = UnsignedIntValue(max_count)
max_count = [max_count]
else:
max_count = [None] * self.len
self.start_time = []
self.speed_calc_cycles = speed_calc_cycles
self.width = width
self.q = []
self.prepend = []
self.lock = []
self.last_count = []
self.last_speed = []
for i in range(self.len):
self.q.append(myQueue()) # queue to save the last speed_calc_cycles
# (time, count) information to calculate speed
#self.q[-1].cancel_join_thread()
self.last_count.append(UnsignedIntValue())
self.last_speed.append(FloatValue())
self.lock.append(mp.Lock())
self.start_time.append(FloatValue(val=time.time()))
if prepend is None:
# no prepend given
self.prepend.append('')
else:
if isinstance(prepend, str):
self.prepend.append(prepend)
else:
# assume list of prepend, (needs to be a sequence)
self.prepend.append(prepend[i])
self.max_count = max_count # list of multiprocessing value type
self.count = count # list of multiprocessing value type
self.interval = interval
self.verbose = verbose
self.show_on_exit = False
self.add_args = {}
self.info_line = info_line
self.show_stat = show_stat
# setup loop class with func
Loop.__init__(self,
func = _show_stat_wrapper_multi_Progress,
args = (self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
show_stat,
self.len,
self.add_args,
self.lock,
self.info_line),
interval = interval,
sigint = sigint,
sigterm = sigterm,
auto_kill_on_last_resort = True)
def __exit__(self, *exc_args):
self.stop()
@staticmethod
def _calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock):
"""do the pre calculations in order to get TET, speed, TTG
:param count: count
:param last_count: count at the last call, allows to treat the case of no progress
between sequential calls
:param start_time: the time when start was triggered
:param max_count: the maximal value count
:type max_count:
:param speed_calc_cycles:
:type speed_calc_cycles:
:param q:
:type q:
:param last_speed:
:type last_speed:
:param lock:
:type lock:
"""
count_value = count.value
start_time_value = start_time.value
current_time = time.time()
if last_count.value != count_value:
# some progress happened
with lock:
# save current state (count, time) to queue
q.put((count_value, current_time))
# get older state from queue (or initial state)
# to to speed estimation
if q.qsize() > speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, start_time_value
last_count.value = count_value
#last_old_count.value = old_count_value
#last_old_time.value = old_time
speed = (count_value - old_count_value) / (current_time - old_time)
last_speed.value = speed
else:
# progress has not changed since last call
# use also old (cached) data from the queue
#old_count_value, old_time = last_old_count.value, last_old_time.value
speed = last_speed.value
if (max_count is None):
max_count_value = None
else:
max_count_value = max_count.value
tet = (current_time - start_time_value)
if (speed == 0) or (max_count_value is None) or (max_count_value == 0):
ttg = None
else:
ttg = math.ceil((max_count_value - count_value) / speed)
return count_value, max_count_value, speed, tet, ttg
def _reset_all(self):
"""
reset all progress information
"""
for i in range(self.len):
self._reset_i(i)
def _reset_i(self, i):
"""
reset i-th progress information
"""
self.count[i].value=0
log.debug("reset counter %s", i)
self.lock[i].acquire()
for x in range(self.q[i].qsize()):
self.q[i].get()
self.lock[i].release()
self.start_time[i].value = time.time()
def _show_stat(self):
"""
convenient functions to call the static show_stat_wrapper_multi with
the given class members
"""
_show_stat_wrapper_multi_Progress(self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
self.show_stat,
self.len,
self.add_args,
self.lock,
self.info_line,
no_move_up=True)
def reset(self, i = None):
"""resets the progress informaion
:param i: tell which progress to reset, if None reset all
:type i: None, int
"""
if i is None:
self._reset_all()
else:
self._reset_i(i)
def start(self):
"""
start
"""
# before printing any output to stout, we can now check this
# variable to see if any other ProgressBar has reserved that
# terminal.
if (self.__class__.__name__ in terminal.TERMINAL_PRINT_LOOP_CLASSES):
if not terminal.terminal_reserve(progress_obj=self):
log.warning("tty already reserved, NOT starting the progress loop!")
return
super(Progress, self).start()
self.show_on_exit = True
def stop(self):
"""
trigger clean up by hand, needs to be done when not using
context management via 'with' statement
- will terminate loop process
- show a last progress -> see the full 100% on exit
- releases terminal reservation
"""
super(Progress, self).stop()
terminal.terminal_unreserve(progress_obj=self, verbose=self.verbose)
if self.show_on_exit:
if not isinstance(self.pipe_handler, PipeToPrint):
myout = inMemoryBuffer()
stdout = sys.stdout
sys.stdout = myout
self._show_stat()
self.pipe_handler(myout.getvalue())
sys.stdout = stdout
else:
self._show_stat()
print()
self.show_on_exit = False
|
cimatosa/progression | progression/progress.py | Progress._reset_i | python | def _reset_i(self, i):
self.count[i].value=0
log.debug("reset counter %s", i)
self.lock[i].acquire()
for x in range(self.q[i].qsize()):
self.q[i].get()
self.lock[i].release()
self.start_time[i].value = time.time() | reset i-th progress information | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/progress.py#L883-L894 | null | class Progress(Loop):
"""
Abstract Progress Class
The :py:class:`Progress` Class uses :py:class:`Loop` to provide a repeating
function which calculates progress information from a changing counter value.
The formatting of these information is done by overwriting the static member
:py:func:`Progress.show_stat`. :py:func:`Progress.show_stat` is intended to
format a single progress bar on a single line only.
The extension to multiple progresses is done
automatically base on the formatting of a single line.
"""
def __init__(self,
count,
max_count = None,
prepend = None,
width = 'auto',
speed_calc_cycles = 10,
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
info_line = None,
show_stat = None):
"""
:param count: shared variable for holding the current state
(use :py:func:`UnsignedIntValue` for short hand creation)
:type count: list/single value of multiprocessing.Value
:param max_count: shared variable for holding the final state
:type max_count: None or list/single value of multiprocessing.Value
:param prepend: string to put in front of each progress output
:type prepend: None, str or list of str
:param width: the width to use for the progress line (fixed or automatically determined)
:type width: int or "auto"
:param speed_calc_cycles: number of updated (cycles) to use for estimating the speed
(example: ``speed_calc_sycles = 4`` and ``interval = 1`` means that the speed is estimated from
the current state and state 4 updates before where the elapsed time will roughly be 4s)
:param interval: seconds to wait before updating the progress
:param verbose: DEPRECATED: has no effect, use the global ``log.setLevel()`` to control the
output level
:param sigint: behavior of the subprocess on signal ``SIGINT`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigint: "stop" or "ign"
:param sigterm: behavior of the subprocess on signal ``SIGTERM`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigterm: "stop" or "ign"
:param info_line: additional text to show below the progress (use :py:func:`StringValue`
for short hand creation of shared strings)
:type info_line: None or multiprocessing.Array of characters
.. note::
As `Progress` is derived from :py:class:`Loop` it is highly encurraged to create
any instance of Progress with a context manager (``with`` statement).
This ensures that the subprocess showing the progress terminats on context exit.
Otherwise one has to make sure that at some point the stop() routine is called.
abstract example::
with AProgressClass(...) as p:
p.start()
# do stuff and modify counter
"""
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
# converts count to list and do type check
try:
for c in count:
if not isinstance(c, Synchronized):
raise ValueError("Each element of 'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = True
except TypeError:
if not isinstance(count, Synchronized):
raise ValueError("'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = False
count = [count]
self.len = len(count)
# converts max_count to list and do type check
if max_count is not None:
if self.is_multi:
try:
for i, m in enumerate(max_count):
if not isinstance(m, Synchronized):
max_count[i] = UnsignedIntValue(m)
except TypeError:
raise TypeError("'max_count' must be iterable")
else:
if not isinstance(max_count, Synchronized):
max_count = UnsignedIntValue(max_count)
max_count = [max_count]
else:
max_count = [None] * self.len
self.start_time = []
self.speed_calc_cycles = speed_calc_cycles
self.width = width
self.q = []
self.prepend = []
self.lock = []
self.last_count = []
self.last_speed = []
for i in range(self.len):
self.q.append(myQueue()) # queue to save the last speed_calc_cycles
# (time, count) information to calculate speed
#self.q[-1].cancel_join_thread()
self.last_count.append(UnsignedIntValue())
self.last_speed.append(FloatValue())
self.lock.append(mp.Lock())
self.start_time.append(FloatValue(val=time.time()))
if prepend is None:
# no prepend given
self.prepend.append('')
else:
if isinstance(prepend, str):
self.prepend.append(prepend)
else:
# assume list of prepend, (needs to be a sequence)
self.prepend.append(prepend[i])
self.max_count = max_count # list of multiprocessing value type
self.count = count # list of multiprocessing value type
self.interval = interval
self.verbose = verbose
self.show_on_exit = False
self.add_args = {}
self.info_line = info_line
self.show_stat = show_stat
# setup loop class with func
Loop.__init__(self,
func = _show_stat_wrapper_multi_Progress,
args = (self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
show_stat,
self.len,
self.add_args,
self.lock,
self.info_line),
interval = interval,
sigint = sigint,
sigterm = sigterm,
auto_kill_on_last_resort = True)
def __exit__(self, *exc_args):
self.stop()
@staticmethod
def _calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock):
"""do the pre calculations in order to get TET, speed, TTG
:param count: count
:param last_count: count at the last call, allows to treat the case of no progress
between sequential calls
:param start_time: the time when start was triggered
:param max_count: the maximal value count
:type max_count:
:param speed_calc_cycles:
:type speed_calc_cycles:
:param q:
:type q:
:param last_speed:
:type last_speed:
:param lock:
:type lock:
"""
count_value = count.value
start_time_value = start_time.value
current_time = time.time()
if last_count.value != count_value:
# some progress happened
with lock:
# save current state (count, time) to queue
q.put((count_value, current_time))
# get older state from queue (or initial state)
# to to speed estimation
if q.qsize() > speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, start_time_value
last_count.value = count_value
#last_old_count.value = old_count_value
#last_old_time.value = old_time
speed = (count_value - old_count_value) / (current_time - old_time)
last_speed.value = speed
else:
# progress has not changed since last call
# use also old (cached) data from the queue
#old_count_value, old_time = last_old_count.value, last_old_time.value
speed = last_speed.value
if (max_count is None):
max_count_value = None
else:
max_count_value = max_count.value
tet = (current_time - start_time_value)
if (speed == 0) or (max_count_value is None) or (max_count_value == 0):
ttg = None
else:
ttg = math.ceil((max_count_value - count_value) / speed)
return count_value, max_count_value, speed, tet, ttg
def _reset_all(self):
"""
reset all progress information
"""
for i in range(self.len):
self._reset_i(i)
def _reset_i(self, i):
"""
reset i-th progress information
"""
self.count[i].value=0
log.debug("reset counter %s", i)
self.lock[i].acquire()
for x in range(self.q[i].qsize()):
self.q[i].get()
self.lock[i].release()
self.start_time[i].value = time.time()
def _show_stat(self):
"""
convenient functions to call the static show_stat_wrapper_multi with
the given class members
"""
_show_stat_wrapper_multi_Progress(self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
self.show_stat,
self.len,
self.add_args,
self.lock,
self.info_line,
no_move_up=True)
def reset(self, i = None):
"""resets the progress informaion
:param i: tell which progress to reset, if None reset all
:type i: None, int
"""
if i is None:
self._reset_all()
else:
self._reset_i(i)
def start(self):
"""
start
"""
# before printing any output to stout, we can now check this
# variable to see if any other ProgressBar has reserved that
# terminal.
if (self.__class__.__name__ in terminal.TERMINAL_PRINT_LOOP_CLASSES):
if not terminal.terminal_reserve(progress_obj=self):
log.warning("tty already reserved, NOT starting the progress loop!")
return
super(Progress, self).start()
self.show_on_exit = True
def stop(self):
"""
trigger clean up by hand, needs to be done when not using
context management via 'with' statement
- will terminate loop process
- show a last progress -> see the full 100% on exit
- releases terminal reservation
"""
super(Progress, self).stop()
terminal.terminal_unreserve(progress_obj=self, verbose=self.verbose)
if self.show_on_exit:
if not isinstance(self.pipe_handler, PipeToPrint):
myout = inMemoryBuffer()
stdout = sys.stdout
sys.stdout = myout
self._show_stat()
self.pipe_handler(myout.getvalue())
sys.stdout = stdout
else:
self._show_stat()
print()
self.show_on_exit = False
|
cimatosa/progression | progression/progress.py | Progress._show_stat | python | def _show_stat(self):
_show_stat_wrapper_multi_Progress(self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
self.show_stat,
self.len,
self.add_args,
self.lock,
self.info_line,
no_move_up=True) | convenient functions to call the static show_stat_wrapper_multi with
the given class members | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/progress.py#L896-L915 | [
"def _show_stat_wrapper_multi_Progress(count, last_count, start_time, max_count, speed_calc_cycles, \n width, q, last_speed, prepend, show_stat_function, len_, \n add_args, lock, info_line, no_move_up=False):\n \"\"\"\n call the static method show_stat_wrapper for each process\n \"\"\"\n# print(ESC_BOLD, end='')\n# sys.stdout.flush()\n for i in range(len_):\n _show_stat_wrapper_Progress(count[i], last_count[i], start_time[i], max_count[i], speed_calc_cycles, \n width, q[i], last_speed[i], prepend[i], show_stat_function,\n add_args, i, lock[i])\n n = len_\n if info_line is not None:\n s = info_line.value.decode('utf-8')\n s = s.split('\\n')\n n += len(s)\n for si in s:\n if width == 'auto':\n width = get_terminal_width()\n if len(si) > width:\n si = si[:width]\n print(\"{0:<{1}}\".format(si, width))\n\n if no_move_up:\n n = 0\n # this is only a hack to find the end\n # of the message in a stream\n # so ESC_HIDDEN+ESC_NO_CHAR_ATTR is a magic ending\n print(terminal.ESC_MOVE_LINE_UP(n) + terminal.ESC_MY_MAGIC_ENDING, end='')\n sys.stdout.flush() \n"
] | class Progress(Loop):
"""
Abstract Progress Class
The :py:class:`Progress` Class uses :py:class:`Loop` to provide a repeating
function which calculates progress information from a changing counter value.
The formatting of these information is done by overwriting the static member
:py:func:`Progress.show_stat`. :py:func:`Progress.show_stat` is intended to
format a single progress bar on a single line only.
The extension to multiple progresses is done
automatically base on the formatting of a single line.
"""
def __init__(self,
count,
max_count = None,
prepend = None,
width = 'auto',
speed_calc_cycles = 10,
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
info_line = None,
show_stat = None):
"""
:param count: shared variable for holding the current state
(use :py:func:`UnsignedIntValue` for short hand creation)
:type count: list/single value of multiprocessing.Value
:param max_count: shared variable for holding the final state
:type max_count: None or list/single value of multiprocessing.Value
:param prepend: string to put in front of each progress output
:type prepend: None, str or list of str
:param width: the width to use for the progress line (fixed or automatically determined)
:type width: int or "auto"
:param speed_calc_cycles: number of updated (cycles) to use for estimating the speed
(example: ``speed_calc_sycles = 4`` and ``interval = 1`` means that the speed is estimated from
the current state and state 4 updates before where the elapsed time will roughly be 4s)
:param interval: seconds to wait before updating the progress
:param verbose: DEPRECATED: has no effect, use the global ``log.setLevel()`` to control the
output level
:param sigint: behavior of the subprocess on signal ``SIGINT`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigint: "stop" or "ign"
:param sigterm: behavior of the subprocess on signal ``SIGTERM`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigterm: "stop" or "ign"
:param info_line: additional text to show below the progress (use :py:func:`StringValue`
for short hand creation of shared strings)
:type info_line: None or multiprocessing.Array of characters
.. note::
As `Progress` is derived from :py:class:`Loop` it is highly encurraged to create
any instance of Progress with a context manager (``with`` statement).
This ensures that the subprocess showing the progress terminats on context exit.
Otherwise one has to make sure that at some point the stop() routine is called.
abstract example::
with AProgressClass(...) as p:
p.start()
# do stuff and modify counter
"""
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
# converts count to list and do type check
try:
for c in count:
if not isinstance(c, Synchronized):
raise ValueError("Each element of 'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = True
except TypeError:
if not isinstance(count, Synchronized):
raise ValueError("'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = False
count = [count]
self.len = len(count)
# converts max_count to list and do type check
if max_count is not None:
if self.is_multi:
try:
for i, m in enumerate(max_count):
if not isinstance(m, Synchronized):
max_count[i] = UnsignedIntValue(m)
except TypeError:
raise TypeError("'max_count' must be iterable")
else:
if not isinstance(max_count, Synchronized):
max_count = UnsignedIntValue(max_count)
max_count = [max_count]
else:
max_count = [None] * self.len
self.start_time = []
self.speed_calc_cycles = speed_calc_cycles
self.width = width
self.q = []
self.prepend = []
self.lock = []
self.last_count = []
self.last_speed = []
for i in range(self.len):
self.q.append(myQueue()) # queue to save the last speed_calc_cycles
# (time, count) information to calculate speed
#self.q[-1].cancel_join_thread()
self.last_count.append(UnsignedIntValue())
self.last_speed.append(FloatValue())
self.lock.append(mp.Lock())
self.start_time.append(FloatValue(val=time.time()))
if prepend is None:
# no prepend given
self.prepend.append('')
else:
if isinstance(prepend, str):
self.prepend.append(prepend)
else:
# assume list of prepend, (needs to be a sequence)
self.prepend.append(prepend[i])
self.max_count = max_count # list of multiprocessing value type
self.count = count # list of multiprocessing value type
self.interval = interval
self.verbose = verbose
self.show_on_exit = False
self.add_args = {}
self.info_line = info_line
self.show_stat = show_stat
# setup loop class with func
Loop.__init__(self,
func = _show_stat_wrapper_multi_Progress,
args = (self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
show_stat,
self.len,
self.add_args,
self.lock,
self.info_line),
interval = interval,
sigint = sigint,
sigterm = sigterm,
auto_kill_on_last_resort = True)
def __exit__(self, *exc_args):
self.stop()
@staticmethod
def _calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock):
"""do the pre calculations in order to get TET, speed, TTG
:param count: count
:param last_count: count at the last call, allows to treat the case of no progress
between sequential calls
:param start_time: the time when start was triggered
:param max_count: the maximal value count
:type max_count:
:param speed_calc_cycles:
:type speed_calc_cycles:
:param q:
:type q:
:param last_speed:
:type last_speed:
:param lock:
:type lock:
"""
count_value = count.value
start_time_value = start_time.value
current_time = time.time()
if last_count.value != count_value:
# some progress happened
with lock:
# save current state (count, time) to queue
q.put((count_value, current_time))
# get older state from queue (or initial state)
# to to speed estimation
if q.qsize() > speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, start_time_value
last_count.value = count_value
#last_old_count.value = old_count_value
#last_old_time.value = old_time
speed = (count_value - old_count_value) / (current_time - old_time)
last_speed.value = speed
else:
# progress has not changed since last call
# use also old (cached) data from the queue
#old_count_value, old_time = last_old_count.value, last_old_time.value
speed = last_speed.value
if (max_count is None):
max_count_value = None
else:
max_count_value = max_count.value
tet = (current_time - start_time_value)
if (speed == 0) or (max_count_value is None) or (max_count_value == 0):
ttg = None
else:
ttg = math.ceil((max_count_value - count_value) / speed)
return count_value, max_count_value, speed, tet, ttg
def _reset_all(self):
"""
reset all progress information
"""
for i in range(self.len):
self._reset_i(i)
def _reset_i(self, i):
"""
reset i-th progress information
"""
self.count[i].value=0
log.debug("reset counter %s", i)
self.lock[i].acquire()
for x in range(self.q[i].qsize()):
self.q[i].get()
self.lock[i].release()
self.start_time[i].value = time.time()
def reset(self, i = None):
"""resets the progress informaion
:param i: tell which progress to reset, if None reset all
:type i: None, int
"""
if i is None:
self._reset_all()
else:
self._reset_i(i)
def start(self):
"""
start
"""
# before printing any output to stout, we can now check this
# variable to see if any other ProgressBar has reserved that
# terminal.
if (self.__class__.__name__ in terminal.TERMINAL_PRINT_LOOP_CLASSES):
if not terminal.terminal_reserve(progress_obj=self):
log.warning("tty already reserved, NOT starting the progress loop!")
return
super(Progress, self).start()
self.show_on_exit = True
def stop(self):
"""
trigger clean up by hand, needs to be done when not using
context management via 'with' statement
- will terminate loop process
- show a last progress -> see the full 100% on exit
- releases terminal reservation
"""
super(Progress, self).stop()
terminal.terminal_unreserve(progress_obj=self, verbose=self.verbose)
if self.show_on_exit:
if not isinstance(self.pipe_handler, PipeToPrint):
myout = inMemoryBuffer()
stdout = sys.stdout
sys.stdout = myout
self._show_stat()
self.pipe_handler(myout.getvalue())
sys.stdout = stdout
else:
self._show_stat()
print()
self.show_on_exit = False
|
cimatosa/progression | progression/progress.py | Progress.start | python | def start(self):
# before printing any output to stout, we can now check this
# variable to see if any other ProgressBar has reserved that
# terminal.
if (self.__class__.__name__ in terminal.TERMINAL_PRINT_LOOP_CLASSES):
if not terminal.terminal_reserve(progress_obj=self):
log.warning("tty already reserved, NOT starting the progress loop!")
return
super(Progress, self).start()
self.show_on_exit = True | start | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/progress.py#L931-L945 | [
"def start(self, timeout=None):\n \"\"\"\n uses multiprocess Process to call _wrapper_func in subprocess \n \"\"\"\n\n if self.is_alive():\n log.warning(\"a process with pid %s is already running\", self._proc.pid)\n return\n\n self._run.value = True\n self._func_running.value = False\n name = self.__class__.__name__\n\n self.conn_recv, self.conn_send = mp.Pipe(False)\n self._monitor_thread = threading.Thread(target = self._monitor_stdout_pipe)\n self._monitor_thread.daemon=True\n self._monitor_thread.start()\n log.debug(\"started monitor thread\")\n self._log_queue = mp.Queue()\n self._log_queue_listener = QueueListener(self._log_queue, *log.handlers)\n self._log_queue_listener.start()\n\n args = (self.func, self.args, self._run, self._pause, self.interval,\n self._sigint, self._sigterm, name, log.level, self.conn_send, \n self._func_running, self._log_queue)\n\n self._proc = mp.Process(target = _loop_wrapper_func,\n args = args)\n self._proc.start()\n log.info(\"started a new process with pid %s\", self._proc.pid)\n log.debug(\"wait for loop function to come up\")\n t0 = time.time()\n while not self._func_running.value:\n if self._proc.exitcode is not None:\n exc = self._proc.exitcode\n self._proc = None\n if exc == 0:\n log.warning(\"wrapper function already terminated with exitcode 0\\nloop is not running\")\n return\n else:\n raise LoopExceptionError(\"the loop function return non zero exticode ({})!\\n\".format(exc)+\n \"see log (INFO level) for traceback information\")\n\n time.sleep(0.1) \n if (timeout is not None) and ((time.time() - t0) > timeout):\n err_msg = \"could not bring up function on time (timeout: {}s)\".format(timeout)\n log.error(err_msg)\n log.info(\"either it takes too long to spawn the subprocess (increase the timeout)\\n\"+\n \"or an internal error occurred before reaching the function call\")\n raise LoopTimeoutError(err_msg)\n\n log.debug(\"loop function is up ({})\".format(humanize_time(time.time()-t0)))\n"
] | class Progress(Loop):
"""
Abstract Progress Class
The :py:class:`Progress` Class uses :py:class:`Loop` to provide a repeating
function which calculates progress information from a changing counter value.
The formatting of these information is done by overwriting the static member
:py:func:`Progress.show_stat`. :py:func:`Progress.show_stat` is intended to
format a single progress bar on a single line only.
The extension to multiple progresses is done
automatically base on the formatting of a single line.
"""
def __init__(self,
count,
max_count = None,
prepend = None,
width = 'auto',
speed_calc_cycles = 10,
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
info_line = None,
show_stat = None):
"""
:param count: shared variable for holding the current state
(use :py:func:`UnsignedIntValue` for short hand creation)
:type count: list/single value of multiprocessing.Value
:param max_count: shared variable for holding the final state
:type max_count: None or list/single value of multiprocessing.Value
:param prepend: string to put in front of each progress output
:type prepend: None, str or list of str
:param width: the width to use for the progress line (fixed or automatically determined)
:type width: int or "auto"
:param speed_calc_cycles: number of updated (cycles) to use for estimating the speed
(example: ``speed_calc_sycles = 4`` and ``interval = 1`` means that the speed is estimated from
the current state and state 4 updates before where the elapsed time will roughly be 4s)
:param interval: seconds to wait before updating the progress
:param verbose: DEPRECATED: has no effect, use the global ``log.setLevel()`` to control the
output level
:param sigint: behavior of the subprocess on signal ``SIGINT`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigint: "stop" or "ign"
:param sigterm: behavior of the subprocess on signal ``SIGTERM`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigterm: "stop" or "ign"
:param info_line: additional text to show below the progress (use :py:func:`StringValue`
for short hand creation of shared strings)
:type info_line: None or multiprocessing.Array of characters
.. note::
As `Progress` is derived from :py:class:`Loop` it is highly encurraged to create
any instance of Progress with a context manager (``with`` statement).
This ensures that the subprocess showing the progress terminats on context exit.
Otherwise one has to make sure that at some point the stop() routine is called.
abstract example::
with AProgressClass(...) as p:
p.start()
# do stuff and modify counter
"""
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
# converts count to list and do type check
try:
for c in count:
if not isinstance(c, Synchronized):
raise ValueError("Each element of 'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = True
except TypeError:
if not isinstance(count, Synchronized):
raise ValueError("'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = False
count = [count]
self.len = len(count)
# converts max_count to list and do type check
if max_count is not None:
if self.is_multi:
try:
for i, m in enumerate(max_count):
if not isinstance(m, Synchronized):
max_count[i] = UnsignedIntValue(m)
except TypeError:
raise TypeError("'max_count' must be iterable")
else:
if not isinstance(max_count, Synchronized):
max_count = UnsignedIntValue(max_count)
max_count = [max_count]
else:
max_count = [None] * self.len
self.start_time = []
self.speed_calc_cycles = speed_calc_cycles
self.width = width
self.q = []
self.prepend = []
self.lock = []
self.last_count = []
self.last_speed = []
for i in range(self.len):
self.q.append(myQueue()) # queue to save the last speed_calc_cycles
# (time, count) information to calculate speed
#self.q[-1].cancel_join_thread()
self.last_count.append(UnsignedIntValue())
self.last_speed.append(FloatValue())
self.lock.append(mp.Lock())
self.start_time.append(FloatValue(val=time.time()))
if prepend is None:
# no prepend given
self.prepend.append('')
else:
if isinstance(prepend, str):
self.prepend.append(prepend)
else:
# assume list of prepend, (needs to be a sequence)
self.prepend.append(prepend[i])
self.max_count = max_count # list of multiprocessing value type
self.count = count # list of multiprocessing value type
self.interval = interval
self.verbose = verbose
self.show_on_exit = False
self.add_args = {}
self.info_line = info_line
self.show_stat = show_stat
# setup loop class with func
Loop.__init__(self,
func = _show_stat_wrapper_multi_Progress,
args = (self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
show_stat,
self.len,
self.add_args,
self.lock,
self.info_line),
interval = interval,
sigint = sigint,
sigterm = sigterm,
auto_kill_on_last_resort = True)
def __exit__(self, *exc_args):
self.stop()
@staticmethod
def _calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock):
"""do the pre calculations in order to get TET, speed, TTG
:param count: count
:param last_count: count at the last call, allows to treat the case of no progress
between sequential calls
:param start_time: the time when start was triggered
:param max_count: the maximal value count
:type max_count:
:param speed_calc_cycles:
:type speed_calc_cycles:
:param q:
:type q:
:param last_speed:
:type last_speed:
:param lock:
:type lock:
"""
count_value = count.value
start_time_value = start_time.value
current_time = time.time()
if last_count.value != count_value:
# some progress happened
with lock:
# save current state (count, time) to queue
q.put((count_value, current_time))
# get older state from queue (or initial state)
# to to speed estimation
if q.qsize() > speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, start_time_value
last_count.value = count_value
#last_old_count.value = old_count_value
#last_old_time.value = old_time
speed = (count_value - old_count_value) / (current_time - old_time)
last_speed.value = speed
else:
# progress has not changed since last call
# use also old (cached) data from the queue
#old_count_value, old_time = last_old_count.value, last_old_time.value
speed = last_speed.value
if (max_count is None):
max_count_value = None
else:
max_count_value = max_count.value
tet = (current_time - start_time_value)
if (speed == 0) or (max_count_value is None) or (max_count_value == 0):
ttg = None
else:
ttg = math.ceil((max_count_value - count_value) / speed)
return count_value, max_count_value, speed, tet, ttg
def _reset_all(self):
"""
reset all progress information
"""
for i in range(self.len):
self._reset_i(i)
def _reset_i(self, i):
"""
reset i-th progress information
"""
self.count[i].value=0
log.debug("reset counter %s", i)
self.lock[i].acquire()
for x in range(self.q[i].qsize()):
self.q[i].get()
self.lock[i].release()
self.start_time[i].value = time.time()
def _show_stat(self):
"""
convenient functions to call the static show_stat_wrapper_multi with
the given class members
"""
_show_stat_wrapper_multi_Progress(self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
self.show_stat,
self.len,
self.add_args,
self.lock,
self.info_line,
no_move_up=True)
def reset(self, i = None):
"""resets the progress informaion
:param i: tell which progress to reset, if None reset all
:type i: None, int
"""
if i is None:
self._reset_all()
else:
self._reset_i(i)
def start(self):
"""
start
"""
# before printing any output to stout, we can now check this
# variable to see if any other ProgressBar has reserved that
# terminal.
if (self.__class__.__name__ in terminal.TERMINAL_PRINT_LOOP_CLASSES):
if not terminal.terminal_reserve(progress_obj=self):
log.warning("tty already reserved, NOT starting the progress loop!")
return
super(Progress, self).start()
self.show_on_exit = True
def stop(self):
"""
trigger clean up by hand, needs to be done when not using
context management via 'with' statement
- will terminate loop process
- show a last progress -> see the full 100% on exit
- releases terminal reservation
"""
super(Progress, self).stop()
terminal.terminal_unreserve(progress_obj=self, verbose=self.verbose)
if self.show_on_exit:
if not isinstance(self.pipe_handler, PipeToPrint):
myout = inMemoryBuffer()
stdout = sys.stdout
sys.stdout = myout
self._show_stat()
self.pipe_handler(myout.getvalue())
sys.stdout = stdout
else:
self._show_stat()
print()
self.show_on_exit = False
|
cimatosa/progression | progression/progress.py | Progress.stop | python | def stop(self):
super(Progress, self).stop()
terminal.terminal_unreserve(progress_obj=self, verbose=self.verbose)
if self.show_on_exit:
if not isinstance(self.pipe_handler, PipeToPrint):
myout = inMemoryBuffer()
stdout = sys.stdout
sys.stdout = myout
self._show_stat()
self.pipe_handler(myout.getvalue())
sys.stdout = stdout
else:
self._show_stat()
print()
self.show_on_exit = False | trigger clean up by hand, needs to be done when not using
context management via 'with' statement
- will terminate loop process
- show a last progress -> see the full 100% on exit
- releases terminal reservation | train | https://github.com/cimatosa/progression/blob/82cf74a25a47f9bda96157cc2c88e5975c20b41d/progression/progress.py#L947-L970 | [
"def stop(self):\n \"\"\"\n stops the process triggered by start\n\n Setting the shared memory boolean run to false, which should prevent\n the loop from repeating. Call __cleanup to make sure the process\n stopped. After that we could trigger start() again.\n \"\"\" \n if self.is_alive():\n self._proc.terminate()\n\n if self._proc is not None:\n self.__cleanup()\n\n if self.raise_error:\n if self._proc.exitcode == 255:\n raise LoopExceptionError(\"the loop function return non zero exticode ({})!\\n\".format(self._proc.exitcode)+\n \"see log (INFO level) for traceback information\")\n self.pipe_handler.close()\n self._proc = None\n",
"def _show_stat(self):\n \"\"\"\n convenient functions to call the static show_stat_wrapper_multi with\n the given class members\n \"\"\"\n _show_stat_wrapper_multi_Progress(self.count,\n self.last_count, \n self.start_time, \n self.max_count, \n self.speed_calc_cycles,\n self.width,\n self.q,\n self.last_speed,\n self.prepend,\n self.show_stat,\n self.len, \n self.add_args,\n self.lock,\n self.info_line,\n no_move_up=True)\n"
] | class Progress(Loop):
"""
Abstract Progress Class
The :py:class:`Progress` Class uses :py:class:`Loop` to provide a repeating
function which calculates progress information from a changing counter value.
The formatting of these information is done by overwriting the static member
:py:func:`Progress.show_stat`. :py:func:`Progress.show_stat` is intended to
format a single progress bar on a single line only.
The extension to multiple progresses is done
automatically base on the formatting of a single line.
"""
def __init__(self,
count,
max_count = None,
prepend = None,
width = 'auto',
speed_calc_cycles = 10,
interval = 1,
verbose = None,
sigint = 'stop',
sigterm = 'stop',
info_line = None,
show_stat = None):
"""
:param count: shared variable for holding the current state
(use :py:func:`UnsignedIntValue` for short hand creation)
:type count: list/single value of multiprocessing.Value
:param max_count: shared variable for holding the final state
:type max_count: None or list/single value of multiprocessing.Value
:param prepend: string to put in front of each progress output
:type prepend: None, str or list of str
:param width: the width to use for the progress line (fixed or automatically determined)
:type width: int or "auto"
:param speed_calc_cycles: number of updated (cycles) to use for estimating the speed
(example: ``speed_calc_sycles = 4`` and ``interval = 1`` means that the speed is estimated from
the current state and state 4 updates before where the elapsed time will roughly be 4s)
:param interval: seconds to wait before updating the progress
:param verbose: DEPRECATED: has no effect, use the global ``log.setLevel()`` to control the
output level
:param sigint: behavior of the subprocess on signal ``SIGINT`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigint: "stop" or "ign"
:param sigterm: behavior of the subprocess on signal ``SIGTERM`` (``"stop"`` triggers
``SystemExit`` whereas ``"ign"`` ignores the signal)
:type sigterm: "stop" or "ign"
:param info_line: additional text to show below the progress (use :py:func:`StringValue`
for short hand creation of shared strings)
:type info_line: None or multiprocessing.Array of characters
.. note::
As `Progress` is derived from :py:class:`Loop` it is highly encurraged to create
any instance of Progress with a context manager (``with`` statement).
This ensures that the subprocess showing the progress terminats on context exit.
Otherwise one has to make sure that at some point the stop() routine is called.
abstract example::
with AProgressClass(...) as p:
p.start()
# do stuff and modify counter
"""
if verbose is not None:
log.warning("verbose is deprecated, only allowed for compatibility")
warnings.warn("verbose is deprecated", DeprecationWarning)
# converts count to list and do type check
try:
for c in count:
if not isinstance(c, Synchronized):
raise ValueError("Each element of 'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = True
except TypeError:
if not isinstance(count, Synchronized):
raise ValueError("'count' must be if the type multiprocessing.sharedctypes.Synchronized")
self.is_multi = False
count = [count]
self.len = len(count)
# converts max_count to list and do type check
if max_count is not None:
if self.is_multi:
try:
for i, m in enumerate(max_count):
if not isinstance(m, Synchronized):
max_count[i] = UnsignedIntValue(m)
except TypeError:
raise TypeError("'max_count' must be iterable")
else:
if not isinstance(max_count, Synchronized):
max_count = UnsignedIntValue(max_count)
max_count = [max_count]
else:
max_count = [None] * self.len
self.start_time = []
self.speed_calc_cycles = speed_calc_cycles
self.width = width
self.q = []
self.prepend = []
self.lock = []
self.last_count = []
self.last_speed = []
for i in range(self.len):
self.q.append(myQueue()) # queue to save the last speed_calc_cycles
# (time, count) information to calculate speed
#self.q[-1].cancel_join_thread()
self.last_count.append(UnsignedIntValue())
self.last_speed.append(FloatValue())
self.lock.append(mp.Lock())
self.start_time.append(FloatValue(val=time.time()))
if prepend is None:
# no prepend given
self.prepend.append('')
else:
if isinstance(prepend, str):
self.prepend.append(prepend)
else:
# assume list of prepend, (needs to be a sequence)
self.prepend.append(prepend[i])
self.max_count = max_count # list of multiprocessing value type
self.count = count # list of multiprocessing value type
self.interval = interval
self.verbose = verbose
self.show_on_exit = False
self.add_args = {}
self.info_line = info_line
self.show_stat = show_stat
# setup loop class with func
Loop.__init__(self,
func = _show_stat_wrapper_multi_Progress,
args = (self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
show_stat,
self.len,
self.add_args,
self.lock,
self.info_line),
interval = interval,
sigint = sigint,
sigterm = sigterm,
auto_kill_on_last_resort = True)
def __exit__(self, *exc_args):
self.stop()
@staticmethod
def _calc(count,
last_count,
start_time,
max_count,
speed_calc_cycles,
q,
last_speed,
lock):
"""do the pre calculations in order to get TET, speed, TTG
:param count: count
:param last_count: count at the last call, allows to treat the case of no progress
between sequential calls
:param start_time: the time when start was triggered
:param max_count: the maximal value count
:type max_count:
:param speed_calc_cycles:
:type speed_calc_cycles:
:param q:
:type q:
:param last_speed:
:type last_speed:
:param lock:
:type lock:
"""
count_value = count.value
start_time_value = start_time.value
current_time = time.time()
if last_count.value != count_value:
# some progress happened
with lock:
# save current state (count, time) to queue
q.put((count_value, current_time))
# get older state from queue (or initial state)
# to to speed estimation
if q.qsize() > speed_calc_cycles:
old_count_value, old_time = q.get()
else:
old_count_value, old_time = 0, start_time_value
last_count.value = count_value
#last_old_count.value = old_count_value
#last_old_time.value = old_time
speed = (count_value - old_count_value) / (current_time - old_time)
last_speed.value = speed
else:
# progress has not changed since last call
# use also old (cached) data from the queue
#old_count_value, old_time = last_old_count.value, last_old_time.value
speed = last_speed.value
if (max_count is None):
max_count_value = None
else:
max_count_value = max_count.value
tet = (current_time - start_time_value)
if (speed == 0) or (max_count_value is None) or (max_count_value == 0):
ttg = None
else:
ttg = math.ceil((max_count_value - count_value) / speed)
return count_value, max_count_value, speed, tet, ttg
def _reset_all(self):
"""
reset all progress information
"""
for i in range(self.len):
self._reset_i(i)
def _reset_i(self, i):
"""
reset i-th progress information
"""
self.count[i].value=0
log.debug("reset counter %s", i)
self.lock[i].acquire()
for x in range(self.q[i].qsize()):
self.q[i].get()
self.lock[i].release()
self.start_time[i].value = time.time()
def _show_stat(self):
"""
convenient functions to call the static show_stat_wrapper_multi with
the given class members
"""
_show_stat_wrapper_multi_Progress(self.count,
self.last_count,
self.start_time,
self.max_count,
self.speed_calc_cycles,
self.width,
self.q,
self.last_speed,
self.prepend,
self.show_stat,
self.len,
self.add_args,
self.lock,
self.info_line,
no_move_up=True)
def reset(self, i = None):
"""resets the progress informaion
:param i: tell which progress to reset, if None reset all
:type i: None, int
"""
if i is None:
self._reset_all()
else:
self._reset_i(i)
def start(self):
"""
start
"""
# before printing any output to stout, we can now check this
# variable to see if any other ProgressBar has reserved that
# terminal.
if (self.__class__.__name__ in terminal.TERMINAL_PRINT_LOOP_CLASSES):
if not terminal.terminal_reserve(progress_obj=self):
log.warning("tty already reserved, NOT starting the progress loop!")
return
super(Progress, self).start()
self.show_on_exit = True
def stop(self):
"""
trigger clean up by hand, needs to be done when not using
context management via 'with' statement
- will terminate loop process
- show a last progress -> see the full 100% on exit
- releases terminal reservation
"""
super(Progress, self).stop()
terminal.terminal_unreserve(progress_obj=self, verbose=self.verbose)
if self.show_on_exit:
if not isinstance(self.pipe_handler, PipeToPrint):
myout = inMemoryBuffer()
stdout = sys.stdout
sys.stdout = myout
self._show_stat()
self.pipe_handler(myout.getvalue())
sys.stdout = stdout
else:
self._show_stat()
print()
self.show_on_exit = False
|
rcarmo/pngcanvas | pngcanvas.py | blend | python | def blend(c1, c2):
return [c1[i] * (0xFF - c2[3]) + c2[i] * c2[3] >> 8 for i in range(3)] | Alpha blends two colors, using the alpha given by c2 | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L36-L38 | null | """Simple PNG Canvas for Python - updated for bytearray()"""
from __future__ import (
absolute_import, division, print_function, unicode_literals
)
__version__ = "1.0.3"
__license__ = "MIT"
import struct
import sys
import zlib
# Py2 - Py3 compatibility
if sys.version < '3':
range = xrange # NOQA
# Color types: see table 6.1 "PNG image types and colour types"
COLOR_TYPE_GRAYSCALE = 0
COLOR_TYPE_TRUECOLOR = 2
COLOR_TYPE_INDEXED_COLOR = 3
COLOR_TYPE_GRAYSCALE_WITH_ALPHA = 4
COLOR_TYPE_TRUECOLOR_WITH_ALPHA = 6
SIGNATURE = struct.pack(b"8B", 137, 80, 78, 71, 13, 10, 26, 10)
def force_int(*args):
return tuple(int(x) for x in args)
def intensity(c, i):
"""Compute a new alpha given a 0-0xFF intensity"""
return [c[0], c[1], c[2], (c[3] * i) >> 8]
def grayscale(c):
"""Compute perceptive grayscale value"""
return int(c[0] * 0.3 + c[1] * 0.59 + c[2] * 0.11)
def gradient_list(start, end, steps):
"""Compute gradient colors"""
delta = [end[i] - start[i] for i in range(4)]
return [bytearray(start[j] + (delta[j] * i) // steps for j in range(4))
for i in range(steps + 1)]
def rgb2rgba(rgb):
"""Take a row of RGB bytes, and convert to a row of RGBA bytes."""
rgba = []
for i in range(0, len(rgb), 3):
rgba += rgb[i:i+3]
rgba.append(255)
return rgba
class ByteReader(object):
def __init__(self, chunks):
self.chunks = chunks
self.decoded = b''
self.decompressor = zlib.decompressobj()
def read(self, num_bytes):
"""Read `num_bytes` from the compressed data chunks.
Data is returned as `bytes` of length `num_bytes`
Will raise an EOFError if data is unavailable.
Note: Will always return `num_bytes` of data (unlike the file read method).
"""
while len(self.decoded) < num_bytes:
try:
tag, data = next(self.chunks)
except StopIteration:
raise EOFError()
if tag != b'IDAT':
continue
self.decoded += self.decompressor.decompress(data)
r = self.decoded[:num_bytes]
self.decoded = self.decoded[num_bytes:]
return r
class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | gradient_list | python | def gradient_list(start, end, steps):
delta = [end[i] - start[i] for i in range(4)]
return [bytearray(start[j] + (delta[j] * i) // steps for j in range(4))
for i in range(steps + 1)] | Compute gradient colors | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L51-L55 | null | """Simple PNG Canvas for Python - updated for bytearray()"""
from __future__ import (
absolute_import, division, print_function, unicode_literals
)
__version__ = "1.0.3"
__license__ = "MIT"
import struct
import sys
import zlib
# Py2 - Py3 compatibility
if sys.version < '3':
range = xrange # NOQA
# Color types: see table 6.1 "PNG image types and colour types"
COLOR_TYPE_GRAYSCALE = 0
COLOR_TYPE_TRUECOLOR = 2
COLOR_TYPE_INDEXED_COLOR = 3
COLOR_TYPE_GRAYSCALE_WITH_ALPHA = 4
COLOR_TYPE_TRUECOLOR_WITH_ALPHA = 6
SIGNATURE = struct.pack(b"8B", 137, 80, 78, 71, 13, 10, 26, 10)
def force_int(*args):
return tuple(int(x) for x in args)
def blend(c1, c2):
"""Alpha blends two colors, using the alpha given by c2"""
return [c1[i] * (0xFF - c2[3]) + c2[i] * c2[3] >> 8 for i in range(3)]
def intensity(c, i):
"""Compute a new alpha given a 0-0xFF intensity"""
return [c[0], c[1], c[2], (c[3] * i) >> 8]
def grayscale(c):
"""Compute perceptive grayscale value"""
return int(c[0] * 0.3 + c[1] * 0.59 + c[2] * 0.11)
def rgb2rgba(rgb):
"""Take a row of RGB bytes, and convert to a row of RGBA bytes."""
rgba = []
for i in range(0, len(rgb), 3):
rgba += rgb[i:i+3]
rgba.append(255)
return rgba
class ByteReader(object):
def __init__(self, chunks):
self.chunks = chunks
self.decoded = b''
self.decompressor = zlib.decompressobj()
def read(self, num_bytes):
"""Read `num_bytes` from the compressed data chunks.
Data is returned as `bytes` of length `num_bytes`
Will raise an EOFError if data is unavailable.
Note: Will always return `num_bytes` of data (unlike the file read method).
"""
while len(self.decoded) < num_bytes:
try:
tag, data = next(self.chunks)
except StopIteration:
raise EOFError()
if tag != b'IDAT':
continue
self.decoded += self.decompressor.decompress(data)
r = self.decoded[:num_bytes]
self.decoded = self.decoded[num_bytes:]
return r
class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | rgb2rgba | python | def rgb2rgba(rgb):
rgba = []
for i in range(0, len(rgb), 3):
rgba += rgb[i:i+3]
rgba.append(255)
return rgba | Take a row of RGB bytes, and convert to a row of RGBA bytes. | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L58-L65 | null | """Simple PNG Canvas for Python - updated for bytearray()"""
from __future__ import (
absolute_import, division, print_function, unicode_literals
)
__version__ = "1.0.3"
__license__ = "MIT"
import struct
import sys
import zlib
# Py2 - Py3 compatibility
if sys.version < '3':
range = xrange # NOQA
# Color types: see table 6.1 "PNG image types and colour types"
COLOR_TYPE_GRAYSCALE = 0
COLOR_TYPE_TRUECOLOR = 2
COLOR_TYPE_INDEXED_COLOR = 3
COLOR_TYPE_GRAYSCALE_WITH_ALPHA = 4
COLOR_TYPE_TRUECOLOR_WITH_ALPHA = 6
SIGNATURE = struct.pack(b"8B", 137, 80, 78, 71, 13, 10, 26, 10)
def force_int(*args):
return tuple(int(x) for x in args)
def blend(c1, c2):
"""Alpha blends two colors, using the alpha given by c2"""
return [c1[i] * (0xFF - c2[3]) + c2[i] * c2[3] >> 8 for i in range(3)]
def intensity(c, i):
"""Compute a new alpha given a 0-0xFF intensity"""
return [c[0], c[1], c[2], (c[3] * i) >> 8]
def grayscale(c):
"""Compute perceptive grayscale value"""
return int(c[0] * 0.3 + c[1] * 0.59 + c[2] * 0.11)
def gradient_list(start, end, steps):
"""Compute gradient colors"""
delta = [end[i] - start[i] for i in range(4)]
return [bytearray(start[j] + (delta[j] * i) // steps for j in range(4))
for i in range(steps + 1)]
class ByteReader(object):
def __init__(self, chunks):
self.chunks = chunks
self.decoded = b''
self.decompressor = zlib.decompressobj()
def read(self, num_bytes):
"""Read `num_bytes` from the compressed data chunks.
Data is returned as `bytes` of length `num_bytes`
Will raise an EOFError if data is unavailable.
Note: Will always return `num_bytes` of data (unlike the file read method).
"""
while len(self.decoded) < num_bytes:
try:
tag, data = next(self.chunks)
except StopIteration:
raise EOFError()
if tag != b'IDAT':
continue
self.decoded += self.decompressor.decompress(data)
r = self.decoded[:num_bytes]
self.decoded = self.decoded[num_bytes:]
return r
class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | ByteReader.read | python | def read(self, num_bytes):
while len(self.decoded) < num_bytes:
try:
tag, data = next(self.chunks)
except StopIteration:
raise EOFError()
if tag != b'IDAT':
continue
self.decoded += self.decompressor.decompress(data)
r = self.decoded[:num_bytes]
self.decoded = self.decoded[num_bytes:]
return r | Read `num_bytes` from the compressed data chunks.
Data is returned as `bytes` of length `num_bytes`
Will raise an EOFError if data is unavailable.
Note: Will always return `num_bytes` of data (unlike the file read method). | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L74-L95 | null | class ByteReader(object):
def __init__(self, chunks):
self.chunks = chunks
self.decoded = b''
self.decompressor = zlib.decompressobj()
|
rcarmo/pngcanvas | pngcanvas.py | PNGCanvas._offset | python | def _offset(self, x, y):
x, y = force_int(x, y)
return y * self.width * 4 + x * 4 | Helper for internal data | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L108-L111 | [
"def force_int(*args):\n return tuple(int(x) for x in args)\n"
] | class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | PNGCanvas.point | python | def point(self, x, y, color=None):
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color)) | Set a pixel | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L113-L121 | [
"def blend(c1, c2):\n \"\"\"Alpha blends two colors, using the alpha given by c2\"\"\"\n return [c1[i] * (0xFF - c2[3]) + c2[i] * c2[3] >> 8 for i in range(3)]\n",
"def _offset(self, x, y):\n \"\"\"Helper for internal data\"\"\"\n x, y = force_int(x, y)\n return y * self.width * 4 + x * 4\n"
] | class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | PNGCanvas.rect_helper | python | def rect_helper(x0, y0, x1, y1):
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1 | Rectangle helper | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L124-L131 | [
"def force_int(*args):\n return tuple(int(x) for x in args)\n"
] | class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | PNGCanvas.vertical_gradient | python | def vertical_gradient(self, x0, y0, x1, y1, start, end):
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0]) | Draw a vertical gradient | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L133-L139 | [
"def gradient_list(start, end, steps):\n \"\"\"Compute gradient colors\"\"\"\n delta = [end[i] - start[i] for i in range(4)]\n return [bytearray(start[j] + (delta[j] * i) // steps for j in range(4))\n for i in range(steps + 1)]\n",
"def point(self, x, y, color=None):\n \"\"\"Set a pixel\"\"\"\n if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:\n return\n if color is None:\n color = self.color\n o = self._offset(x, y)\n\n self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))\n",
"def rect_helper(x0, y0, x1, y1):\n \"\"\"Rectangle helper\"\"\"\n x0, y0, x1, y1 = force_int(x0, y0, x1, y1)\n if x0 > x1:\n x0, x1 = x1, x0\n if y0 > y1:\n y0, y1 = y1, y0\n return x0, y0, x1, y1\n"
] | class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | PNGCanvas.rectangle | python | def rectangle(self, x0, y0, x1, y1):
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]]) | Draw a rectangle | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L141-L144 | [
"def rect_helper(x0, y0, x1, y1):\n \"\"\"Rectangle helper\"\"\"\n x0, y0, x1, y1 = force_int(x0, y0, x1, y1)\n if x0 > x1:\n x0, x1 = x1, x0\n if y0 > y1:\n y0, y1 = y1, y0\n return x0, y0, x1, y1\n",
"def polyline(self, arr):\n \"\"\"Draw a set of lines\"\"\"\n for i in range(0, len(arr) - 1):\n self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])\n"
] | class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | PNGCanvas.filled_rectangle | python | def filled_rectangle(self, x0, y0, x1, y1):
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color) | Draw a filled rectangle | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L146-L151 | [
"def point(self, x, y, color=None):\n \"\"\"Set a pixel\"\"\"\n if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:\n return\n if color is None:\n color = self.color\n o = self._offset(x, y)\n\n self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))\n",
"def rect_helper(x0, y0, x1, y1):\n \"\"\"Rectangle helper\"\"\"\n x0, y0, x1, y1 = force_int(x0, y0, x1, y1)\n if x0 > x1:\n x0, x1 = x1, x0\n if y0 > y1:\n y0, y1 = y1, y0\n return x0, y0, x1, y1\n"
] | class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | PNGCanvas.copy_rect | python | def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4] | Copy (blit) a rectangle onto another part of the image | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L153-L162 | [
"def force_int(*args):\n return tuple(int(x) for x in args)\n",
"def _offset(self, x, y):\n \"\"\"Helper for internal data\"\"\"\n x, y = force_int(x, y)\n return y * self.width * 4 + x * 4\n",
"def rect_helper(x0, y0, x1, y1):\n \"\"\"Rectangle helper\"\"\"\n x0, y0, x1, y1 = force_int(x0, y0, x1, y1)\n if x0 > x1:\n x0, x1 = x1, x0\n if y0 > y1:\n y0, y1 = y1, y0\n return x0, y0, x1, y1\n"
] | class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | PNGCanvas.blend_rect | python | def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba) | Blend a rectangle onto the image | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L164-L172 | [
"def _offset(self, x, y):\n \"\"\"Helper for internal data\"\"\"\n x, y = force_int(x, y)\n return y * self.width * 4 + x * 4\n",
"def point(self, x, y, color=None):\n \"\"\"Set a pixel\"\"\"\n if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:\n return\n if color is None:\n color = self.color\n o = self._offset(x, y)\n\n self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))\n",
"def rect_helper(x0, y0, x1, y1):\n \"\"\"Rectangle helper\"\"\"\n x0, y0, x1, y1 = force_int(x0, y0, x1, y1)\n if x0 > x1:\n x0, x1 = x1, x0\n if y0 > y1:\n y0, y1 = y1, y0\n return x0, y0, x1, y1\n"
] | class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | PNGCanvas.line | python | def line(self, x0, y0, x1, y1):
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1) | Draw a line using Xiaolin Wu's antialiasing technique | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L174-L230 | null | class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | PNGCanvas.polyline | python | def polyline(self, arr):
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1]) | Draw a set of lines | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L232-L235 | [
"def line(self, x0, y0, x1, y1):\n \"\"\"Draw a line using Xiaolin Wu's antialiasing technique\"\"\"\n # clean params\n x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)\n if y0 > y1:\n y0, y1, x0, x1 = y1, y0, x1, x0\n dx = x1 - x0\n if dx < 0:\n sx = -1\n else:\n sx = 1\n dx *= sx\n dy = y1 - y0\n\n # 'easy' cases\n if dy == 0:\n for x in range(x0, x1, sx):\n self.point(x, y0)\n return\n if dx == 0:\n for y in range(y0, y1):\n self.point(x0, y)\n self.point(x1, y1)\n return\n if dx == dy:\n for x in range(x0, x1, sx):\n self.point(x, y0)\n y0 += 1\n return\n\n # main loop\n self.point(x0, y0)\n e_acc = 0\n if dy > dx: # vertical displacement\n e = (dx << 16) // dy\n for i in range(y0, y1 - 1):\n e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF\n if e_acc <= e_acc_temp:\n x0 += sx\n w = 0xFF-(e_acc >> 8)\n self.point(x0, y0, intensity(self.color, w))\n y0 += 1\n self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))\n self.point(x1, y1)\n return\n\n # horizontal displacement\n e = (dy << 16) // dx\n for i in range(x0, x1 - sx, sx):\n e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF\n if e_acc <= e_acc_temp:\n y0 += 1\n w = 0xFF-(e_acc >> 8)\n self.point(x0, y0, intensity(self.color, w))\n x0 += sx\n self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))\n self.point(x1, y1)\n"
] | class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | PNGCanvas.dump | python | def dump(self):
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'') | Dump the image data | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L237-L251 | [
"def pack_chunk(tag, data):\n \"\"\"Pack a PNG chunk for serializing to disk\"\"\"\n to_check = tag + data\n return (struct.pack(b\"!I\", len(data)) + to_check +\n struct.pack(b\"!I\", zlib.crc32(to_check) & 0xFFFFFFFF))\n"
] | class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | PNGCanvas.pack_chunk | python | def pack_chunk(tag, data):
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF)) | Pack a PNG chunk for serializing to disk | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L254-L258 | null | class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | PNGCanvas.load | python | def load(self, f):
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row | Load a PNG image | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L260-L307 | [
"def rgb2rgba(rgb):\n \"\"\"Take a row of RGB bytes, and convert to a row of RGBA bytes.\"\"\"\n rgba = []\n for i in range(0, len(rgb), 3):\n rgba += rgb[i:i+3]\n rgba.append(255)\n\n return rgba\n",
"def read(self, num_bytes):\n \"\"\"Read `num_bytes` from the compressed data chunks.\n\n Data is returned as `bytes` of length `num_bytes`\n\n Will raise an EOFError if data is unavailable.\n\n Note: Will always return `num_bytes` of data (unlike the file read method).\n\n \"\"\"\n while len(self.decoded) < num_bytes:\n try:\n tag, data = next(self.chunks)\n except StopIteration:\n raise EOFError()\n if tag != b'IDAT':\n continue\n self.decoded += self.decompressor.decompress(data)\n\n r = self.decoded[:num_bytes]\n self.decoded = self.decoded[num_bytes:]\n return r\n",
"def defilter(cur, prev, filter_type, bpp=4):\n \"\"\"Decode a chunk\"\"\"\n if filter_type == 0: # No filter\n return cur\n elif filter_type == 1: # Sub\n xp = 0\n for xc in range(bpp, len(cur)):\n cur[xc] = (cur[xc] + cur[xp]) % 256\n xp += 1\n elif filter_type == 2: # Up\n for xc in range(len(cur)):\n cur[xc] = (cur[xc] + prev[xc]) % 256\n elif filter_type == 3: # Average\n xp = 0\n for i in range(bpp):\n cur[i] = (cur[i] + prev[i] // 2) % 256\n for xc in range(bpp, len(cur)):\n cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256\n xp += 1\n elif filter_type == 4: # Paeth\n xp = 0\n for i in range(bpp):\n cur[i] = (cur[i] + prev[i]) % 256\n for xc in range(bpp, len(cur)):\n a = cur[xp]\n b = prev[xc]\n c = prev[xp]\n p = a + b - c\n pa = abs(p - a)\n pb = abs(p - b)\n pc = abs(p - c)\n if pa <= pb and pa <= pc:\n value = a\n elif pb <= pc:\n value = b\n else:\n value = c\n cur[xc] = (cur[xc] + value) % 256\n xp += 1\n else:\n raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))\n return cur\n",
"def chunks(f):\n \"\"\"Split read PNG image data into chunks\"\"\"\n while 1:\n try:\n length = struct.unpack(b\"!I\", f.read(4))[0]\n tag = f.read(4)\n data = f.read(length)\n crc = struct.unpack(b\"!I\", f.read(4))[0]\n except struct.error:\n return\n if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:\n raise IOError('Checksum fail')\n yield tag, data\n"
] | class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | PNGCanvas.defilter | python | def defilter(cur, prev, filter_type, bpp=4):
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur | Decode a chunk | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L310-L351 | null | class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
@staticmethod
def chunks(f):
"""Split read PNG image data into chunks"""
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data
|
rcarmo/pngcanvas | pngcanvas.py | PNGCanvas.chunks | python | def chunks(f):
while 1:
try:
length = struct.unpack(b"!I", f.read(4))[0]
tag = f.read(4)
data = f.read(length)
crc = struct.unpack(b"!I", f.read(4))[0]
except struct.error:
return
if zlib.crc32(tag + data) & 0xFFFFFFFF != crc:
raise IOError('Checksum fail')
yield tag, data | Split read PNG image data into chunks | train | https://github.com/rcarmo/pngcanvas/blob/e2eaa0d5ba353005b3b658f6ee453c1956340670/pngcanvas.py#L354-L366 | null | class PNGCanvas(object):
def __init__(self, width, height,
bgcolor=(0xff, 0xff, 0xff, 0xff),
color=(0, 0, 0, 0xff)):
self.width = width
self.height = height
self.color = bytearray(color) # rgba
self.bgcolor = bytearray(bgcolor)
self.canvas = bytearray(self.bgcolor * width * height)
def _offset(self, x, y):
"""Helper for internal data"""
x, y = force_int(x, y)
return y * self.width * 4 + x * 4
def point(self, x, y, color=None):
"""Set a pixel"""
if x < 0 or y < 0 or x > self.width - 1 or y > self.height - 1:
return
if color is None:
color = self.color
o = self._offset(x, y)
self.canvas[o:o + 3] = blend(self.canvas[o:o + 3], bytearray(color))
@staticmethod
def rect_helper(x0, y0, x1, y1):
"""Rectangle helper"""
x0, y0, x1, y1 = force_int(x0, y0, x1, y1)
if x0 > x1:
x0, x1 = x1, x0
if y0 > y1:
y0, y1 = y1, y0
return x0, y0, x1, y1
def vertical_gradient(self, x0, y0, x1, y1, start, end):
"""Draw a vertical gradient"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
grad = gradient_list(start, end, y1 - y0)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, grad[y - y0])
def rectangle(self, x0, y0, x1, y1):
"""Draw a rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
self.polyline([[x0, y0], [x1, y0], [x1, y1], [x0, y1], [x0, y0]])
def filled_rectangle(self, x0, y0, x1, y1):
"""Draw a filled rectangle"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
self.point(x, y, self.color)
def copy_rect(self, x0, y0, x1, y1, dx, dy, destination):
"""Copy (blit) a rectangle onto another part of the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
dx, dy = force_int(dx, dy)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
d = destination._offset(dx + x - x0, dy + y - y0)
o = self._offset(x, y)
destination.canvas[d:d + 4] = self.canvas[o:o + 4]
def blend_rect(self, x0, y0, x1, y1, dx, dy, destination, alpha=0xff):
"""Blend a rectangle onto the image"""
x0, y0, x1, y1 = self.rect_helper(x0, y0, x1, y1)
for x in range(x0, x1 + 1):
for y in range(y0, y1 + 1):
o = self._offset(x, y)
rgba = self.canvas[o:o + 4]
rgba[3] = alpha
destination.point(dx + x - x0, dy + y - y0, rgba)
def line(self, x0, y0, x1, y1):
"""Draw a line using Xiaolin Wu's antialiasing technique"""
# clean params
x0, y0, x1, y1 = int(x0), int(y0), int(x1), int(y1)
if y0 > y1:
y0, y1, x0, x1 = y1, y0, x1, x0
dx = x1 - x0
if dx < 0:
sx = -1
else:
sx = 1
dx *= sx
dy = y1 - y0
# 'easy' cases
if dy == 0:
for x in range(x0, x1, sx):
self.point(x, y0)
return
if dx == 0:
for y in range(y0, y1):
self.point(x0, y)
self.point(x1, y1)
return
if dx == dy:
for x in range(x0, x1, sx):
self.point(x, y0)
y0 += 1
return
# main loop
self.point(x0, y0)
e_acc = 0
if dy > dx: # vertical displacement
e = (dx << 16) // dy
for i in range(y0, y1 - 1):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
x0 += sx
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
y0 += 1
self.point(x0 + sx, y0, intensity(self.color, (0xFF - w)))
self.point(x1, y1)
return
# horizontal displacement
e = (dy << 16) // dx
for i in range(x0, x1 - sx, sx):
e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
if e_acc <= e_acc_temp:
y0 += 1
w = 0xFF-(e_acc >> 8)
self.point(x0, y0, intensity(self.color, w))
x0 += sx
self.point(x0, y0 + 1, intensity(self.color, (0xFF-w)))
self.point(x1, y1)
def polyline(self, arr):
"""Draw a set of lines"""
for i in range(0, len(arr) - 1):
self.line(arr[i][0], arr[i][1], arr[i + 1][0], arr[i + 1][1])
def dump(self):
"""Dump the image data"""
scan_lines = bytearray()
for y in range(self.height):
scan_lines.append(0) # filter type 0 (None)
scan_lines.extend(
self.canvas[(y * self.width * 4):((y + 1) * self.width * 4)]
)
# image represented as RGBA tuples, no interlacing
return SIGNATURE + \
self.pack_chunk(b'IHDR', struct.pack(b"!2I5B",
self.width, self.height,
8, 6, 0, 0, 0)) + \
self.pack_chunk(b'IDAT', zlib.compress(bytes(scan_lines), 9)) + \
self.pack_chunk(b'IEND', b'')
@staticmethod
def pack_chunk(tag, data):
"""Pack a PNG chunk for serializing to disk"""
to_check = tag + data
return (struct.pack(b"!I", len(data)) + to_check +
struct.pack(b"!I", zlib.crc32(to_check) & 0xFFFFFFFF))
def load(self, f):
"""Load a PNG image"""
SUPPORTED_COLOR_TYPES = (COLOR_TYPE_TRUECOLOR, COLOR_TYPE_TRUECOLOR_WITH_ALPHA)
SAMPLES_PER_PIXEL = { COLOR_TYPE_TRUECOLOR: 3,
COLOR_TYPE_TRUECOLOR_WITH_ALPHA: 4 }
assert f.read(8) == SIGNATURE
chunks = iter(self.chunks(f))
header = next(chunks)
assert header[0] == b'IHDR'
(width, height, bit_depth, color_type, compression,
filter_type, interlace) = struct.unpack(b"!2I5B", header[1])
if bit_depth != 8:
raise ValueError('Unsupported PNG format (bit depth={}; must be 8)'.format(bit_depth))
if compression != 0:
raise ValueError('Unsupported PNG format (compression={}; must be 0)'.format(compression))
if filter_type != 0:
raise ValueError('Unsupported PNG format (filter_type={}; must be 0)'.format(filter_type))
if interlace != 0:
raise ValueError('Unsupported PNG format (interlace={}; must be 0)'.format(interlace))
if color_type not in SUPPORTED_COLOR_TYPES:
raise ValueError('Unsupported PNG format (color_type={}; must one of {})'.format(SUPPORTED_COLOR_TYPES))
self.width = width
self.height = height
self.canvas = bytearray(self.bgcolor * width * height)
bytes_per_pixel = SAMPLES_PER_PIXEL[color_type]
bytes_per_row = bytes_per_pixel * width
bytes_per_rgba_row = SAMPLES_PER_PIXEL[COLOR_TYPE_TRUECOLOR_WITH_ALPHA] * width
bytes_per_scanline = bytes_per_row + 1
# Python 2 requires the encode for struct.unpack
scanline_fmt = ('!%dB' % bytes_per_scanline).encode('ascii')
reader = ByteReader(chunks)
old_row = None
cursor = 0
for row in range(height):
scanline = reader.read(bytes_per_scanline)
unpacked = list(struct.unpack(scanline_fmt, scanline))
old_row = self.defilter(unpacked[1:], old_row, unpacked[0], bpp=bytes_per_pixel)
rgba_row = old_row if color_type == COLOR_TYPE_TRUECOLOR_WITH_ALPHA else rgb2rgba(old_row)
self.canvas[cursor:cursor + bytes_per_rgba_row] = rgba_row
cursor += bytes_per_rgba_row
@staticmethod
def defilter(cur, prev, filter_type, bpp=4):
"""Decode a chunk"""
if filter_type == 0: # No filter
return cur
elif filter_type == 1: # Sub
xp = 0
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + cur[xp]) % 256
xp += 1
elif filter_type == 2: # Up
for xc in range(len(cur)):
cur[xc] = (cur[xc] + prev[xc]) % 256
elif filter_type == 3: # Average
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i] // 2) % 256
for xc in range(bpp, len(cur)):
cur[xc] = (cur[xc] + ((cur[xp] + prev[xc]) // 2)) % 256
xp += 1
elif filter_type == 4: # Paeth
xp = 0
for i in range(bpp):
cur[i] = (cur[i] + prev[i]) % 256
for xc in range(bpp, len(cur)):
a = cur[xp]
b = prev[xc]
c = prev[xp]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
value = a
elif pb <= pc:
value = b
else:
value = c
cur[xc] = (cur[xc] + value) % 256
xp += 1
else:
raise ValueError('Unrecognized scanline filter type: {}'.format(filter_type))
return cur
@staticmethod
|
cnelson/python-fleet | fleet/v1/objects/unit.py | Unit._set_options_from_file | python | def _set_options_from_file(self, file_handle):
# TODO: Find a library to handle this unit file parsing
# Can't use configparser, it doesn't handle multiple entries for the same key in the same section
# This is terribly naive
# build our output here
options = []
# keep track of line numbers to report when parsing problems happen
line_number = 0
# the section we are currently in
section = None
for line in file_handle.read().splitlines():
line_number += 1
# clear any extra white space
orig_line = line
line = line.strip()
# ignore comments, and blank lines
if not line or line.startswith('#'):
continue
# is this a section header? If so, update our variable and continue
# Section headers look like: [Section]
if line.startswith('[') and line.endswith(']'):
section = line.strip('[]')
continue
# We encountered a non blank line outside of a section, this is a problem
if not section:
raise ValueError(
'Unable to parse unit file; '
'Unexpected line outside of a section: {0} (line: {1}'.format(
line,
line_number
))
# Attempt to parse a line inside a section
# Lines should look like: name=value \
# continuation
continuation = False
try:
# if the previous value ends with \ then we are a continuation
# so remove the \, and set the flag so we'll append to this below
if options[-1]['value'].endswith('\\'):
options[-1]['value'] = options[-1]['value'][:-1]
continuation = True
except IndexError:
pass
try:
# if we are a continuation, then just append our value to the previous line
if continuation:
options[-1]['value'] += orig_line
continue
# else we are a normal line, so spit and get our name / value
name, value = line.split('=', 1)
options.append({
'section': section,
'name': name,
'value': value
})
except ValueError:
raise ValueError(
'Unable to parse unit file; '
'Malformed line in section {0}: {1} (line: {2})'.format(
section,
line,
line_number
))
# update our internal structure
self._data['options'] = options
return True | Parses a unit file and updates self._data['options']
Args:
file_handle (file): a file-like object (supporting read()) containing a unit
Returns:
True: The file was successfuly parsed and options were updated
Raises:
IOError: from_file was specified and it does not exist
ValueError: The unit contents specified in from_string or from_file is not valid | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/objects/unit.py#L134-L224 | null | class Unit(FleetObject):
"""This object represents a Unit in Fleet
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to the changes in
this desired state. The actual state of the system is communicated with UnitState entities.
Attributes (all are readonly):
Always available:
options (update with add_option, remove_option): list of UnitOption entities
desiredState: (update with set_desired_state): state the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Available once units are submitted to fleet:
name: unique identifier of entity
currentState: state the Unit is currently in (same possible values as desiredState)
machineID: ID of machine to which the Unit is scheduled
A UnitOption represents a single option in a systemd unit file.
section: name of section that contains the option (e.g. "Unit", "Service", "Socket")
name: name of option (e.g. "BindsTo", "After", "ExecStart")
value: value of option (e.g. "/usr/bin/docker run busybox /bin/sleep 1000")
"""
_STATES = ['inactive', 'loaded', 'launched']
def __init__(self, client=None, data=None, desired_state=None, options=None, from_file=None, from_string=None):
"""Create a new unit
Args:
client (fleet.v1.Client, optional): The fleet client that retrieved this object
data (dict, optional): Initialize this object with this data. If this is used you must not
specify options, desired_state, from_file, or from_string
desired_state (string, optional): The desired_state for this object, defaults to 'launched' if not specified
If you do not specify data, You may specify one of the following args to initialize the object:
options (list, optional): A list of options to initialize the object with.
from_file (str, optional): Initialize this object from the unit file on disk at this path
from_string (str, optional): Initialize this object from the unit file in this string
If none are specified, an empty unit will be created
Raises:
IOError: from_file was specified and it does not exist
ValueError: Conflicting options, or The unit contents specified in from_string or from_file is not valid
"""
# make sure if they specify data, then they didn't specify anything else
if data and (desired_state or options or from_file or from_string):
raise ValueError('If you specify data you can not specify desired_state,'
'options, from_file, or from_string')
# count how many of options, from_file, from_string we have
given = 0
for thing in [options, from_file, from_string]:
if thing:
given += 1
# we should only have one, if we have more, yell at them
if given > 1:
raise ValueError('You must specify only one of options, from_file, from_string')
# ensure we have a minimum structure if we aren't passed one
if data is None:
# we set this here, instead as a default value to the arg
# as we want to be able to check it vs data above, it should be None in that case
if desired_state is None:
desired_state = 'launched'
if options is None:
options = []
# Minimum structure required by fleet
data = {
'desiredState': desired_state,
'options': options
}
# Call the parent class to configure us
super(Unit, self).__init__(client=client, data=data)
# If they asked us to load from a file, attemp to slurp it up
if from_file:
with open(from_file, 'r') as fh:
self._set_options_from_file(fh)
# If they asked us to load from a string, lie to the loader with StringIO
if from_string:
self._set_options_from_file(StringIO(from_string))
def __repr__(self):
return '<{0}: {1}>'.format(
self.__class__.__name__,
self.as_dict()
)
def __str__(self):
"""Generate a Unit file representation of this object"""
# build our output here
output = []
# get a ist of sections
sections = set([x['section'] for x in self._data['options']])
for section in sections:
# for each section, add it to our output
output.append(u'[{0}]'.format(section))
# iterate through the list of options, adding all items to this section
for option in self._data['options']:
if option['section'] == section:
output.append(u'{0}={1}'.format(option['name'], option['value']))
# join and return the output
return u"\n".join(output)
def _is_live(self):
"""Checks to see if this unit came from fleet, or was created locally
Only units with a .name property (set by the server), and _client property are considered 'live'
Returns:
True: The object is live
False: The object is not
"""
if 'name' in self._data and self._client:
return True
return False
def add_option(self, section, name, value):
"""Add an option to a section of the unit file
Args:
section (str): The name of the section, If it doesn't exist it will be created
name (str): The name of the option to add
value (str): The value of the option
Returns:
True: The item was added
"""
# Don't allow updating units we loaded from fleet, it's not supported
if self._is_live():
raise RuntimeError('Submitted units cannot update their options')
option = {
'section': section,
'name': name,
'value': value
}
self._data['options'].append(option)
return True
def remove_option(self, section, name, value=None):
"""Remove an option from a unit
Args:
section (str): The section to remove from.
name (str): The item to remove.
value (str, optional): If specified, only the option matching this value will be removed
If not specified, all options with ``name`` in ``section`` will be removed
Returns:
True: At least one item was removed
False: The item requested to remove was not found
"""
# Don't allow updating units we loaded from fleet, it's not supported
if self._is_live():
raise RuntimeError('Submitted units cannot update their options')
removed = 0
# iterate through a copy of the options
for option in list(self._data['options']):
# if it's in our section
if option['section'] == section:
# and it matches our name
if option['name'] == name:
# and they didn't give us a value, or it macthes
if value is None or option['value'] == value:
# nuke it from the source
self._data['options'].remove(option)
removed += 1
if removed > 0:
return True
return False
def destroy(self):
"""Remove a unit from the fleet cluster
Returns:
True: The unit was removed
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# if this unit didn't come from fleet, we can't destroy it
if not self._is_live():
raise RuntimeError('A unit must be submitted to fleet before it can destroyed.')
return self._client.destroy_unit(self.name)
def set_desired_state(self, state):
"""Update the desired state of a unit.
Args:
state (str): The desired state for the unit, must be one of ``_STATES``
Returns:
str: The updated state
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value for ``state`` was provided
"""
if state not in self._STATES:
raise ValueError(
'state must be one of: {0}'.format(
self._STATES
))
# update our internal structure
self._data['desiredState'] = state
# if we have a name, then we came from the server
# and we have a handle to an active client
# Then update our selves on the server
if self._is_live():
self._update('_data', self._client.set_unit_desired_state(self.name, self.desiredState))
# Return the state
return self._data['desiredState']
|
cnelson/python-fleet | fleet/v1/objects/unit.py | Unit.add_option | python | def add_option(self, section, name, value):
# Don't allow updating units we loaded from fleet, it's not supported
if self._is_live():
raise RuntimeError('Submitted units cannot update their options')
option = {
'section': section,
'name': name,
'value': value
}
self._data['options'].append(option)
return True | Add an option to a section of the unit file
Args:
section (str): The name of the section, If it doesn't exist it will be created
name (str): The name of the option to add
value (str): The value of the option
Returns:
True: The item was added | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/objects/unit.py#L241-L266 | [
"def _is_live(self):\n \"\"\"Checks to see if this unit came from fleet, or was created locally\n\n Only units with a .name property (set by the server), and _client property are considered 'live'\n\n Returns:\n True: The object is live\n False: The object is not\n\n \"\"\"\n if 'name' in self._data and self._client:\n return True\n\n return False\n"
] | class Unit(FleetObject):
"""This object represents a Unit in Fleet
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to the changes in
this desired state. The actual state of the system is communicated with UnitState entities.
Attributes (all are readonly):
Always available:
options (update with add_option, remove_option): list of UnitOption entities
desiredState: (update with set_desired_state): state the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Available once units are submitted to fleet:
name: unique identifier of entity
currentState: state the Unit is currently in (same possible values as desiredState)
machineID: ID of machine to which the Unit is scheduled
A UnitOption represents a single option in a systemd unit file.
section: name of section that contains the option (e.g. "Unit", "Service", "Socket")
name: name of option (e.g. "BindsTo", "After", "ExecStart")
value: value of option (e.g. "/usr/bin/docker run busybox /bin/sleep 1000")
"""
_STATES = ['inactive', 'loaded', 'launched']
def __init__(self, client=None, data=None, desired_state=None, options=None, from_file=None, from_string=None):
"""Create a new unit
Args:
client (fleet.v1.Client, optional): The fleet client that retrieved this object
data (dict, optional): Initialize this object with this data. If this is used you must not
specify options, desired_state, from_file, or from_string
desired_state (string, optional): The desired_state for this object, defaults to 'launched' if not specified
If you do not specify data, You may specify one of the following args to initialize the object:
options (list, optional): A list of options to initialize the object with.
from_file (str, optional): Initialize this object from the unit file on disk at this path
from_string (str, optional): Initialize this object from the unit file in this string
If none are specified, an empty unit will be created
Raises:
IOError: from_file was specified and it does not exist
ValueError: Conflicting options, or The unit contents specified in from_string or from_file is not valid
"""
# make sure if they specify data, then they didn't specify anything else
if data and (desired_state or options or from_file or from_string):
raise ValueError('If you specify data you can not specify desired_state,'
'options, from_file, or from_string')
# count how many of options, from_file, from_string we have
given = 0
for thing in [options, from_file, from_string]:
if thing:
given += 1
# we should only have one, if we have more, yell at them
if given > 1:
raise ValueError('You must specify only one of options, from_file, from_string')
# ensure we have a minimum structure if we aren't passed one
if data is None:
# we set this here, instead as a default value to the arg
# as we want to be able to check it vs data above, it should be None in that case
if desired_state is None:
desired_state = 'launched'
if options is None:
options = []
# Minimum structure required by fleet
data = {
'desiredState': desired_state,
'options': options
}
# Call the parent class to configure us
super(Unit, self).__init__(client=client, data=data)
# If they asked us to load from a file, attemp to slurp it up
if from_file:
with open(from_file, 'r') as fh:
self._set_options_from_file(fh)
# If they asked us to load from a string, lie to the loader with StringIO
if from_string:
self._set_options_from_file(StringIO(from_string))
def __repr__(self):
return '<{0}: {1}>'.format(
self.__class__.__name__,
self.as_dict()
)
def __str__(self):
"""Generate a Unit file representation of this object"""
# build our output here
output = []
# get a ist of sections
sections = set([x['section'] for x in self._data['options']])
for section in sections:
# for each section, add it to our output
output.append(u'[{0}]'.format(section))
# iterate through the list of options, adding all items to this section
for option in self._data['options']:
if option['section'] == section:
output.append(u'{0}={1}'.format(option['name'], option['value']))
# join and return the output
return u"\n".join(output)
def _set_options_from_file(self, file_handle):
"""Parses a unit file and updates self._data['options']
Args:
file_handle (file): a file-like object (supporting read()) containing a unit
Returns:
True: The file was successfuly parsed and options were updated
Raises:
IOError: from_file was specified and it does not exist
ValueError: The unit contents specified in from_string or from_file is not valid
"""
# TODO: Find a library to handle this unit file parsing
# Can't use configparser, it doesn't handle multiple entries for the same key in the same section
# This is terribly naive
# build our output here
options = []
# keep track of line numbers to report when parsing problems happen
line_number = 0
# the section we are currently in
section = None
for line in file_handle.read().splitlines():
line_number += 1
# clear any extra white space
orig_line = line
line = line.strip()
# ignore comments, and blank lines
if not line or line.startswith('#'):
continue
# is this a section header? If so, update our variable and continue
# Section headers look like: [Section]
if line.startswith('[') and line.endswith(']'):
section = line.strip('[]')
continue
# We encountered a non blank line outside of a section, this is a problem
if not section:
raise ValueError(
'Unable to parse unit file; '
'Unexpected line outside of a section: {0} (line: {1}'.format(
line,
line_number
))
# Attempt to parse a line inside a section
# Lines should look like: name=value \
# continuation
continuation = False
try:
# if the previous value ends with \ then we are a continuation
# so remove the \, and set the flag so we'll append to this below
if options[-1]['value'].endswith('\\'):
options[-1]['value'] = options[-1]['value'][:-1]
continuation = True
except IndexError:
pass
try:
# if we are a continuation, then just append our value to the previous line
if continuation:
options[-1]['value'] += orig_line
continue
# else we are a normal line, so spit and get our name / value
name, value = line.split('=', 1)
options.append({
'section': section,
'name': name,
'value': value
})
except ValueError:
raise ValueError(
'Unable to parse unit file; '
'Malformed line in section {0}: {1} (line: {2})'.format(
section,
line,
line_number
))
# update our internal structure
self._data['options'] = options
return True
def _is_live(self):
"""Checks to see if this unit came from fleet, or was created locally
Only units with a .name property (set by the server), and _client property are considered 'live'
Returns:
True: The object is live
False: The object is not
"""
if 'name' in self._data and self._client:
return True
return False
def remove_option(self, section, name, value=None):
"""Remove an option from a unit
Args:
section (str): The section to remove from.
name (str): The item to remove.
value (str, optional): If specified, only the option matching this value will be removed
If not specified, all options with ``name`` in ``section`` will be removed
Returns:
True: At least one item was removed
False: The item requested to remove was not found
"""
# Don't allow updating units we loaded from fleet, it's not supported
if self._is_live():
raise RuntimeError('Submitted units cannot update their options')
removed = 0
# iterate through a copy of the options
for option in list(self._data['options']):
# if it's in our section
if option['section'] == section:
# and it matches our name
if option['name'] == name:
# and they didn't give us a value, or it macthes
if value is None or option['value'] == value:
# nuke it from the source
self._data['options'].remove(option)
removed += 1
if removed > 0:
return True
return False
def destroy(self):
"""Remove a unit from the fleet cluster
Returns:
True: The unit was removed
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# if this unit didn't come from fleet, we can't destroy it
if not self._is_live():
raise RuntimeError('A unit must be submitted to fleet before it can destroyed.')
return self._client.destroy_unit(self.name)
def set_desired_state(self, state):
"""Update the desired state of a unit.
Args:
state (str): The desired state for the unit, must be one of ``_STATES``
Returns:
str: The updated state
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value for ``state`` was provided
"""
if state not in self._STATES:
raise ValueError(
'state must be one of: {0}'.format(
self._STATES
))
# update our internal structure
self._data['desiredState'] = state
# if we have a name, then we came from the server
# and we have a handle to an active client
# Then update our selves on the server
if self._is_live():
self._update('_data', self._client.set_unit_desired_state(self.name, self.desiredState))
# Return the state
return self._data['desiredState']
|
cnelson/python-fleet | fleet/v1/objects/unit.py | Unit.remove_option | python | def remove_option(self, section, name, value=None):
# Don't allow updating units we loaded from fleet, it's not supported
if self._is_live():
raise RuntimeError('Submitted units cannot update their options')
removed = 0
# iterate through a copy of the options
for option in list(self._data['options']):
# if it's in our section
if option['section'] == section:
# and it matches our name
if option['name'] == name:
# and they didn't give us a value, or it macthes
if value is None or option['value'] == value:
# nuke it from the source
self._data['options'].remove(option)
removed += 1
if removed > 0:
return True
return False | Remove an option from a unit
Args:
section (str): The section to remove from.
name (str): The item to remove.
value (str, optional): If specified, only the option matching this value will be removed
If not specified, all options with ``name`` in ``section`` will be removed
Returns:
True: At least one item was removed
False: The item requested to remove was not found | train | https://github.com/cnelson/python-fleet/blob/a11dcd8bb3986d1d8f0af90d2da7399c9cc54b4d/fleet/v1/objects/unit.py#L268-L302 | [
"def _is_live(self):\n \"\"\"Checks to see if this unit came from fleet, or was created locally\n\n Only units with a .name property (set by the server), and _client property are considered 'live'\n\n Returns:\n True: The object is live\n False: The object is not\n\n \"\"\"\n if 'name' in self._data and self._client:\n return True\n\n return False\n"
] | class Unit(FleetObject):
"""This object represents a Unit in Fleet
Create and modify Unit entities to communicate to fleet the desired state of the cluster.
This simply declares what should be happening; the backend system still has to react to the changes in
this desired state. The actual state of the system is communicated with UnitState entities.
Attributes (all are readonly):
Always available:
options (update with add_option, remove_option): list of UnitOption entities
desiredState: (update with set_desired_state): state the user wishes the Unit to be in
("inactive", "loaded", or "launched")
Available once units are submitted to fleet:
name: unique identifier of entity
currentState: state the Unit is currently in (same possible values as desiredState)
machineID: ID of machine to which the Unit is scheduled
A UnitOption represents a single option in a systemd unit file.
section: name of section that contains the option (e.g. "Unit", "Service", "Socket")
name: name of option (e.g. "BindsTo", "After", "ExecStart")
value: value of option (e.g. "/usr/bin/docker run busybox /bin/sleep 1000")
"""
_STATES = ['inactive', 'loaded', 'launched']
def __init__(self, client=None, data=None, desired_state=None, options=None, from_file=None, from_string=None):
"""Create a new unit
Args:
client (fleet.v1.Client, optional): The fleet client that retrieved this object
data (dict, optional): Initialize this object with this data. If this is used you must not
specify options, desired_state, from_file, or from_string
desired_state (string, optional): The desired_state for this object, defaults to 'launched' if not specified
If you do not specify data, You may specify one of the following args to initialize the object:
options (list, optional): A list of options to initialize the object with.
from_file (str, optional): Initialize this object from the unit file on disk at this path
from_string (str, optional): Initialize this object from the unit file in this string
If none are specified, an empty unit will be created
Raises:
IOError: from_file was specified and it does not exist
ValueError: Conflicting options, or The unit contents specified in from_string or from_file is not valid
"""
# make sure if they specify data, then they didn't specify anything else
if data and (desired_state or options or from_file or from_string):
raise ValueError('If you specify data you can not specify desired_state,'
'options, from_file, or from_string')
# count how many of options, from_file, from_string we have
given = 0
for thing in [options, from_file, from_string]:
if thing:
given += 1
# we should only have one, if we have more, yell at them
if given > 1:
raise ValueError('You must specify only one of options, from_file, from_string')
# ensure we have a minimum structure if we aren't passed one
if data is None:
# we set this here, instead as a default value to the arg
# as we want to be able to check it vs data above, it should be None in that case
if desired_state is None:
desired_state = 'launched'
if options is None:
options = []
# Minimum structure required by fleet
data = {
'desiredState': desired_state,
'options': options
}
# Call the parent class to configure us
super(Unit, self).__init__(client=client, data=data)
# If they asked us to load from a file, attemp to slurp it up
if from_file:
with open(from_file, 'r') as fh:
self._set_options_from_file(fh)
# If they asked us to load from a string, lie to the loader with StringIO
if from_string:
self._set_options_from_file(StringIO(from_string))
def __repr__(self):
return '<{0}: {1}>'.format(
self.__class__.__name__,
self.as_dict()
)
def __str__(self):
"""Generate a Unit file representation of this object"""
# build our output here
output = []
# get a ist of sections
sections = set([x['section'] for x in self._data['options']])
for section in sections:
# for each section, add it to our output
output.append(u'[{0}]'.format(section))
# iterate through the list of options, adding all items to this section
for option in self._data['options']:
if option['section'] == section:
output.append(u'{0}={1}'.format(option['name'], option['value']))
# join and return the output
return u"\n".join(output)
def _set_options_from_file(self, file_handle):
"""Parses a unit file and updates self._data['options']
Args:
file_handle (file): a file-like object (supporting read()) containing a unit
Returns:
True: The file was successfuly parsed and options were updated
Raises:
IOError: from_file was specified and it does not exist
ValueError: The unit contents specified in from_string or from_file is not valid
"""
# TODO: Find a library to handle this unit file parsing
# Can't use configparser, it doesn't handle multiple entries for the same key in the same section
# This is terribly naive
# build our output here
options = []
# keep track of line numbers to report when parsing problems happen
line_number = 0
# the section we are currently in
section = None
for line in file_handle.read().splitlines():
line_number += 1
# clear any extra white space
orig_line = line
line = line.strip()
# ignore comments, and blank lines
if not line or line.startswith('#'):
continue
# is this a section header? If so, update our variable and continue
# Section headers look like: [Section]
if line.startswith('[') and line.endswith(']'):
section = line.strip('[]')
continue
# We encountered a non blank line outside of a section, this is a problem
if not section:
raise ValueError(
'Unable to parse unit file; '
'Unexpected line outside of a section: {0} (line: {1}'.format(
line,
line_number
))
# Attempt to parse a line inside a section
# Lines should look like: name=value \
# continuation
continuation = False
try:
# if the previous value ends with \ then we are a continuation
# so remove the \, and set the flag so we'll append to this below
if options[-1]['value'].endswith('\\'):
options[-1]['value'] = options[-1]['value'][:-1]
continuation = True
except IndexError:
pass
try:
# if we are a continuation, then just append our value to the previous line
if continuation:
options[-1]['value'] += orig_line
continue
# else we are a normal line, so spit and get our name / value
name, value = line.split('=', 1)
options.append({
'section': section,
'name': name,
'value': value
})
except ValueError:
raise ValueError(
'Unable to parse unit file; '
'Malformed line in section {0}: {1} (line: {2})'.format(
section,
line,
line_number
))
# update our internal structure
self._data['options'] = options
return True
def _is_live(self):
"""Checks to see if this unit came from fleet, or was created locally
Only units with a .name property (set by the server), and _client property are considered 'live'
Returns:
True: The object is live
False: The object is not
"""
if 'name' in self._data and self._client:
return True
return False
def add_option(self, section, name, value):
"""Add an option to a section of the unit file
Args:
section (str): The name of the section, If it doesn't exist it will be created
name (str): The name of the option to add
value (str): The value of the option
Returns:
True: The item was added
"""
# Don't allow updating units we loaded from fleet, it's not supported
if self._is_live():
raise RuntimeError('Submitted units cannot update their options')
option = {
'section': section,
'name': name,
'value': value
}
self._data['options'].append(option)
return True
def destroy(self):
"""Remove a unit from the fleet cluster
Returns:
True: The unit was removed
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
"""
# if this unit didn't come from fleet, we can't destroy it
if not self._is_live():
raise RuntimeError('A unit must be submitted to fleet before it can destroyed.')
return self._client.destroy_unit(self.name)
def set_desired_state(self, state):
"""Update the desired state of a unit.
Args:
state (str): The desired state for the unit, must be one of ``_STATES``
Returns:
str: The updated state
Raises:
fleet.v1.errors.APIError: Fleet returned a response code >= 400
ValueError: An invalid value for ``state`` was provided
"""
if state not in self._STATES:
raise ValueError(
'state must be one of: {0}'.format(
self._STATES
))
# update our internal structure
self._data['desiredState'] = state
# if we have a name, then we came from the server
# and we have a handle to an active client
# Then update our selves on the server
if self._is_live():
self._update('_data', self._client.set_unit_desired_state(self.name, self.desiredState))
# Return the state
return self._data['desiredState']
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.