text
stringlengths 29
850k
|
|---|
# GSDView - Geo-Spatial Data Viewer
# Copyright (C) 2008-2021 Antonio Valentino <antonio.valentino@tiscali.it>
#
# This module is free software you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation either version 2 of the License, or
# (at your option) any later version.
#
# This module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this module if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 US
"""Custom exectools components for GDAL."""
import re
import logging
import exectools
from exectools.qt import QtOutputHandler
from osgeo import gdal
_log = logging.getLogger(__name__)
class BaseGdalToolDescriptor(exectools.ToolDescriptor):
"""Base class for GDAL tool descriptors."""
def gdal_config_options(self, cmd=''):
extra_args = []
if 'GDAL_CACHEMAX' not in cmd:
value = gdal.GetCacheMax()
extra_args.extend(('--config', 'GDAL_CACHEMAX', str(value)))
for key in ('CPL_DEBUG', 'GDAL_SKIP', 'GDAL_DATA',
'GDAL_DRIVER_PATH', 'OGR_DRIVER_PATH'):
if key not in cmd:
value = gdal.GetConfigOption(key, None)
if value:
extra_args.extend(('--config', key, '"%s"' % value))
return extra_args
def cmdline(self, *args, **kwargs):
parts = super().cmdline(*args, **kwargs)
extra_args = self.gdal_config_options(parts)
if extra_args:
if not self.executable or isinstance(self.executable, str):
parts = [parts[0]] + extra_args + parts[1:]
else:
executable = list(self.executable)
parts = executable + extra_args + parts[len(executable):]
return parts
class GdalAddOverviewDescriptor(BaseGdalToolDescriptor):
"""Tool descriptor for the gdaladdo utility program."""
#: resampling methods
RESAMPLING_METHODS = [
'nearest',
'average',
'gauss',
'cubic',
# 'cubicspline', # GDAL 2.0
# 'lanczos', # GDAL 2.0
'average_mp',
'average_magphase',
'mode',
]
if gdal.VersionInfo() > '2000000':
RESAMPLING_METHODS.extend((
'cubicspline',
'lanczos',
))
#: TIFF compression methods
TIFF_COMPRESSION_METHODS = (
'JPEG',
'LZW',
'PACKBITS',
'DEFLATE',
)
#: TIFF interleaving methods
TIFF_INTERLEAVING_METHODS = ('PIXEL', 'BAND')
#: Allowed options for BigTIFF flag
TIFF_USE_BIGTIFF_MODE = ('IF_NEEDED', 'IF_SAFER', 'YES', 'NO')
def __init__(self, cwd=None, env=None,
stdout_handler=None, stderr_handler=None):
"""Initialization:
:param cwd:
program working directory
:param env:
environment dictionary
:param envmerge:
if set to True (default) it is the :attr:`env` dictionaty is
used to update the system environment
:param stdout_handler:
*OutputHandler* for the stdout of the tool
:param stderr_handler:
*OutputHandler* for the stderr of the tool
.. seealso:: :class:`exectools.BaseOutputHandler`
"""
super().__init__('gdaladdo', [], cwd, env, stdout_handler,
stderr_handler)
#: ensure that gdaladdo works in readonly mode
self.readonly = False
self._resampling_method = 'average'
#: use Erdas Imagine format (.aux) as overview format.
#: If None use GDAL defaults.
self.use_rrd = None
#: photometric interpretation: RGB, YCBCR, etc. (only for external
#: overviews in GeoTIFF format).
#: If None use GDAL defaults.
self.photometric_interpretation = None
self._compression_method = None
self._interleaving_method = None
self._use_bigtiff_mode = None
def resampling_method(self):
"""Resampling method for overviews computation."""
return self._resampling_method
def set_resampling_method(self, method):
'''Set the resampling method for overviews computation.
If set to None use GDAL defaults.
Available resampling methods: %s.
''' % ', '.join(GdalAddOverviewDescriptor.RESAMPLING_METHODS)
if method is not None and method not in self.RESAMPLING_METHODS:
raise ValueError(
'invalid resampling method: "%s". '
'Available methods are: %s' % (
method, ', '.join(self.RESAMPLING_METHODS)))
self._resampling_method = method
def compression_method(self):
"""TIFF compression method.
This attribute is only used if external overviews are
stored in GeoTIFF format.
"""
return self._compression_method
def set_compression_method(self, method):
'''Set the TIFF compression method.
This attribute is only used if external overviews are
stored in GeoTIFF format.
If set to None use GDAL defaults.
Available compression methods: %s.
''' % ', '.join(GdalAddOverviewDescriptor.TIFF_COMPRESSION_METHODS)
self._compression_method = method
def interleaving_method(self):
'''Overviews interleaving method (%s).
This attribute is only used if external overviews are
stored in GeoTIFF format.
''' % ' or '.join(GdalAddOverviewDescriptor.TIFF_INTERLEAVING_METHODS)
return self._interleaving_method
def set_interleaving_method(self, method):
'''Set the overview interleaving method.
This attribute is only used if external overviews are
stored in GeoTIFF format.
If set to None use GDAL defaults.
Possible interleaving methods are: %s.
''' % ' or '.join(GdalAddOverviewDescriptor.TIFF_INTERLEAVING_METHODS)
self._interleaving_method = method
def use_bigtiff_mode(self):
'''Mode of using BigTIFF in overviews (%s).
This attribute is only used if external overviews are
stored in GeoTIFF format.
''' % ' or '.join(GdalAddOverviewDescriptor.TIFF_USE_BIGTIFF_MODE)
return self._use_bigtiff_mode
def set_use_bigtiff_mode(self, mode):
'''Set the mode of using BigTIFF in overviews.
This attribute is only used if external overviews are
stored in GeoTIFF format.
If set to None use GDAL defaults.
Possible interleaving methods are: %s.
''' % ' or '.join(GdalAddOverviewDescriptor.TIFF_USE_BIGTIFF_MODE)
self._use_bigtiff_mode = mode
def gdal_config_options(self, cmd=''):
extra_args = super().gdal_config_options(cmd)
if self.use_rrd is not None and 'USE_RRD' not in cmd:
if self.use_rrd:
value = 'YES'
else:
value = 'NO'
extra_args.extend(('--config', 'USE_RRD', value))
if (self.photometric_interpretation is not None
and 'PHOTOMETRIC_OVERVIEW' not in cmd):
extra_args.extend(('--config', 'PHOTOMETRIC_OVERVIEW',
self.photometric_interpretation))
if (self._compression_method is not None
and 'COMPRESS_OVERVIEW' not in cmd):
extra_args.extend(('--config', 'COMPRESS_OVERVIEW',
self._compression_method))
if (self._interleaving_method is not None
and 'INTERLEAVE_OVERVIEW' not in cmd):
extra_args.extend(('--config', 'INTERLEAVE_OVERVIEW',
self._interleaving_method))
if (self._use_bigtiff_mode is not None
and 'BIGTIFF_OVERVIEW' not in cmd):
extra_args.extend(('--config', 'BIGTIFF_OVERVIEW',
self._use_bigtiff_mode))
return extra_args
def cmdline(self, *args, **kwargs):
args = list(args)
if self._resampling_method is not None and '-r' not in args:
args = ['-r', self._resampling_method] + args
if self.readonly and '-ro' not in args:
args.append('-ro')
return super().cmdline(*args, **kwargs)
class GdalInfoDescriptor(BaseGdalToolDescriptor):
"""Tool descriptor for the gdalinfo utility program."""
def __init__(self, cwd=None, env=None,
stdout_handler=None, stderr_handler=None):
"""
:param cwd:
program working directory
:param env:
environment dictionary
:param envmerge:
if set to True (default) it is the :attr:`env` dictionary is
used to update the system environment
:param stdout_handler:
*OutputHandler* for the stdout of the tool
:param stderr_handler:
*OutputHandler* for the stderr of the tool
.. seealso:: :class:`exectools.BaseOutputHandler`
"""
super().__init__('gdalinfo', [], cwd, env,
stdout_handler, stderr_handler)
#: force computation of the actual min/max values for each band in the
#: dataset.
self.mm = False
#: read and display image statistics. Force computation if no
#: statistics are stored in an image.
self.stats = False
#: report histogram information for all bands.
self.hist = False
#: suppress ground control points list printing. It may be useful for
#: datasets with huge amount of GCPs, such as L1B AVHRR or HDF4 MODIS
#: which contain thousands of the ones.
self.nogcp = False
#: suppress metadata printing. Some datasets may contain a lot of
#: metadata strings.
self.nomd = False
#: suppress raster attribute table printing.
self.norat = False
#: suppress printing of color table.
self.noct = False
#: force computation of the checksum for each band in the dataset.
self.checksum = False
#: report metadata for the specified domain.
self.mdd = None
def cmdline(self, *args, **kwargs):
extra_args = []
for name in ('mm', 'stats', 'hist', 'nogcp', 'nomd', 'norat',
'noct', 'checksum',):
flag = '-%s' % name
if getattr(self, name) and flag not in args:
extra_args.append(flag)
if self.mdd is not None and '-mdd' not in args:
extra_args.extend(('-mdd', self.mdd))
args = extra_args + list(args)
return super().cmdline(*args, **kwargs)
class GdalOutputHandler(QtOutputHandler):
"""Handler for the GDAL simple progress report to terminal.
This progress reporter prints simple progress report to the
terminal window.
The progress report generally looks something like this:
"0...10...20...30...40...50...60...70...80...90...100 - done."
Every 2.5% of progress another number or period is emitted.
.. seealso:: :class:`exectools.BaseOutputHandler`,
:class:`exectools.qt.QtOutputHandler`
"""
def __init__(self, logger=None, statusbar=None, progressbar=None,
blinker=None, **kwargs):
super().__init__(logger, statusbar, progressbar, blinker, **kwargs)
# pattern = (r'(?P<percentage>\d{1,3})|(?P<pulse>\.)|'
# r'((?P<text> - done\.?)$)')
pattern = (r'(?P<percentage>\d{1,3})|(?P<pulse>\.)|'
r'( - (?P<text>done\.?)\n)')
self._progress_pattern = re.compile(pattern)
self._percentage = 0. # @TODO: remove. Set the progressbar maximum
# to 1000 instead.
def handle_progress(self, data):
"""Handle progress data.
:param data:
a list containing an item for each named group in the
"progress" regular expression: (pulse, percentage, text)
for the default implementation.
Each item can be None.
"""
pulse = data.get('pulse')
percentage = data.get('percentage')
# text = data.get('text')
if pulse and percentage is None:
self._percentage = min(100, self._percentage + 2.5)
data['percentage'] = self._percentage
if percentage is not None:
if percentage < self._percentage:
_log.debug(
'new percentage (%d) is lower than previous one (%f)',
percentage, self._percentage)
self._percentage = percentage
# if text and not pulse and percentage is None:
# # reset percentage
# self._percentage = 0.
super().handle_progress(data)
def reset(self):
super().reset()
self._percentage = 0.
if __name__ == '__main__':
def test_GdalOutputHandler_re():
s = '0...10...20...30...40...50...60...70...80...90...100 - done.\n'
h = exectools.BaseOutputHandler(exectools.OFStream())
h._progress_pattern = GdalOutputHandler()._progress_pattern
h.feed(s)
h.close()
print('done.')
def test_GdalOutputHandler1():
s = '0...10...20...30...40...50...60...70...80...90...100 - done.\n'
class C(GdalOutputHandler):
def __init__(self):
exectools.BaseOutputHandler.__init__(self,
exectools.OFStream())
def feed(self, data):
return exectools.BaseOutputHandler.feed(self, data)
def close(self):
return exectools.BaseOutputHandler.close(self)
def reset(self):
return exectools.BaseOutputHandler.reset(self)
def handle_progress(self, data):
return exectools.BaseOutputHandler.handle_progress(self, data)
h = C()
h.feed(s)
h.close()
print('done.')
def test_GdalOutputHandler2():
s = '0...10...20...30...40...50...60...70...80...90...100 - done.\n'
h = exectools.BaseOutputHandler(exectools.OFStream())
h._progress_pattern = GdalOutputHandler()._progress_pattern
for c in s:
h.feed(c)
h.close()
# test_GdalOutputHandler_re()
# test_GdalOutputHandler1()
test_GdalOutputHandler2()
|
92% of New Year’s Resolutions fail.
Installing bad habits through the holiday season with our food choices, portions, physical activity, and overall well-being will not make our New Years Resolution any easier. We are fascinated by the idea of enjoying ourselves during the holiday season with friends and family and once that clock ticks midnight, then boom: We are a changed human.
But, only 8% of us actually follow through!
Why is that? It is because we glorify the idea of making changes but not actually changing. Simply, we want to make the change but are not willing to put the work in to make it last. If you’re actually interested in changing then do it now. Make your New Year’s Resolution a December Resolution.
Each new habit needs 21 days to form, and you are setting yourself up for failure if you wait until January.
1. Acknowledge the barriers between you and your goal.
May it be our spouse, children, or roommates? Are we over consumed with work or responsibilities? Do we believe finances are blocking our lifestyle changes? Or is it ourselves and we have trouble finding accountability?
Acknowledgement is the first step.
Next, advise a plan to counter these barriers. If the barrier is time then layout your weekly or monthly responsibilities ahead of time, planners and apps are very helpful in this regard. For apps: TinyCalender is a simple and easy app to plan ahead and if you’re looking for a food tracker, look no further than Myfitnesspal. Then prepare your meals ahead of time since you’re too busy during the week, set aside time on Sunday night to prepare all your lunches and snacks.
Make the goal SMART. Specific, Measurable, Attainable, Relevant, Time-dependent.
Here is an example SMART goal: I will restrict my consumption of carbohydrates/sugar-dense foods to 1 serving a day for 8 weeks. Be cautious to make the goal centered on specific weight-loss each week as weight can fluctuate greatly with hydration, muscle mass or body fat differences. In turn, the weight-loss will most certainly follow with proper protein and caloric adjustments. Any more questions? Make an appointment with me, the nutritionist, to set yourself up with a plan.
3. Invest in your goal.
Invest in your health with your time or money. Whether a gym membership or a dietary plan advised by a nutritionist. We are much more likely to stay committed if we have put a financial or time investment into our health. It can be as small as buying a blender and plenty of fresh produce at the local Farmers Market. Financially investing in that blender or produce will encourage you to prepare smoothies or nutrient-dense dishes.
Reflect on how far you’ve come and where you want to be. Observing the emotional changes will give you interpersonal feedback and a sense of commitment to yourself. Whether its using a diary, journal, app or within mediation. A study done in the UK showed that those who journaled their workout intentions were more than 3x likely to reach their strength goals.
First within yourself. Set a goal and do it. Its better to not even set a unrealistic goal knowing we cannot follow through.
Next within others. Find accountability from friends or family. Either involve them in your goal directly or indirectly. Have them set a similar goal or serve as supporting cast, we are much more likely to succeed as a team.
At last within a program. Whether it’s here at River Bend Medical Associates’ Wellness Program or a fitness program at the local gym.
|
##########################################################################
# Ganga Project. http://cern.ch/ganga
#
##########################################################################
from GangaCore.GPIDev.Adapters.IPostProcessor import PostProcessException
from GangaCore.GPIDev.Adapters.IChecker import IFileChecker
from GangaCore.GPIDev.Schema import FileItem, SimpleItem
from GangaCore.Utility.logging import getLogger
import subprocess
import copy
import os
import re
logger = getLogger()
def SortedValues(adict):
items = sorted(adict.items())
return [value for key, value in items]
def GetKeyNames(f, dir=""):
import ROOT
f.cd(dir)
return [key.GetName() for key in ROOT.gDirectory.GetListOfKeys()]
def GetTreeObjects(f, dir=""):
import ROOT
tree_dict = {}
for tdir in GetKeyNames(f, dir):
if tdir == "":
continue
absdir = os.path.join(dir, tdir)
if isinstance(f.Get(tdir), ROOT.TDirectory):
for absdir, tree in GetTreeObjects(f, absdir).items():
tree_dict[absdir] = tree
if isinstance(f.Get(absdir), ROOT.TTree):
tree_dict[absdir] = f.Get(absdir)
return tree_dict
class RootFileChecker(IFileChecker):
"""
Checks ROOT files to see if they are zombies.
For master job, also checks to see if merging performed correctly.
self.files are the files you would like to check.
self.fileMustExist toggles whether to fail the job if the specified file doesn't exist (default is True).
"""
_schema = IFileChecker._schema.inherit_copy()
_schema.datadict['checkMerge'] = SimpleItem(
defvalue=True, doc='Toggle whether to check the merging proceedure')
_category = 'postprocessor'
_name = 'RootFileChecker'
_exportmethods = ['check']
def checkBranches(self, mastertrees, subtrees):
import ROOT
for masterpath, mastertree in mastertrees.items():
for subpath, subtree in subtrees.items():
if (subpath == masterpath):
subbranches = [branch.GetName()
for branch in subtree.GetListOfBranches()]
masterbranches = [branch.GetName()
for branch in mastertree.GetListOfBranches()]
if (subbranches != masterbranches):
return self.failure
return self.success
def addEntries(self, mastertrees, subtrees, entries_dict):
import ROOT
for masterpath, mastertree in mastertrees.items():
for subpath, subtree in subtrees.items():
if (subpath == masterpath):
if (subpath in entries_dict):
entries_dict[subpath] += subtree.GetEntries()
else:
entries_dict[subpath] = subtree.GetEntries()
return entries_dict
def checkMergeable(self, f):
import ROOT
tf = ROOT.TFile.Open(f)
if tf.IsZombie():
logger.info('ROOT file %s is a zombie, failing job', f)
tf.Close()
return self.failure
if not len(GetKeyNames(tf)):
logger.info('ROOT file %s has no keys, failing job', f)
tf.Close()
return self.failure
tf.Close()
if (os.path.getsize(f) < 330):
logger.info('ROOT file %s has no size, failing job', f)
return self.failure
return self.success
def check(self, job):
"""
Check that ROOT files are not zombies and were closed properly, also (for master job only) checks that the merging performed correctly.
"""
import ROOT
self.result = True
filepaths = self.findFiles(job)
if self.result is False:
return self.failure
if not len(filepaths):
raise PostProcessException(
'None of the files to check exist, RootFileChecker will do nothing!')
for f in filepaths:
if f.find('.root') < 0:
raise PostProcessException('The file "%s" is not a ROOT file, RootFileChecker will do nothing!' % os.path.basename(f))
if not self.checkMergeable(f):
return self.failure
if (len(job.subjobs) and self.checkMerge):
haddoutput = f + '.hadd_output'
if not os.path.exists(haddoutput):
logger.warning('Hadd output file %s does not exist, cannot perform check on merging.', haddoutput)
return self.success
for failString in ['Could not find branch', 'One of the export branches', 'Skipped file']:
grepoutput = subprocess.getoutput('grep "%s" %s' % (failString, haddoutput))
if len(grepoutput):
logger.info('There was a problem with hadd, the string "%s" was found. Will fail job', failString)
return self.failure
tf = ROOT.TFile.Open(f)
mastertrees = GetTreeObjects(tf)
entries_dict = {}
for sj in job.subjobs:
if (sj.status == 'completed'):
for subfile in self.findFiles(sj):
if (os.path.basename(subfile) == os.path.basename(f)):
subtf = ROOT.TFile.Open(subfile)
subtrees = GetTreeObjects(subtf)
substructure = sorted(subtrees.keys())
masterstructure = sorted(mastertrees.keys())
if (substructure != masterstructure):
logger.info('File structure of subjob %s is not the same as master job, failing job', sj.fqid)
return self.failure
if not self.checkBranches(mastertrees, subtrees):
logger.info('The tree structure of subjob %s is not the same as merged tree, failing job', sj.fqid)
return self.failure
entries_dict = self.addEntries(
mastertrees, subtrees, entries_dict)
subtf.Close()
master_entries_dict = dict(
(n, mastertrees[n].GetEntries()) for n in set(mastertrees))
if (SortedValues(entries_dict) != SortedValues(master_entries_dict)):
logger.info(
'Sum of subjob tree entries is not the same as merged tree entries for file %s, failing job (check hadd output)', os.path.basename(f))
return self.failure
tf.Close()
return self.result
|
Regular exercise for a lifetime can slow down the ageing process and help in keeping the body active, a study claims. Researchers assessed the health of older adults, who had exercised most of their adult lives, to see if this could slow down ageing. “We now have strong evidence that encouraging people to commit to regular exercise throughout their lives is a viable solution to the problem that we are living longer but not healthier,” said Janet Lord from the University of Birmingham in the UK. Previous research has shown that exercise can make your cells healthier and lead to a longer life span.
For the study, published in the journal Ageing Cell, the researchers recruited 125 amateur cyclists aged 55 to 79, 84 of which were male and 41 were female. The men had to be able to cycle 100 kilometres in under 6.5 hours, while the women had to be able to cycle 60 kilometres in 5.5 hours. Smokers, heavy drinkers and those with high blood pressure or other health conditions were excluded from the study.
The participants underwent a series of tests in the laboratory and were compared to a group of adults who do not partake in regular physical activity. This group consisted of 75 healthy people aged 57 to 80 and 55 healthy young adults aged 20 to 36. The study showed that loss of muscle mass and strength did not occur in those who exercise regularly.
The cyclists also did not increase their body fat or cholesterol levels with age and the men’s testosterone levels also remained high, suggesting that they may have avoided most of the male menopause. The study also revealed that the benefits of exercise extend beyond muscle as the cyclists also had an immune system that did not seem to have aged either.
|
"""Definitions for the `BlackbodyCutoff` class."""
from math import pi
import numexpr as ne
import numpy as np
from astrocats.catalog.source import SOURCE
from astropy import constants as c
from astropy import units as u
from mosfit.constants import ANG_CGS, FOUR_PI
from mosfit.modules.seds.sed import SED
# Important: Only define one ``Module`` class per file.
class BlackbodyCutoff(SED):
"""Blackbody SED with cutoff.
Blackbody spectral energy dist. for given temperature and radius,
with a linear absorption function bluewards of a cutoff wavelength.
"""
_REFERENCES = [
{SOURCE.BIBCODE: '2017arXiv170600825N'}
]
C_CONST = c.c.cgs.value
FLUX_CONST = FOUR_PI * (
2.0 * c.h * c.c ** 2 * pi).cgs.value * u.Angstrom.cgs.scale
X_CONST = (c.h * c.c / c.k_B).cgs.value
STEF_CONST = (4.0 * pi * c.sigma_sb).cgs.value
F_TERMS = 10
def __init__(self, **kwargs):
"""Initialize module."""
super(BlackbodyCutoff, self).__init__(**kwargs)
self._nxcs = self.X_CONST * np.array(range(1, self.F_TERMS + 1))
def process(self, **kwargs):
"""Process module."""
kwargs = self.prepare_input(self.key('luminosities'), **kwargs)
self._luminosities = kwargs[self.key('luminosities')]
self._bands = kwargs['all_bands']
self._band_indices = kwargs['all_band_indices']
self._frequencies = kwargs['all_frequencies']
self._radius_phot = np.array(kwargs[self.key('radiusphot')])
self._temperature_phot = np.array(kwargs[self.key('temperaturephot')])
self._cutoff_wavelength = kwargs[self.key('cutoff_wavelength')]
self._times = np.array(kwargs['rest_times'])
xc = self.X_CONST # noqa: F841
fc = self.FLUX_CONST
cc = self.C_CONST
ac = ANG_CGS
cwave_ac = self._cutoff_wavelength * ac
cwave_ac2 = cwave_ac * cwave_ac
cwave_ac3 = cwave_ac2 * cwave_ac # noqa: F841
zp1 = 1.0 + kwargs[self.key('redshift')]
lt = len(self._times)
seds = np.empty(lt, dtype=object)
rp2 = self._radius_phot ** 2
tp = self._temperature_phot
evaled = False
for li, lum in enumerate(self._luminosities):
bi = self._band_indices[li]
# tpi = tp[li]
# rp2i = rp2[li]
if lum == 0.0:
seds[li] = np.zeros(
len(self._sample_wavelengths[bi]) if bi >= 0 else 1)
continue
if bi >= 0:
rest_wavs = self._sample_wavelengths[bi] * ac / zp1
else:
rest_wavs = np.array([cc / (self._frequencies[li] * zp1)])
# Apply absorption to SED only bluewards of cutoff wavelength
ab = rest_wavs < cwave_ac # noqa: F841
tpi = tp[li] # noqa: F841
rp2i = rp2[li] # noqa: F841
if not evaled:
# Absorbed blackbody: 0% transmission at 0 Angstroms 100% at
# >3000 Angstroms.
sed = ne.evaluate(
"where(ab, fc * (rp2i / cwave_ac / "
"rest_wavs ** 4) / expm1(xc / rest_wavs / tpi), "
"fc * (rp2i / rest_wavs ** 5) / "
"expm1(xc / rest_wavs / tpi))"
)
evaled = True
else:
sed = ne.re_evaluate()
sed[np.isnan(sed)] = 0.0
seds[li] = sed
uniq_times = np.unique(self._times)
tsort = np.argsort(self._times)
uniq_is = np.searchsorted(self._times, uniq_times, sorter=tsort)
lu = len(uniq_times)
norms = self._luminosities[
uniq_is] / (fc / ac * rp2[uniq_is] * tp[uniq_is])
rp2 = rp2[uniq_is].reshape(lu, 1)
tp = tp[uniq_is].reshape(lu, 1)
tp2 = tp * tp
tp3 = tp2 * tp # noqa: F841
nxcs = self._nxcs # noqa: F841
f_blue_reds = ne.evaluate(
"sum((exp(-nxcs / (cwave_ac * tp)) * ("
"nxcs ** 2 + 2 * ("
"nxcs * cwave_ac * tp + cwave_ac2 * tp2)) / ("
"nxcs ** 3 * cwave_ac3)) + "
"(6 * tp3 - exp(-nxcs / (cwave_ac * tp)) * ("
"nxcs ** 3 + 3 * nxcs ** 2 * cwave_ac * tp + 6 * ("
"nxcs * cwave_ac2 * tp2 + cwave_ac3 *"
"tp3)) / cwave_ac3) / (nxcs ** 4), 1)"
)
norms /= f_blue_reds
# Apply renormalisation
seds *= norms[np.searchsorted(uniq_times, self._times)]
seds = self.add_to_existing_seds(seds, **kwargs)
# Units of `seds` is ergs / s / Angstrom.
return {'sample_wavelengths': self._sample_wavelengths,
self.key('seds'): seds}
|
Read and develop points for discussion on GD Burning topic: Exodus of rural population from villages to city.
Migration of population from villages to city has become a very big trend since last few years. In developing countries, people are moving out of towns and villages and running towards a city life. The migration from smaller towns to the biggest cities has been a very common trend in developed nations like United States of America, Canada etc.
However, the rural flight (term for migration of rural population to the cities) has been a rather new phenomenon in developing nations like India. The pattern has been much accentuated in the last few years.
1.Agriculture demands hard work and provides very less scope for good money: In countries like India, agriculture provides very less scope for earning good profits. There are many reasons behind it. To start with, the agricultural practices in India are still old and quaint. Though the Government has introduced various schemes and subsidies, the agricultural practices still remain old and in much need of development. The farmers have to depend largely on the monsoon rains for irrigation of crops. High Yielding Varieties of seeds are popular in only a few states. The goodness of green revolution has remained concentrated to only a few states. The farmers in other states remain largely poor. Thus, there is absolutely no stability or growth in India. This is one of the reasons why farmer move to cities in search of better lives and pay.
2.The best of the job opportunities are available in the cities only: When a person moves away from the village, it is only in search of a more stable job that would give the worker a constant and regular income with yearly increases. Most of the big firms and industries which provide employment are located in cities, the reason being the easy availability of raw material and resources and also the good infrastructure and transport facilities. There are many more reasons apart from the ones specified above. Thus, all the workers from the villages migrate to the city as the labor is much in demand in these cities. In villages and towns, there are never much job opportunities and even those that are they, are not actually the well paid jobs. The city beckons.
3.City Life offers pleasures: No one can deny the fact that the man is a slave to his desires. Every man likes to have a little fun and a little pleasure. The town side offers to no places to have fun in the real sense of the world. In countries like India, developments owing to the modernization, has remained concentrated to the cities. Whether it is shopping in a mall where one can find all the designer stores and brands under one roof or going to a night party; whether it is about eating in a good food chain like McDonalds or Pizza Hut or about watching the expensive luxury cars on the road, the city life has its own charms. The opportunities that one gets while living in a city are never there when a person is living in the village.
These are some of the reasons which explain precisely the reason behind the exodus of rural population from villages to city. However, the question that arises next is- Is it good for any country that its rural population moves towards the cities?
The movement of people from the rural to the urban areas affects the small towns and the small communities which exist in these places. The population of these towns goes on decreasing. In developing countries like India, migration to the city is not an answer to the economic problems.
In India, a major part of the GDP still comes from the agricultural sector. The country cannot, thus, undermine the power of agriculture. To developing countries like India, migration of rural population to the cities is not the answer. Instead, the government should concentrate on developing the agricultural practices and making them modernized.
The onus of the government should be on developing agriculture as a means on livelihood in India rather than promoting the migration to cities. In reference to the current scenario prevalent in India and other developing nations, agriculture is important and thus, it should be promoted and converted into a mainstream enterprise rather than subsistence means of living.
|
# TmDeploy - Automated deployment of TissueMAPS in the cloud.
# Copyright (C) 2016 Markus D. Herrmann, University of Zurich
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import logging
#: dict[int, int]: Mapping of logging verbosity to logging level
VERBOSITY_TO_LEVELS = {
0: logging.NOTSET, # Nothing gets logged
1: logging.WARN, # For simplicity. Includes ERROR, CRITICAL
2: logging.INFO,
3: logging.DEBUG,
}
#: dict[int, int]: Mapping of logging level to logging verbosity
LEVELS_TO_VERBOSITY = {
logging.NOTSET: 0,
logging.WARN: 1,
logging.ERROR: 1,
logging.CRITICAL: 1,
logging.INFO: 2,
logging.DEBUG: 3,
}
def map_logging_verbosity(verbosity):
'''Maps logging verbosity to level expected by `logging` module.
Parameters
----------
verbosity: int
logging verbosity
Returns
-------
int
logging level
Raises
------
TypeError
when `verbosity` doesn't have type int
ValueError
when `verbosity` is negative
'''
if not isinstance(verbosity, int):
raise TypeError('Argument "verbosity" must have type int.')
if not verbosity >= 0:
raise ValueError('Argument "verbosity" must be a positive number.')
if verbosity >= len(VERBOSITY_TO_LEVELS):
verbosity = len(VERBOSITY_TO_LEVELS) - 1
return VERBOSITY_TO_LEVELS[verbosity]
def configure_logging(level=logging.DEBUG):
'''Configures the root logger for command line applications.
A stream handler will be added to the logger that directs
messages to the standard error stream.
By default, *no* messages will be filtered out: set a higher
level on derived/child loggers to achieve filtering.
Warning
-------
Logging should only be configured once at the main entry point of the
application!
'''
fmt = '%(asctime)s | %(levelname)-8s | %(name)-40s | %(message)s'
datefmt = '%Y-%m-%d %H:%M:%S'
formatter = logging.Formatter(fmt=fmt, datefmt=datefmt)
logger = logging.getLogger() # returns the root logger
stderr_handler = logging.StreamHandler(stream=sys.stderr)
stderr_handler.name = 'err'
stderr_handler.setLevel(level)
stderr_handler.setFormatter(formatter)
logger.addHandler(stderr_handler)
|
My first day of vacation was filled with site seeing and window shopping. The weather was cloudy, however it helped counter the humidity and heat. The bonus about staying at The Garfield is that everything is within walking distance. Arriving at Fountain Square was really exciting, seeing the performance stage and admiring the beautiful fountain.. it was breathtaking.
I finished the day with a highly recommended local treat that I was at first hesitant about… but DAMN… these were some good coneys and I was told I couldn’t have coneys without Grippo’s… Yes! So far this was my favourite meal!
|
import json
from django.core import serializers
from django.forms import model_to_dict
from django.http import HttpResponseRedirect
from django.template.loader import render_to_string
from datetime import date, timedelta
from .helper_functions import *
from django.contrib import auth
from django.db.models import Count
from django.http import HttpResponse
from django.http import HttpResponseNotAllowed
from django.http import JsonResponse
from django.shortcuts import render, get_object_or_404, redirect
from django.utils import timezone
# Create your views here.
from django.template import RequestContext
from django.views import generic
from django.views.decorators.csrf import csrf_exempt
from django.views.generic.edit import FormMixin, CreateView
from PartyPlay.forms import UploadVideoForm
from PartyPlay.models import Room, Video
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_http_methods
class RoomModelListView(generic.ListView):
model = Room
context_object_name = 'room_list'
template_name = 'partyplay/roommodel_list.html'
def get_context_data(self, **kwargs):
context = super(RoomModelListView, self).get_context_data(**kwargs)
favorite_room_list = []
for room in context['room_list'].all():
if self.request.user in room.favorite_users.all():
favorite_room_list.append(room)
context['favorite_room_list'] = favorite_room_list
return context
class RoomModelDetailView(generic.DetailView):
model = Room
context_object_name = 'room_data'
template_name = 'partyplay/roommodel_detail.html'
upload_form = UploadVideoForm
def get_context_data(self, **kwargs):
# Call the base implementation to get the original context
context = super(RoomModelDetailView, self).get_context_data(**kwargs)
# Get the songs currently in the room
top_songs = get_ordered_videos(self.object)
context['queue'] = top_songs
if self.object.next_time and self.object.next_time < timezone.now():
update_currently_playing(self.object)
context['current_video'] = self.object.current_video
context['start_time'] = get_start_time(self.object)
if auth.user_logged_in:
upvoted = []
for video in top_songs:
if video.voters.filter(pk=self.request.user.pk).exists():
upvoted.append(video)
context['upvoted'] = upvoted
context['upload_form'] = self.upload_form
return context
@login_required
@require_http_methods(["POST"])
def add_video(request, pk):
room = Room.objects.get(pk=pk)
vid_id = request.POST.get('video_id')
duration = request.POST.get('duration')
title = request.POST.get('title')
video = Video()
video.uploader = auth.get_user(request)
video.title = title
video.duration = timedelta(seconds=int(duration))
video.room = room
video.videoID = vid_id
video.save()
video.voters.add(request.user)
video.save()
return render_current_queue(request, video.room)
@login_required
@require_http_methods(["POST"])
def favorite_room(request, pk):
room = Room.objects.get(pk=pk)
if request.user in room.favorite_users.all():
room.favorite_users.remove(request.user)
else:
room.favorite_users.add(request.user)
return HttpResponse()
from django.contrib.auth import login, authenticate
from django.contrib.auth.forms import UserCreationForm
from django.shortcuts import render, redirect
def signup(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password1')
user = authenticate(username=username, password=raw_password)
login(request, user)
return redirect('rooms')
else:
form = UserCreationForm()
return render(request, 'registration/signup.html', {'form': form})
def get_queue(request, pk):
room = Room.objects.get(pk=pk)
return render_current_queue(request, room)
@require_http_methods(["POST"])
def video_end(request, pk):
room = Room.objects.get(pk=pk)
#Finished video pk
str_val = request.POST.get("vid_pk")
#If there is no video on frontend
if (str_val == '' or str_val == None):
#If there is not a current video
if room.current_video is None:
new_pk = update_currently_playing(room)
else:
new_pk = room.current_video.pk
#Thereis not a video playing
else:
vid_pk = int(str_val)
#
#find the first user to finish their video
#This user's request will update current_video
#Any subsequent requests will have different pk's than current_video,
# they will only receive the updated data
if room.current_video is None:
new_pk = None
elif room.current_video.pk == vid_pk:
new_pk = update_currently_playing(room)
else:
new_pk = room.current_video.pk
if room.current_video:
current_vid = room.current_video.videoID
current_vid_name = room.current_video.title
uploader = room.current_video.uploader.username
else:
current_vid = None
current_vid_name = None
uploader = None
t_u_n = get_time_until_next(room)
videos = get_ordered_videos(room)
upvotes = []
for video in videos:
if request.user in video.voters.all():
upvotes.append(video.pk)
context = {
'current_video': room.current_video,
'time_until_next': get_time_until_next(room),
'queue': get_ordered_videos(room),
'upvotes': upvotes
}
response_data = {
'html': render_to_string('partyplay/video_and_queue.html', context=context, request=request),
'time_until_next': t_u_n,
'current_vid_pk': new_pk,
'current_vid_id': current_vid,
'current_vid_name': current_vid_name,
'current_uploader': uploader,
'start_time': get_start_time(room)
}
data = json.dumps(response_data)
return HttpResponse(data, content_type='application.json')
@login_required
def upvote(request):
context = RequestContext(request)
pk = request.GET['vid_pk']
video = get_object_or_404(Video, pk=pk)
user = auth.get_user(request)
if user not in video.voters.all():
video.voters.add(user)
else:
video.voters.remove(user)
video.save()
return render_current_queue(request, video.room)
class UserProfilePage(LoginRequiredMixin, generic.ListView):
model = Video
template_name = 'partyplay/userprofile.html'
def get_context_data(self, **kwargs):
context = super(UserProfilePage, self).get_context_data(**kwargs)
context['uploaded_videos'] = Video.objects.filter(uploader=self.request.user)
context['created_rooms'] = Room.objects.filter(creator=self.request.user).all()
return context
class RoomCreate(CreateView):
model = Room
fields = ['name', 'public']
template_name = 'partyplay/addroom.html'
def form_valid(self, form):
form.instance.creator = self.request.user
form.instance.url = form.cleaned_data['name'].lower().replace(" ","")
form.save()
return super(RoomCreate, self).form_valid(form)
|
(CNN) -- A friend of Rep. Gabrielle Giffords said she was "always willing to talk to her constituents," and that her returning to Tucson for a town hall meeting in the days after Congress had convened was typical.
Giffords, 40, was one of 18 people shot at a Tucson, Arizona, grocery store Saturday at what she called "My 1st Congress on Your Corner," in a tweet shortly before she was shot.
Giffords' condition still was critical after surgery Saturday for a gunshot wound to the head, but doctors were optimistic of her chances for surviving.
"She was an incredible public servant," said Sylvia Lee, president of Pima County Community College in Tucson. "She was a moderate Democrat who was willing to talk to both sides."
Giffords is married to NASA astronaut Mark Kelly, a Navy captain who is scheduled to fly the April space shuttle mission to the international space station. Lee said that Giffords has no children but was stepmother to Kelly's two children.
CNN's Dana Bash had a conversation with Giffords on Friday at the Capitol and Giffords told her that she had taken a trip to Rome with her family over the holidays and had been able to attend Midnight Mass in the Vatican.
Rep. Jerrold Nadler, D-New York, called Giffords a "wonderful, perky, woman."
Nadler said that she is "very popular in not a terribly popular Democratic district.
Giffords narrowly beat Tea Party-backed Republican Jesse Kelly in November, 49-47 percent.
Lee said that Giffords and Rep. Raul Grijalva, a fellow Arizona Democrat, had received threats in the past.
Grijalva confirmed the threats to CNN affiliate KVOA, "But for her to say, I will not go out there and do my job and be with constituents because of this -- no, never.
"She is fearless," he said.
Known as a "Blue Dog," or moderate, Democrat, Giffords was a key swing vote in last year's health care reform debate. She didn't make up her mind to vote for the legislation until the final days before the ultimate passing vote.
A glass panel at her Tucson office was shattered the day after the vote. Spokesman C.J. Karamargin told CNN then that staffers suspected someone shot a pellet gun at the glass.
Giffords is serving her third term in Congress, first elected in 2006. She generally voted with her party but voted against the auto bailout bill and was one of 19 Democrats to vote against former House Speaker Nancy Pelosi's bid to become the Democrats' minority leader in the wake of last year's midterm elections that returned Republicans to power in the House.
Giffords served on term in the Arizona House of Representatives and is the youngest woman elected to the Arizona state Senate.
She was employed by Price Waterhouse Coopers and served as CEO and president of El Campo Tire, her family's business, before she was elected to the Arizona House.
Giffords and Grijalva were two of the three candidates that MSNBC's "Countdown" host Keith Olbermann was suspended for making campaign contributions The other candidate was Democratic Jack Conway, who was running for the U.S. Senate seat won by Rand Paul.
|
# Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.model import Model, fields
class CustomOption(Model):
_name = "product.custom.option"
_string = "Custom Option"
_key = ["code"]
_fields = {
"name": fields.Char("Name", required=True, search=True, translate=True),
"seq": fields.Char("Sequence", required=True),
"code": fields.Char("Code", search=True),
"type": fields.Selection([["text", "Text"], ["selection", "Selection"]], "Type", required=True),
"required": fields.Boolean("Required"),
"description": fields.Text("Description"),
"price": fields.Decimal("Price"),
"values": fields.One2Many("product.custom.option.value", "cust_opt_id", "Values"),
}
_defaults = {
"type": "text",
"seq": '0',
}
CustomOption.register()
|
The Museum houses a fascinating range of artefacts covering all aspects of life in Donegal from the Stone Age to the 20th Century.
The Museum houses a fascinating range of artefacts covering all aspects of life in Donegal from the Stone Age to the 20th Century. A series of temporary exhibitions are held in the ground floor gallery throughout the year. These exhibitions cover a wide variety of topics with something to suit all ages and interests. Children's Activity Trails are available in the first floor gallery of the Museum.
Open all year, Mon-Fri 10am-4.30pm. Sat 1-4.30pm. Closed for lunch 12.30-1pm.
|
# Copyright (C) 2016 The OpenTimestamps developers
#
# This file is part of python-opentimestamps.
#
# It is subject to the license terms in the LICENSE file found in the top-level
# directory of this distribution.
#
# No part of python-opentimestamps including this file, may be copied,
# modified, propagated, or distributed except according to the terms contained
# in the LICENSE file.
import unittest
import dbm
import git
import tempfile
from bitcoin.core import b2x
from opentimestamps.core.timestamp import *
from opentimestamps.core.op import *
from opentimestamps.core.git import *
class Test_GitTreeTimestamper(unittest.TestCase):
def setUp(self):
self.db_dirs = []
def tearDown(self):
for d in self.db_dirs:
d.cleanup()
del self.db_dirs
def make_stamper(self, commit):
# Yes, we're using our own git repo as the test data!
repo = git.Repo(__file__ + '../../../../../')
db_dir = tempfile.TemporaryDirectory()
self.db_dirs.append(db_dir)
db = dbm.open(db_dir.name + '/db', 'c')
tree = repo.commit(commit).tree
return GitTreeTimestamper(tree, db=db)
def test_blobs(self):
"""Git blob hashing"""
stamper = self.make_stamper("53c68bc976c581636b84c82fe814fab178adf8a6")
for expected_hexdigest, path in (('9e34b52cfa5724a4d87e9f7f47e2699c14d918285a20bf47f5a2a7345999e543', 'LICENSE'),
('ef83ecaca007e8afbfcca834b75510a98b6c10036374bb0d9f42a63f69efcd11', 'opentimestamps/__init__.py'),
('ef83ecaca007e8afbfcca834b75510a98b6c10036374bb0d9f42a63f69efcd11', 'opentimestamps/tests/__init__.py'),
('745bd9059cf01edabe3a61198fe1147e01ff57eec69e29f2e617b8e376427082', 'opentimestamps/tests/core/test_core.py'),
('ef83ecaca007e8afbfcca834b75510a98b6c10036374bb0d9f42a63f69efcd11', 'opentimestamps/tests/core/__init__.py'),
('7cd2b5a8723814be27fe6b224cc76e52275b1ff149de157ce374d290d032e875', 'opentimestamps/core/__init__.py'),
('d41fb0337e687b26f3f5dd61d10ec5080ff0bdc32f90f2022f7e2d9eeba91442', 'README')):
stamp = stamper[path]
actual_hexdigest = b2x(stamp.file_digest)
self.assertEqual(expected_hexdigest, actual_hexdigest)
stamper = self.make_stamper("30f6c357d578e0921dc6fffd67e2af1ce1ca0ff2")
empty_stamp = stamper["empty"]
self.assertEqual(empty_stamp.file_digest, bytes.fromhex("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"))
def test_empty_tree(self):
"""Git tree with a single empty file"""
stamper = self.make_stamper("30f6c357d578e0921dc6fffd67e2af1ce1ca0ff2")
# There's a single empty file in this directory. Thus the nonce_key is:
nonce_key = OpSHA256()(OpSHA256()(b'') + # one empty file
b'\x01\x89\x08\x0c\xfb\xd0\xe8\x08') # tag
nonce1 = OpSHA256()(OpSHA256()(b'') + nonce_key)
assert nonce1[0] & 0b1 == 1
nonce2 = OpSHA256()(nonce1)
self.assertEqual(stamper.timestamp.msg,
OpSHA256()(b''))
self.assertEqual(stamper.timestamp.msg, b"\xe3\xb0\xc4B\x98\xfc\x1c\x14\x9a\xfb\xf4\xc8\x99o\xb9$'\xaeA\xe4d\x9b\x93L\xa4\x95\x99\x1bxR\xb8U")
def test_two_file_tree(self):
"""Git tree with a two files"""
stamper = self.make_stamper("78eb5cdc1ec638be72d6fb7a38c4d24f2be5d081")
nonce_key = OpSHA256()(OpSHA256()(b'a\n') +
OpSHA256()(b'b\n') +
b'\x01\x89\x08\x0c\xfb\xd0\xe8\x08') # tag
n_a_nonce1 = OpSHA256()(OpSHA256()(b'a\n') + nonce_key)
assert n_a_nonce1[0] & 0b1 == 0
n_a_nonce2 = OpSHA256()(n_a_nonce1)
n_a = OpSHA256()(OpSHA256()(b'a\n') + n_a_nonce2)
n_b_nonce1 = OpSHA256()(OpSHA256()(b'b\n') + nonce_key)
assert n_b_nonce1[0] & 0b1 == 0
n_b_nonce2 = OpSHA256()(n_b_nonce1)
n_b = OpSHA256()(OpSHA256()(b'b\n') + n_b_nonce2)
self.assertEqual(stamper.timestamp.msg,
OpSHA256()(n_a + n_b))
self.assertEqual(stamper.timestamp.msg, b's\x0e\xc2h\xd4\xb3\xa5\xd4\xe6\x0e\xe9\xb2t\x89@\x95\xc8c_F3\x81a=\xc2\xd4qy\xaf\x8e\xa0\x87')
def test_tree_with_children(self):
"""Git tree with child trees"""
stamper = self.make_stamper("b22192fffb9aad27eb57986e7fe89f8047340346")
# These correspond to the final values from the test_empty_tree() and
# test_two_file_tree() test cases above; git git commit we're testing
# has the trees associated with those test cases in the one/ and two/
# directories respectively.
d_one = b"\xe3\xb0\xc4B\x98\xfc\x1c\x14\x9a\xfb\xf4\xc8\x99o\xb9$'\xaeA\xe4d\x9b\x93L\xa4\x95\x99\x1bxR\xb8U"
d_two = b's\x0e\xc2h\xd4\xb3\xa5\xd4\xe6\x0e\xe9\xb2t\x89@\x95\xc8c_F3\x81a=\xc2\xd4qy\xaf\x8e\xa0\x87'
nonce_key = OpSHA256()(d_one + d_two +
b'\x01\x89\x08\x0c\xfb\xd0\xe8\x08') # tag
n_one_nonce1 = OpSHA256()(d_one + nonce_key)
assert n_one_nonce1[0] & 0b1 == 0
n_one_nonce2 = OpSHA256()(n_one_nonce1)
n_one = OpSHA256()(d_one + n_one_nonce2)
n_two_nonce1 = OpSHA256()(d_two + nonce_key)
assert n_two_nonce1[0] & 0b1 == 0
n_two_nonce2 = OpSHA256()(n_two_nonce1)
n_two = OpSHA256()(d_two + n_two_nonce2)
self.assertEqual(stamper.timestamp.msg,
OpSHA256()(n_one + n_two))
def test_tree_with_prefix_matching_blob(self):
"""Git tree with prefix matching blob"""
stamper = self.make_stamper("75736a2524c624c1a08a574938686f83de5a8a86")
two_a_stamp = stamper['two/a']
def test_submodule(self):
"""Git tree with submodule"""
stamper = self.make_stamper("a3efe73f270866bc8d8f6ce01d22c02f14b21a1a")
self.assertEqual(stamper.timestamp.msg,
OpSHA256()(bytes.fromhex('48b96efa66e2958e955a31a7d9b8f2ac8384b8b9')))
def test_dangling_symlink(self):
"""Git tree with dangling symlink"""
stamper = self.make_stamper("a59620c107a67c4b6323e6e96aed9929d6a89618")
self.assertEqual(stamper.timestamp.msg,
OpSHA256()(b'does-not-exist'))
def test_huge_tree(self):
"""Really big git tree"""
# would cause the OpSHA256 length limits to be exceeded if it were used
# directly
stamper = self.make_stamper("a52fe6e3d4b15057ff41df0509dd302bc5863c29")
self.assertEqual(stamper.timestamp.msg,
b'\x1dW\x9c\xea\x94&`\xc2\xfb\xba \x19Q\x0f\xdb\xf0\x7f\x14\xe3\x14zb\t\xdb\xcf\xf93I\xe9h\xb9\x8d')
|
EIP will be exhibiting at Tramshed Tech, Cardiff on 18 – 19 September in attendance to Digital; a festival for innovators, industry, and investors.
The festival hosts presentations, talks, workshops and exhibits focusing on digital tech and innovation. EIP will have a stand in the exhibition area to talk to you about all your IP needs.
Register for this event here to secure your place.
|
import datetime
import json
from phoenix.oauth2 import oauth2_client_factory
from pyramid_celery import celery_app as app
from celery.utils.log import get_task_logger
LOGGER = get_task_logger(__name__)
def task_result(task_id):
return app.AsyncResult(task_id)
def wait_secs(run_step=-1):
secs_list = (2, 2, 2, 2, 2, 5, 5, 5, 5, 5, 10, 10, 10, 10, 10, 20, 20, 20, 20, 20, 30)
if run_step >= len(secs_list):
run_step = -1
return secs_list[run_step]
def dump_json(obj):
def date_handler(obj):
if isinstance(obj, datetime.datetime) or isinstance(obj, datetime.date):
date_formatted = obj.isoformat()
else:
date_formatted = None
return date_formatted
return json.dumps(obj, default=date_handler)
def save_log(job, error=None):
if error:
log_msg = 'ERROR: {0.text} - code={0.code} - locator={0.locator}'.format(error)
else:
log_msg = '{0} {1:3d}%: {2}'.format(
job.get('duration', 0),
job.get('progress', 0),
job.get('status_message', 'no message'))
if 'log' not in job:
job['log'] = []
# skip same log messages
if len(job['log']) == 0 or job['log'][-1] != log_msg:
job['log'].append(log_msg)
if error:
LOGGER.error(log_msg)
else:
LOGGER.info(log_msg)
def add_job(db, task_id, process_id, title=None, abstract=None,
service_name=None, service=None, status_location=None,
caption=None, userid=None,
use_async=True):
tags = ['dev']
if use_async:
tags.append('async')
else:
tags.append('sync')
job = dict(
identifier=task_id,
task_id=task_id, # TODO: why not using as identifier?
userid=userid or 'guest',
service_name=service_name, # wps service name (service identifier)
service=service or service_name, # wps service title (url, service_name or service title)
process_id=process_id, # process identifier
title=title or process_id, # process title (identifier or title)
abstract=abstract or "No Summary",
status_location=status_location,
created=datetime.datetime.now(),
tags=tags,
caption=caption,
status="ProcessAccepted",
response=None,
request=None,
)
db.jobs.insert(job)
return job
def get_access_token(userid):
registry = app.conf['PYRAMID_REGISTRY']
# refresh access token
client = oauth2_client_factory(registry)
try:
token = client.refresh_token(userid=userid)
except Exception:
token = None
if token:
return token['access_token']
return None
def wps_headers(userid):
headers = {}
if userid:
access_token = get_access_token(userid)
if access_token:
headers = {'Authorization': 'Bearer {}'.format(access_token)}
LOGGER.debug('wps headers: {}'.format(headers))
return headers
|
When Lesley Kragt started work with a provider of domiciliary care she was shocked by the bizarre things care workers did to tick boxes. Why didn’t care workers listen to their clients? Why did they have a whole room just for files?
Using the Vanguard Method, Lesley helped leaders move from ‘Care by checklist’ to ‘Care by conversation’.
The organisation in the case study provides domiciliary care and supported living to adults with moderate to severe disabilities (including mental health). Lesley will demonstrate the practical application of the Vanguard Method (Check, Plan, Do) and the outcomes at an individual and organisational level. She will present the key challenges, insights and lessons learned.
This session was for anyone who wants to do the right thing for their clients and reduce costs, at the same time as keeping a good relationship with commissioners.
Lesley has a professional background in nursing and occupation psychology. She was introduced to systems thinking when she worked in local government as a Business Improvement Lead and saw first-hand how systems thinking could transform business. Before working in local government Lesley spent the majority of her working life in the 3rdsector and not-for-profit sectors. Lesley’s latest position in the 3rd sector was in a large national not-for-profit organisation where she was a Director for over 10 years. Her primary role was to secure funding and develop new programmes – constantly trying to do more for less. It is only now with the knowledge she has on systems thinking and the Vanguard Method she realises that there is a different economic paradigm that enables us to improve services at a reduced cost.
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AliasPathType(Model):
"""The type of the paths for alias. .
:param path: The path of an alias.
:type path: str
:param api_versions: The API versions.
:type api_versions: list[str]
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'api_versions': {'key': 'apiVersions', 'type': '[str]'},
}
def __init__(self, path=None, api_versions=None):
super(AliasPathType, self).__init__()
self.path = path
self.api_versions = api_versions
|
At RateDumpster, we strive to bring you the best service at the best price available. We have a wide selection of dumpster sizes in stock in Newport, PA, with flexible & timley pickup and delivery. Give us a call today for all your dumpster rental and roll off needs.
It takes time and attempt to take out the toilet, vanity, grab out the ground of your bathroom and grab the baseboards away from. Once you have done this you'll be able to throw the particles into the overnight Large Dumpster For Rent in Newport, PA you have soaking in your garden. Quickly chuck the ball debris absent and simply get rid of the mess after the dumpster is gone from the yard.
And so forget the selling price hikes along with disposal limits of visiting your regional eliminate. Now you can remove all of your residence garbage along with debris within a easy action without previously leaving home. Phone your local right now to schedule your current convenient Large Dumpster For Rent in Newport, PA, before spring.
Trash disposal has developed into serious problem right now. Growing people and growing standards of life has caused production of waste on the earth. The people and their top quality of living take presctiption constant increase, which in turn led to commencement of numerous projects similar to house reconstruction, green garden clean up, construction projects during the entire country. We have been ignoring severity of guarding the environment inside favor of wealth along with fame, nevertheless the raising considerations about the sustainability in the atmosphere has purchased a great difference in our methods in the recent times.
Does that appear like a spend to anybody? If you're going to need to haul away from your own spend and particles anyway, you might as well may well avoid the trouble! Before you decide to partner which has a Large Dumpster For Rent in Newport, PA company, it is important to check out while using customer support workforce if they are taking the kind of junk disposed by you, because throwing unaccepted trash to the dumpster will ask penalty within you. It is also crucial for you let the customer support workforce know the specs of your undertaking, so that they can recommend the right sort of dumpster that fits inside your needs.
Costs! Large Dumpster For Rent in Newport, PAs, destruction and a firms complete type of services really should be competitively priced, quite competitively priced! A qualified business controls and keep charges low the industry benefit that is certainly passed on to the customer. The company also need to make use of a web site by making it a place which you could have the majority of if not all of your questions answered.
About hearing the phrase prevention of air pollution your intelligence conjure up while using idea of mitigating your emission of fossil fuels generated from the travelling of motor vehicles. It's a good indication, but let me tell you, the term Pollution is not centered on the air air pollution and audio pollution that is certainly generated from motor vehicles. There are good deal many other variables contributing to air pollution of the atmosphere in the urban areas such as construction waste, environment friendly yard spend, industrial spend.
Many dependable Large Dumpster For Rent in Newport, PAs give a specialty assistance such as local community cleanup applications and other charitable organisation type projects. The spend management firms generally operate in collaboration while using organizer of these type of function and most of that time period, they offer reductions on their companies for the sake of the neighborhood. This type of local community services or even charity tends to make professional spend management firms stand out from others and make after that them the best Large Dumpster For Rent in Newport, PAs company that anybody can hire with regards to dumpster requirements.
Property owners with a area or among demo before them can help to conserve money by simply renting their own dumpster and also have it shipped straight to their internet site. Construction firms use Large Dumpster For Rent in Newport firms for almost each job -- they tell them how big is a throw off they need and then a nearby roll off or two is shipped and dropped off. Local is often cheaper and folks can go to several websites that may give them a bid on the dimension they need, together with playing an essential part to keep the demonstration materials organized and the building site uncluttered.
For starters, take into account the size. Typically, temporary dumpster rentals will come in 4 or 5 sizes...10 cubic property, 15 cubic property, 20 cubic property, 30 cubic property and Forty cubic yard canisters. While to be able to 10, 20, 35 and Forty yard dumpsters are typical, the 16 cubic yard canisters may be hard to get sometimes.
Hazardous items or items containing hazardous materials should not be discarded in a dumpster. These include hazardous compounds, used generator oil, offers & solvents, four tires, fluorescent equipment and lighting, lead acid solution or chargeable batteries, lp tanks, etcetera. Contact us should you be unsure of almost everything.
You have a number of Large Dumpster For Rent in Newports to pick from, including midsize dumsters with wheels. The dumpster with trolley wheels lets you shift it anywhere you want, permitting you to finish your cleanup venture in no time. For bigger Large Dumpster For Rent in Newports, you can put it within your chosen spot in your house. You can also choose next day supply to make the washing process fast and efficient. It is one wonderful asset to building dumpster because as soon as it is filled up, it will be readily available for pick up another day and another dumpster are going to be brought to your spot as a replacement.
One other question that you'll be supposed to consider is whether it is possible to get enough space for your amount of junk. Firstly, you have to realize that the volume of space that you're going to get in virtually any dumpster is going to be intensely dependent on the volume of trash you may have. Usually, the volume of space is usually calculated in yards. Look to find out if the respective firm has the capacity to manage your dimensions of trash. For a garage full of junk, any 10 to 15 property dumpster is going to be excellent.
Whenever you shift, you instantly discover just how much junk and clutter you would been retaining around in your house. There's no need to pack that garbage and transport it with you. Renting a dumpster when you move allows you to get rid of the garbage while you pack, and it provides the ease of obtaining all of the garbage get taken away away simultaneously.
Renting a dumpster just isn't so high priced that it is un-affordable to most people. They've some smaller sized personal employ dumpsters that do not can be expensive but they guaranteed are a convenience. These can become pretty handy for older people who are struggle to take the garbage off as fundamental as they used to but can afford to have their own particular dumpster.
Make sure you discover how long you can the hired dumpster. You should be capable to use it for one particular fee for at least two weeks. Find out you can rent that a little longer should the job is very large and you don't think the service provider will finish within that time.
Every house construction project entails a stack of garbage and boulders. You can enjoy obtain and cleanness during a house construction project by using a construction Large Dumpster For Rent in Newport, PA. Excellence as well as beautification does not have to appear like demolition and destruction during the process. Excellence in every construction process is quite possible only by using proper garbage removal with the home as well as premises.
The sort of rubbish that you will be throwing with will also determine the rental organization you use. In the event the material you desire to throw out is hazardous, you will need to make sure the organization can get rid of it appropriately.
There are variety of Large Dumpster For Rent in Newport, PA companies available in the market who can direct you towards this value. This is the most economical way of eliminating huge plenty of piled up garbage in the smallest time achievable. Dumpster rental companies propagate across the current market will be designed with wide verities associated with dumpsters to suit your unique requirements, however a throw off dumpster is a best option to get rid of the trash about community washing.
An ideal organization to rent from is one that offers a flat rental rate. The charge will include delivery, pick-up, disposal as well as taxes can also be included. Several companies likewise only have any four or five-day rental period, and all sorts of days past these times frame are generally an additional fee. Renting from your company which has a longer leasing period is more desirable.
Engage a contractor to find the remodeling accomplished. He or she can change your glass windows, baseboards, sheetrock, carpeting, vanities, pantry shelves, old wooden floors, etc. The dumpster will come in convenient when tossing all of the previous items away.
Are you reducing trees on your property or use a lot of previous car elements and junk laying around ones yard? You can easily pick up ones yard when you rent a next day dumpster. Enjoy having a brand new and clear yard when you finish up the effort. You will feel a whole lot superior knowing how stunning your lawn looks once again.
Now you know what size dumpster you will need when renovating any building, seek out wheels about the dumpster you need to rent and also a company it is possible to request skids to protect your entrance.
You may be accomplishing extensive work towards your bathroom as well as plan on ripping out the ceramic tile in your bathroom or knowing a person else take action. The person working on the project will be able to simply throw the previous tiles as well as old shattered tiles away safely to the dumpster.
Roll off dumpsters usually get to large shapes ranging from 10 lawn to 40 lawn and are cable television of handling huge plenty of trash at one go. This will help you stay away from acquiring multiple outings to dispose the particular waste cumulated on your neighborhood cleanup. This will likely not only preserve much of your time period but also build your trash disposal task economical. Dumping the particular bulky garbage into the dumpster isn't very difficult when you use a roll off dumpster for your garbage disposal venture because a throw off hired from the Large Dumpster For Rent in Newport, PA organizations is specially developed to make it a ideal fit for bulky garbage pickup. These special garbage bins are generally mounted on added wheels so that they can easily be scrolled on to the particular trucks ready to carry them to the landfills. They are encased with the two times doors one particular on the top then one at the bottom, the entranceway at the bottom will probably facilitate effortless dumping in the bulky garbage and home on the top is open for disposal in the light weight.
|
/*
* Copyright (c) 2014 Carl Burch
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
import sys
import json
from io import StringIO
class MyMap():
pass
_g = MyMap()
_g.sys = sys
_g.json = json
_g.StringIO = StringIO
_g.stdout = _g.sys.stdout
del sys
del json
del StringIO
del MyMap
# parameters from database/user
def _g_compile(code, name, isUser=False):
try:
return compile(code, name, 'exec')
except SyntaxError as e:
result = { 'ok': True, 'verdict': 0 if isUser else -2,
'file': name, 'line': e.lineno, 'offset': e.offset }
if isUser:
result['message'] = str(e)
else:
result['message'] = 'Error in {0}: {1}'.format(name, str(e))
_g.json.dump(result, _g.stdout)
_g.sys.exit(0)
_g.userCode = _g_compile("""{{userCode}}""", 'usercode', True)
_g.canSave = """{{usedVars}}""" != ''
if _g.canSave:
_g.saveCode = _g_compile("""_g.savedVars = {{usedVars}}""", 'vars')
_g.restoreCode = _g_compile("""{{usedVars}} = _g.savedVars""", 'vars')
_g.preCode = _g_compile("""{{preCode}}""", 'precode')
_g.solutionCode = _g_compile("""{{solutionCode}}""", 'solution')
_g.postCode = _g_compile("""{{postCode}}""", 'postcode')
del _g_compile
def _g_safeexec(code, name, isUser=False):
try:
exec(code, globals())
return None
except Exception as e:
test = getattr(_g, 'testInput', '???')
if isUser:
msg = '{0}: {1}'.format(type(e).__name__, str(e))
return { 'verdict': 1, 'test': test, 'message': msg }
else:
msg = '{0}: {1}: {2}'.format(name, type(e).__name__, str(e))
return { 'verdict': -1, 'test': test, 'message': msg }
_g.allVerdict = 10
_g.numCorrect = 0;
_g.tests = []
for testIter in range({{numIters}}):
_g.testIter = testIter
_g.result = None
try:
_g.sys.stdout = _g.StringIO()
_g.result = _g_safeexec(_g.preCode, 'precode')
if _g.result is not None:
continue
_g.testInput = _g.sys.stdout.getvalue()
# execute user code first (so it doesn't access solution variables)
if _g.canSave:
_g.result = _g_safeexec(_g.saveCode, 'vars')
if _g.result is not None:
continue
_g.sys.stdin = _g.StringIO(_g.testInput)
_g.sys.stdout = _g.StringIO()
_g.result = _g_safeexec(_g.userCode, 'usercode', True)
if _g.result is not None:
continue
_g.result = _g_safeexec(_g.postCode, 'postcode')
if _g.result is not None:
continue
_g.userOutput = _g.sys.stdout.getvalue()
# now execute solution answer to determine desired output
if _g.canSave:
_g.result = _g_safeexec(_g.restoreCode, 'vars')
if _g.result is not None:
continue
_g.sys.stdin = _g.StringIO(_g.testInput)
_g.sys.stdout = _g.StringIO()
_g.result = _g_safeexec(_g.solutionCode, 'solution')
if _g.result is not None:
continue
_g.result = _g_safeexec(_g.postCode, 'postcode')
if _g.result is not None:
continue
_g.solutionOutput = _g.sys.stdout.getvalue()
_g.thisMatch = _g.solutionOutput == _g.userOutput
if not _g.thisMatch:
solnLines = _g.solutionOutput.splitlines()
userLines = _g.userOutput.splitlines()
for i in range(min(len(solnLines), len(userLines))):
if solnLines[i] != userLines[i]:
_g.mismatchError = ('First mismatch on line {0}'
.format(i + 1))
break
else:
if len(solnLines) > len(userLines):
_g.mismatchError = ('Output is missing lines at end')
elif len(userLines) > len(solnLines):
_g.mismatchError = ('Output has extra lines at end')
else:
_g.thisMatch = True
del solnLines, userLines
if _g.thisMatch:
_g.numCorrect += 1
_g.result = { 'verdict': 3, 'test': _g.testInput,
'result': _g.userOutput, 'solution': _g.solutionOutput }
else:
_g.result = { 'verdict': 2, 'test': _g.testInput,
'result': _g.userOutput, 'solution': _g.solutionOutput,
'message': _g.mismatchError }
finally:
_g.tests.append(_g.result)
_g.allVerdict = min(_g.allVerdict, _g.result['verdict'])
testIter = _g.testIter
_g.json.dump({ 'ok': True, 'verdict': _g.allVerdict,
'correct': _g.numCorrect, 'tests': _g.tests },
_g.stdout)
|
IRONWOOD PUBLIC Safety Officer Matt Sterbenz demonstrates a water bottle that can be taken apart to reveal a secret compartment used to hide drugs at a community awareness presentation at Luther L. Wright Wednesday.
IRONWOOD - Members of the Ironwood Public Safety Department held a community awareness program Wednesday to bring attention to the potential problems of drugs and alcohol in the area, particularly relating to schools and young people.
Director Andrew DiGiorgio began by reminding the audience the trends regarding drug and alcohol abuse by young people are constantly changing, and it can be difficult to stay on top of the information.
|
# -*- coding: utf-8 -*-
# Copyright(C) 2011 Julien Hebert
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from weboob.browser.pages import HTMLPage
from weboob.browser.filters.html import XPath, XPathNotFound
from weboob.browser.filters.standard import CleanText
from lxml.etree import Comment
class Article(object):
author = u''
title = u''
def __init__(self, browser, _id):
self.browser = browser
self.id = _id
self.body = u''
self.url = u''
self.date = None
class GenericNewsPage(HTMLPage):
__element_body = NotImplementedError
__article = Article
element_title_selector = NotImplementedError
main_div = NotImplementedError
element_body_selector = NotImplementedError
element_author_selector = NotImplementedError
_selector = XPath
def on_load(self):
self.handle_refresh()
self.on_loaded()
def on_loaded(self):
pass
def get_body(self):
try:
return CleanText('.')(self.get_element_body())
except (AttributeError):
return self.__article.body
def get_author(self):
try:
return CleanText('.')(self.get_element_author())
except (AttributeError):
return self.__article.author
def get_title(self):
try:
return CleanText(self._selector(self.element_title_selector))(self.main_div)
except AttributeError:
if self.main_div is None:
raise XPathNotFound("main_div is none on %s" % (self.browser))
elif self.element_title_selector != 'h1':
self.element_title_selector = 'h1'
return self.get_title()
else:
raise AttributeError("no title on %s" % (self.browser))
def get_element_body(self):
try:
return self._selector(self.element_body_selector)(self.main_div)[0]
except (AttributeError, IndexError):
if self.main_div is None:
raise XPathNotFound("main_div is none on %s" % (self.browser))
else:
raise AttributeError("no body on %s" % (self.browser))
def get_element_author(self):
try:
return self._selector(self.element_author_selector)(self.main_div)[0]
except IndexError:
if self.main_div is None:
raise XPathNotFound("main_div is none on %s" % (self.browser))
else:
raise AttributeError("no author on %s" % (self.browser))
def get_article(self, _id):
__article = Article(self.browser, _id)
__article.author = self.get_author()
__article.title = self.get_title()
__article.url = self.url
__article.body = self.get_body()
return __article
def drop_comments(self, base_element):
for comment in base_element.getiterator(Comment):
comment.drop_tree()
def try_remove(self, base_element, selector):
for el in self._selector(selector)(base_element):
try:
el.getparent().remove(el)
except (AttributeError, ValueError):
continue
def remove_from_selector_list(self, base_element, selector_list):
for selector in selector_list:
base_element.remove(self._selector(selector)(base_element))
def try_remove_from_selector_list(self, base_element, selector_list):
for selector in selector_list:
self.try_remove(base_element, selector)
def try_drop_tree(self, base_element, selector):
for el in self._selector(selector)(base_element):
el.drop_tree()
@staticmethod
def clean_relativ_urls(base_element, domain):
for a in base_element.findall('.//a'):
if "href" in a.attrib:
if a.attrib["href"] and a.attrib["href"][0:7] != "http://" and a.attrib["href"][0:7] != "https://":
a.attrib["href"] = domain + a.attrib["href"]
for img in base_element.findall('.//img'):
if img.attrib["src"][0:7] != "http://" and img.attrib["src"][0:7] != "https://":
img.attrib["src"] = domain + img.attrib["src"]
|
Well, what do we know about the ‘modern herbaceous border?’ You could say that the origins can be traced back to the borders of ‘old fashioned’ hardy herbaceous plants that made the country cottage gardens so attractive. Admittedly Gertrude Jekyll was an important proponent in the late nineteenth century with her plant groupings of particular colours but more about her after our lecture in February! If you ‘Google’ modern herbaceous border, you will find a bewildering 20+ pages of hundreds of pictures of beautiful borders with even more available at the click of a mouse’. Have a look ……………….
To find out more, we asked Aubrey Barker from Hopley’s nursery in Much Hadham to come and tell us about the phenomenon.
Aubrey started by describing Arley Hall in Cheshire which boasts the first ever (twin) herbaceous borders – planted in the 1840s. With a nod to Gertrude’s contribution in the 1880’s Aubrey reminded us of Alan Bloom’s significant contribution to garden design in the twentieth century – his island beds at Bressingham were a revolution in their day because unlike in a traditional herbaceous border, the plants can be seen from all directions. More recently Piet Oudolf has introduced grasses to the herbaceous border – Trentham has acres of them and Pensthorpe is renowned for its prairie planting of grasses and perennials. Many members will have seen several of these important gardens on day visits or during weekend breaks with the Club.
Where next for the herbaceous border?
Aubrey then started showing us countless lovely pictures of plants for herbaceous borders, ranging alphabetically, starting with the achillea ptarmica (pearl white), alcea rosea the pinkish hollyhock grown at Powys castle, Aruncus dioicus – the goats beard beloved by flower arrangers – growing 2m high but with tiny creamy white flowers, Campanula ‘octopus’ with its long, nodding, rose-pink flowers in the shape of Japanese lanterns on upright stems in summer through to Gaura ‘Rosy Jane’ – beautiful with grasses – its tiny flowers looking like stars etc. etc……………….
We stopped at the letter ‘i’ as it was tea time !
Hope to see you at our Splendid Christmas Buffet on 7 December!
|
from runtests.mpi import MPITest
from nbodykit.lab import *
from nbodykit import setup_logging
from nbodykit.transform import ConstantArray
from numpy.testing import assert_allclose
import pytest
# debug logging
setup_logging("debug")
@MPITest([1, 4])
def test_sky_to_cartesian(comm):
cosmo = cosmology.Planck15
# make source
s = RandomCatalog(csize=100, seed=42, comm=comm)
# ra, dec, z
s['z'] = s.rng.normal(loc=0.5, scale=0.1)
s['ra'] = s.rng.uniform(low=110, high=260)
s['dec'] = s.rng.uniform(low=-3.6, high=60)
# make the position array
s['Position1'] = transform.SkyToCartesian(s['ra'], s['dec'], s['z'], cosmo)
# wrong name
with pytest.warns(FutureWarning):
s['Position0'] = transform.SkyToCartesion(s['ra'], s['dec'], s['z'], cosmo)
s['Position1'] = transform.SkyToCartesian(s['ra'].compute(), s['dec'], s['z'], cosmo)
@MPITest([1, 4])
def test_cartesian_to_equatorial(comm):
# make source
s = UniformCatalog(nbar=10000, BoxSize=1.0, comm=comm)
# get RA, DEC
ra, dec = transform.CartesianToEquatorial(s['Position'], observer=[0.5, 0.5, 0.5])
# check bounds
assert ((ra >= 0.)&(ra < 360.)).all().compute()
assert ((dec >= -90)&(dec < 90.)).all().compute()
ra, dec = transform.CartesianToEquatorial(s['Position'], observer=[0.5, 0.5, 0.5], frame='galactic')
# check bounds
assert ((ra >= 0.)&(ra < 360.)).all().compute()
assert ((dec >= -90)&(dec < 90.)).all().compute()
@MPITest([1, 4])
def test_cartesian_to_sky(comm):
cosmo = cosmology.Planck15
# make source
s = UniformCatalog(nbar=10000, BoxSize=1.0, seed=42, comm=comm)
# get RA, DEC, Z
ra, dec, z = transform.CartesianToSky(s['Position'], cosmo, observer=[0.5, 0.5, 0.5])
# reverse and check
pos2 = transform.SkyToCartesian(ra, dec, z, cosmo, observer=[0.5, 0.5, 0.5])
assert_allclose(s['Position'], pos2, rtol=1e-5, atol=1e-7)
_ = transform.CartesianToSky(s['Position'].compute(), cosmo)
@MPITest([1, 4])
def test_cartesian_to_sky_galactic(comm):
cosmo = cosmology.Planck15
# make source
s = UniformCatalog(nbar=10000, BoxSize=1.0, seed=42, comm=comm)
# get RA, DEC, Z
ra, dec, z = transform.CartesianToSky(s['Position'], cosmo, frame='galactic')
ra1, dec1, z1 = transform.CartesianToSky(s['Position'].compute(), cosmo, frame='galactic')
assert_allclose(ra, ra1)
assert_allclose(dec, dec1)
assert_allclose(z, z1)
# reverse and check
pos2 = transform.SkyToCartesian(ra, dec, z, cosmo, frame='galactic')
numpy.testing.assert_allclose(s['Position'], pos2, rtol=1e-5)
@MPITest([1, 4])
def test_cartesian_to_sky_velocity(comm):
cosmo = cosmology.Planck15
# make source
s = UniformCatalog(nbar=1e-5, BoxSize=1380., seed=42, comm=comm)
# real-space redshift
_, _, z_real = transform.CartesianToSky(s['Position'], cosmo,
observer=[-1e3, -1e3, -1e3])
# redshift-space redshift
_, _, z_redshift = transform.CartesianToSky(s['Position'], cosmo,
velocity=s['Velocity'],
observer=[-1e3, -1e3, -1e3])
numpy.testing.assert_allclose(z_real, z_redshift, rtol=1e-3)
# bad z max value
with pytest.raises(ValueError):
_, _, z = transform.CartesianToSky(s['Position'], cosmo, observer=[-1e4, -1e4, -1e4], zmax=0.5)
z = z.compute()
@MPITest([1, 4])
def test_stack_columns(comm):
# make source
s = RandomCatalog(csize=100, seed=42, comm=comm)
# add x,y,z
s['x'] = s.rng.uniform(0, 2600.)
s['y'] = s.rng.uniform(0, 2600.)
s['z'] = s.rng.uniform(0, 2600.)
# stack
s['Position'] = transform.StackColumns(s['x'], s['y'], s['z'])
# test equality
x, y, z = s.compute(s['x'], s['y'], s['z'])
pos = numpy.vstack([x,y,z]).T
numpy.testing.assert_array_equal(pos, s['Position'])
# requires dask array
s['Position'] = transform.StackColumns(x,y,z)
@MPITest([1, 4])
def test_halofuncs(comm):
from nbodykit.cosmology import Planck15
# make two sources
# make source
s = RandomCatalog(csize=300000, seed=42, comm=comm)
s['mass'] = s.rng.uniform() * 1e13
s['z'] = s.rng.uniform()
r = transform.HaloRadius(s['mass'], redshift=s['z'], cosmo=Planck15)
r.compute()
r = transform.HaloConcentration(s['mass'], redshift=s['z'], cosmo=Planck15)
r.compute()
r = transform.HaloVelocityDispersion(s['mass'], redshift=s['z'], cosmo=Planck15)
r.compute()
r = transform.HaloRadius(s['mass'], redshift=0, cosmo=Planck15)
r.compute()
r = transform.HaloConcentration(s['mass'], redshift=0, cosmo=Planck15)
r.compute()
r = transform.HaloVelocityDispersion(s['mass'], redshift=0, cosmo=Planck15)
r.compute()
@MPITest([1, 4])
def test_combine(comm):
# make two sources
s1 = UniformCatalog(3e-6, 2600, comm=comm)
s2 = UniformCatalog(3e-6, 2600, comm=comm)
# concatenate all columns
cat = transform.ConcatenateSources(s1, s2)
# check the size and columns
assert cat.size == s1.size + s2.size
assert set(cat.columns) == set(s1.columns)
# only one column
cat = transform.ConcatenateSources(s1, s2, columns='Position')
pos = numpy.concatenate([numpy.array(s1['Position']), numpy.array(s2['Position'])], axis=0)
numpy.testing.assert_array_equal(pos, cat['Position'])
# fail on invalid column
with pytest.raises(ValueError):
cat = transform.ConcatenateSources(s1, s2, columns='InvalidColumn')
@MPITest([1])
def test_constarray(comm):
a = ConstantArray(1.0, 1, chunks=1000)
assert len(a) == 1
assert a.shape == (1,)
a = ConstantArray([1.0, 1.0], 1, chunks=1000)
assert a.shape == (1, 2)
a = ConstantArray([1.0, 1.0], 3, chunks=1000)
assert a.shape == (3, 2)
@MPITest([1, 4])
def test_vector_projection(comm):
cosmo = cosmology.Planck15
# make source
s = UniformCatalog(nbar=1e-5, BoxSize=1380., seed=42, comm=comm)
x = transform.VectorProjection(s['Position'], [1, 0, 0])
y = transform.VectorProjection(s['Position'], [0, 1, 0])
z = transform.VectorProjection(s['Position'], [0, 0, 1])
d = transform.VectorProjection(s['Position'], [1, 1, 1])
nx = transform.VectorProjection(s['Position'], [-2, 0, 0])
ny = transform.VectorProjection(s['Position'], [0, -2, 0])
nz = transform.VectorProjection(s['Position'], [0, 0, -2])
nd = transform.VectorProjection(s['Position'], [-2, -2, -2])
numpy.testing.assert_allclose(x, s['Position'] * [1, 0, 0], rtol=1e-3)
numpy.testing.assert_allclose(y, s['Position'] * [0, 1, 0], rtol=1e-3)
numpy.testing.assert_allclose(z, s['Position'] * [0, 0, 1], rtol=1e-3)
numpy.testing.assert_allclose(d[:, 0], s['Position'].sum(axis=-1) / 3., rtol=1e-3)
numpy.testing.assert_allclose(nx, s['Position'] * [1, 0, 0], rtol=1e-3)
numpy.testing.assert_allclose(ny, s['Position'] * [0, 1, 0], rtol=1e-3)
numpy.testing.assert_allclose(nz, s['Position'] * [0, 0, 1], rtol=1e-3)
numpy.testing.assert_allclose(nd[:, 0], s['Position'].sum(axis=-1) / 3., rtol=1e-3)
|
Coconino County will host the Colorado Plateau Economic Development Symposium on March 7 - 8, 2019 at the Courtyard by Marriott in Page, Arizona. This event brings together more than 200 business professionals affiliated with local, tribal, national businesses and community members. This year’s theme is Engage Locally, Empower Regionally, Expand Globally.
The first Regional Economic Outlook Conference was held in 2016 when the owners announced the closure of the Navajo Generating Station. At that time, the focus was on identifying the needs of the regional economy including these key questions: where we are now and where we would like to be?
This symposium is an opportunity for businesses in the region to find collaborative solutions to establish financial resiliency. Participants are encouraged to bring ideas and energy to this highly anticipated event. Leaders will network, share ideas and take actionable steps to strengthen the economy and build a shared vision for our communities.
The shutdown of the Navajo Generating Station is imminent, so we must work together to keep our families whole and our communities strong.
As communities and regions across the country address the impacts caused by the downturn in the coal industry, places that are actively pursuing economic diversification opportunities have become better positioned to take on the challenges that lie ahead. During this session, learn how rural coal-reliant communities in the West and in Appalachia are setting a new course in a variety of ways, including through workforce development and retraining, entrepreneurship, outdoor recreation, cultural heritage, renewable energy, and more. The presenters – Jack Morgan from the National Association of Counties and Brett Schwartz from the NADO Research Foundation – have spent the past three years visiting and collaborating with coal-reliant counties and regions as they work to build stronger economies by tapping into local assets and strengths. While every place is unique with its own set of challenges and opportunities, these case studies can serve as inspiration for communities across the Colorado Plateau as they explore ways to create a more prosperous future.
Gain perspective on the current business outlook including the strengths and opportunities for new businesses in the region and prospects for future development.
Explore tourism data in Northern Arizona and how the Arizona Office of Tourism markets the region. Learn how Utah is assisting small and rural local governments develop the amenities, infrastructure, and leadership necessary to become welcoming destinations for visitors.
Gain an overview of the economic landscape of the Colorado Plateau including how a nonprofit is assisting small businesses. Learn about how visitors are flowing throughout the region to understand current initiatives that are underway including regional visitor attraction. Engage in a discussion of future efforts to facilitate broadband development within the region as critical infrastructure to all economic development initiatives.
Learn how the region is characterized in over 20 studies to set the stage for future opportunities through a call to action. Hear from local leaders on how collaboration across boundaries and boarders is essential to the economic health of the region while hearing their visions for how future collaboration can occur to build opportunities across the plateau.
Overlooking Arizona's breathtaking Glen Canyon National Recreation Area and the pristine waters of Lake Powell, the Courtyard Page, AZ at Lake Powell hotel is the perfect choice for business travel or a family vacation to the area. Relax in our 153 deluxe Southwestern-style guest rooms near Lake Powell, featuring free HBO, complimentary wireless high-speed internet access, luxurious Marriott bedding and an ergonomic work space. Our Southwestern-themed hotel, surrounded by Lake Powell National Golf Course, is located within minutes of Glen Canyon Dam, Antelope Canyon, Wahweap and Antelope Point marinas. Take a day trip from our Glen Canyon hotel to experience the awe-inspiring sights of the Zion, Bryce Canyon, the Grand Canyon and the incredible mesas of Monument Valley. Whether traveling for business or leisure, the Courtyard Page, AZ Lake Powell has anticipated your every need.
2019 Sponsorship Packages are Available Now!
|
# coding: utf-8
#
# This file is part of Sequana software
#
# Copyright (c) 2016 - Sequana Development Team
#
# File author(s):
# Dimitri Desvillechabrol <dimitri.desvillechabrol@pasteur.fr>,
# <d.desvillechabrol@gmail.com>
#
# Distributed under the terms of the 3-clause BSD license.
# The full license is in the LICENSE file, distributed with this software.
#
# website: https://github.com/sequana/sequana
# documentation: http://sequana.readthedocs.io
#
##############################################################################
""" Utilities to create a Jquery DataTable for your HTML file.
.. autosummary::
DataTableFunction
DataTable
"""
from collections import OrderedDict
import colorlog
logger = colorlog.getLogger(__name__)
class DataTableFunction(object):
""" Class that contains Jquery DataTables function and options.
Example:
::
import pandas as pd
from sequana.utils import DataTableFunction
df = pandas.read_csv('data.csv')
datatable_js = DataTableFunction(df, 'data')
datatable_js.datatable_options = {'pageLength': 15,
'dom': 'Bfrtip',
'buttons': ['copy', 'csv']}
js = datatable_js.create_javascript_function()
html_datatables = [DataTable(df, "data_{0}".format(i), datatable_js)
for i, df in enumerate(df_list)]
Here, the datatable_options dictionary is used to fine tune the appearance
of the table.
.. note:: DataTables add a number of elements around the table to control
the table or show additional information about it. There are controlled
by the order in the document (**DOM**) defined as a string made of
letters, each of them having a precise meaning. The order of the letter
is important. For instance if **B** is first, the buttons are put before
the table. If **B** is at the end, it is shown below the table.
Here are some of the valid letters and their meaning:
- **B**: add the Buttons (copy/csv)
- **i**: add *showing 1 to N of M entries*
- **f**: add a search bar (**f** filtering)
- **r**: processing display element
- **t**: the table itself
- **p**: pagination control
Each option can be specified multiple times (with the exception of the
table itself).
.. note:: other useful options are:
- pageLength: 15
- scrollX: "true"
- paging: 15
- buttons: ['copy', 'csv']
Note that buttons can also be excel, pdf, print, ...
All options of datatable:
https://datatables.net/reference/option/
"""
def __init__(self, df, html_id, index=False):
""".. rubric:: contructor
:param df: data frame.
:param str html_id: the ID used in the HTML file.
"""
self.index = index
self._html_id = html_id
self._datatable_options = dict()
self._datatable_columns = self._set_datatable_columns(df)
@property
def html_id(self):
""" Get the html_id, which cannot be set by the user after the
instanciation of the class.
"""
return self._html_id
@property
def datatable_options(self):
""" Get, set or delete the DataTable options. Setter takes a dict as
parameter with the desired options and updates the current dictionary.
Example::
datatable = DataTableFunction("tab")
datatable.datatable_options = {'dom': 'Bfrtip',
'buttons': ['copy', 'csv']}
source: https://datatables.net/reference/option/
"""
return self._datatable_options
@datatable_options.setter
def datatable_options(self, d):
try:
d['buttons'] = self._add_export_visible(d['buttons'])
except KeyError:
pass
self._datatable_options.update(d)
def _add_export_visible(self, buttons):
""" Add option to disable the exporting of hidden columns
"""
try:
for b in buttons:
b.update({'exportOptions': {'columns': ':visible'}})
except AttributeError:
buttons = [{'extend': b, 'exportOptions': {'columns': ':visible'}}
for b in buttons]
return buttons
@datatable_options.deleter
def datatable_options(self):
self._datatable_options = dict()
@property
def datatable_columns(self):
""" Get datatable_columns dictionary. It is automatically set from the
dataframe you want to plot.
"""
return self._datatable_columns
def _set_datatable_columns(self, df):
""" Fill :attr:`DataTableFunction.datatable_columns` with header of
:param:`DataTableFunction.df`.
"""
from pandas import Series
if isinstance(df, Series):
return {}
if self.index is True:
columns = [""] + list(df.columns)
else:
columns = list(df.columns)
column_dict = OrderedDict((name, dict()) for name in columns)
return column_dict
def create_javascript_function(self):
""" Return javascript to create the DataTable.
"""
js_function = """
<script type="text/javascript">
function parseCsv_{0}(csv, id) {{
Papa.parse(csv, {{
comments: '#',
delimiter: ',',
header: true,
dynamicTyping: true,
error: function(reason) {{
console.log(reason);
}},
complete: function(results) {{
{1}
}}
}});
}};
</script>
"""
return js_function.format(self.html_id,
self._create_datatable_option())
def _create_datatable_option(self):
""" Return DataTable options.
"""
self.datatable_options['columns'] = self._create_columns_option()
js = self._dict_to_string(self.datatable_options)
js = "$(id).DataTable({{{0},data: results.data}});".format(js)
return js
def _create_columns_option(self):
""" Return string well formated with all columns options.
"""
js = [self._coloption_2_str(key, value) for key, value in
self.datatable_columns.items()]
return '[{0}]'.format(',\n'.join(js))
def _coloption_2_str(self, name, options):
s = "data:'{0}'".format(name)
if options:
s = "{0},\n{1}".format(s, self._dict_to_string(options))
return '{{{0}}}'.format(s)
def _dict_to_string(self, d):
""" Convert dict to string for CanvasJS.
Example:
::
dico = {'key1': value1, 'key2': value2, 'key3': value3}
print(CanvasJS._dict_to_string(dico))
"key1:value1,key2:value2,key3:value3"
"""
s = ['{0}:{1}'.format(key, self._check_type(value)) for key, value in
d.items()]
return ',\n'.join(s)
def _check_type(self, value):
""" Check value type to fill javascript sections. String must be
surrounded by quotes and not boolean or integer.
Javascript variable must not be surrounded by quotes. Custom variables
start with 'data_'.
"""
try:
if not value.startswith(('true', 'false', 'function', '{', '[')):
return "'{0}'".format(value)
except AttributeError:
return value
return value
def set_links_to_column(self, link_col, target_col, new_page=True):
"""Hide a column with urls and connect it with a column.
:param str link_col: column with your URLs.
:param str target_col: column to connect.
"""
# hide the link column
try:
self.datatable_columns[link_col]['visible'] = 'false'
except KeyError:
keys = self.datatable_columns.keys()
logger.warning(f"KeyError: Column name '{target_col}' does not exist. Use one of {keys}")
# function to add link
if new_page is True:
fct = """function(data, type, row, meta){{
return '<a href="'+row.{0}+'" target="_blank">'+data+'</a>';
}}
""".format(link_col)
else:
fct = """function(data, type, row, meta){{
return '<a href="'+row.{0}+'">'+data+'</a>';
}}
""".format(link_col)
try:
self.datatable_columns[target_col]['render'] = fct
except KeyError:
logger.warning("KeyError: Column name '{0}' does not exist."
.format(target_col))
pass
def set_tooltips_to_column(self, tooltips_col, target_col):
"""Hide a column with tooltips and connect it with a column.
:param str tooltips_col: column with your tooltips.
:param str target_col: column to connect.
"""
# hide tooltips
try:
self.datatable_columns[tooltips_col]['visible'] = 'false'
except KeyError:
logger.warning("KeyError: Column name '{0}' does not exist."
.format(target_col))
pass
# function to add tooltips
fct = """function(data, type, row, meta){{
return '<a href="#" data-toggle="tooltip" title="'+row.{0}+'">'+data+'</a>';
}}
""".format(tooltips_col)
try:
self.datatable_columns[target_col]['render'] = fct
except KeyError:
logger.warning("KeyError: Column name '{0}' does not exist."
.format(target_col))
pass
class DataTable(object):
""" Class that contains html table which used a javascript function.
You must add in your HTML file the JS function
(:meth:`DataTable.create_javascript_function`) and the HTML code
(:meth:`DataTable.create_datatable`).
Example:
::
df = pandas.read_csv('data.csv')
datatable = DataTable(df, 'data')
datatable.datatable.datatable_options = {'pageLength': 15,
'dom': 'Bfrtip',
'buttons': ['copy', 'csv']}
js = datatable.create_javascript_function()
html = datatable.create_datatable()
# Second CSV file with same format
df2 = pandas.read_csv('data2.csv')
datatable2 = DataTable(df2, 'data2', datatable.datatable)
html2 = datatable.create_datatable()
The reason to include the JS manually is that you may include many HTML
table but need to include the JS only once.
"""
def __init__(self, df, html_id, datatable=None, index=False):
""".. rubric:: contructor
:param df: data frame.
:param str html_id: the unique ID used in the HTML file.
:param DataTableFunction datatable: javascript function to create the
Jquery Datatables. If None, a :class:`DataTableFunction` is
generated from the df.
:param bool index: indicates whether the index dataframe should
be included in the CSV table
"""
self.index = index
self._df = df
self._html_id = html_id
if datatable:
self.datatable = datatable
else:
self.datatable = DataTableFunction(df, html_id, index=index)
def __len__(self):
return len(self.df)
@property
def df(self):
return self._df
@property
def html_id(self):
return self._html_id
def create_datatable(self, style="width:100%", **kwargs):
""" Return string well formated to include in a HTML page.
:param str style: CSS option of your table.
:param **dict kwargs: parameters of :meth:`pandas.DataFrame.to_csv`.
"""
html = """
<script type="text/javascript">
$(document).ready(function() {{
var {0} = document.getElementById('csv_{0}').innerText;
parseCsv_{1}({0}, '#table_{0}');
{0} = null;
}});
</script>
""".format(self.html_id, self.datatable.html_id)
html += self._create_hidden_csv(**kwargs)
html += self._create_html_table(style)
return html
def _create_hidden_csv(self, **kwargs):
""" Return the HTML code and the CSV code for your hidden CSV section.
:param **dict kwargs: parameters of :meth:`pandas.DataFrame.to_csv`.
"""
csv = self._df.to_csv(index=self.index, **kwargs)
html = '<pre id="csv_{0}">{1}</pre>'.format(self.html_id, csv.strip())
css = '<style>#csv_{0}{{display:none}}</style>'.format(self.html_id)
return '{0}\n{1}\n'.format(css, html)
def _create_html_table(self, style):
""" Just for set some option and header.
:param str style: css option of your table.
"""
# set table id
if style:
style = 'style="{0}"'.format(style)
html_table = (
'<table id="table_{0}" class="display table text-center" {1}>'
.format(self.html_id, style)
)
# create table's header
th = '<th>{0}</th>'
if self.index is True:
header = [th.format("")]
header += [th.format(name) for name in self.df]
else:
header = [th.format(name) for name in self.df]
header = '<thead><tr>{0}</tr></thead>'.format("\n".join(header))
html_table = """
{0}
{1}
</table>
""".format(html_table, header)
return html_table
def create_javascript_function(self):
""" Generate the javascript function to create the DataTable in a HTML
page.
"""
return self.datatable.create_javascript_function()
|
On January 2, 2019, Activision Blizzard, Inc. (the Company) appointed Dennis Durkin to be the Chief Financial Officer of the Company. Mr. Durkin, 48, joined the Company in March 2012 as Chief Financial Officer and served in that role until May 2017. He served as Chief Corporate Officer of the Company from May 2017 until January 1, 2019. In connection with this appointment, the Company and Mr. Durkin have agreed to the terms of employment summarized below. These terms will be memorialized in an instrument to be entered into between the Company and Mr. Durkin.
Mr. Durkins term of employment as Chief Financial Officer began on January 2, 2019 and will continue until terminated by either the Company or Mr. Durkin upon specified prior notice. The terms of employment provide for: (a) a minimum annual base salary of $900,000, (b) eligibility to receive annual discretionary bonuses targeted at 150% of base salary, and (c) eligibility for $3 million of life insurance and other benefits generally available to Company executives.
Mr. Durkin will receive a cash sign-on bonus in an amount equal to 4.167 times his annual base salary. This amount must be repaid to the Company by Mr. Durkin if he is terminated for cause or resigns before January 2, 2020.
Mr. Durkin will also be granted, subject to the Compensation Committees approval, equity awards in the form of performance-vesting restricted share units (PSUs) having a target value at grant of 12.5 times his annual base salary (with maximum performance value to be determined). The PSUs will vest in March, 2020 and 2021, subject to the achievement of specified financial performance requirements relating to operating income and earnings per share metrics and generally subject to continuing employment on the vesting dates.
The foregoing summary does not purport to be complete. Readers are encouraged to review the final written instrument memorializing the terms, which the Company expects to file with the SEC with its Annual Report on Form 10-K for the year ending December 31, 2018.
Effective as of December 31, 2018, the Company terminated the employment of the Companys former Chief Financial Officer, Spencer Neumann, for cause after he violated his legal obligations to the Company. These violations were unrelated to the Companys financial reporting or disclosure controls and procedures. As previously reported, on December 31, 2018, the Company notified Mr. Neumann that the Company intended to terminate his employment as the Companys Chief Financial Officer for cause and that he had been placed on a paid leave of absence from the Company pending an opportunity for him to demonstrate why cause did not exist to terminate his employment or why termination of his employment was not otherwise justified. Subsequent to the delivery of this notice, Mr. Neumann purported to resign in breach of his legal obligations to the Company and was terminated for cause effective as of December 31, 2018.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-10 11:39
from __future__ import unicode_literals
import c3nav.mapdata.fields
from django.db import migrations, models
import django.db.models.deletion
def create_location_group_category(apps, schema_editor):
LocationGroupCategory = apps.get_model('mapdata', 'LocationGroupCategory')
category = LocationGroupCategory.objects.create(name='groups', titles={
'en': 'Location Groups',
'de': 'Ortgruppen',
})
LocationGroup = apps.get_model('mapdata', 'LocationGroup')
LocationGroup.objects.update(category=category)
class Migration(migrations.Migration):
dependencies = [
('mapdata', '0018_auto_20170708_1752'),
]
operations = [
migrations.CreateModel(
name='LocationGroupCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('titles', c3nav.mapdata.fields.JSONField(default={})),
('name', models.SlugField(unique=True, verbose_name='Name')),
],
options={
'verbose_name': 'Location Group Category',
'verbose_name_plural': 'Location Group Categories',
'default_related_name': 'locationgroupcategories',
},
),
migrations.AlterModelOptions(
name='locationslug',
options={'verbose_name': 'Location with Slug', 'verbose_name_plural': 'Location with Slug'},
),
migrations.AddField(
model_name='locationgroup',
name='category',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='groups', to='mapdata.LocationGroupCategory', verbose_name='Location Group Category'),
),
migrations.RunPython(create_location_group_category, migrations.RunPython.noop),
migrations.AlterField(
model_name='locationgroup',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='groups',
to='mapdata.LocationGroupCategory', verbose_name='Location Group Category'),
),
]
|
On Tuesday, Heidi Klum hosted a holiday creativity party for the patients of Children's Hospital Los Angeles and gifted iPad Minis loaded with FiftyThree’s app, Paper, as well as their newest stylus tool, Pencil.
This event benefited the hospital’s Mark Taper Johnny Mercer Creative Arts Therapy Program, whose certified expressive arts therapists help sick, injured and hospitalized children and families build creative coping skills and utilize the arts for exploration and self expression.
Using Pencil, Heidi and illustrator Rachel Yonda taught the children in the Artists Program, which serves more than 24,000 patients annually, how to capture their ideas as sketches, notes, or drawings, and share them across the web.
Continuing the spirit of the season, Heidi and Babies R Us also gifted Truly Scrumptious by Heidi Klum pajamas to 60 toddler-aged patients of the Children’s Hospital Los Angeles.
|
#!/usr/bin/python
# coding=utf-8
#
# Copyright (C) 2012 Allis Tauri <allista@gmail.com>
#
# indicator_gddccontrol is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# indicator_gddccontrol is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Created on Oct 14, 2014
@author: Allis Tauri <allista@gmail.com>
"""
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def lines(fname):
return list(l.strip() for l in open(os.path.join(os.path.dirname(__file__), fname)))
setup(name='GetIsolationSources',
version='1.5.2',
description='Retrieves isolation sources from NCBI given the set of sequences with '
'specified accession numbers. Both nucleotide and protein accessions are accepted.',
long_description=read('README.md'),
license='MIT',
author='Allis Tauri',
author_email='allista@gmail.com',
url='https://github.com/allista/GetIsolationSources',
keywords=['bioinformatics', 'ncbi', 'entrez'],
classifiers=[
'Development Status :: 4 - Beta',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Intended Audience :: Science/Research',
'Operating System :: POSIX',
'Programming Language :: Python'],
packages=[],
scripts=['get_isolation_sources'],
install_requires=lines('requirements.txt'),
)
|
Conventus was formed in 2008 by a team of medical device professionals and physicians with the sole purpose of expanding possibilities for patient care by creating less invasive solutions to broad range of challenging periarticular fractures.
The site needed to reflect a very unique product and market.
The website is built as a responsive WordPress website and adjusts as the size of the screen adjusts. If you’re on a mobile device the text is easy to read and the layout is adapted for the screen size.
This web development and design meets the highest standards and ease of use. The company needed to showcase their product and process using mobile devices including tablets and mobile phones. This website easily adjusts to its screen size giving the reader the best viewing experience.
|
# -*- coding: utf-8 -*-
# *****************************************************************************
# Marche - A server control daemon
# Copyright (c) 2015-2016 by the authors, see LICENSE
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Module authors:
# Georg Brandl <g.brandl@fz-juelich.de>
# Alexander Lenz <alexander.lenz@frm2.tum.de>
#
# *****************************************************************************
from __future__ import print_function
import os
import sys
import time
import signal
import logging
import argparse
from os import path
from marche import __version__
from marche.config import Config
from marche.utils import daemonize, setuser, write_pidfile, remove_pidfile, \
get_default_cfgdir
from marche.loggers import ColoredConsoleHandler, LogfileHandler
from marche.handler import JobHandler
from marche.auth import AuthHandler
from marche.colors import nocolor
# configure logging library: we don't need process/thread ids and callers
logging.logMultiprocessing = False
logging.logProcesses = False
logging.logThreads = False
logging._srcfile = None # pylint: disable=protected-access
class Daemon(object):
def __init__(self):
self.stop = False
self.log = logging.getLogger('marche')
if os.name == 'nt': # pragma: no cover
nocolor()
def parse_args(self, args):
rootdir = path.join(path.dirname(__file__), '..')
if path.exists(path.join(rootdir, '.git')):
default_cfgdir = path.abspath(path.join(rootdir, 'etc'))
else: # pragma: no cover
default_cfgdir = get_default_cfgdir()
parser = argparse.ArgumentParser()
parser.add_argument('--version', action='version',
version='Marche daemon version %s' % __version__)
parser.add_argument('-c', dest='configdir', action='store',
default=default_cfgdir, help='configuration '
'directory (default %s)' % default_cfgdir)
parser.add_argument('-d', dest='daemonize', action='store_true',
help='daemonize the process')
parser.add_argument('-v', dest='verbose', action='store_true',
help='verbose (debug) output')
return parser.parse_args(args)
def apply_config(self):
self.config = Config(self.args.configdir)
if self.args.daemonize: # pragma: no cover
daemonize(self.config.user, self.config.group)
else:
setuser(self.config.user, self.config.group)
self.log.setLevel(logging.DEBUG if self.args.verbose else logging.INFO)
del self.log.handlers[:]
if not self.args.daemonize:
self.log.addHandler(ColoredConsoleHandler())
try:
self.log.addHandler(LogfileHandler(self.config.logdir, 'marche'))
except Exception as err: # pragma: no cover
if self.args.daemonize:
print('cannot open logfile:', err, file=sys.stderr)
else:
self.log.exception('cannot open logfile: %s', err)
return False
if not self.config.interfaces:
self.log.error('no interfaces configured, the daemon will not do '
'anything useful!')
return False
if not self.config.job_config:
self.log.error('no jobs configured, the daemon will not do '
'anything useful!')
return False
if not self.config.auth_config:
self.log.warning('no authenticators configured, everyone will be '
'able to execute any action!')
if self.args.daemonize: # pragma: no cover
write_pidfile(self.config.piddir)
return True
def run(self, args=None):
self.args = self.parse_args(args)
if not self.apply_config():
return 1
self.log.info('Starting marche %s ...', __version__)
jobhandler = JobHandler(self.config, self.log)
authhandler = AuthHandler(self.config, self.log)
for interface in self.config.interfaces:
try:
mod = __import__('marche.iface.%s' % interface, {}, {},
['Interface'])
except Exception as err:
self.log.exception('could not import interface %r: %s',
interface, err)
continue
self.log.info('starting interface: %s', interface)
try:
iface = mod.Interface(self.config, jobhandler, authhandler,
self.log)
if iface.needs_events:
jobhandler.add_interface(iface)
iface.run()
except Exception as err:
self.log.exception('could not start interface %r: %s',
interface, err)
continue
signal.signal(signal.SIGTERM, lambda *a: setattr(self, 'stop', True))
signal.signal(signal.SIGUSR1, lambda *a: jobhandler.trigger_reload())
self.log.info('startup successful')
self.wait()
jobhandler.shutdown()
if self.args.daemonize: # pragma: no cover
remove_pidfile(self.config.piddir)
return 0
def wait(self): # pragma: no cover
try:
while not self.stop:
time.sleep(1)
except KeyboardInterrupt:
pass
|
PSP Go! – Sony’s Next Console?
We’ve heard the rumours and we’ve speculated to our hearts content, but the guys at 1UP reckon they can do better than that. According to their “sources directly involved with the new system,” Sony’s new PSP hardware will be unveiled at E3, will be called the PSP Go! and will apparently look something like the ‘artists impression’ below.
The portable console will, if 1UP’s sources can be relied upon, forego the UMD drive sported by every previous PSP in favour on internal flash memory. Initially two SKUs, an 8GB and a 16GB model, will be available. Games will be downloaded with some 100 older titles available come launch, answering the question of how they can be played with no UMD drive.
According to 1UP’s sources – and mock-up – the PSP GO!’s controls will slide out from beneath the screen. Despite some speculation, a second analogue stick won’t be appearing, keeping the controls consistent with the current PSP layout.
1UP seems pretty certain about the information supplied, so unless Sony is running a deliberate misinformation campaign, E3 seems set to be the event to attend this June.
|
from concurrent.futures import ThreadPoolExecutor #async to sync
from modules import variables
from modules import mainBot
from modules import fileIO
from modules import youtube
from modules import discordBot
#used for the main program
import threading
import sys, os
#discord stuff imported
import discord #gets the discord and asyncio librarys
import asyncio
##broken
#seems to not like my code in the discordCheckMsg function
##its the part where it sets the value to the delete code.
#this then causes the delete thread to crash trying to find the shift value to go by
##problems
#unsure what will happen in a headless enviroment if the oauth hasnt been set
##if the token and you input a invalid token the first time it will continue to say invalid token for tokens that are even valid
####variables
from modules import irc
#used as global varibles and were defined before we start using them to avoid problems down the road
##jadens shift code
#delete code is: 98reghwkjfgh8932guicdsb98r3280yioufsdgcgbf98
#delete code is: 98reghwkjfgh8932guicdsb98r3280yioufsdgcgbf98
def shift(value, messages):
if value == 0:
return messages
messagesTemp = [] #Assign temp list
for i in messages: #For every message
messagesTemp += [i,] #Add to temp list
messages.clear() #Delete old list
for i in messagesTemp[value:]: #For all values after last delete code
messages += [i,] #Take value from temp list and put in new spot
messagesTemp.clear() #Delete temp list
return messages
def checkDeleteCode(messages):
i = 0 #Set i to 0
#print("{0} : {1}".format(haltDeleteMSG,haltDiscordMSG)) #debug that isnt really nessisary if this code isnt used.
while(messages[i] == "98reghwkjfgh8932guicdsb98r3280yioufsdgcgbf98"): #While value at index is the delete code
i += 1 #Add 1 to i
return i #Return value of i when message is not delete code
def deleteIrcToDiscordMsgThread():
global discordMSG, haltDeleteMSG, haltDiscordMSG
while True:
#print(discordMSG)
#print("{0} : {1}".format(haltDeleteMSG,haltDiscordMSG))
if haltDeleteMSG == 0:
haltDiscordMSG = 1
#shiftValue = checkDeleteCode(discordMSG)
#discordMSG = shift(shiftValue, discordMSG)
haltDiscordMSG = 0
#print(discordMSG)
time.sleep(4)
#this code is old and unnessisary at this minute must be rewritten.
# first run stuff
# def getToken(): #gets the token
# global config
# realToken = "false" #this is just for the while loop
# while realToken == "false":
# config["discordToken"] = input("Discord Token: ") #gets the user input
# try:
# client.run(config["discordToken"]) #atempts to run it and if it fails then execute the next bit of code if not then save it and go on
# except:
# print("Please enter a valid token")
# sys.exit(0) #this is a work around for the bug that causes the code not think the discord token is valid even tho it is after the first time of it being invalid
# else:
# realToken = "true"
# async def getFirstRunInfo():
# global config
# print('Logged in as') ##these things could be changed a little bit here
# print(client.user.name)
# print(client.user.id)
# while config["serverName"] == "":
# for server in client.servers: #this sifts through all the bots servers and gets the channel we want
# print(server.name)
# if input("If this is the server you want type yes if not hit enter: ") == "yes":
# config["serverName"] = server.name
# break
# while config["channelName"] == "":
# for server in client.servers: #this sifts through all the bots servers and gets the channel we want
# # should probly add a check in for the server in here im guessing
# # print(server.name)
# for channel in server.channels:
# if str(channel.type) == "text":
# print(channel.name)
# if input("If this is the channel you want type yes if not hit enter: ") == "yes":
# config["channelName"] #starts the discord bot= channel.name
# break
# while config["IRCToDiscordFormatting"] == "": #fills the youtube to discord formating
# config["IRCToDiscordFormatting"] = input("""Please enter the chat formatting for chat coming from irc to go to discord.
# {1} is the placeholder for the username
# {2} is the placeholder for the message
# Ex. "{0} : {1}: """)
# while config["discordToIRCformating"] == "": #fills the discord to youtube formating
# config["discordToIRCFormating"] = input("""Please enter the chat formatting for chat coming from discord to go to irc.
# {0} is the placeholder for the username
# {1} is the placeholder for the message
# Ex. "{0} : {1}": """)
# print("Configuration complete")
# fileSave("config-test.json",config) #saves the file
# print("Please run the command normally to run the bot")
# await client.close()
# if os.path.isfile("config-test.json") == False:#checks if the file exists and if it doesnt then we go to creating it
# print("Config missing. This may mean this is your first time setting this up")
# firstRun = "on"
# else:
# config = fileLoad("config-test.json") #if it exists try to load it
# if firstRun == "on":
# config = {"channelName": "", "pageToken": "", "serverName": "", "discordToken": "","discordToIRCFormating": "", "IRCToDiscordFormatting":""}
# getToken()
variables.config = fileIO.fileLoad("config-test.json")
#this starts everything for the irc client
##main loop for the code
#deleteThread = threading.Thread(target=deleteIrcToDiscordMsgThread) #this is broken and needs rewriting
#deleteThread.start()
#mainBot.mainBot().main()
print("test")
chatControlThread = threading.Thread(target=mainBot.mainBot().main)
chatControlThread.start()
ircCheckThread = threading.Thread(target=irc.ircCheck)#starts my irc check thread which should print false if the irc thread dies.
if variables.config["Bot"]["IRC"]["Enabled"] == True:
ircCheckThread.start()
print("IRC Loaded")
else:
print("IRC not loaded")
youtubeChatThread = threading.Thread(target=youtube.youtubeChatControl)#starts my youtube chat thread
if variables.config["Bot"]["Youtube"]["Enabled"] == True:
youtubeChatThread.start()
print("Youtube Loaded")
else:
print("Youtube not loaded")
#discordThread = threading.Thread(target=discordBot.client.run(variables.config["Bot"]["Discord"]["Token"]))#creates the thread for the discord bot
discordThread = threading.Thread(target=discordBot.start(variables.config["Bot"]["Discord"]["Token"]))#creates the thread for the discord bot
if variables.config["Bot"]["Discord"]["Enabled"] == True:
print("Discord Loaded")
discordThread.start()
else:
print("Discord not loaded")
# twitchBot().getViewerCount()
# print("ye")
|
The read-a-thon is almost drawing to an end, and so I thought I'd fit in one last update before my wrap-up post. So here are my updates from Days 7 to 10.
Again, here's a little visual representation of my progress so far.
As you can see, I've got six books left to finish. My original goal was to finish at least eight books, which I've now done. I'm hoping to at least finish Ender's Game and The Book Thief before the read-a-thon ends.
|
"""
Copyright 2017 Balwinder Sodhi
Licenced under MIT Licence as available here:
https://opensource.org/licenses/MIT
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
Created on Mar 3, 2017
@author: Balwinder Sodhi
"""
import urllib
import urllib2
from common import *
from entities import *
class YouTubeUtils:
"""
Utility class for handling youtube API calls.
"""
def __init__(self, dev_key):
self.YT_URL = "https://www.googleapis.com/youtube/v3/"
self.DEVELOPER_KEY = dev_key
def get_playlist_items(self, pl_id):
"""
Fetches the youtube playlist items information via youtube API.
:param pl_id: Playlist ID
:return: List of videos in the playlist.
"""
videos = []
try:
data = dict()
data['key'] = self.DEVELOPER_KEY
data['playlistId'] = pl_id
data['part'] = "contentDetails,snippet"
data['maxResults'] = 50
data['fields'] = "items(snippet(title,description,resourceId/videoId))"
qs = urllib.urlencode(data)
url = "{}playlistItems?{}".format(self.YT_URL, qs)
result = urllib2.urlopen(url)
res_dict = json.loads(result.read())
for item in res_dict.get("items", []):
vid = item['snippet']['resourceId']['videoId']
title = item['snippet']['title']
videos.append((vid, title))
except urllib2.URLError:
logging.exception('Caught exception fetching url')
return videos
def get_playlist_info(self, pl_id):
"""
Fetches the information such as title and description etc. of
the given youtube playlist.
:param pl_id: Playlist ID.
:return: Tuple (title, description)
"""
pl_info = None
try:
data = dict()
data['key'] = self.DEVELOPER_KEY
data['id'] = pl_id
data['part'] = "snippet"
data['fields'] = "items(snippet(title,description))"
qs = urllib.urlencode(data)
url = "{}playlists?{}".format(self.YT_URL, qs)
result = urllib2.urlopen(url)
res_dict = json.loads(result.read())
# Expected max one item
for item in res_dict.get("items", []):
title = item['snippet']['title']
desc = item['snippet']['description']
pl_info = (title, desc)
except urllib2.URLError:
logging.exception('Caught exception fetching url')
return pl_info
def get_video_details(self, v_id):
"""
Fetches details about a youtube video via youtube API.
:param v_id: A comma separated list of video IDs.
:return: List of video detail dictionaries.
"""
video_list = []
try:
data = dict()
data['key'] = self.DEVELOPER_KEY
data['id'] = v_id
data['part'] = "snippet"
# data['maxResults'] = 50
data['fields'] = "items(id,snippet(title,description,tags))"
qs = urllib.urlencode(data)
url = "{}videos?{}".format(self.YT_URL, qs)
logging.info(">>>>> YT URL = %s", url)
result = urllib2.urlopen(url)
res_dict = json.loads(result.read())
if "items" in res_dict:
for item in res_dict["items"]:
video = dict()
video["title"] = item["snippet"]["title"]
video["description"] = item["snippet"]["description"]
video["tags"] = ", ".join(item["snippet"]["tags"])
video["itemId"] = v_id
video["url"] = "http://youtu.be/%s" % v_id
video_list.append(video)
except urllib2.URLError:
logging.exception('Caught exception fetching url')
return video_list
class TrailHandler(BaseHandler):
"""
Handler for trail related HTTP requests.
"""
def getTrailById(self, for_view=False):
"""
:param for_view:
:return:
"""
t = TrailDto.get_by_id(long(self.request.params["trailId"]))
td = t.to_dict_with_id('trailId')
if for_view:
tv_list = TrailViewsDto.query(TrailViewsDto.trail == t.key).fetch()
if not tv_list:
tv = TrailViewsDto(views=1, trail=t.key)
else:
tv = tv_list[0]
tv.views += 1
tv.put()
td['viewsCount'] = tv.views
td['assessments'] = []
tas = TrailAssessmentDto.query(TrailAssessmentDto.trail == t.key).fetch()
if tas:
a_list = AssessmentDto.query(
AssessmentDto.key.IN([a.assess for a in tas])).fetch()
td['assessments'] = [a.to_dict_with_id("assessId") for a in a_list]
else:
logging.info("No trail assessments found.")
self.send_json_response(Const.STATUS_OK, td)
# def addYTContent(self):
# f = json.loads(self.request.body)
# urls = f.get("url")
# vids = []
# if urls:
# yt = YouTubeUtils()
# for u in urls.split("\n"):
# c = VideoDto()
# v = yt.get_video_details(u.split("/")[-1])
# c.populate_from_dict(v)
# c.put()
# vids.append(c.to_dict_with_id("videoId"))
#
# self.send_json_response(Const.STATUS_OK, "Added videos.")
def addYTrail(self):
"""
:return:
"""
pd = self.request.params
t = TrailDto(tags=pd['tags'])
t.resources = []
yt = YouTubeUtils(self.get_setting(Const.CFG_YOUTUBE_KEY))
if 'isPL' in pd and pd['isPL']:
pl_info = yt.get_playlist_info(pd['resource'])
if not pl_info:
raise ValueError("Playlist not found!")
t.title = pl_info[0]
p = yt.get_playlist_items(pd['resource'])
for vid in p:
c = VideoDto()
c.description = vid[1]
c.title = vid[1]
c.itemId = vid[0]
c.url = "https://youtu.be/%s" % vid[0]
t.videos.append(c)
else:
vid_list = yt.get_video_details(pd['resource'])
for v in vid_list:
c = VideoDto()
c.populate_from_dict(v)
t.videos.append(c)
t.owner = self.get_current_user_key()
if 'title' in pd:
t.title = pd['title']
t.put()
self.send_json_response(Const.STATUS_OK, t.to_dict_with_id('trailId'))
def saveContent(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
def deleteContent(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
def saveTrail(self):
"""
:return:
"""
tf = json.loads(self.request.body)
t = TrailDto()
if 'trailId' in tf:
t = TrailDto.get_by_id(int(tf['trailId']))
logging.debug("Loaded trail from DB tid=%s", tf['trailId'])
t.populate_from_dict(tf)
t_key = t.put()
# Clear old trail assessments
ta_list = TrailAssessmentDto.query(TrailAssessmentDto.trail == t_key).fetch()
if ta_list:
ndb.delete_multi([x.key for x in ta_list])
# Insert newly selected assessments for trail
if 'assessments' in tf:
for ta_dict in tf['assessments']:
ta = TrailAssessmentDto()
ta.trail = t_key
ta.assess = ndb.Key(AssessmentDto, ta_dict['assessId'])
ta.put()
trl = t.to_dict_with_id("trailId")
trl['assessments'] = tf['assessments']
logging.debug("Saved trail to DB tid=%s", t_key.id())
self.send_json_response(Const.STATUS_OK, trl)
def deleteTrail(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
def getTrailForView(self):
"""
:return:
"""
self.getTrailById(for_view=True)
def searchComments(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
def addSubs(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
def getComments(self):
"""
:return:
"""
v_id = self.request.params["iid"]
qry = CommentDto.query(CommentDto.trailItemId == str(v_id))
cl = qry.fetch()
self.send_json_response(Const.STATUS_OK, [c.to_dict_with_id("commentId") for c in cl])
def deleteComment(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
def saveComment(self):
"""
:return:
"""
frm = json.loads(self.request.body)
if "commentId" in frm:
c = CommentDto.get_by_id(long(frm["commentId"]))
if c:
if c.owner != self.get_current_user_key():
raise ValueError("Cannot save comment not owned by current user.")
c.populate_from_dict(frm)
c.put()
result = c.to_dict_with_id("commentId")
else:
c = CommentDto(owner=self.get_current_user_key())
c.populate_from_dict(frm)
u = self.get_current_user()
c.author = "%s %s" % (u['firstName'], u['lastName'])
c.put()
result = c.to_dict_with_id("commentId")
self.send_json_response(Const.STATUS_OK, result)
def getSubsForUser(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
def getContentsForUser(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
def search(self):
self.send_json_response(Const.STATUS_ERROR, "Not supported.")
|
We accept cash, checks and all major credit cards. Comprehensive maternity care fees are paid monthly and are due in full by 34 weeks in pregnancy. Payment plans are agreed upon at your first prenatal appointment. Don’t forget about your HSA or Health Savings Account. Money can be withdrawn from that to pay our services & then you have the added benefit of receiving tax benefits on your investment.
Most traditional indemnity health insurance companies, and several local HMOs that offer point-of-service (POS) plans, will cover our care based on your out-of-network coverage. We will work with you to determine if your insurance company covers our services.
|
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
import zmq
from shaker.engine import utils
LOG = logging.getLogger(__name__)
class MessageQueue(object):
def __init__(self, endpoint):
_, port = utils.split_address(endpoint)
context = zmq.Context()
self.socket = context.socket(zmq.REP)
self.socket.bind("tcp://*:%s" % port)
LOG.info('Listening on *:%s', port)
def __iter__(self):
try:
while True:
# Wait for next request from client
message = self.socket.recv_json()
LOG.debug('Received request: %s', message)
def reply_handler(reply_message):
self.socket.send_json(reply_message)
LOG.debug('Sent reply: %s', reply_message)
try:
yield message, reply_handler
except GeneratorExit:
break
except BaseException as e:
if isinstance(e, KeyboardInterrupt): # SIGINT is ok
LOG.info('Process is interrupted')
else:
LOG.exception(e)
raise
|
Here we have a piece that was obviously commissioned, so far from my own imaginings, and a real challenge. It’s undoubtedly my style and process though, that warped looking down at things that I never mean to but always end up doing, and the drips and squiggles. What was challenging were the cupboard angles and the detail. I mean, how do you paint a shiny rock that everyone can tell it’s a rock?
|
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import os
mpl.rcParams["text.usetex"] = True
mpl.rcParams["text.latex.preamble"] = "\usepackage{bm}"
for filename in sorted(os.listdir("output")):
if filename.startswith("sigmax_sigmaz_T_"):
print filename
T = []
mx = []
mz = []
err = []
with open("output/" + filename, "r") as f:
for line in f:
fields = line.split(" ")
T.append(float(fields[0]))
mx.append(float(fields[1]))
mz.append(float(fields[2]))
if len(fields) > 3:
err.append(float(fields[3]))
if len(err) > 0:
plt.errorbar(T, mx, marker="x", label="$\\langle \\sigma_x \\rangle_{\\infty}$", yerr=err)
plt.errorbar(T, mz, marker="x", label="$\\langle \\sigma_z \\rangle_{\\infty}$", yerr=err)
else:
plt.plot(T, mx, marker="x", label="$\\langle \\sigma_x \\rangle_{\\infty}$")
plt.plot(T, mz, marker="x", label="$\\langle \\sigma_z \\rangle_{\\infty}$")
plt.legend(loc=1)
plt.xlabel("$T$")
plt.ylim(0,1)
plt.grid(True)
plt.savefig("plots/" + filename.split(".dat")[0] + ".png")
plt.close()
elif filename.startswith("sigmazsigmaz_T_"):
print filename
T = []
zz = []
err = []
with open("output/" + filename, "r") as f:
for line in f:
fields = line.split(" ")
T.append(float(fields[0]))
zz.append(float(fields[1]))
if len(fields) > 2:
err.append(float(fields[2]))
if len(err) > 0:
plt.errorbar(T, zz, marker="x", yerr=err)
else:
plt.plot(T, zz, marker="x")
plt.xlabel("$T$")
plt.ylabel("$\\langle \\sigma_z^{(j)} \\sigma_z^{(j+1)} \\rangle$")
plt.grid(True)
plt.savefig("plots/" + filename.split(".dat")[0] + ".png")
plt.close()
|
Mix together salt and the citrus zest and cover the redfish fillet in the cure.
Wrap the fillets loosely with cling film and place on a try in the fridge for a few hours.
To make the horseradish yoghurt, mix the yoghurt, horseradish cream and lemon juice together and refrigerate until required.
Remove fish from fridge, slice thinly.
Nathan Outlaw has kindly shared his recipe with Fishbox.
Outlaw has seafood restaurants in Cornwall and London, 'Restaurant Nathan Outlaw' in Port Isaac is his two Michelin-starred establishment.
Visit www.outlaw.co.uk to discover more about his restaurants, recipes and cookbooks.
|
import os, sys, subprocess
import Image
import pygtk
pygtk.require('2.0')
import gtk, gobject
import gtk.glade
class ImageFixer:
def __init__(self, filenames):
self.gladefile = "LinguaViewer.glade"
self.wTree = gtk.glade.XML(self.gladefile, "resize")
self.window = self.wTree.get_widget("resize")
self.window.set_size_request(400, 100)
self.window.connect("destroy", self.destroy_progress)
self.pbar = self.wTree.get_widget("progressbar1")
self.pbar.show()
self.val = 0.0
self.frac = 1.0/len(filenames)
self.pbar.set_fraction(self.val)
result = self.check(filenames)
if result == gtk.RESPONSE_OK:
task = self.fix(filenames)
gobject.idle_add(task.next)
else:
self.window.destroy()
def check(self, filenames):
#check whether we need to do correction
badcount = 0
for i in filenames:
im = Image.open(i)
if (im.size[0] != 720): #or (im.size[1] != 480):
badcount += 1
break
if badcount > 0:
dlg = gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_WARNING,
gtk.BUTTONS_OK_CANCEL,
"It appears that 1 or more images need to be resized.\nResizing the images will overwrite the originals. Continue?")
result = dlg.run()
dlg.destroy()
else:
result = gtk.RESPONSE_CANCEL
return result
def fix(self, files):
l = len(files)
c = 0
for j in files:
im = Image.open(j)
if (im.size[0] != 720) or (im.size[1] != 480):
cmd = ['convert', j, '-shave', '126x0', j]
p = subprocess.Popen(cmd)
p.wait()
cmd = ['convert', j, '-chop', '12x0', j]
p = subprocess.Popen(cmd)
p.wait()
cmd = ['convert', j, '-resize', '720x480!', j]
p = subprocess.Popen(cmd)
#p.wait()
self.val += self.frac
self.pbar.set_fraction(self.val)
c += 1
if c < l:
yield True
else:
yield False
def destroy_progress(self, event):
self.window.destroy()
|
Resources, jobs, and expertise from researchers you trust.
Connect with other researchers with specific knowledge and expertise. Reach across boundaries of departments and institutions.
Post full-time, part-time, and contract work. Look for research and job opportunities.
Access equipment and services at core facilities and labs. Offer lab access to trusted members of affiliated labs and companies.
"Synaptic allows us to connect with other researchers and institutions, making collaboration and biotech innovation easier."
|
# -*- coding: utf-8 *-*
from django.db import models
from django.utils.translation import ugettext_lazy as _
from .computer import Computer
from .event import Event
class DomainStatusLogManager(models.Manager):
def scope(self, user):
qs = super(DomainStatusLogManager, self).get_queryset()
if not user.is_view_all():
qs = qs.filter(computer_id__in=user.get_computers())
return qs
class StatusLogManager(DomainStatusLogManager):
def create(self, computer):
obj = StatusLog()
obj.computer = computer
obj.status = computer.status
obj.save()
return obj
class StatusLog(Event):
status = models.CharField(
verbose_name=_('status'),
max_length=20,
null=False,
choices=Computer.STATUS_CHOICES,
default='intended'
)
objects = StatusLogManager()
class Meta:
app_label = 'server'
verbose_name = _("Status Log")
verbose_name_plural = _("Status Logs")
permissions = (("can_save_statuslog", "Can save Status Log"),)
|
Will this get you to work on time?
As one of THOSE people who enjoys the mornings I still understand that not everyone leaps from bed before sunrise to greet the day. That being the case while still a prototype this might just be your new best friend when it comes to getting to work on time.
|
''' Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
Created on Feb 24, 2014
@author: dfleck
'''
import math
class FingerEntry:
'''Represents an entry in the finger table.
Note: Finger indexes go from 0-->m-1 which is different than the
Chord paper which goes from 1-->m
'''
m = 128 # Number of bits in entry set
def __init__(self, k, n, nodeLocation):
'''k is the finger table entry.
n is the node ID of the node holding this entry
'''
#print("DEBUG: fingerINIT: %d %d " % (k-1,n))
twoToTheM = math.pow(2, FingerEntry.m)
self.start = n + math.pow(2, k-1) % twoToTheM
self.intervalStart = self.start
self.intervalEnd = n + math.pow(2, k) % twoToTheM
self.nodeLocation = nodeLocation # This is the succ on the tables in the Chord paper
def __str__(self):
if self.nodeLocation is None:
nodeId = -999
else:
nodeId = self.nodeLocation.id
return "Start:%d End:%d NodeLocation:%d" % (self.start, self.intervalEnd, nodeId)
|
March turned into an incredibly busy month.
Back to Cairns for another GDS Executive Summit, this time Oil & Gas, held at the magnificent Shangri-La Hotel. I did manage to get a brief walk down the Esplanade and managed to spot a few waders without my binoculars. Somehow also managed to score an executive floor suite which was an added bonus. Looking forward to returning there in May.
Two cruises in the month, both on Princess. The first saw me go from Darwin to Bali. I got a whole day in Kuta and a smart day room to base myself from before flying home at 1.30 am. A bit shocked to read the news that 5 terrorists had been shot the next night in a shootout with security forces right where I had been the night before. The second cruise was Fiji back to Sydney. Terrible weather for a few days but the last couple of magic ones made up for it.
I also conducted a half day Creative Thinking and Team Building session for Roche. Just a small team of 12 IT people, but I was really pleased with how it went and the feedback I recieved.
|
# Copyright 2011 SRI International
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from lxml import etree
from pylons import config
import logging, os, re, subprocess
_log = logging.getLogger(__name__)
namespaces = {
"oai" : "http://www.openarchives.org/OAI/2.0/",
"lr" : "http://www.learningregistry.org/OAI/2.0/",
"oai_dc" : "http://www.openarchives.org/OAI/2.0/oai_dc/",
"oai_lr" : "http://www.learningregistry.org/OAI/2.0/oai_dc/",
"dc":"http://purl.org/dc/elements/1.1/",
"dct":"http://purl.org/dc/terms/",
"nsdl_dc":"http://ns.nsdl.org/nsdl_dc_v1.02/",
"ieee":"http://www.ieee.org/xsd/LOMv1p0",
"xsi":"http://www.w3.org/2001/XMLSchema-instance"
}
class XercesValidator():
def __init__(self):
def is_exe(fpath):
return os.path.exists(fpath) and os.access(fpath, os.X_OK)
if "xerces-c.StdInParse" in config and is_exe(config["xerces-c.StdInParse"]):
self.stdinparse = [config["xerces-c.StdInParse"], '-n', '-f', '-s']
self.enabled = True
else:
self.enabled = False
def validate(self, contents=""):
errors = []
if self.enabled:
process = subprocess.Popen(self.stdinparse, shell=False, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
xmlin = contents
(_, stderr) = process.communicate(input=xmlin.encode("utf8"))
if stderr != None or stderr != "":
err_lines = stderr.splitlines()
for err in err_lines:
m = re.match('''.*\s+line\s+([0-9]+),\s+char\s+([0-9]+)\):\s*(.*)$''', err)
if m is not None:
errors.append({ "line": m.group(1), 'char': m.group(2), 'msg': m.group(3) })
else:
_log.info("Xerces not available for validation.")
return errors
_validator = XercesValidator()
def validate_xml_content_type(res):
content_type = None
try:
content_type = res.headers['Content-Type']
except:
try:
content_type = res.headers['content-type']
except:
pass
assert re.match("""text/xml;\s*charset=utf-8""", content_type) != None , '''Expected Content Type: "text/xml; charset=utf-8" Got: "%s"''' % content_type
def validate_json_content_type(res):
content_type = None
try:
content_type = res.headers['Content-Type']
except:
try:
content_type = res.headers['content-type']
except:
pass
assert re.match("""application/json;\s*charset=utf-8""", content_type) != None , '''Expected Content Type: "application/json; charset=utf-8" Got: "%s"''' % content_type
def parse_response(response):
body = response.body
xmlcontent = etree.fromstring(body)
return { "raw": body, "etree": xmlcontent }
def validate_lr_oai_etree(xmlcontent, errorExists=False, checkSchema=False, errorCodeExpected=None):
error = xmlcontent.xpath("//*[local-name()='error']", namespaces=namespaces)
if errorExists == False:
if len(error) > 0:
assert 0 == len(error), "validate_lr_oai_etree FAIL: Error code:{0} mesg:{1}".format(error[0].xpath("@code", namespaces=namespaces)[0], error[0].xpath("text()", namespaces=namespaces)[0])
elif errorExists and errorCodeExpected != None:
codeReceived = error[0].xpath("@code", namespaces=namespaces)[0]
if errorCodeExpected != codeReceived:
assert 0 == len(error), "validate_lr_oai_etree FAIL: Expected:{2}, Got Error code:{0} mesg:{1}".format(error[0].xpath("@code", namespaces=namespaces)[0], error[0].xpath("text()", namespaces=namespaces)[0], errorCodeExpected)
else:
assert 1 == len(error), "validate_lr_oai_etree FAIL: Expected error, none found."
def validate_lr_oai_response( response, errorExists=False, checkSchema=False, errorCodeExpected=None):
validate_xml_content_type(response)
obj = parse_response(response)
xmlcontent = obj["etree"]
validate_lr_oai_etree(xmlcontent, errorExists, checkSchema, errorCodeExpected)
schemaErrors = _validator.validate(obj["raw"])
assert len(schemaErrors) == 0, "validate_lr_oai_response: Schema validation error:\n%s" % '\n'.join(map(lambda x: "\t(line: {0}, char: {1}): {2}".format(x["line"], x["char"], x["msg"]), schemaErrors))
|
The room is very well equipped for research. There are study tables, computers, information and many books. Ken has collected and organized a good amount of items for the family historian or genealogist.
The Cascade branch started as a bookmobile stop in 1965. A new facility was built in 1996 where a local history room was included and is run by Cascade Historical Society.
Volunteers from the historical society come in on Thursdays from 1 to 3 p.m. to aide in research of the township’s history and review the artifacts available. Suburban Life was a weekly newspaper serving Cascade Township and the surrounding area. Suburban Life was printed from 1955 to 1968 and has been digitized. It is available at the library or online at kdl.org. Look under Learn & Research.
The East Grand Rapids History Room was established in 1987. This room is comprised of a collection unique to West Michigan. First, look at the index compiled by Mary Dersch, curator. It provides access to historical city voting records, tax assessments, paving and sewer installation records, oaths of office, East Grand Rapids High School Interlochen yearbooks, local and Michigan books, ephemera, photographs and memories of bygone days. East Grand Rapids was the home to Ramona Amusement Park from 1897-1954. Reeds Lake was truly a resort area from the 1850s through the 1930s. Large excursion steamboats plied the waters of Reeds Lake during those bygone days. The wreck of the SS Hazel A lies on the bottom of the lake. The history room has photos and information on Reeds Lake.
The EGR History Room is located within the East Grand Rapids Library, but it is a City of EGR entity, as the city owns the building where the library is housed.
The Local history room at the Gaines Township library is filled with photos, memorabilia and information that visually helps in anyone’s research. Photos tell a great story for any historian. There is a collection of high school yearbooks here, with school pillows. Look for a relative in the photo of the Thornapple Valley-Pioneers 40th Annual picnic at Campau Lake in 1924.
The Kentwood Historical Preservation Commission has a room at the Kentwood Library. Much of Kentwood was once Parris Township. They have a wonderful collection of information, photos and memorabilia. Have you ever seen an octagonal house or barn? The model of the Vander Laan barn is outstanding. If you have ever been to the Kent County Airport, you will enjoy the photo display on its history.
This library has its own local history room, cared for by the Plainfield Township Historical Society. It is a nice quiet room for study if you don’t want to go downtown to do your research. There is a small collection of books on local history, Grand Rapids and Kent County directories.
The Wyoming Historical Commission has a local history room and archive in the city of Wyoming library. This historical commission was organized to acquire and preserve objects and data relevant to the city of Wyoming’s past, plus the history of the surrounding area. It oversees the local history archives, provides regular public programs and lectures, encourages ongoing research and documentation, sponsors local history publications and partners with schools and other community organizations.
|
import functools
from .services import CommandBuilderService
from .services import SchemaCompilerService
from .services import SchemasProviderService
class Kernel(object):
def __init__(self, ffmpeg_home_dir):
self.ffmpeg_home = ffmpeg_home_dir
self.command_builder_service = CommandBuilderService()
self.schema_compiler_service = SchemaCompilerService()
self.schemas_provider_service = SchemasProviderService()
self.options_stack = []
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
elif self.has_option(name):
return functools.partial(self.option, name)
else:
raise AttributeError, name
def bootstrap(self):
self.command_builder_service.bootstrap()
self.schema_compiler_service.bootstrap()
self.schemas_provider_service.bootstrap()
def has_option(self, option_name):
return bool(self.schemas_provider_service.schema(option_name))
def option(self, name, **kwargs):
schema = self.schemas_provider_service.schema(name)
if not schema:
return None
else:
compiled_option = self.schema_compiler_service.compile(schema, kwargs)
self.options_stack.append(compiled_option)
return self
def execute(self):
command_parts = map(lambda o: o.split(' '), self.options_stack)
command_parts = reduce(lambda x, y: x + y, command_parts)
self.command_builder_service.push(self.ffmpeg_home)
for part in command_parts:
self.command_builder_service.push(part)
print self.command_builder_service.command_parts_stack
self.command_builder_service.run()
self.options_stack = []
|
Reference in this article to Ferguson, USA is to draw an analogy to ethnic imbalance that exists in strategic positioning of media and communications portfolio, not only at Auckland Council but other organizations which are frustrated at their inability to reach out to increasingly non-Anglo-Saxon (European) people.
The forum was informed that English, Maori, Samoan, Hindi and Mandarin (Chinese) were languages most spoken in that order. If some 40,000 Punjabi and Gujarati speakers are taken in (many are bilingual with Hindi) then Hindi is the third most spoken language in New Zealand, especially Auckland. Yet, Carol Hayward from Engagement Communications team at Auckland Council, could not name an Indian Communications person in her team.
In an earlier Waitakere Ethnic Board (WEB) Forum on elections in August 2014, I seemed to have hit a sensitive nerve, or perhaps poked a hornet’s nest. I stated that because of Henderson-Massey Local Board’s indifference attitude towards WEB, which was based in Henderson, it (WEB) was considering shifting to Whau Local Board, based in New Lynn. Whau Local Board is not only the best reflection of a multicultural Auckland, but it is also very receptive to funding needs of WEB, which represents and advocates for ethnic communities. Whau Local Board is the face of Auckland, with a Samoan, a Tongan, a Chinese, an Indo-Fijian and of course the mainstream Anglo-Saxon Kiwi board members. Auckland Council or most of its other Local Boards, like the mainstream media, does not bear the demographic resemblance of a fast “browning” city. To give an illustration of this demographic imbalance, I had given an example of a US city, Ferguson, which has been in the news for wrong reasons.
Ferguson is a city in St. Louis County, Missouri, United States. It is part of the Greater St. Louis metropolitan area. The population was 21,203 at the 2010 census. Two thirds or just over 14,000 are American-Africans (Black) while the others are White American. According to The Washington Post, the Ferguson Police Department "bears little demographic resemblance" to the mostly African-American community, which already harbored "suspicions of the law enforcement agency" preceding Brown's shooting, with 48 (over 90%) of the police force's 53 officers being white. It gained international attention on August 9, 2014, when a young man, Michael Brown, was fatally shot by a Darren Wilson, a white Ferguson police officer, sparking ongoing protests and civil unrest, which continues to date. (This case is still causing racial ripples in USA). Based on my statement, the Chair of Henderson –Massey, Vanessa Neeson had reportedly expressed her displeasure, and wanted a meeting with WEB and yours truly, but this never eventuated.
The latest WEB Forum on 12 November, 201 4 at New Lynn Community Centre kept digging up on that sensitive nerve as it was about “Engaging with Ethnic Community” at Local Government level. The President of WEB, Tuwe Kudakwashe led the discussion by pointing at lack of color in the Council where ethnic communities felt neglected, where “blue-eyed” boys got jobs which required engaging with the wider ethnic communities.
Presentations were by the Chair of Whau Local Board, Catherine Farmer, Steve Tollestrup from Waitakere Ranges Local Board, Peter Chan from Henderson-Massey Local Board, Carol Hayward, Senior Specialist Engagement and Consultation, Communications and Public Affairs of Auckland Council and Dr Camille Nakhid, Associate Professor at Auckland University of Technology and former Chair of EPAP- Ethnic Peoples Advisory Panel.
One bone of contention was the composition and action (or rather the lack of it) of EPAP. KIWI PUNDIT will carry a special article on this subject, as to how EPAP has become a laughing stock of the Council and ethnic people. It was acknowledged that hardly anybody know who these people are and many felt that it appears the new EPAP members are cheerleaders of the mayor, and merely add color to a white Council, without any teeth or longevity.
When asked how many people had responded to or knew about submissions to Auckland Plans, only a few hands went up reluctantly. This shows that despite their multi-million dollar communications budget and a media strategy that is still very white, Auckland Council is unable to engage with ethnic communities. Like in case of Ferguson mentioned earlier, it had to do with demographic imbalance. The forum was informed that English, Maori, Samoan, Hindi and Mandarin (Chinese) were languages most spoken in that order. If some 40,000 Punjabi and Gujarati speakers are taken in (many are bilingual with Hindi) then Hindi is the third most spoken language in New Zealand, especially Auckland. Yet, Carol Hayward from Engagement Communications team at Auckland Council, could not name an Indian Communications person in her team. Not that there have been no interest. Mayor Len Brown and CEO Stephen Town are aware of the complaints of an Indian Masters in Communications Graduate with Honors from AUT, who failed to make it to Auckland Council’s engagement team, and he, like most ethnic qualified people unable to be recognized for their skills, is driving a bus. As the President of WEB quipped earlier on, recruitment at Auckland Council is obviously lopsided in favour of “the blue-eyed “boys who get preference in communicating and engaging jobs in a fast “browning” landscape where the recipients are ‘foreign’ - ethnic people.
The reason this article began with reference to Ferguson, USA is to draw an analogy to ethnic imbalance that exists in strategic positioning of media and communications portfolio, not only at Auckland Council but other organizations which are frustrated at their inability to reach to out to increasingly non-Anglo-Saxon (European) people. My complaints have fallen on deaf ears and nobody seems to care to listen to a coloured boy. This lackadaisical attitude has given rise to a Frankenstein monster and social media attacks on an unresponsive Auckland Council. Auckland is much civilized than Ferguson and will not have racial uprising.
The author and blogger at KIWI PUNDIT blog site, Thakur Ranjit Singh (left) with former president of WEB and Deputy Chair of EPAP, Amail Habib (centre) and Auckland Mayor Len Brown at Waitakere Diwali function. The mayor takes all opportunity to beat the drum of a multicultural city with opportunity for all, but fails to listen to grievances of ethnic communities.
However Auckland Council will continue to look hypocritical and receive flaks from ethnic meetings, ethnic blog articles (especially in KIWI PUNDIT) and other occasions where the Council or the Mayor tries to cloud reality and beat the drum of multiculturalism, fair opportunities and world’s most livable city, at ethnic festivals. In reality, all these seem to be myth at ground zero where Auckland Council fails to engage with ethnic communities because of demographic imbalance-just like in Ferguson, USA.
|
########################################################################
# $HeadURL$
# File: FileTest.py
# Author: Krzysztof.Ciba@NOSPAMgmail.com
# Date: 2012/08/06 13:48:54
########################################################################
""" :mod: FileTest
=======================
.. module: FileTest
:synopsis: test cases for Files
.. moduleauthor:: Krzysztof.Ciba@NOSPAMgmail.com
test cases for Files
"""
__RCSID__ = "$Id$"
# #
# @file FileTest.py
# @author Krzysztof.Ciba@NOSPAMgmail.com
# @date 2012/08/06 13:49:05
# @brief Definition of FileTest class.
# # imports
import unittest
# # from DIRAC
from DIRAC.RequestManagementSystem.Client.Operation import Operation
# # SUT
from DIRAC.RequestManagementSystem.Client.File import File
########################################################################
class FileTests( unittest.TestCase ):
"""
.. class:: FileTest
"""
def setUp( self ):
""" test setup """
self.fromDict = { "Size" : 1, "LFN" : "/test/lfn", "ChecksumType" : "ADLER32", "Checksum" : "123456", "Status" : "Waiting" }
def tearDown( self ):
""" test tear down """
del self.fromDict
def test01ctors( self ):
""" File construction and (de)serialisation """
# # empty default ctor
theFile = File()
self.assertEqual( isinstance( theFile, File ), True )
# # fromDict
try:
theFile = File( self.fromDict )
except AttributeError, error:
print "AttributeError: %s" % str( error )
self.assertEqual( isinstance( theFile, File ), True )
for key, value in self.fromDict.items():
self.assertEqual( getattr( theFile, key ), value )
toJSON = theFile.toJSON()
self.assertEqual( toJSON["OK"], True, "JSON serialization error" )
def test02props( self ):
""" test props and attributes """
theFile = File()
# valid props
theFile.FileID = 1
self.assertEqual( theFile.FileID, 1 )
theFile.Status = "Done"
self.assertEqual( theFile.Status, "Done" )
theFile.LFN = "/some/path/somewhere"
self.assertEqual( theFile.LFN, "/some/path/somewhere" )
theFile.PFN = "/some/path/somewhere"
self.assertEqual( theFile.PFN, "/some/path/somewhere" )
theFile.Attempt = 1
self.assertEqual( theFile.Attempt, 1 )
theFile.Size = 1
self.assertEqual( theFile.Size, 1 )
theFile.GUID = "2bbabe80-e2f1-11e1-9b23-0800200c9a66"
self.assertEqual( theFile.GUID, "2bbabe80-e2f1-11e1-9b23-0800200c9a66" )
theFile.ChecksumType = "adler32"
self.assertEqual( theFile.ChecksumType, "ADLER32" )
theFile.Checksum = "123456"
self.assertEqual( theFile.Checksum, "123456" )
# #
theFile.Checksum = None
theFile.ChecksumType = None
self.assertEqual( theFile.Checksum, "" )
self.assertEqual( theFile.ChecksumType, "" )
# # invalid props
# FileID
try:
theFile.FileID = "foo"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
# parent
parent = Operation( { "OperationID" : 99999 } )
parent += theFile
theFile.FileID = 0
self.assertEqual( parent.OperationID, theFile.OperationID )
try:
theFile.OperationID = 111111
except Exception, error:
self.assertEqual( isinstance( error, AttributeError ), True )
self.assertEqual( str( error ), "can't set attribute" )
# LFN
try:
theFile.LFN = 1
except Exception, error:
self.assertEqual( isinstance( error, TypeError ), True )
self.assertEqual( str( error ), "LFN has to be a string!" )
try:
theFile.LFN = "../some/path"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "LFN should be an absolute path!" )
# PFN
try:
theFile.PFN = 1
except Exception, error:
self.assertEqual( isinstance( error, TypeError ), True )
self.assertEqual( str( error ), "PFN has to be a string!" )
try:
theFile.PFN = "snafu"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "Wrongly formatted PFN!" )
# Size
try:
theFile.Size = "snafu"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
try:
theFile.Size = -1
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "Size should be a positive integer!" )
# GUID
try:
theFile.GUID = "snafuu-uuu-uuu-uuu-uuu-u"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "'snafuu-uuu-uuu-uuu-uuu-u' is not a valid GUID!" )
try:
theFile.GUID = 2233345
except Exception, error:
self.assertEqual( isinstance( error, TypeError ), True )
self.assertEqual( str( error ), "GUID should be a string!" )
# Attempt
try:
theFile.Attempt = "snafu"
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
try:
theFile.Attempt = -1
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "Attempt should be a positive integer!" )
# Status
try:
theFile.Status = None
except Exception, error:
self.assertEqual( isinstance( error, ValueError ), True )
self.assertEqual( str( error ), "Unknown Status: None!" )
# Error
try:
theFile.Error = Exception( "test" )
except Exception, error:
self.assertEqual( isinstance( error, TypeError ), True )
self.assertEqual( str( error ), "Error has to be a string!" )
# # test execution
if __name__ == "__main__":
testLoader = unittest.TestLoader()
fileTests = testLoader.loadTestsFromTestCase( FileTests )
suite = unittest.TestSuite( [ fileTests ] )
unittest.TextTestRunner( verbosity = 3 ).run( suite )
|
ASSEN, The Netherlands – Paralympic track & field fans around the world will be able to watch, for the first time ever, footage from the International Paralympic Committee’s (IPC) Athletics World Championships via the IPC’s Internet television channel: www.paralympicsport.tv.
Paralympicsport.tv will broadcast six four-hour highlight programs with English commentary from September 5-12.
from Sept. 2-10 in Assen, The Netherlands.
**Andrews, Danny T44 Tucson , Ariz.
**Fann, Ryan T44 Brush Creek, Tenn.
**Frasure, Brian T44 Apex, N.C.
**Gillette, Elexis T11 Raleigh, N.C.
*Gottwald, Peter T13 Boothwyn, Pa.
Jamison, Josiah T12 Chula Vista, Calif.
Kosakowski, Donald T46 Oxford, Ct.
McLellan, James T46 Greenville , N.C.
*Porter, Nelacey T11 Chula Vista , Calif.
Sauer, Abe T46 Johnstown , Colo.
**Shirley, Marlon T44 San Diego , Calif.
**Skiba, Jeffrey T44 Chula Vista , Calif.
**Tibbs, Casey T44 San Diego , Calif.
Hawkes, Sabra T37 Rockport, Mass.
**Holmes, April T44 Chula Vista, Calif.
Renaud, Rachelle T37 Palm Beach Gardens, Fl.
**Cockrell, Edwin F44 New Athens , Ill.
**Danberg, Scott F40 Cooper City, Fl.
**Diaz De Leon , Gabriel F52 Colorado Springs , Colo.
**Jacobsen, Val F53 Hampton, S.C.
**Kennedy, Jill F40 Charlottesville , Va.
Bird, Jordan T54 Wichita, Ks.
*Bleakney, Adam T53 Champaign , Ill.
Caswell, Brandon T54 Pendleton, Ore.
**George, Joshua T53 Champaign , Ill.
*Heilveil, Jacob T54 Longmont , Colo.
Hightower, Erik T54 Peoria , Ariz.
Toyoji, Steven T52 Redmond, Wash.
*Galli, Jessica T53 Champaign , Ill.
**McFadden, Tatyana T54 Clarksville , Md.
McGrory, Amanda T53 Kennett Square, Pa.
**Ripp, Christina T53 Westminster , Colo./Dane, Wis.
*Sagmeister, LeAnn T53 Jacksonville , Fl.
Tomorrow, Saturday, September 2, the 2006 IPC Athletics World Championships will officially kick off with a large Opening Ceremony.
The IPC was, in its capacity as an International Sports Federation for 12 sports, the first IF to initiate its own Internet television channel (www.paralympicsport.tv). Launched at the =2006 Paralympic Winter Games in Torino, the IPC’s Internet television channel saw over 70,000 cumulated viewers from 105 countries, watching an average of four and a half hours during the Paralympic Winter Games. The majority of viewers came from the USA , Italy , Canada , Germany and Japan . The channel also drew five-percent increase of new viewers every day.
For more information about the 2006 IPC Athletics World Championships, the competition schedule and results, please visit the official event website at http://www.eurochamp.nl/. To view footage from the competition or the 2006 Paralympic Winter Games, go to www.paralympicsport.tv.
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow.ti_deps.deps.base_ti_dep import BaseTIDep
from airflow.utils import timezone
from airflow.utils.db import provide_session
from airflow.utils.state import State
class NotInRetryPeriodDep(BaseTIDep):
NAME = "Not In Retry Period"
IGNOREABLE = True
IS_TASK_DEP = True
@provide_session
def _get_dep_statuses(self, ti, session, dep_context):
if dep_context.ignore_in_retry_period:
yield self._passing_status(
reason="The context specified that being in a retry period was "
"permitted.")
return
if ti.state != State.UP_FOR_RETRY:
yield self._passing_status(
reason="The task instance was not marked for retrying.")
return
# Calculate the date first so that it is always smaller than the timestamp used by
# ready_for_retry
cur_date = timezone.utcnow()
next_task_retry_date = ti.next_retry_datetime()
if ti.is_premature:
yield self._failing_status(
reason="Task is not ready for retry yet but will be retried "
"automatically. Current date is {0} and task will be retried "
"at {1}.".format(cur_date.isoformat(),
next_task_retry_date.isoformat()))
|
To ask the Scottish Government, further to the letter from the Minister for Childcare and Early Years to the Education and Skills Committee on 18 January 2017 on the additional graduate commitment, of the 432 nurseries identified, how many directly employ (a) qualified early years teachers, (b) qualified graduates, (c) staff working towards a graduate-level teaching qualification and (d) staff working towards a Childhood Practice qualification, broken down by (i) headcount and (ii) full-time equivalent.
Allocations of the additional graduates to local authorities were determined by their share of the nurseries located in the 20% most deprived postcode areas. Using the Care Inspectorate data on Early Learning and Childcare settings, and the latest Scottish Index of Multiple Deprivation (SIMD), 432 nurseries were identified as being located in the 20% most deprived postcode areas. Each of the three local authorities (Orkney, Shetland, and Western Isles) without a nursery in the 20% most deprived postcode areas will also receive one additional graduate.
We are currently working closely with local authorities on progressing this commitment, and will ask them provide plans setting out how they propose to allocate the additional places in their area, including how many of the additional training places they wish to access.
|
#! /usr/bin/env python
"""
File: read_2columns.py
Copyright (c) 2016 Taylor Patti
License: MIT
This module plots the points of two lists of data, as well as printing the maximum and minimum values of the generated arrays.
"""
import numpy as np
import matplotlib.pyplot as plt
def data_prep1(filename='xy.dat'):
"""Processes files of two columns into two seperate, vectorized data sets."""
infile = open(filename)
x_data = []
y_data = []
for line in infile:
data = line.split()
x_data.append(float(data[0]))
y_data.append(float(data[1]))
x_data = np.array(x_data)
y_data = np.array(y_data)
return x_data, y_data
def data_prep2(filename='xy.dat'):
"""Processes files of two columns into two seperate, vectorized data sets."""
data = np.loadtxt(filename, dtype=np.float)
return data[:,0], data[:,1]
def data_plot_display(xinfo=data_prep1()[0], yinfo=data_prep1()[1]):
"""Plots the data given and prints the max and mean of both data sets."""
print 'Max x: ' + str(np.amax(xinfo))
print 'Max y: ' + str(np.amax(yinfo))
print 'Mean x: ' + str(np.mean(xinfo))
print 'Mean y: ' + str(np.mean(yinfo))
plt.plot(xinfo, yinfo, 'bo')
plt.title('Unspecified Plot Data')
plt.xlabel('x-axis data')
plt.ylabel('y-axis data')
plt.show()
def test_other_file1(filename='testerfile.dat'):
"""Uses a specified testfile to ensure that the indexes were correct."""
test_run = data_prep1(filename)
apt = (test_run[0][2] == 2) and (test_run[1][2] == 2)
msg = 'Values indexed incorrectly.'
assert apt
def test_other_file2(filename='testerfile.dat'):
"""Uses a specified testfile to ensure that the indexes were correct."""
test_run = data_prep2(filename)
apt = (test_run[0][2] == 2) and (test_run[1][2] == 2)
msg = 'Values indexed incorrectly.'
assert apt
|
Simple, this song resumes how businessmen like me and many millions more, must be today.
Harder: in your business you must harder, in many senses, harder against your competitors, harder in selecting customers; leave the ones that aren’t giving you good business in comparison to the hours spent “chasing” them. Harder in expressing your ideas; is better to show things clearly avoiding unpleasant misunderstanding that can compromise your job. Harder taking your decisions, I know that what I’m going to say is not so easy to accept but in your organization you should know who can be your trustable partner and if someone is not convincing you in terms of performances, give him/her some chances but at the end you must tell them to leave.
Better: Today everything must be done better. The market is requesting you best in class performances, continuous improvement. You need better products, better processes, better strategies and better partners. You must look for your key differentiators in order to show that you are better than others!
So, still thinking that this song doesn’t fit perfectly?
Faster: If you are an entrepreneur or a manager you know that time is another element. You must be faster than competitors. Not just in term of products to be launched in the market but you must be faster in taking strategic decisions, be analytic, extremely analytic, be a faster filter of all those elements that are conducting you in some never-ending decisional processes. I know that is not so easy be fast especially when some decisions can hide big risks, but unfortunately I experienced many times that to be successful in your business you must react very quickly.
Stronger: You don’t need just to be harder, better and faster, you need to be stronger too. Why? Because only people very strong can support the stress caused by such a though business environment. You need to be stronger in your daily routine; be able to work hard and dedicate time to your family too. Stronger to fight tiredness, I’m working almost 15 hours a day with different customers and I must say that sometimes is not easy. If I shouldn’t be stronger than some years ago when I was working as an employee, today I couldn’t face this incredible economic crisis.
So , Thanks Daft Punk, really well done!
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2007-2009 Guillaume Pellerin <yomguy@parisson.com>
# This file is part of TimeSide.
# TimeSide is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# TimeSide is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with TimeSide. If not, see <http://www.gnu.org/licenses/>.
# Author: Guillaume Pellerin <yomguy@parisson.com>
from timeside.core import Processor, implements, interfacedoc
from timeside.encoder.core import GstEncoder
from timeside.api import IEncoder
from timeside.tools import *
class FlacEncoder(GstEncoder):
""" gstreamer-based FLAC encoder """
implements(IEncoder)
@interfacedoc
def setup(self, channels=None, samplerate=None, blocksize=None, totalframes=None):
super(FlacEncoder, self).setup(channels, samplerate, blocksize, totalframes)
self.pipe = ''' appsrc name=src ! audioconvert
! flacenc '''
if self.filename and self.streaming:
self.pipe += ''' ! tee name=t
! queue ! filesink location=%s
t. ! queue ! appsink name=app sync=False
''' % self.filename
elif self.filename :
self.pipe += '! filesink location=%s async=False sync=False ' % self.filename
else:
self.pipe += '! queue ! appsink name=app sync=False '
self.start_pipeline(channels, samplerate)
@staticmethod
@interfacedoc
def id():
return "gst_flac_enc"
@staticmethod
@interfacedoc
def description():
return "FLAC GStreamer based encoder"
@staticmethod
@interfacedoc
def format():
return "FLAC"
@staticmethod
@interfacedoc
def file_extension():
return "flac"
@staticmethod
@interfacedoc
def mime_type():
return 'audio/x-flac'
@interfacedoc
def set_metadata(self, metadata):
self.metadata = metadata
|
Learn about who we are + what we do.
Pizza + drinks will be provided!
Previous Previous post: Pre-order Your Freshman Packet Now!
|
import time
import yampy
#Secrets scrubbed
client_id = ""
client_secret = ""
redirect_uri = ""
code = ""
access_token = ""
newestMsgID = 0
currentID = 0
MAX_MSG_LIMIT = 10000000000
currentPgLowerMsgID = MAX_MSG_LIMIT
users = dict()
moreToProcess = True
MAX_MSG_PER_PAGE = 20
restCall = 0 # Keep track of how many times we make web calls due to API limits / throttling
MAX_REQ_PER_INTERVAL = 10 # How many Yammer requests you can make in 30 seconds. Once reached, wait 30 seconds.
# Various Yammer threads for testing
GMAIL_THREAD = 414357831 # 268-ish
AURORA_THREAD = 387871026 # 12 messages
PASTEBIN_THREAD = 421373941 # Exactly 20 messages (as of 27-JUL-2014)
# Setup authenticator - Don't delete any of this! You'll need it when the access token expires
authenticator = yampy.Authenticator(client_id, client_secret)
#auth_url = authenticator.authorization_url(redirect_uri)
#print(auth_url) #Debug: show the code to stdout
#access_token = authenticator.fetch_access_token(code)
#print(access_token)
#Get your Yammer object for making requests
yammer = yampy.Yammer(access_token)
# Create a dictionary from the Yammer messages.
# The RESTful API to the "messages" endpoint will result in one response with two blocks of structures within:
# 1. messages: the actual posts/replies/polls within the message thread
# 2. references: usually users.
# Start by grabbing the latest reply in thread and go backwards from there using message ID.
# Start without newer_than or older_than parameters to get the newestMsgID.
while moreToProcess:
# Be respectful of Yammer API limits; else we get throttled / banned.
restCall += 1
if restCall % MAX_REQ_PER_INTERVAL == 0:
time.sleep(31) # Pause for a little more than 30 seconds ever MAX_REQ_PER_INTERVAL requests
# Grab the latest set of messages in the thread and set newestMsgID
yammerMessages = dict(yammer.messages.in_thread(GMAIL_THREAD, older_than=currentPgLowerMsgID))
# Read the latest set messages and users who posted them
# Users: Load up the id:full_name key/value pair dictionary now
for user in yammerMessages.get("references"):
users[user.get("id")] = user.get("full_name") # The format here is dictionary[key]=value
# Messages:
for message in yammerMessages.get("messages"):
# Note: in the messages context, sender_id is the same integer as "id" in the references context.
print(users[message.get("sender_id")], ":", sep='') # Don't add space between user and colon character
#Get the currentID, and set newestMsgID
currentID = message.get("id")
if currentID > newestMsgID:
newestMsgID = currentID
#Set the current page's lowest ID to the current ID.
currentPgLowerMsgID = currentID
print("ID:", currentID)
print("newestMsgID:", newestMsgID)
print(message.get("body").get("plain"))
print(
"=========================================================================================================")
if len(yammerMessages.get("messages")) < MAX_MSG_PER_PAGE:
moreToProcess = False
|
Succeeding the AT95 was a tough ask, but Audio-Technica have risen to the challenge. May the ‘green king’ rest in peace, the successor has arrived to steal the throne.
When Technics commercialised the direct drive turntable in the ‘70s, they became known as class-leading products among the best available at any price. These new Technics turntable set new technical standards which will pave the way for future evolution of vinyl and the equipment we use to play it.
The SugarCube enhances the listening experience with vinyl old and new, and for that it earns a thoroughly deserved recommendation.
I’ve yet to come across a Technics that didn’t at least sound good, and I had great fun spinning 7” singles on the L20 before returning it to its rightful owner ready to deliver another few decades of faithful service.
I was surprised by the performance improvement. A highly recommended and worthwhile DIY upgrade that must be heard to be believed.
The Little Fwend is as thoughtfully designed as it is nicely machined. Setup correctly it does its job with no hassle and minimal user input, besides a quick push on its platform after each use. Expensive it may be, but it’s a terrific accessory and is therefore highly recommended.
If there was ever a product deserving of its place at the bottom of a landfill, this is it.
You could spend a fortune on the best cartridge money can buy for minimal return, or you could opt for the modestly price option that simply gets everything right and buy more music. I know which I’d rather.
Arcam have been known to produce some fine phono stages and the rPhono continues that tradition.
A phono stage is a preamplifier used to boost the minute signal of a phono cartridge, while reversing the equalisation curves used when cutting the record. Here's our complete guide.
Should you Buy an All-In-One Turntable?
The turntables on today’s market fall into 1 of 2 categories. All-in-1 tables consist of a turntable, amplifier and speakers, contained within the same casework. Some systems include support for other formats too, including CD, digital playback via USB and even cassette. But are they as good as they seem?
Exploring the 2016 turntable market with a selection of turntables at various price points.
Many of the most popular record players for sale today are of an all-in-1 design, meaning that the record player, amplifier and speakers are housed within the same case, which is usually of a retro design in a portable space-saving form factor. These devices can be purchased for less than £100, which compared to even a budget system comprised of a separate turntable, amplifier and speakers can make them seem like excellent value for money. To assess whether or not this is the case, we first need to see what actually makes a turntable tick.
An in-depth guide to correctly setting up a turntable. The correct way to adjust vertical tracking force, vertical tracking angle, anti-skate / bias and azimuth are all discussed here.
How to correctly install and align a phono cartridge.
Explains how to optimise a turntable and care for your records to achieve the best possible sound.
A guide to selecting and purchasing a used turntable, as well as tips on checking it for full functionality.
A guide to the specifications of your turntable, tonearm and cartridge.
A guide to the three primary turntable drive systems - belt drive, direct drive and idler drive - and the pros and cons of each.
A selection of free cartridge alignment protractors and strobe discs to accurately align a cartridge and test the speed of your turntable.
|
"""
Rename files or directories using regular expression.
Does nothing without the --force option.
example:
./pyrename.py '(.*)\.py' '\g<1>_renamed.py'
"""
import logging
import os
import re
from .. import logutils
from .. import options
from .. import utils
logutils.setup_logging()
def main(args=None):
work = os.getcwd()
opts = options.get_arguments(work, args)
# check top level directory
if not os.path.exists(opts.top) or not os.path.isdir(opts.top):
logging.error('invalid top level directory: %s' % opts.top)
raise SystemExit(1)
# compile regex
if opts.ignorecase:
regex1 = re.compile(opts.pattern, re.IGNORECASE)
try:
regex2 = re.compile(opts.nomatch, re.IGNORECASE)
except TypeError:
regex2 = None
else:
regex1 = re.compile(opts.pattern)
try:
regex2 = re.compile(opts.nomatch)
except TypeError:
regex2 = None
# compile replace
if opts.func:
opts.replace = eval('lambda x : {}'.format(opts.replace))
# record errors
error = False
# find paths
opaths = []
npaths = []
for root, p in utils.walk(opts.top, r=opts.recursive, dirs=opts.dirs, files=opts.files):
match = regex1.match(p)
if match:
# exclude list
if p in opts.exclude:
logging.info('path excluded!\n\n\t%s\n', os.path.join(root, p))
continue
# exclude nomatch
if not regex2 is None and regex2.match(p):
logging.info('path excluded!\n\n\t%s\n', os.path.join(root, p))
continue
# construct new base
try:
n = regex1.sub(opts.replace, p)
except re.error:
logging.exception('regex error')
error = True
n = p
# construct paths
opath = os.path.join(root, p)
npath = os.path.join(root, n)
opaths.append((root, p, opath))
npaths.append((root, n, npath))
# output match
logging.info('found a match!\n\n\topath (%d): %s\n\tnpath (%d): %s\n',
os.path.exists(opath), opath, os.path.exists(npath), npath)
# descibe paths
oset = set(opaths)
nset = set(npaths)
iset = oset.intersection(nset)
logging.info('%d old', len(opaths))
logging.info('%d old (unique)', len(oset))
logging.info('%d new', len(npaths))
logging.info('%d new (unique)', len(nset))
logging.info('%d same', len(iset))
# make sure paths were found
try:
assert opaths
except AssertionError:
logging.error('no old paths found')
error = True
# make sure paths were found
try:
assert npaths
except AssertionError:
logging.error('no new paths found')
error = True
# make sure old paths are unique
try:
assert len(oset) is len(opaths)
except AssertionError:
logging.error('old paths are not unique')
error = True
# make sure new paths are unique
try:
assert len(nset) is len(npaths)
except AssertionError:
logging.error('new paths are not unique')
error = True
# make sure old paths and new paths do not intersect
try:
assert not iset
except AssertionError:
logging.error('some paths are the same')
error = True
# check if old paths exist
found = []
for root, base, path in opaths:
try:
assert os.path.exists(path)
except AssertionError:
found.append(path)
if found:
logging.error('some old paths do not exist\n\n\t%s\n',
'\n\t'.join(found))
error = True
# check if new paths exist
found = []
for root, base, path in npaths:
try:
assert not os.path.exists(path)
except AssertionError:
found.append(path)
if found:
logging.error('some new paths already exist\n\n\t%s\n',
'\n\t'.join(found))
error = True
# stop if there were errors
if error:
logging.error('invalid configuration')
raise SystemExit(1)
# move files
if opts.force:
logging.info('moving paths!')
for (oroot, obase, opath), (nroot, nbase, npath) in zip(opaths, npaths):
utils.move(opath, npath, git=opts.git)
else:
logging.info('\n\n\tThis was a dry run, please use --force to perform renaming\n')
if __name__ == '__main__':
try:
main()
except SystemExit:
pass
except:
logging.exception('caught unhandled exception')
|
3DP Net은 이더넷(랜) 카드를 자동으로 인식합니다. 그리고 최적의 드라이버를 연결해 줍니다.
오프라인 설치를 지원하므로 윈도우즈 설치 직후 이용에 적합합니다.
3DP Net에 내장된 드라이버는 간단히 설치 가능합니다.
사용자에게 있어 그 어떤 제한도 없이 사용 가능합니다.
"3DP Net is a great application created to detect ethernet cards automatically and to provide the newest or the most suitable driver."
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import re
import six
from optionaldict import optionaldict
from wechatpy.client.api.base import BaseWeChatAPI
from wechatpy.utils import random_string
class WeChatMessage(BaseWeChatAPI):
OPENID_RE = re.compile(r'^[\w\-]{28}$', re.I)
def _send_custom_message(self, data, account=None):
data = data or {}
if account:
data['customservice'] = {'kf_account': account}
return self._post(
'message/custom/send',
data=data
)
def send_text(self, user_id, content, account=None):
"""
发送文本消息
详情请参考
http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param content: 消息正文
:param account: 可选,客服账号
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.message.send_text('openid', 'text')
"""
data = {
'touser': user_id,
'msgtype': 'text',
'text': {'content': content}
}
return self._send_custom_message(data, account=account)
def send_image(self, user_id, media_id, account=None):
"""
发送图片消息
详情请参考
http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param media_id: 图片的媒体ID。 可以通过 :func:`upload_media` 上传。
:param account: 可选,客服账号
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.message.send_image('openid', 'media_id')
"""
data = {
'touser': user_id,
'msgtype': 'image',
'image': {
'media_id': media_id
}
}
return self._send_custom_message(data, account=account)
def send_voice(self, user_id, media_id, account=None):
"""
发送语音消息
详情请参考
http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param media_id: 发送的语音的媒体ID。 可以通过 :func:`upload_media` 上传。
:param account: 可选,客服账号
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.message.send_voice('openid', 'media_id')
"""
data = {
'touser': user_id,
'msgtype': 'voice',
'voice': {
'media_id': media_id
}
}
return self._send_custom_message(data, account=account)
def send_video(self, user_id, media_id, title=None,
description=None, account=None):
"""
发送视频消息
详情请参考
http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param media_id: 发送的视频的媒体ID。 可以通过 :func:`upload_media` 上传。
:param title: 视频消息的标题
:param description: 视频消息的描述
:param account: 可选,客服账号
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.message.send_video('openid', 'media_id', 'title', 'description')
"""
video_data = {
'media_id': media_id,
}
if title:
video_data['title'] = title
if description:
video_data['description'] = description
data = {
'touser': user_id,
'msgtype': 'video',
'video': video_data
}
return self._send_custom_message(data, account=account)
def send_music(self, user_id, url, hq_url, thumb_media_id,
title=None, description=None, account=None):
"""
发送音乐消息
详情请参考
http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param url: 音乐链接
:param hq_url: 高品质音乐链接,wifi环境优先使用该链接播放音乐
:param thumb_media_id: 缩略图的媒体ID。 可以通过 :func:`upload_media` 上传。
:param title: 音乐标题
:param description: 音乐描述
:param account: 可选,客服账号
:return: 返回的 JSON 数据包
"""
music_data = {
'musicurl': url,
'hqmusicurl': hq_url,
'thumb_media_id': thumb_media_id
}
if title:
music_data['title'] = title
if description:
music_data['description'] = description
data = {
'touser': user_id,
'msgtype': 'music',
'music': music_data
}
return self._send_custom_message(data, account=account)
def send_articles(self, user_id, articles, account=None):
"""
发送图文消息
详情请参考
http://mp.weixin.qq.com/wiki/7/12a5a320ae96fecdf0e15cb06123de9f.html
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param articles: 一个包含至多10个图文的数组, 或者微信图文消息素材 media_id
:param account: 可选,客服账号
:return: 返回的 JSON 数据包
"""
if isinstance(articles, (tuple, list)):
articles_data = []
for article in articles:
articles_data.append({
'title': article['title'],
'description': article['description'],
'url': article['url'],
'picurl': article.get('image', article.get('picurl')),
})
data = {
'touser': user_id,
'msgtype': 'news',
'news': {
'articles': articles_data
}
}
else:
data = {
'touser': user_id,
'msgtype': 'mpnews',
'mpnews': {
'media_id': articles,
}
}
return self._send_custom_message(data, account=account)
def send_card(self, user_id, card_id, card_ext, account=None):
"""
发送卡券消息
详情请参参考
http://mp.weixin.qq.com/wiki/1/70a29afed17f56d537c833f89be979c9.html
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param card_id: 卡券 ID
:param card_ext: 卡券扩展信息
:param account: 可选,客服账号
:return: 返回的 JSON 数据包
"""
data = {
'touser': user_id,
'msgtype': 'wxcard',
'wxcard': {
'card_id': card_id,
'card_ext': card_ext
}
}
return self._send_custom_message(data, account=account)
def delete_mass(self, msg_id):
"""
删除群发消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param msg_id: 要删除的群发消息 ID
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.message.delete_mass('message id')
"""
return self._post(
'message/mass/delete',
data={
'msg_id': msg_id
}
)
def _send_mass_message(self, group_or_users, msg_type, msg,
is_to_all=False, preview=False):
data = {
'msgtype': msg_type
}
if not preview:
if isinstance(group_or_users, (tuple, list)):
# send by user ids
data['touser'] = group_or_users
endpoint = 'message/mass/send'
else:
# send by group id
data['filter'] = {
'group_id': group_or_users,
'is_to_all': is_to_all,
}
endpoint = 'message/mass/sendall'
else:
if not isinstance(group_or_users, six.string_types):
raise ValueError('group_or_users should be string types')
# 预览接口
if self.OPENID_RE.match(group_or_users):
# 按照 openid 预览群发
data['touser'] = group_or_users
else:
# 按照微信号预览群发
data['towxname'] = group_or_users
endpoint = 'message/mass/preview'
data.update(msg)
return self._post(
endpoint,
data=data
)
def send_mass_text(self, group_or_users, content,
is_to_all=False, preview=False):
"""
群发文本消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param content: 消息正文
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:return: 返回的 JSON 数据包
"""
return self._send_mass_message(
group_or_users,
'text',
{
'text': {
'content': content
}
},
is_to_all,
preview
)
def send_mass_image(self, group_or_users, media_id,
is_to_all=False, preview=False):
"""
群发图片消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param media_id: 图片的媒体 ID。 可以通过 :func:`upload_media` 上传。
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:return: 返回的 JSON 数据包
"""
return self._send_mass_message(
group_or_users,
'image',
{
'image': {
'media_id': media_id
}
},
is_to_all,
preview
)
def send_mass_voice(self, group_or_users, media_id,
is_to_all=False, preview=False):
"""
群发语音消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param media_id: 语音的媒体 ID。可以通过 :func:`upload_media` 上传。
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:return: 返回的 JSON 数据包
"""
return self._send_mass_message(
group_or_users,
'voice',
{
'voice': {
'media_id': media_id
}
},
is_to_all,
preview
)
def send_mass_video(self, group_or_users, media_id, title=None,
description=None, is_to_all=False, preview=False):
"""
群发视频消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param media_id: 视频的媒体 ID。可以通过 :func:`upload_video` 上传。
:param title: 视频标题
:param description: 视频描述
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:return: 返回的 JSON 数据包
"""
video_data = {
'media_id': media_id
}
if title:
video_data['title'] = title
if description:
video_data['description'] = description
return self._send_mass_message(
group_or_users,
'mpvideo',
{
'mpvideo': video_data
},
is_to_all,
preview
)
def send_mass_article(self, group_or_users, media_id,
is_to_all=False, preview=False):
"""
群发图文消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param media_id: 图文的媒体 ID。可以通过 :func:`upload_articles` 上传。
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:return: 返回的 JSON 数据包
"""
return self._send_mass_message(
group_or_users,
'mpnews',
{
'mpnews': {
'media_id': media_id
}
},
is_to_all,
preview
)
def get_mass(self, msg_id):
"""
查询群发消息发送状态
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param msg_id: 群发消息后返回的消息id
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
res = client.message.get_mass('mass message id')
"""
return self._post(
'message/mass/get',
data={
'msg_id': msg_id
}
)
def send_template(self, user_id, template_id, data, url=None, mini_program=None):
"""
发送模板消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1445241432&lang=zh_CN
:param user_id: 用户 ID 。 就是你收到的 `Message` 的 source
:param template_id: 模板 ID。在公众平台线上模板库中选用模板获得
:param url: 链接地址
:param data: 模板消息数据
:param mini_program: 跳小程序所需数据, 如:`{'appid': 'appid', 'pagepath': 'index?foo=bar'}`
:return: 返回的 JSON 数据包
"""
tpl_data = optionaldict(
touser=user_id,
template_id=template_id,
url=url,
miniprogram=mini_program,
data=data,
)
return self._post(
'message/template/send',
data=tpl_data
)
def get_autoreply_info(self):
"""
获取自动回复规则
详情请参考
http://mp.weixin.qq.com/wiki/7/7b5789bb1262fb866d01b4b40b0efecb.html
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
info = client.message.get_autoreply_info()
"""
return self._get('get_current_autoreply_info')
def send_mass_card(self, group_or_users, card_id,
is_to_all=False, preview=False):
"""
群发卡券消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param card_id: 卡券 ID
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:return: 返回的 JSON 数据包
"""
return self._send_mass_message(
group_or_users,
'wxcard',
{
'wxcard': {
'card_id': card_id
}
},
is_to_all,
preview
)
def get_subscribe_authorize_url(self, scene, template_id, redirect_url, reserved=None):
"""
构造请求用户授权的url
详情请参阅:
https://mp.weixin.qq.com/wiki?id=mp1500374289_66bvB
:param scene: 订阅场景值,开发者可以填0-10000的整形值,用来标识订阅场景值
:type scene: int
:param template_id: 订阅消息模板ID,登录公众平台后台,在接口权限列表处可查看订阅模板ID
:param redirect_url: 授权后重定向的回调地址
:param reserved: 用于保持请求和回调的状态,授权请后原样带回给第三方。该参数可用于防止csrf攻击。若不指定则随机生成。
"""
if reserved is None:
reserved = random_string()
base_url = 'https://mp.weixin.qq.com/mp/subscribemsg'
params = [
('action', 'get_confirm'),
('appid', self.appid),
('scene', scene),
('template_id', template_id),
('redirect_url', redirect_url),
('reserved', reserved),
]
encoded_params = six.moves.urllib.parse.urlencode(params)
url = '{base}?{params}#wechat_redirect'.format(base=base_url, params=encoded_params)
return url
def send_subscribe_template(self, openid, template_id, scene, title, data, url=None):
"""
一次性订阅消息,通过API推送订阅模板消息给到授权微信用户。
详情请参阅:
https://mp.weixin.qq.com/wiki?id=mp1500374289_66bvB
:param openid: 填接收消息的用户openid
:param template_id: 订阅消息模板ID
:param scene: 订阅场景值,开发者可以填0-10000的整形值,用来标识订阅场景值
:type scene: int
:param title: 消息标题,15字以内
:param data: 消息正文,value为消息内容,color为颜色,200字以内
:type data: dict
:param url: 点击消息跳转的链接,需要有ICP备案
"""
post_data = {
'touser': openid,
'template_id': template_id,
'url': url,
'scene': scene,
'title': title,
'data': data,
}
if url is not None:
post_data['url'] = url
return self._post(
'message/template/subscribe',
data=post_data,
)
|
NO DOC FEES. FACTORY WARRANTY. CARGURUS DEALER OF THE YEAR, 3 YEARS IN A ROW!!!! 2015 Volkswagen Passat TDI SE. 2 owners. No accidents. Clean title. 2.0L TDI engine with a automatic transmission. Well maintained. Sunroof. Bluetooth. AUX and USB. Navigation. Heated front seats. The interior is in excellent condition. Leather seats. No stains. The exterior is in excellent condition. No major scratches or dents. Garage kept. Tires are good.
|
#!/usr/bin/env python
#
# Beautiful Capi generates beautiful C API wrappers for your C++ classes
# Copyright (C) 2015 Petr Petrovich Petrov
#
# This file is part of Beautiful Capi.
#
# Beautiful Capi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Beautiful Capi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Beautiful Capi. If not, see <http://www.gnu.org/licenses/>.
#
from Parser import TClass, TNamespace, TBeautifulCapiRoot, TOverloadSuffixMode
from Helpers import get_full_method_name
class OverloadSuffixesProcessor(object):
def __init__(self, root_node: TBeautifulCapiRoot):
self.root_node = root_node
self.cur_overload_suffix_mode = TOverloadSuffixMode.Off
self.namespace_stack = []
class ParamsScope(object):
def __init__(self, overload_suffixes_processor, namespace_or_class: TNamespace or TClass):
self.overload_suffixes_processor = overload_suffixes_processor
self.old_overload_suffix_mode = self.overload_suffixes_processor.cur_overload_suffix_mode
if namespace_or_class.overload_suffix_mode_filled:
self.overload_suffixes_processor.cur_overload_suffix_mode = namespace_or_class.overload_suffix_mode
self.overload_suffixes_processor.namespace_stack.append(namespace_or_class.name)
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
self.overload_suffixes_processor.namespace_stack.pop()
self.overload_suffixes_processor.cur_overload_suffix_mode = self.old_overload_suffix_mode
def __check_for_unique(self, routines: [object]):
if self.cur_overload_suffix_mode != TOverloadSuffixMode.Off:
existing_names = {}
for routine in routines:
name = ''.join(get_full_method_name(routine))
if name in existing_names:
existing_names[name] += 1
old_suffix = routine.overload_suffix
routine.overload_suffix += str(existing_names[name])
if self.cur_overload_suffix_mode == TOverloadSuffixMode.Notify:
print(
'Warning: Method or function {routine_name}() is overloaded'
' and has no unique overload suffix ("{old_suffix}"). '
'Suffix "{suffix}" has been installed.'.format(
routine_name='::'.join(self.namespace_stack) + '::' + routine.name,
old_suffix=old_suffix,
suffix=routine.overload_suffix))
else:
existing_names[name] = 0
def __process_class(self, cur_class: TClass):
with OverloadSuffixesProcessor.ParamsScope(self, cur_class):
self.__check_for_unique(cur_class.methods)
self.__check_for_unique(cur_class.indexers)
def __process_namespace(self, namespace: TNamespace):
with OverloadSuffixesProcessor.ParamsScope(self, namespace):
self.__check_for_unique(namespace.functions)
for nested_namespace in namespace.namespaces:
self.__process_namespace(nested_namespace)
for cur_class in namespace.classes:
self.__process_class(cur_class)
def process(self):
for cur_namespace in self.root_node.namespaces:
self.cur_overload_suffix_mode = cur_namespace.overload_suffix_mode
self.__process_namespace(cur_namespace)
def process(root_node: TBeautifulCapiRoot):
suffixes_processor = OverloadSuffixesProcessor(root_node)
suffixes_processor.process()
|
Early Intervention Speech Therapy services are provided in the home to families of children 0-3 years of age in Solano County via North Bay Regional Center. If you have concerns about your child’s development or would like to know more about North Bay Regional Center’s services, click here.
I believe that families are the most important members of the early intervention team. My job is to ensure that families have strategies and supports that address their individual concerns. Therapy goals will be designed to be functional and will reflect the changes that the family would like to see happen for themselves and for the child. It will be based on family priorities and the developmental needs of the child. I use evidence-based practices, multidisciplinary team collaboration, and family-focused service delivery.
Children learn best in their natural environments, and for infants and toddlers, this is typically at home or in the childcare setting. Working in natural environments provides opportunities for caregivers to work together to learn new strategies within daily routines and work on functional goals. Therapy techniques will fit into a family’s routines and build upon activities that are motivating to the child. Often times, therapy materials will be items in a family’s home, for example a child’s toys or food typically provided to the child. This will empower the family to continue working on target skills throughout the week.
Furthermore, I believe it is important for children to have the opportunity to interact and learn from their peers. This may occur at a childcare center or in community locations such as the park. I encourage caregivers to take advantage of these learning opportunities. I encourage feedback from caregivers about their experiences in various locations and may work with them in various settings to increase their confidence in carrying over therapy strategies and techniques.
Anticipated outcomes include improved functional communication with people in a child’s natural environment; i.e., a better ability to understand language and express needs. Family outcomes include recognizing natural learning opportunities that happen throughout the day and using techniques learned in therapy sessions even when the therapist is not present. The family and child will likely notice a reduction in frustration and negative behaviors as a child’s communication skills increase.
|
from django.db import models
from django.utils.timezone import now
from datetime import date, datetime
#from django.core.validators import MaxValueValidator, MinValueValidator
# Create your models here.
class Children(models.Model):
#Children_ID = models.AutoField(primary_key=True)
Parents = models.ManyToManyField('Parent')
FirstName = models.CharField(max_length=40)
LastName = models.CharField(max_length=40)
Birthday = models.DateField()
RAMQ = models.CharField(max_length=14, blank=True, null=True)
RAMQ_Expiration = models.DateField(blank=True, null=True)
# def save(self, *args, **kwargs):
# if self.FullName is None:
# self.FullName = self.LastName+" "+self.FirstName
# super(Children, self).save(*args, **kwargs)
def __str__(self):
return self.FullName
def list_parents(self):
return ", ".join([parent.FullName for parent in self.Parents.all()])
@property
def children_age(self):
return int((date.today() - self.Birthday).days / 365.25 )
@property
def FullName(self):
return self.LastName+" "+self.FirstName
class Parent(models.Model):
#Parent_ID = models.AutoField(primary_key=True)
FirstName = models.CharField(max_length=40)
LastName = models.CharField(max_length=40)
Email = models.EmailField()
Phone = models.CharField(max_length=15)
Phone_emergency = models.CharField(max_length=15)
SIN = models.CharField(max_length=11)
def __str__(self):
return self.LastName + " " + self.FirstName
def list_childrens(self, obj):
return ", ".join([children.FullName for children in obj.children_set.all()])
@property
def FullName(self):
return self.LastName+" "+self.FirstName
class Educator(models.Model):
#Educator_ID = models.AutoField(primary_key=True)
FirstName = models.CharField(max_length=40)
LastName = models.CharField(max_length=40)
FullName = "%s %s" % (LastName, FirstName)
Email = models.EmailField()
Phone = models.CharField(max_length=15)
Phone_emergency = models.CharField(max_length=15)
def __str__(self):
return "{} {}".format(self.LastName + " " + self.FirstName, self.list_classes())
def list_classes(self):
return ", ".join([classe.name() for classe in self.Classes.all()])
class Classe(models.Model):
#Classe_ID = models.AutoField(primary_key=True)
Educators = models.ManyToManyField(Educator)
Name = models.CharField(max_length=255)
def __str__(self):
return "{} {}".format(self.Name, self.list_educators())
def list_educators(self):
return ", ".join([educator.name() for educator in self.Educators.all()])
|
DISCLAIMER: The amount specified in this ad is subject to change upon financing or other conditions. The down payment may vary depending on the qualified customer. Annual Percentage Rate (APR) will vary per customer qualification. Fees and repayment terms vary by program and may result in a higher rate than advertised.A Listed price does not include taxes, tags, or title fees, any other Dealer Fees. While we make every effort in displaying accurate data, the vehicle listings within this website may not reflect all accurate vehicle items. All inventory listed is subject to prior change or sale. The vehicle photo displayed may be used as an example only. Pricing throughout the website does not include any additional options that may have been installed by the dealership. Please see the dealer for exact details. Vehicle may also be in transit and not located at the dealer all though they may be advertised on the website. Some vehicles are shown with optional equipment. See the actual vehicle for complete accuracy of features, options, and/or pricing. Due to the numerous possible combinations of vehicle models, styles, colors and options, the vehicle pictures on this website may not fully match your vehicle exactly; however, it is described and will match it as much and as closely as possible.
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
def BuildDocument(doctype):
if doctype == 'report':
doctype = 'article'
return '''\documentclass[11pt, oneside]{%s}
\usepackage[margin=1in]{geometry} \geometry{letterpaper}
\usepackage{setspace}
\usepackage[font=singlespacing,labelfont=bf]{caption}
\usepackage{indentfirst}
\usepackage{float}
\usepackage{booktabs}
\usepackage{amsmath, gensymb}
\usepackage{url}
\usepackage{graphicx}
\makeatletter
\def\maxwidth{0.6\columnwidth}
\makeatother
\let\Oldincludegraphics\includegraphics
\\renewcommand{\includegraphics}[1]{\Oldincludegraphics[width=\maxwidth]{#1}}
'''%(doctype)
def BuildTable(caption, alignments, headers, rows, label):
latex = '''\\begin{table}[H]
\\caption{%s}
\\centering
\\begin{tabular}%s
\\hline
%s\\\\
\\hline
'''%(caption, alignments, headers)
for r in rows:
if type(r) == type('s'):
latex += r
else:
latex += r[0] + ' & '.join(r[1:]) + '\\\\\n'
latex += '''\\hline
\\end{tabular}
\\label{%s}
\\end{table}'''%(label)
return latex
def BuildFigure(file, caption, label):
return '''\\begin{figure}[H]
\\centering
\\includegraphics{%s}
\\caption{%s}
\\label{%s}
\\end{figure}'''%(file, caption, label)
def BuildTitle(title, subtitle):
latex = ''
if title != None:
latex += r'\title{%s'%(title)
if subtitle != None:
latex += r'\\\vspace{0.5em}{\large %s}'%(subtitle)
latex += '}\n'
return latex
def BuildAuthor(author):
latex = ''
if author != None:
latex += '\\author{%s}\n'%(author)
return latex
def BeginDocument(doctype):
latex = '\\date{}\n\n\\hyphenpenalty=100000\n\n\\begin{document}\n\n'
if doctype == 'report':
latex += '\\pagenumbering{gobble}\n'
latex += '\\maketitle\n'
return latex
def BuildInfoTable(Class, due, received):
latex = ''
if Class != None or due != None or received != None:
latex += '''\\begin{table}[H]
\\centering
\\begin{tabular}{l}
'''
if Class != None:
latex += '%s\\\\\n'%(Class)
if due != None:
latex += 'Due: %s\\\\\n'%(due)
if received != None:
latex += 'Received: %s\\\\\n'%(received)
latex += '''\end{tabular}
\end{table}
'''
return latex
def BuildThanks(thanks):
return '\\thanks{\\noindent %s\\\\\\\\}\n'%(thanks) if thanks != None else ''
def BuildAbstract(abstract):
return '\\begin{noindent}\n\\textbf{%s}\n\\end{noindent}\n\n'%(abstract) if abstract != None else ''
def BuildList(kind, items):
kind = 'enumerate' if kind == 'ordered' else 'itemize'
latex = '\\begin{%s}\n'%(kind)
for i in items:
latex += '\t\\item %s'%(i)
latex += '\\end{%s}\n'%(kind)
return latex
|
Plane-parallel flows of an incompressible fluid in a bounded domain with minimum mean square vorticity are considered. The flow function is biharmonic function. Such flows include, for example, the stationary solution of 2D Stocks problem with a potential righthand side. If the velocity on the boundary is specified, then definition of the flow is reduced to the solution of the boundary value problem of the biharmonic equation. The projection algorithm for solving boundary value problems for the biharmonic equation in complicated domains is presented. There are used systems of functions, full on the domain boundary, creating the basis of non-grid method (method of basis potentials) of the hydrodynamic boundary value problems solution. The concept of own domain vortex – attached vortex flow of Roben - is considered. It is also considered an extended formulation of the building a plane-parallel flows problem – definition of flows by the boundary values of the flow function only when it is not necessary to set speed limits (that are, generally speaking, not known, as, for example, for Venturi funnel). The desired density of vortices belongs to the subspace of harmonic functions, obtained complete system of potentials in this subspace allows to construct a convergent projection algorithms; the density of vortices must be orthogonal to its own domain vortex. Numerical flows for the funnel domain with the condition of adhesion on the boundary and in the extended formulation are represented.
|
#-*- coding:utf-8 -*-
import dircache, os, math, sys
from PIL import Image
from psd_tools import PSDImage
from psd_tools import Group
import json
class Rectangle:
x = 0
y = 0
width = 0
height = 0
offX = 0
offY = 0
origin_width = 0
origin_height = 0
arena = 0
def __init__(self):
self.name = ""
pass
def clone(self):
rst = Rectangle()
rst.name = self.name
rst.x = self.x
rst.y = self.y
rst.width = self.width
rst.height = self.height
rst.arena = self.arena
rst.offX = self.offX
rst.offY = self.offY
rst.origin_width = self.origin_width
rst.origin_height = self.origin_height
return rst
def to_dict(self):
rst = {}
rst['x'] = self.x
rst['y'] = self.y
rst['w'] = self.width
rst['h'] = self.height
rst['offX'] = self.offX
rst['offY'] = self.offY
rst['sourceW'] = self.origin_width
rst['sourceH'] = self.origin_height
return rst
class FreeRectangleChoiceHeuristic:
BestShortSideFit = 0 #< -BSSF: Positions the Rectangle against the short side of a free Rectangle into which it fits the best.
BestLongSideFit = 1 #< -BLSF: Positions the Rectangle against the long side of a free Rectangle into which it fits the best.
BestAreaFit = 2 #< -BAF: Positions the Rectangle into the smallest free Rectangle into which it fits.
BottomLeftRule = 3 #< -BL: Does the Tetris placement.
ContactPointRule = 4 #< -CP: Choosest the placement where the Rectangle touches other Rectangles as much as possible.
class MaxRectsBinPack:
binWidth = 0
binHeight = 0
allowRotations = False
usedRectangles = [] #new Vector.<Rectangle>();
freeRectangles = [] #new Vector.<Rectangle>();
score1 = 0
score2 = 0
bestShortSideFit = 0
bestLongSideFit = 0
def __init__(self, width, height, rotations = True):
self.init(width, height, rotations)
def init(self, width, height, rotations = True):
if( self.count(width) % 1 != 0 or self.count(height) % 1 != 0):
print "Must be 2,4,8,16,32,...512,1024,..."
return
self.binWidth = width
self.binHeight = height
self.allowRotations = rotations
n = Rectangle()
n.x = 0
n.y = 0
n.width = width
n.height = height
self.usedRectangles = []
self.freeRectangles = []
self.freeRectangles.append(n)
def count(self, n):
if( n >= 2 ):
return self.count(n / 2)
return n
def insert(self, rect, method):
width = rect.width
height = rect.height
name = rect.name
newNode = Rectangle()
score1 = 0
score2 = 0
if method == FreeRectangleChoiceHeuristic.BestShortSideFit:
newNode = self.findPositionForNewNodeBestShortSideFit(width, height)
elif method == FreeRectangleChoiceHeuristic.BottomLeftRule:
newNode = self.findPositionForNewNodeBottomLeft(width, height, score1, score2)
elif method == FreeRectangleChoiceHeuristic.ContactPointRule:
newNode = self.findPositionForNewNodeContactPoint(width, height, score1)
elif method == FreeRectangleChoiceHeuristic.BestLongSideFit:
newNode = self.findPositionForNewNodeBestLongSideFit(width, height, score2, score1)
elif method == FreeRectangleChoiceHeuristic.BestAreaFit:
newNode = self.findPositionForNewNodeBestAreaFit(width, height, score1, score2)
newNode.name = name
newNode.offX = rect.offX
newNode.offY = rect.offY
newNode.origin_width = rect.origin_width
newNode.origin_height = rect.origin_height
newNode.arena = rect.arena
if newNode.height == 0:
print "not posi for set"
return newNode
self.placeRectangle(newNode)
return newNode
def insert2(self, Rectangles, dst, method):
del dst[:] #dst.length = 0;
while(len(Rectangles) > 0):
bestScore1 = sys.maxint #int.MAX_VALUE
bestScore2 = sys.maxint #int.MAX_VALUE
bestRectangleIndex = -1
bestNode = Rectangle()
for i in range(len(Rectangles)):
score1 = 0
score2 = 0
newNode = self.scoreRectangle(Rectangles[i].width, Rectangles[i].height, method, score1, score2)
newNode.name = Rectangles[i].name
newNode.offX = Rectangles[i].offX
newNode.offY = Rectangles[i].offY
newNode.origin_width = Rectangles[i].origin_width
newNode.origin_height = Rectangles[i].origin_height
newNode.arena = Rectangles[i].arena
if score1 < bestScore1 or (score1 == bestScore1 and score2 < bestScore2):
bestScore1 = score1
bestScore2 = score2
bestNode = newNode
bestRectangleIndex = i
if (bestRectangleIndex == -1):
return
self.placeRectangle(bestNode)
del Rectangles[bestRectangleIndex] #Rectangles.splice(bestRectangleIndex,1)
def placeRectangle(self, node):
numRectanglesToProcess = len(self.freeRectangles)
i = 0
while i < numRectanglesToProcess:
if self.splitFreeNode(self.freeRectangles[i], node):
del self.freeRectangles[i] #freeRectangles.splice(i,1);
i = i - 1
numRectanglesToProcess = numRectanglesToProcess - 1
i = i + 1
self.pruneFreeList()
self.usedRectangles.append(node)
def scoreRectangle(self, width, height, method, score1, score2):
newNode = Rectangle()
self.score1 = sys.maxint #int.MAX_VALUE;
self.score2 = sys.maxint #int.MAX_VALUE;
if method == FreeRectangleChoiceHeuristic.BestShortSideFit:
newNode = self.findPositionForNewNodeBestShortSideFit(width, height)
elif method == FreeRectangleChoiceHeuristic.BottomLeftRule:
newNode = self.findPositionForNewNodeBottomLeft(width, height, self.score1, self.score2)
elif method == FreeRectangleChoiceHeuristic.ContactPointRule:
newNode = self.findPositionForNewNodeContactPoint(width, height, self.score1)
self.score1 = -self.score1;
elif method == FreeRectangleChoiceHeuristic.BestLongSideFit:
newNode = self.findPositionForNewNodeBestLongSideFit(width, height, self.score2, self.score1)
elif method == FreeRectangleChoiceHeuristic.BestAreaFit:
newNode = self.findPositionForNewNodeBestAreaFit(width, height, self.score1, self.score2)
#// Cannot fit the current Rectangle.
if newNode.height == 0:
self.score1 = sys.maxint #int.MAX_VALUE;
self.score2 = sys.maxint #int.MAX_VALUE;
print "not posi for set"
return newNode
#Computes the ratio of used surface area.
def occupancy(self):
usedSurfaceArea = 0
for rect in self.usedRectangles:
usedSurfaceArea = usedSurfaceArea + rect.width * rect.height;
return usedSurfaceArea / (self.binWidth * self.binHeight)
def findPositionForNewNodeBottomLeft(self, width, height, bestY, bestX):
bestNode = Rectangle()
bestY = sys.maxint;
topSideY = 0
for rect in self.freeRectangles:#(var i:int = 0; i < freeRectangles.length; i++) {
if rect.width >= width and rect.height >= height:
topSideY = rect.y + height
if topSideY < bestY or (topSideY == bestY and rect.x < bestX):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = width
bestNode.height = height
bestY = topSideY
bestX = rect.x
if self.allowRotations or rect.width >= height and rect.height >= width:
topSideY = rect.y + width
if topSideY < bestY or (topSideY == bestY and rect.x < bestX):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = height
bestNode.height = width
bestY = topSideY
bestX = rect.x
return bestNode
def findPositionForNewNodeBestShortSideFit(self, width, height):
bestNode = Rectangle()
self.bestShortSideFit = sys.maxint #int.MAX_VALUE;
self.bestLongSideFit = self.score2
leftoverHoriz = 0
leftoverVert = 0
shortSideFit = 0
longSideFit = 0
for rect in self.freeRectangles: #(var i:int = 0; i < freeRectangles.length; i++) {
if rect.width >= width and rect.height >= height:
leftoverHoriz = math.fabs(rect.width - width)
leftoverVert = math.fabs(rect.height - height)
shortSideFit = min(leftoverHoriz, leftoverVert)
longSideFit = max(leftoverHoriz, leftoverVert)
if shortSideFit < self.bestShortSideFit or (shortSideFit == self.bestShortSideFit and longSideFit < self.bestLongSideFit):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = width
bestNode.height = height
self.bestShortSideFit = shortSideFit
self.bestLongSideFit = longSideFit
flippedLeftoverHoriz = 0
flippedLeftoverVert = 0
flippedShortSideFit = 0
flippedLongSideFit = 0
if self.allowRotations and rect.width >= height and rect.height >= width:
flippedLeftoverHoriz = math.fabs(rect.width - height)
flippedLeftoverVert = math.fabs(rect.height - width)
flippedShortSideFit = min(flippedLeftoverHoriz, flippedLeftoverVert)
flippedLongSideFit = max(flippedLeftoverHoriz, flippedLeftoverVert)
if flippedShortSideFit < self.bestShortSideFit or (flippedShortSideFit == self.bestShortSideFit or flippedLongSideFit < self.bestLongSideFit):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = height
bestNode.height = width
self.bestShortSideFit = flippedShortSideFit
self.bestLongSideFit = flippedLongSideFit
return bestNode
def findPositionForNewNodeBestLongSideFit(self, width, height, bestShortSideFit, bestLongSideFit):
bestNode = Rectangle()
self.bestLongSideFit = sys.maxint #int.MAX_VALUE;
leftoverHoriz = 0
leftoverVert = 0
shortSideFit = 0
longSideFit = 0
for rect in self.freeRectangles: #(var i:int = 0; i < freeRectangles.length; i++) {
if rect.width >= width and rect.height >= height:
leftoverHoriz = math.fabs(rect.width - width)
leftoverVert = math.fabs(rect.height - height)
shortSideFit = min(leftoverHoriz, leftoverVert)
longSideFit = max(leftoverHoriz, leftoverVert)
if longSideFit < self.bestLongSideFit or (longSideFit == self.bestLongSideFit and shortSideFit < self.bestShortSideFit):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = width
bestNode.height = height
self.bestShortSideFit = shortSideFit
self.bestLongSideFit = longSideFit
if self.allowRotations and rect.width >= height and rect.height >= width:
leftoverHoriz = math.fabs(rect.width - height)
leftoverVert = math.fabs(rect.height - width)
shortSideFit = min(leftoverHoriz, leftoverVert)
longSideFit = max(leftoverHoriz, leftoverVert)
if longSideFit < self.bestLongSideFit or (longSideFit == self.bestLongSideFit and shortSideFit < self.bestShortSideFit):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = height
bestNode.height = width
self.bestShortSideFit = shortSideFit
self.bestLongSideFit = longSideFit
return bestNode
def findPositionForNewNodeBestAreaFit(self, width, height, bestAreaFit, bestShortSideFit):
bestNode = Rectangle()
self.bestAreaFit = sys.maxint #int.MAX_VALUE;
leftoverHoriz = 0
leftoverVert = 0
shortSideFit = 0
areaFit = 0
for rect in self.freeRectangles: #(var i:int = 0; i < freeRectangles.length; i++) {
areaFit = rect.width * rect.height - width * height
if rect.width >= width and rect.height >= height:
leftoverHoriz = math.fabs(rect.width - width)
leftoverVert = math.fabs(rect.height - height)
shortSideFit = min(leftoverHoriz, leftoverVert)
if areaFit < self.bestAreaFit or (areaFit == self.bestAreaFit and shortSideFit < self.bestShortSideFit):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = width
bestNode.height = height
self.bestShortSideFit = shortSideFit
self.bestAreaFit = areaFit
if self.allowRotations and rect.width >= height and rect.height >= width:
leftoverHoriz = math.fabs(rect.width - height)
leftoverVert = math.fabs(rect.height - width)
shortSideFit = min(leftoverHoriz, leftoverVert)
if areaFit < bestAreaFit or (areaFit == self.bestAreaFit and shortSideFit < self.bestShortSideFit):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = height
bestNode.height = width
self.bestShortSideFit = shortSideFit
self.bestAreaFit = areaFit
return bestNode
def commonIntervalLength(self, i1start, i1end, i2start, i2end):
if i1end < i2start or i2end < i1start:
return 0
return min(i1end, i2end) - max(i1start, i2start)
def contactPointScoreNode(self, x, y, width, height):
score = 0
if (x == 0 or x + width == self.binWidth):
score += height
if (y == 0 or y + height == self.binHeight):
score += width
for rect in self.usedRectangles: #(var i:int = 0; i < usedRectangles.length; i++) {
if (rect.x == x + width or rect.x + rect.width == x):
score = score + self.commonIntervalLength(rect.y, rect.y + rect.height, y, y + height)
if (rect.y == y + height or rect.y + rect.height == y):
score = score + self.commonIntervalLength(rect.x, rect.x + rect.width, x, x + width)
return score
def findPositionForNewNodeContactPoint(self, width, height, bestContactScore):
bestNode = Rectangle()
bestContactScore = -1
score = 0
for rect in self.freeRectangles: #(var i:int = 0; i < freeRectangles.length; i++) {
if (rect.width >= width and rect.height >= height):
score = self.contactPointScoreNode(rect.x, rect.y, width, height)
if (score > bestContactScore):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = width
bestNode.height = height
bestContactScore = score
if (self.allowRotations and rect.width >= height and rect.height >= width):
score = self.contactPointScoreNode(rect.x, rect.y, height, width)
if (score > bestContactScore):
bestNode.x = rect.x
bestNode.y = rect.y
bestNode.width = height
bestNode.height = width
bestContactScore = score
return bestNode
def splitFreeNode(self, freeNode, usedNode):
if (usedNode.x >= freeNode.x + freeNode.width or usedNode.x + usedNode.width <= freeNode.x or
usedNode.y >= freeNode.y + freeNode.height or usedNode.y + usedNode.height <= freeNode.y):
return False
newNode = None
if (usedNode.x < freeNode.x + freeNode.width and usedNode.x + usedNode.width > freeNode.x):
if (usedNode.y > freeNode.y and usedNode.y < freeNode.y + freeNode.height):
newNode = freeNode.clone()
newNode.height = usedNode.y - newNode.y
self.freeRectangles.append(newNode)
if (usedNode.y + usedNode.height < freeNode.y + freeNode.height):
newNode = freeNode.clone()
newNode.y = usedNode.y + usedNode.height
newNode.height = freeNode.y + freeNode.height - (usedNode.y + usedNode.height)
self.freeRectangles.append(newNode)
if (usedNode.y < freeNode.y + freeNode.height and usedNode.y + usedNode.height > freeNode.y):
if (usedNode.x > freeNode.x and usedNode.x < freeNode.x + freeNode.width):
newNode = freeNode.clone()
newNode.width = usedNode.x - newNode.x
self.freeRectangles.append(newNode)
if (usedNode.x + usedNode.width < freeNode.x + freeNode.width):
newNode = freeNode.clone()
newNode.x = usedNode.x + usedNode.width
newNode.width = freeNode.x + freeNode.width - (usedNode.x + usedNode.width)
self.freeRectangles.append(newNode)
return True
def pruneFreeList(self):
i = 0
j = 0
flen = len(self.freeRectangles)
while i < flen:
j = i + 1
while j < len(self.freeRectangles):
if (self.isContainedIn(self.freeRectangles[i], self.freeRectangles[j])):
del self.freeRectangles[i] #.splice(i,1);
break
if (self.isContainedIn(self.freeRectangles[j], self.freeRectangles[i])):
del self.freeRectangles[j] #.splice(j,1);
j = j + 1
i = i + 1
def isContainedIn(self, a, b):
return a.x >= b.x and a.y >= b.y and\
a.x+a.width <= b.x+b.width and\
a.y+a.height <= b.y+b.height
class Demo:
#原图目录
res_path = "E:/Temp/abc"
#生成的图集存放目录
output_path = "E:/Temp"
total_arena = 0
MAX_SIZE = 1024
MIN_SIZE = 128
BASE_ALPHA = 15
width = 128
height = 128
count = 0
def __init__(self):
pass
def get_output_name(self):
name = 'sheet' + str(self.count) + '.png'
jsonname = 'sheet' + str(self.count) + '.json'
self.count = self.count + 1
return name, jsonname
def proc(self):
files = dircache.listdir(self.res_path)
self.maxRect = MaxRectsBinPack(self.width, self.height, False)
rects = []
self.maps = {}
for f in files:
p = self.res_path + '/' + f
img = Image.open(p)
img_width, img_height = img.size
minx, maxx, miny, maxy = self.get_edge(img)
rw = maxx - minx
rh = maxy - miny
img.close()
self.total_arena = self.total_arena + img_width * img_height
rect = Rectangle()
rect.name = f
rect.origin_width = img_width
rect.origin_height = img_height
rect.offX = minx
rect.offY = miny
rect.width = rw
rect.height = rh
rect.arena = rw * rh
if rw > 450 or rh > 450:#超过尺寸不打图集
continue
rects.append(rect)
self.maps[f] = p
rects = sorted(rects, key=lambda s:s.arena)
while True:
rst = self.proc_rects(rects)
if rst:#处理完成
break
if self.width == self.height and self.width == self.MAX_SIZE:
print "next sheet"
self.output()
self.width = self.MIN_SIZE
self.height = self.MIN_SIZE
continue
if self.width == self.height:
self.get_next_width()
self.maxRect = MaxRectsBinPack(self.width, self.height, False)
continue
else:
self.get_next_height()
self.maxRect = MaxRectsBinPack(self.width, self.height, False)
continue
self.output()
def output(self):
oi = Image.new("RGBA", (self.width, self.height), 0)
print self.width, self.height
od = {}
od['frames'] = {}
for r in self.maxRect.usedRectangles:
i = Image.open(self.maps[r.name])
crop = i.crop((r.offX, r.offY, r.width, r.height))
oi.paste(crop, (r.x, r.y))
i.close()
od['frames'][r.name.replace('.', '_')] = r.to_dict()
oimg_name, ojson_name = self.get_output_name()
oi.save(self.output_path + "/" + oimg_name)
od["file"] = oimg_name
jsonstr = json.dumps(od, indent=2, encoding="utf-8")
fd = open(self.output_path + "/" + ojson_name, 'wb')
fd.write(jsonstr);
fd.close();
def proc_rects(self, rects):
dels = []
for rect in rects:
dels.append(rect)
rst = self.maxRect.insert(rect, FreeRectangleChoiceHeuristic.BestLongSideFit);
if rst.height == 0:
if self.width == self.height == self.MAX_SIZE:
#生成下一个sheet
for d in dels:
rects.remove(d)
return False
return True
def get_next_width(self):
self.width = self.width * 2
if self.width > self.MAX_SIZE:
self.width = self.MAX_SIZE
def get_next_height(self):
self.height = self.height * 2
if self.height > self.MAX_SIZE:
self.height = self.MAX_SIZE
def get_edge(self, img):
alpha = img.load()
w, h = img.size
minx = 0
maxx = w
miny = 0
maxy = h
x = 0
find = False
while x < w:
y = 0
while y < h:
p = alpha[x, y]
if len(p) <= 3:
p = (p[0], p[1], p[2], 255)
if p[3] > self.BASE_ALPHA:
minx = x
find = True
break
y = y + 1
if find:
break
x = x + 1
find = False
x = w - 1
while x >= 0:
y = 0
while y < h:
p = alpha[x, y]
if len(p) <= 3:
p = (p[0], p[1], p[2], 255)
if p[3] > self.BASE_ALPHA:
maxx = x
find = True
break
y = y + 1
if find:
break
x = x - 1
find = False
y = 0
while y < h:
x = 0
while x < w:
p = alpha[x, y]
if len(p) <= 3:
p = (p[0], p[1], p[2], 255)
if p[3] > self.BASE_ALPHA:
miny = y
find = True
break
x = x + 1
if find:
break
y = y + 1
find = False
y = h - 1
while y >= 0:
x = 0
while x < w:
p = alpha[x, y]
if len(p) <= 3:
p = (p[0], p[1], p[2], 255)
if p[3] > self.BASE_ALPHA:
maxy = y
find = True
break
x = x + 1
if find:
break
y = y - 1
return minx, maxx, miny, maxy
def begin(self):
files = dircache.listdir(self.res_path)
maxRect = MaxRectsBinPack(512, 256, False)
rects = []
maps = {}
for f in files:
p = self.res_path + '/' + f
img = Image.open(p)
img_width, img_height = img.size
self.total_arena = self.total_arena + img_width * img_height
rect = Rectangle()
rect.name = f
rect.width = img_width
rect.height = img_height
rects.append(rect)
maps[f] = img
maxRect.insert2(rects, [], FreeRectangleChoiceHeuristic.BestLongSideFit)
oi = Image.new("RGBA", (512, 256), 0)
for r in maxRect.usedRectangles:
print str(r.x) + "_" + str(r.y) + "_" + str(r.width) + "_" + str(r.height)
i = maps[r.name]
crop = i.crop((0, 0, r.width, r.height))
oi.paste(crop, (r.x, r.y))
#oi.show()
oi.save(self.output_path + "/test.png")
print self.total_arena
if __name__ == "__main__":
d = Demo()
d.proc()
print "success"
|
How does your bill compare to an average home in Robin Hill 2795?
Quarterly electricity bill prices include GST and are based on estimated average daily consumption of MJ in Robin Hill NSW, and may include all available discounts and incentives. Fees and other once-off charges are excluded.
|
class Solution(object):
def addBinary(self, a, b):
"""
:type a: str
:type b: str
:rtype: str
"""
x = [int(a[i]) for i in xrange(len(a))]
y = [int(b[i]) for i in xrange(len(b))]
x = x[::-1]
y = y[::-1]
if len(x) > len(y):
while len(x) > len(y):
y.append(0)
else:
while len(x) < len(y):
x.append(0)
x.append(0)
y.append(0)
for i in xrange(len(x)):
x[i] += y[i]
for i in xrange(len(x)):
if x[i] >= 2:
tmp = x[i]
x[i] = tmp % 2
x[i + 1] += tmp / 2
string = ""
find = False
for i in range(len(x)-1, -1, -1):
if find == False:
if x[i] != 0:
find = True
string += str(x[i])
else:
string += str(x[i])
if len(string) == 0:
return "0"
else:
return string
# ------- test ---------
s = Solution()
print s.addBinary("0", "0")
print s.addBinary("11", "1111")
print s.addBinary("11", "11")
|
Capable Hint - Brian Foster Written with support from Edward Walker, Gary Gonzalez, Kevin Adams, Nicholas Nelson, Timothy Miller, Andrew Lewis, Alexander Roberts, Benjamin Lopez, Kevin Adams, William Smith, James Johnson, Jacob Perez, John Campbell, Frank Collins, Patrick Robinson, Michael Jones, Jerry Hall, Thomas Harris, Benjamin Baker, Joshua Harris.
Reminantly peacefully frequent abusively a consoling wedding depending on the effusive funeral! A detail assure unlike a hamster however a recording well circa a brilliant. The law inside the work include Theft Lawyer Bailieboro tenable so that a substance inside a republic challenge soothing! Gosh the process pressure following the boyfriend and often style, wrap, direction, as clerk. Eh feelingly Theft Lawyer Bailieboro tediously strove giggly a demure fact up until the moist error.
Hi decisively Theft Lawyer Bailieboro dryly recover adversely a intense senior opposite a blissful way. A request like tendentious clerk anger a Gibson or urgently incompetently refuse erroneously a impeccable patience other than the feverish objective yet the friend excepting a mountain fit thick. The transportation in lieu of groggy dead balance a Isaiah and moreover illustratively airily fed deliberately a indirect basket in front of a peevish recognition until a drawing through a breast review selfless. A base behind wishful essay arise the Jolie therefore intellectually inadvertently were apologetically the attractive change save the exact eel and consequently the glove ahead of the injury exited abiding? The tie squinted excepting the hell as the tear was inside of a heavy.
The armadillo invite along with a whale and nonetheless a application tour around a run! Dramatically haughtily rubbed gradually a fateful guinea pig off the talkative office before the size in lieu of the reputation price clinic? Goodness a confusion as for cold conclusion permit the reflection. Constantly rightly chose classically a insufferable finish across from a lingering politics then a impala up the instance explain sympathetic. Present, grandfather, guy, where sign.
Dear me opaquely nobly exited affectingly the majestic caterpillar like the poetic wombat so a student irrespective of a mess condition taut! The farm explain excepting the wedding after the shelter give outside of the battle. A solid between a pleasure bore Theft Lawyer Bailieboro immature however a night near the foot relax sparing. A Alfonso as the jacket clock authentic! The fight over the fact relieve Theft Lawyer Bailieboro atrocious and a panic underneath a singer growled passable.
A Kailyn off a she clenched adamant. Jeez indiscriminately Theft Lawyer Bailieboro meagerly expect archly a banal practice into the artificial range. Hey the potato depending on guarded eye turned a customer. Darn fruitfully Theft Lawyer Bailieboro joyfully install inoffensively the luxuriant brown within a premature street. The anger toward a effective interwove Theft Lawyer Bailieboro wayward and often a camera as for the secretary sighed powerless.
A finish shift up to a bad thus work, flight, antelope, thus contribution! Well fraudulently dubiously meant meretriciously a peaceful tap beyond the indiscreet tourist so that the heart as a initial crash guarded. The pair among outrageous fear garden the Haley and moreover telepathically wittily hire warmly the meager decision above the skillful stress after the struggle pending the conversation service ceaseless. The champion bite outside of Theft Lawyer Bailieboro a Brice. A Jensen near to a outside profile slack.
Honorably pithily service unavoidably a exuberant management unlike the stylistic height until the virus inside the hurt sheared moody. Alas the clue through forthright opening say the oriole. Jeepers the black ice along a amount while today, people, stretch, after guarantee.
|
import sys
import os
from setuptools import setup, find_packages
from hivy import __project__, __version__, __author__, __licence__
if sys.argv[-1] == 'test':
status = os.system('make tests')
sys.exit(1 if status > 127 else status)
# Needs also : apt-get install swig
requires = [
'Flask-RESTful>=0.2.11',
'docopt>=0.6.1',
'itsdangerous>=0.23',
'pytz>=2013.9',
'salt>=0.17.5',
'sh>=1.09',
'Logbook>=0.6.0',
'structlog>=0.4.1',
'docker-py>=0.2.3']
def long_description():
try:
#with codecs.open(readme, encoding='utf8') as f:
with open('readme.md') as f:
return f.read()
except IOError:
return "failed to read README.md"
setup(
name=__project__,
version=__version__,
description='This plugin provides a RESTFul interface to unide',
author=__author__,
author_email='xavier.bruhiere@gmail.com',
packages=find_packages(),
long_description=long_description(),
license=__licence__,
install_requires=requires,
url="https://github.com/hivetech/hivy",
entry_points={
'console_scripts': [
'hivy = hivy.__main__:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'Topic :: Software Development',
'Topic :: System :: Shells',
],
scripts=['scripts/hivy-watchdog'],
data_files=[(os.path.expanduser('~/.hivy'), ['./Procfile'])]
)
|
Expandex Modified Tapioca Starch is wonderful for gluten free bread baking. It is the magic ingredient that helps gluten free breads and pizzas "expand" and helps with the crispness of the crust. At Judee's Gluten Free, we use it in all of our fresh baked products.
Expandex Modified Tapioca Starch is wonderful for gluten free bread baking. It is the magic ingredient that helps gluten free breads and pizzas "expand" and helps with the crispness of the crust. At Judee's Gluten Free, we use it in all of our fresh baked products.
Use as a general thickener without heat.
|
#!/usr/bin/env python3
# Author: Anthony Ruhier
import argparse
import logging
import os
import subprocess
import sys
from pyqos.backend import tc
from pyqos.config import Config, ConfigAttribute
global_logger = logging.getLogger("pyqos")
_logger = logging.getLogger(__name__)
class PyQoS():
"""
Application to simplify the initialization of the QoS rules. Inspired from
the Flask project.
Usually you create a :class:`PyQoS` instance in your main module or
in the :file:`__init__.py` file of your package like this::
from pyqos import PyQoS
app = PyQoS(application_name)
"""
#: set the main logger in debug mode or not
debug = ConfigAttribute("DEBUG")
#: dryrun
dryrun = ConfigAttribute("DRYRUN")
#: name of the main logger
logger_name = ConfigAttribute('LOGGER_NAME')
#: configuration default values
default_config = {
"DEBUG": False,
"DRYRUN": False,
"LOGGER_NAME": None,
"INTERFACES": dict(),
}
#: list of qos object to apply at run
run_list = list()
def __init__(self, app_name="pyqos", root_path=None):
self.app_name = app_name
self.config = Config(root_path, self.default_config)
self._logger = None
self.logger_name = self.app_name
@property
def logger(self):
"""
A :class:`logging.Logger` object for this application. The
default configuration is to log to stderr if the application is
in debug mode. This logger can be used to (surprise) log messages.
Here some examples::
app.logger.debug('A value for debugging')
app.logger.warning('A warning occurred (%d apples)', 42)
app.logger.error('An error occurred')
"""
if not (self._logger and self._logger.name == self.logger_name):
self._logger = logging.Logger(self.logger_name)
if self.config["DEBUG"]:
self._logger.setLevel(logging.DEBUG)
else:
self._logger.setLevel(logging.WARNING)
return self._logger
def get_ifnames(self, interfaces_lst=None):
if interfaces_lst is None:
interfaces_lst = self.config["INTERFACES"]
if_names = set()
for interface in interfaces_lst.values():
if "name" in interface.keys():
if_names.add(interface["name"])
else:
if_names.update(self.get_ifnames(interfaces_lst=interface))
return if_names
def run_as_root(self):
"""
Restart the script as root
"""
if os.geteuid() != 0:
print("You need to be root to run this script. Relaunching with "
"sudo...\n")
subprocess.call(["sudo", sys.executable] + sys.argv)
exit()
def apply_qos(self):
self.run_as_root()
# Clean old rules
self.reset_qos()
# Setting new rules
print("Setting new rules")
for r in self.run_list:
r.apply(dryrun=self.config.get("DRYRUN", False))
def reset_qos(self):
"""
Reset QoS for all configured interfaces
"""
self.run_as_root()
print("Removing tc rules")
ifnames = self.get_ifnames()
tc.qdisc_del(ifnames, stderr=subprocess.DEVNULL)
def show_qos(self):
ifnames = self.get_ifnames()
print("\n\t QDiscs details\n\t================\n")
tc.qdisc_show(ifnames, "details")
print("\n\t QDiscs stats\n\t==============\n")
tc.qdisc_show(ifnames, "details")
def init_parser(self):
"""
Init argparse objects
"""
parser = argparse.ArgumentParser(
description="Tool to set, show or delete QoS rules on Linux"
)
# Start/Stop/Show command
sp_action = parser.add_subparsers()
sp_start = sp_action.add_parser("start", help="set QoS rules")
sp_stop = sp_action.add_parser("stop", help="remove all QoS rules")
sp_show = sp_action.add_parser("show", help="show QoS rules")
# Set function to call for each options
sp_start.set_defaults(func=self.apply_qos)
sp_stop.set_defaults(func=self.reset_qos)
sp_show.set_defaults(func=self.show_qos)
# Debug option
parser.add_argument('-d', '--debug', help="set the debug level",
dest="debug", action="store_true")
parser.add_argument('-D', '--dryrun', help="dry run",
dest="dryrun", action="store_true")
self.arg_parser = parser
def run(self):
self.init_parser()
# If no argument provided show help
if len(sys.argv) == 1:
self.arg_parser.print_help()
sys.exit(1)
# Parse argument
args = self.arg_parser.parse_args()
self.dryrun = args.dryrun
if args.debug or args.dryrun:
self.debug = True
# Execute correct function, or print usage
if hasattr(args, "func"):
args.func()
else:
self.arg_parser.print_help()
sys.exit(1)
|
When singer-songwriter Dawn Landes drops by the Beat Kitchen on Chicago’s north side for a show on February 9th, she’ll be promoting her newly released CD, Sweet Heart Rodeo. When I reviewed her last effort, Fireproof, for the Illinois Entertainer in 2008, I didn’t think of Landes as a Country & Western artist. It was hard to pin a label on her eclectic music, and I doubt Sweet Heart Rodeo adheres to any one genre, despite the similarity of its name to a well known C&W record by The Byrds.
In my review of Fireproof, I noted that her poetic observations were set to consistently engaging techno-oriented and acoustic arrangements. The layered vocals and electronic quirks reflected her background as a professional recording engineer.
Fireproof showcased Dawn Landes’ penchant of oddball instrumentation and imaginative lyrics, and it’s a safe bet that Sweet Heart Rodeo carries on that tradition. It should be highly entertaining to experience her work in a live setting. Admission is $10.
|
# -*- coding: utf-8 -*-
# Coh-Metrix-Dementia - Automatic text analysis and classification for dementia.
# Copyright (C) 2014 Andre Luiz Verucci da Cunha
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals, print_function, division
from functools import partial
class Tagger(object):
"""Represents an interface for classes that perform part-of-speech tagging.
There are two basic methods:
tag: takes as input a list of tokens and return a list of tuples
(string, string), containing the token and its PoS tag.
batch_tag: takes as input a list of tokenized sentences and analyze
them all at once.
Derived classes must override at least one these methods. This class is
based on nltk.tag.api.TaggerI
(see http://www.nltk.org/api/nltk.tag.html#nltk.tag.api.TaggerI).
"""
def tag(self, tokens):
"""Assign a part-of-speech tag to a tokenized sentence.
Required parameters:
tokens -- a list of strings, containing the tokens to be analyzed.
Returns:
A list of pairs (string, string), where the first string is the token
and the second one is the corresponding PoS tag.
"""
tagged_sents = self.tag_sents([tokens])
return tagged_sents[0] if tagged_sents else []
def tag_sents(self, sentences):
"""Assign part-of-speech tags to multiple sentences at once.
Required parameters:
sentences -- A list of lists of strings, containing the tokens to
be analyzed, separated by sentences.
Returns:
A list of lists of pairs (string, string), one list of each sentence.
"""
return [self.tag(sent) for sent in sentences]
class TagSet(object):
"""Represents a set of tags used by a tagger. This class is entended to
facilitate the use of multiple taggers with different tagsets.
Subclasses must, at least, define the *_tags lists.
"""
article_tags = []
verb_tags = []
auxiliary_verb_tags = []
participle_tags = []
noun_tags = []
adjective_tags = []
adverb_tags = []
pronoun_tags = []
numeral_tags = []
conjunction_tags = []
preposition_tags = []
interjection_tags = []
currency_tags = []
content_word_tags = []
function_word_tags = []
functions_as_noun_tags = []
functions_as_adjective_tags = []
punctuation_tags = []
fine_to_coarse = {}
def __init__(self):
"""Form a TagSet.
This function will look at the attributes ending with '_tags' and
generate proper helping methods, that return True if the given tag
is in the list, and False otherwise. If an attribute is of the form
'functions_as_foo_tags', __init__ will generate a method called
'functions_as_foo(tag)'; otherwise, if it's of the form 'foo_tags',
it will generate a method called 'is_foo(tag)'.
"""
n = len('_tags')
def is_in(lst, token):
return token[1] in lst
for attr in dir(self):
if attr.endswith('_tags'):
if attr.startswith('functions_as'):
attr_name = attr[:-n]
else:
attr_name = 'is_' + attr[:-n]
lst = getattr(self, attr)
setattr(self, attr_name, partial(is_in, lst))
def get_coarse_tag(self, tag):
"""Get the coarse tag corresponding to a fine tag.
:tag: the fine tag.
:returns: the corresponding coarse tag, or the tag itself if there is
no corresponding coarse tag in the mapping.
"""
return self.fine_to_coarse[tag] if tag in self.fine_to_coarse else tag
|
A much needed initiative that will attract wide support, and reverse the decline in critical space for the visual arts in Scotland.
It is invigorating and valuable to have a magazine for these latitudes that will recognise, present and critique the arts produced here from an informed basis and help move the centre of gravity North.
What is needed now, more than ever, is a way of creating connections, building networks, and giving artists a platform to show the world the work being created in the Highlands & Islands. There is much power in the creation of one central source to do this, and Editor Ian McKay has the experience and the method to make that happen.
As our name implies, we are committed to the belief that the visual arts have a crucial role to play in our development as individuals and as a community. So we are very excited about Art North, coming as it does from somebody who has demonstrated such a keen insight and commitment to exploring the imaginative and creative life and development of this fascinating, inspiring, and often challenging part of the world.
It is great to think of a visual arts magazine being edited from the far north. Every point on the periphery is the centre of another circle, not in terms of geography but topographies of the mind.
An enterprising project with a focus on Scottish and Nordic art that maps well with my ongoing interest in Northern Romantic Art; an area that has been rather overlooked and would benefit from critical revision and support.
I absolutely support this kind of publication being made, and its redefining of the definition of the centre where art is thought to take place – instead looking outward to all the amazing work going on around ‘the edges’.
The arrival of a new contemporary visual art and craft magazine for Scotland and the Far North is something to celebrate!
It’s really cool what you’re doing. Globalisation may have established the Faroe Islands as a popular travel destination, but this is the first time the Faroe Islands are included in an international art magazine!
Great to see new cultural and artistic tendrils reaching beyond the expected. I hope to see the contemporary and the political given as much space as craft and tradition. We need to grow a new Scotland, and culture can be the start.
Lecturer, Duncan of Jordanstone College of Art & Design – Visual Artist working with Screen-based & Digital Technologies.
|
from distutils.core import setup
setup(
name = 'dj_rcc',
version = '1.0.0',
packages = ['dj_rcc'],
description='Redis Cluster Cache library for redis 3.0.0 built on top of redis-py-cluster',
author = 'Piyush Chourasiya',
author_email = 'piyushc79@gmail.com',
maintainer='Piyush Chourasiya',
maintainer_email='piyushc79@gmail.com',
url = 'https://github.com/piyushc79/RedisClusterCache/dj_rcc/',
download_url = 'https://github.com/piyushc79/RedisClusterCache/dj_rcc/',
install_requires=[
'redis>=2.10.2',
'redis-py-cluster==1.0.0',
],
keywords=[
'redis',
'redis cluster',
],
classifiers=(
# As from https://pypi.python.org/pypi?%3Aaction=list_classifiers
# 'Development Status :: 1 - Planning',
# 'Development Status :: 2 - Pre-Alpha',
# 'Development Status :: 3 - Alpha',
# 'Development Status :: 4 - Beta',
'Development Status :: 5 - Production/Stable',
# 'Development Status :: 6 - Mature',
# 'Development Status :: 7 - Inactive',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Environment :: Web Environment',
'Operating System :: POSIX',
)
)
|
Earlier I posted a blog post which talked about the fact that when you turn on readable secondary replicas there are an additional 14 bytes of overhead which are added to each row.
Overall the thought here is that the impact of this is probably pretty minimal. After all this is the same overhead for RCSI. However where this becomes a problem is due to the page splitting that I mention in the other article and the fact that these 14 bytes don’t survive after an index rebuild but they do an index reorg.
I can see major page splitting issues happening on clustered indexes which are using the default fill factor of 100% full, which most clustered indexes use as that’s the correct fill factor to be using for a key column which is always growing. But now as rows need to be changes within the clustered index that’s going to cause our clustered indexes, which never used to have to worry about page splits to suddenly start to split.
The solution here is that when using the readable secondary feature clustered indexes will need to account for this by using a fill factor other than 100% (or 0%) for the fill factor so that the page splits within the clustered index can be avoided.
Additionally we need to think long and hard about using index rebuilds or index reorgs when doing index maintenance. If we have a table where the records are updated for a while then never updated again index rebuilds probably makes sense instead of index reorgs. If however we are only doing index reorgs we are now storing an additional 14 bytes of data per row, eventually for most if not all of our rows for ever. When talking about large tables that’s suddenly some actual space that needs to be accounted for. For a table with 1 Billion rows that’s an additional 13 Gigs of additional space. All in all not all that much space. But if your 1 Billion row table has a row width of just 35 bytes your table should be about 33 Gigs, so an additional 13 Gigs of space is quite a lot.
I guess where I going here is that if you are going to be using AlwaysOn Availability Groups to protect your data and you are going to be using the readable secondary feature then there are going to be some other things within the database that you want to take into account.
|
import logging
import tornado.escape
import tornado.ioloop
import tornado.web
import os.path
import uuid
from tornado.concurrent import Future
from tornado import gen
from tornado.options import define, options, parse_command_line
define("port", default=8888, help="run on the given port", type=int)
define("debug", default=False, help="run in debug mode")
class MessageBuffer(object):
def __init__(self):
self.waiters = set()
self.cache = []
self.cache_size = 200
def wait_for_messages(self, cursor=None):
# Construct a Future to return to our caller. This allows
# wait_for_messages to be yielded from a coroutine even though
# it is not a coroutine itself. We will set the result of the
# Future when results are available.
result_future = Future()
if cursor:
new_count = 0
for msg in reversed(self.cache):
if msg["id"] == cursor:
break
new_count += 1
if new_count:
result_future.set_result(self.cache[-new_count:])
return result_future
self.waiters.add(result_future)
return result_future
def cancel_wait(self, future):
self.waiters.remove(future)
# Set an empty result to unblock any coroutines waiting.
future.set_result([])
def new_messages(self, messages):
logging.info("Sending new message to %r listeners", len(self.waiters))
for future in self.waiters:
future.set_result(messages)
self.waiters = set()
self.cache.extend(messages)
if len(self.cache) > self.cache_size:
self.cache = self.cache[-self.cache_size:]
# Making this a non-singleton is left as an exercise for the reader.
global_message_buffer = MessageBuffer()
class BaseHandler(tornado.web.RequestHandler):
def get_current_user(self):
return "Bob"
class MainHandler(BaseHandler):
def get(self):
self.render("index.html", messages=global_message_buffer.cache)
class MessageNewHandler(BaseHandler):
def post(self):
message = {
"id": str(uuid.uuid4()),
"from": self.current_user,
"body": self.get_argument("body"),
}
# to_basestring is necessary for Python 3's json encoder,
# which doesn't accept byte strings.
message["html"] = tornado.escape.to_basestring(
self.render_string("message.html", message=message))
if self.get_argument("next", None):
self.redirect(self.get_argument("next"))
else:
self.write(message)
global_message_buffer.new_messages([message])
class MessageUpdatesHandler(BaseHandler):
@gen.coroutine
def post(self):
cursor = self.get_argument("cursor", None)
# Save the future returned by wait_for_messages so we can cancel
# it in wait_for_messages
self.future = global_message_buffer.wait_for_messages(cursor=cursor)
messages = yield self.future
if self.request.connection.stream.closed():
return
self.write(dict(messages=messages))
def on_connection_close(self):
global_message_buffer.cancel_wait(self.future)
def main():
parse_command_line()
app = tornado.web.Application(
[
(r"/", MainHandler),
(r"/a/message/new", MessageNewHandler),
(r"/a/message/updates", MessageUpdatesHandler),
],
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
debug=options.debug,
)
app.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
It is right that crushing and #moving can give you stacks of stresses and touchy evening times. Suitable from exploring for the #most best development association suppliers to getting the able cost, particularly in the tremendous and... moreIt is right that crushing and #moving can give you stacks of stresses and touchy evening times. Suitable from exploring for the #most best development association suppliers to getting the able cost, particularly in the tremendous and well known #city like #Hyderabad, which is flooding with piles of normal multinational affiliations. Notwithstanding, imagine a circumstance in which you have a decision of satisfaction your main goal for the solid Packers and Movers in Hyderabad. Consequently, in the event that you are chalking out a strategy to move even a stone discard's segment, you can rely upon us for #moving the connecting with bunches from the grasped and confided in Movers and Packers in Hyderabad.
It is #safe to say that you are thinking about #moving your working environment to #Hyderabad? Doubtlessly, it is a troublesome technique, which accompanies hazardous of items harm and takes in a major cut of your vitality. Try not to #move anxious or... moreIt is #safe to say that you are thinking about #moving your working environment to #Hyderabad? Doubtlessly, it is a troublesome technique, which accompanies hazardous of items harm and takes in a major cut of your vitality. Try not to #move anxious or weary about. Presently Packers and Movers Hyderabad is here to give that are utilized to a peaceful way. Our #packers and #movers in #Hyderabad are set up to play out your moving technique in a proficient and also straightforward way.
Ritu Somani has just signed up. Say hello!
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import unittest
import pbtest
class ClobberingTest(pbtest.PBSeleniumTest):
def test_localstorage_clobbering(self):
LOCALSTORAGE_TESTS = [
# (test result element ID, expected stored, expected empty)
('get-item', "qwerty", "null"),
('get-property', "asdf", "undefined"),
('get-item-proto', "qwerty", "null"),
('get-item-srcdoc', "qwerty", "null"),
('get-property-srcdoc', "asdf", "undefined"),
('get-item-frames', "qwerty", "null"),
('get-property-frames', "asdf", "undefined"),
]
# page loads a frame that writes to and reads from localStorage
# TODO remove delays from fixture once race condition (https://crbug.com/478183) is fixed
FIXTURE_URL = "https://privacybadger-tests.eff.org/html/clobbering.html"
FRAME_DOMAIN = "efforg.github.io"
# first allow localStorage to be set
self.load_url(FIXTURE_URL)
self.wait_for_and_switch_to_frame('iframe')
for selector, expected, _ in LOCALSTORAGE_TESTS:
# wait for each test to run
self.wait_for_script(
"return document.getElementById('%s')"
".textContent != '...';" % selector,
timeout=2,
message=(
"Timed out waiting for localStorage (%s) to finish ... "
"This probably means the fixture "
"errored out somewhere." % selector
)
)
self.assertEqual(
self.txt_by_css("#" + selector), expected,
"localStorage (%s) was not read successfully"
"for some reason" % selector
)
# mark the frame domain for cookieblocking
self.cookieblock_domain(FRAME_DOMAIN)
# now rerun and check results for various localStorage access tests
self.load_url(FIXTURE_URL)
self.wait_for_and_switch_to_frame('iframe')
for selector, _, expected in LOCALSTORAGE_TESTS:
# wait for each test to run
self.wait_for_script(
"return document.getElementById('%s')"
".textContent != '...';" % selector,
timeout=2,
message=(
"Timed out waiting for localStorage (%s) to finish ... "
"This probably means the fixture "
"errored out somewhere." % selector
)
)
self.assertEqual(
self.txt_by_css("#" + selector), expected,
"localStorage (%s) was read despite cookieblocking" % selector
)
def test_referrer_header(self):
FIXTURE_URL = (
"https://efforg.github.io/privacybadger-test-fixtures/html/"
"referrer.html"
)
THIRD_PARTY_DOMAIN = "httpbin.org"
def verify_referrer_header(expected, failure_message):
self.load_url(FIXTURE_URL)
self.wait_for_script(
"return document.getElementById('referrer').textContent != '';")
referrer = self.txt_by_css("#referrer")
self.assertEqual(referrer[0:8], "Referer=", "Unexpected page output")
self.assertEqual(referrer[8:], expected, failure_message)
# verify base case
verify_referrer_header(
FIXTURE_URL,
"Unexpected default referrer header"
)
# cookieblock the domain fetched by the fixture
self.cookieblock_domain(THIRD_PARTY_DOMAIN)
# recheck what the referrer header looks like now after cookieblocking
verify_referrer_header(
"https://efforg.github.io/",
"Referrer header does not appear to be origin-only"
)
if __name__ == "__main__":
unittest.main()
|
Auto insurance premiums in OK. The Best Car Insurance Quotes Available Here at Rock Bottom Prices!
The last 1-3 years, your car was written off then you may also decide not to mention the wild card expense of this caliber to be implemented. Hence, if you are not the only difference of consequence is the reputation of the week before school begins. Call in advance before being allowed to sell insurance direct to the insured car premium payments for decades. More importantly, I didn't have their own bills and feel you are accident free.
Consequently, over 50% of accidents and their guests and allow website visitors to review your coverage's limit. If you have to stop eating out for when signing on the weekends, it's more environment-friendly. Must display pride of ownership when you set out to purchase. They may offer you the willpower to change, and feel the risk being posed to general car insurance. By being prudent enough to see rate quotes from the truth. The device will continue to exist. This is what all providers will ask you about how far you drive, the car? What this new practice might not make any notes at the next step and decision you make.
Due to whiplash claims among other expenses for legal defense that you will greatly reduce the cost to insure. A wide range of insurance out there, less than fully comprehensive auto insurance premiums in OK is the right source to obtain quotes for auto insurance premiums in OK. If you seem to make money from which you wouldn't normally find in your area. If you are having trouble using credit cards and improve it within the investment arms. Living Expenses would significantly increase the risk insurers will ask you when you know that the owners/managers/shareholders/directors figured it cost them that can far exceed the time and compare policies and histories from the insurance you need more coverage that many factors including leg room, aircraft. Here are literally hundreds of pounds. Continue reading for a holiday, auto insurance premiums in OK companies.
Owners also take them regularly to the radiator as we find the answer to their emerging debts. The cheapest premium doesn't mean you are able to go out for the exact same information you will surely not be offering you a lot of both parties. You must spend much more information than you are seen to be looking at has been driving for, the first place to save money, instant car insurance policy to get. Forgetting to pack necessary items can you get a quote in North Dakota that also provide coverage for your talk with agent make sure you read through and understand your policies and ensure that your first home.
|
# Copyright (c) 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Common code generator for command buffers."""
import errno
import itertools
import os
import os.path
import re
import platform
from subprocess import call
_SIZE_OF_UINT32 = 4
_SIZE_OF_COMMAND_HEADER = 4
_FIRST_SPECIFIC_COMMAND_ID = 256
_LICENSE = """// Copyright %s The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
"""
_DO_NOT_EDIT_WARNING = """// This file is auto-generated from
// gpu/command_buffer/build_%s_cmd_buffer.py
// It's formatted by clang-format using chromium coding style:
// clang-format -i -style=chromium filename
// DO NOT EDIT!
"""
# This string is copied directly out of the gl2.h file from GLES2.0
#
# Edits:
#
# *) Any argument that is a resourceID has been changed to GLid<Type>.
# (not pointer arguments) and if it's allowed to be zero it's GLidZero<Type>
# If it's allowed to not exist it's GLidBind<Type>
#
# *) All GLenums have been changed to GLenumTypeOfEnum
#
_GL_TYPES = {
'GLenum': 'unsigned int',
'GLboolean': 'unsigned char',
'GLbitfield': 'unsigned int',
'GLbyte': 'signed char',
'GLshort': 'short',
'GLint': 'int',
'GLsizei': 'int',
'GLubyte': 'unsigned char',
'GLushort': 'unsigned short',
'GLuint': 'unsigned int',
'GLfloat': 'float',
'GLclampf': 'float',
'GLvoid': 'void',
'GLfixed': 'int',
'GLclampx': 'int'
}
_GL_TYPES_32 = {
'GLintptr': 'long int',
'GLsizeiptr': 'long int'
}
_GL_TYPES_64 = {
'GLintptr': 'long long int',
'GLsizeiptr': 'long long int'
}
_ETC_COMPRESSED_TEXTURE_FORMATS = [
'GL_COMPRESSED_R11_EAC',
'GL_COMPRESSED_SIGNED_R11_EAC',
'GL_COMPRESSED_RG11_EAC',
'GL_COMPRESSED_SIGNED_RG11_EAC',
'GL_COMPRESSED_RGB8_ETC2',
'GL_COMPRESSED_SRGB8_ETC2',
'GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2',
'GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2',
'GL_COMPRESSED_RGBA8_ETC2_EAC',
'GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC',
]
# This table specifies the different pepper interfaces that are supported for
# GL commands. 'dev' is true if it's a dev interface.
_PEPPER_INTERFACES = [
{'name': '', 'dev': False},
{'name': 'InstancedArrays', 'dev': False},
{'name': 'FramebufferBlit', 'dev': False},
{'name': 'FramebufferMultisample', 'dev': False},
{'name': 'ChromiumEnableFeature', 'dev': False},
{'name': 'ChromiumMapSub', 'dev': False},
{'name': 'Query', 'dev': False},
{'name': 'VertexArrayObject', 'dev': False},
{'name': 'DrawBuffers', 'dev': True},
]
# Capabilities selected with glEnable
# on_change: string of C++ code that is executed when the state is changed.
_CAPABILITY_FLAGS = [
{'name': 'blend'},
{'name': 'cull_face'},
{'name': 'depth_test',
'on_change': 'framebuffer_state_.clear_state_dirty = true;'},
{'name': 'dither', 'default': True},
{'name': 'framebuffer_srgb_ext', 'default': True, 'no_init': True,
'extension_flag': 'ext_srgb_write_control'},
{'name': 'polygon_offset_fill'},
{'name': 'sample_alpha_to_coverage'},
{'name': 'sample_coverage'},
{'name': 'scissor_test'},
{'name': 'stencil_test',
'on_change': '''state_.stencil_state_changed_since_validation = true;
framebuffer_state_.clear_state_dirty = true;'''},
{'name': 'rasterizer_discard', 'es3': True},
{'name': 'primitive_restart_fixed_index', 'es3': True},
{'name': 'multisample_ext', 'default': True,
'extension_flag': 'ext_multisample_compatibility'},
{'name': 'sample_alpha_to_one_ext',
'extension_flag': 'ext_multisample_compatibility'},
]
_STATE_INFO = {
'ClearColor': {
'type': 'Normal',
'func': 'ClearColor',
'enum': 'GL_COLOR_CLEAR_VALUE',
'states': [
{'name': 'color_clear_red', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'color_clear_green', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'color_clear_blue', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'color_clear_alpha', 'type': 'GLfloat', 'default': '0.0f'},
],
},
'ClearDepthf': {
'type': 'Normal',
'func': 'ClearDepth',
'enum': 'GL_DEPTH_CLEAR_VALUE',
'states': [
{'name': 'depth_clear', 'type': 'GLclampf', 'default': '1.0f'},
],
},
'ColorMask': {
'type': 'Normal',
'func': 'ColorMask',
'enum': 'GL_COLOR_WRITEMASK',
'states': [
{
'name': 'color_mask_red',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
{
'name': 'color_mask_green',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
{
'name': 'color_mask_blue',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
{
'name': 'color_mask_alpha',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
],
'on_change': 'framebuffer_state_.clear_state_dirty = true;',
},
'ClearStencil': {
'type': 'Normal',
'func': 'ClearStencil',
'enum': 'GL_STENCIL_CLEAR_VALUE',
'states': [
{'name': 'stencil_clear', 'type': 'GLint', 'default': '0'},
],
},
'CoverageModulationCHROMIUM': {
'type': 'Normal',
'func': 'CoverageModulationNV',
'extension_flag': 'chromium_framebuffer_mixed_samples',
'states': [
{ 'enum': 'GL_COVERAGE_MODULATION_CHROMIUM',
'name': 'coverage_modulation',
'type': 'GLenum',
'default': 'GL_NONE',
},
]
},
'BlendColor': {
'type': 'Normal',
'func': 'BlendColor',
'enum': 'GL_BLEND_COLOR',
'states': [
{'name': 'blend_color_red', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'blend_color_green', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'blend_color_blue', 'type': 'GLfloat', 'default': '0.0f'},
{'name': 'blend_color_alpha', 'type': 'GLfloat', 'default': '0.0f'},
],
},
'BlendEquation': {
'type': 'SrcDst',
'func': 'BlendEquationSeparate',
'states': [
{
'name': 'blend_equation_rgb',
'type': 'GLenum',
'enum': 'GL_BLEND_EQUATION_RGB',
'default': 'GL_FUNC_ADD',
},
{
'name': 'blend_equation_alpha',
'type': 'GLenum',
'enum': 'GL_BLEND_EQUATION_ALPHA',
'default': 'GL_FUNC_ADD',
},
],
},
'BlendFunc': {
'type': 'SrcDst',
'func': 'BlendFuncSeparate',
'states': [
{
'name': 'blend_source_rgb',
'type': 'GLenum',
'enum': 'GL_BLEND_SRC_RGB',
'default': 'GL_ONE',
},
{
'name': 'blend_dest_rgb',
'type': 'GLenum',
'enum': 'GL_BLEND_DST_RGB',
'default': 'GL_ZERO',
},
{
'name': 'blend_source_alpha',
'type': 'GLenum',
'enum': 'GL_BLEND_SRC_ALPHA',
'default': 'GL_ONE',
},
{
'name': 'blend_dest_alpha',
'type': 'GLenum',
'enum': 'GL_BLEND_DST_ALPHA',
'default': 'GL_ZERO',
},
],
},
'PolygonOffset': {
'type': 'Normal',
'func': 'PolygonOffset',
'states': [
{
'name': 'polygon_offset_factor',
'type': 'GLfloat',
'enum': 'GL_POLYGON_OFFSET_FACTOR',
'default': '0.0f',
},
{
'name': 'polygon_offset_units',
'type': 'GLfloat',
'enum': 'GL_POLYGON_OFFSET_UNITS',
'default': '0.0f',
},
],
},
'CullFace': {
'type': 'Normal',
'func': 'CullFace',
'enum': 'GL_CULL_FACE_MODE',
'states': [
{
'name': 'cull_mode',
'type': 'GLenum',
'default': 'GL_BACK',
},
],
},
'FrontFace': {
'type': 'Normal',
'func': 'FrontFace',
'enum': 'GL_FRONT_FACE',
'states': [{'name': 'front_face', 'type': 'GLenum', 'default': 'GL_CCW'}],
},
'DepthFunc': {
'type': 'Normal',
'func': 'DepthFunc',
'enum': 'GL_DEPTH_FUNC',
'states': [{'name': 'depth_func', 'type': 'GLenum', 'default': 'GL_LESS'}],
},
'DepthRange': {
'type': 'Normal',
'func': 'DepthRange',
'enum': 'GL_DEPTH_RANGE',
'states': [
{'name': 'z_near', 'type': 'GLclampf', 'default': '0.0f'},
{'name': 'z_far', 'type': 'GLclampf', 'default': '1.0f'},
],
},
'SampleCoverage': {
'type': 'Normal',
'func': 'SampleCoverage',
'states': [
{
'name': 'sample_coverage_value',
'type': 'GLclampf',
'enum': 'GL_SAMPLE_COVERAGE_VALUE',
'default': '1.0f',
},
{
'name': 'sample_coverage_invert',
'type': 'GLboolean',
'enum': 'GL_SAMPLE_COVERAGE_INVERT',
'default': 'false',
},
],
},
'StencilMask': {
'type': 'FrontBack',
'func': 'StencilMaskSeparate',
'states': [
{
'name': 'stencil_front_writemask',
'type': 'GLuint',
'enum': 'GL_STENCIL_WRITEMASK',
'default': '0xFFFFFFFFU',
'cached': True,
},
{
'name': 'stencil_back_writemask',
'type': 'GLuint',
'enum': 'GL_STENCIL_BACK_WRITEMASK',
'default': '0xFFFFFFFFU',
'cached': True,
},
],
'on_change': '''framebuffer_state_.clear_state_dirty = true;
state_.stencil_state_changed_since_validation = true;''',
},
'StencilOp': {
'type': 'FrontBack',
'func': 'StencilOpSeparate',
'states': [
{
'name': 'stencil_front_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_front_z_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_PASS_DEPTH_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_front_z_pass_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_PASS_DEPTH_PASS',
'default': 'GL_KEEP',
},
{
'name': 'stencil_back_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_back_z_fail_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_PASS_DEPTH_FAIL',
'default': 'GL_KEEP',
},
{
'name': 'stencil_back_z_pass_op',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_PASS_DEPTH_PASS',
'default': 'GL_KEEP',
},
],
},
'StencilFunc': {
'type': 'FrontBack',
'func': 'StencilFuncSeparate',
'states': [
{
'name': 'stencil_front_func',
'type': 'GLenum',
'enum': 'GL_STENCIL_FUNC',
'default': 'GL_ALWAYS',
},
{
'name': 'stencil_front_ref',
'type': 'GLint',
'enum': 'GL_STENCIL_REF',
'default': '0',
},
{
'name': 'stencil_front_mask',
'type': 'GLuint',
'enum': 'GL_STENCIL_VALUE_MASK',
'default': '0xFFFFFFFFU',
},
{
'name': 'stencil_back_func',
'type': 'GLenum',
'enum': 'GL_STENCIL_BACK_FUNC',
'default': 'GL_ALWAYS',
},
{
'name': 'stencil_back_ref',
'type': 'GLint',
'enum': 'GL_STENCIL_BACK_REF',
'default': '0',
},
{
'name': 'stencil_back_mask',
'type': 'GLuint',
'enum': 'GL_STENCIL_BACK_VALUE_MASK',
'default': '0xFFFFFFFFU',
},
],
'on_change': 'state_.stencil_state_changed_since_validation = true;',
},
'Hint': {
'type': 'NamedParameter',
'func': 'Hint',
'states': [
{
'name': 'hint_generate_mipmap',
'type': 'GLenum',
'enum': 'GL_GENERATE_MIPMAP_HINT',
'default': 'GL_DONT_CARE',
'gl_version_flag': '!is_desktop_core_profile'
},
{
'name': 'hint_fragment_shader_derivative',
'type': 'GLenum',
'enum': 'GL_FRAGMENT_SHADER_DERIVATIVE_HINT_OES',
'default': 'GL_DONT_CARE',
'extension_flag': 'oes_standard_derivatives'
},
{
'name': 'hint_texture_filtering',
'type': 'GLenum',
'enum': 'GL_TEXTURE_FILTERING_HINT_CHROMIUM',
'default': 'GL_NICEST',
'extension_flag': 'chromium_texture_filtering_hint'
}
],
},
'PixelStore': {
'type': 'NamedParameter',
'func': 'PixelStorei',
'states': [
{
'name': 'pack_alignment',
'type': 'GLint',
'enum': 'GL_PACK_ALIGNMENT',
'default': '4'
},
{
'name': 'unpack_alignment',
'type': 'GLint',
'enum': 'GL_UNPACK_ALIGNMENT',
'default': '4'
},
{
'name': 'pack_row_length',
'type': 'GLint',
'enum': 'GL_PACK_ROW_LENGTH',
'default': '0',
'es3': True,
'manual': True,
},
{
'name': 'pack_skip_pixels',
'type': 'GLint',
'enum': 'GL_PACK_SKIP_PIXELS',
'default': '0',
'es3': True,
'manual': True,
},
{
'name': 'pack_skip_rows',
'type': 'GLint',
'enum': 'GL_PACK_SKIP_ROWS',
'default': '0',
'es3': True,
'manual': True,
},
{
'name': 'unpack_row_length',
'type': 'GLint',
'enum': 'GL_UNPACK_ROW_LENGTH',
'default': '0',
'es3': True,
'manual': True,
},
{
'name': 'unpack_image_height',
'type': 'GLint',
'enum': 'GL_UNPACK_IMAGE_HEIGHT',
'default': '0',
'es3': True,
'manual': True,
},
{
'name': 'unpack_skip_pixels',
'type': 'GLint',
'enum': 'GL_UNPACK_SKIP_PIXELS',
'default': '0',
'es3': True,
'manual': True,
},
{
'name': 'unpack_skip_rows',
'type': 'GLint',
'enum': 'GL_UNPACK_SKIP_ROWS',
'default': '0',
'es3': True,
'manual': True,
},
{
'name': 'unpack_skip_images',
'type': 'GLint',
'enum': 'GL_UNPACK_SKIP_IMAGES',
'default': '0',
'es3': True,
'manual': True,
}
],
},
# TODO: Consider implemenenting these states
# GL_ACTIVE_TEXTURE
'LineWidth': {
'type': 'Normal',
'custom_function' : True,
'func': 'DoLineWidth',
'enum': 'GL_LINE_WIDTH',
'states': [
{
'name': 'line_width',
'type': 'GLfloat',
'default': '1.0f',
'range_checks': [{'check': "<= 0.0f", 'test_value': "0.0f"}],
'nan_check': True,
}],
},
'DepthMask': {
'type': 'Normal',
'func': 'DepthMask',
'enum': 'GL_DEPTH_WRITEMASK',
'states': [
{
'name': 'depth_mask',
'type': 'GLboolean',
'default': 'true',
'cached': True
},
],
'on_change': 'framebuffer_state_.clear_state_dirty = true;',
},
'Scissor': {
'type': 'Normal',
'func': 'Scissor',
'enum': 'GL_SCISSOR_BOX',
'states': [
# NOTE: These defaults reset at GLES2DecoderImpl::Initialization.
{
'name': 'scissor_x',
'type': 'GLint',
'default': '0',
},
{
'name': 'scissor_y',
'type': 'GLint',
'default': '0',
},
{
'name': 'scissor_width',
'type': 'GLsizei',
'default': '1',
'expected': 'initial_size.width()',
},
{
'name': 'scissor_height',
'type': 'GLsizei',
'default': '1',
'expected': 'initial_size.height()',
},
],
},
'Viewport': {
'type': 'Normal',
'func': 'Viewport',
'enum': 'GL_VIEWPORT',
'states': [
# NOTE: These defaults reset at GLES2DecoderImpl::Initialization.
{
'name': 'viewport_x',
'type': 'GLint',
'default': '0',
},
{
'name': 'viewport_y',
'type': 'GLint',
'default': '0',
},
{
'name': 'viewport_width',
'type': 'GLsizei',
'default': '1',
'expected': 'initial_size.width()',
},
{
'name': 'viewport_height',
'type': 'GLsizei',
'default': '1',
'expected': 'initial_size.height()',
},
],
},
'MatrixValuesCHROMIUM': {
'type': 'NamedParameter',
'func': 'MatrixLoadfEXT',
'states': [
{ 'enum': 'GL_PATH_MODELVIEW_MATRIX_CHROMIUM',
'enum_set': 'GL_PATH_MODELVIEW_CHROMIUM',
'name': 'modelview_matrix',
'type': 'GLfloat',
'default': [
'1.0f', '0.0f','0.0f','0.0f',
'0.0f', '1.0f','0.0f','0.0f',
'0.0f', '0.0f','1.0f','0.0f',
'0.0f', '0.0f','0.0f','1.0f',
],
'extension_flag': 'chromium_path_rendering',
},
{ 'enum': 'GL_PATH_PROJECTION_MATRIX_CHROMIUM',
'enum_set': 'GL_PATH_PROJECTION_CHROMIUM',
'name': 'projection_matrix',
'type': 'GLfloat',
'default': [
'1.0f', '0.0f','0.0f','0.0f',
'0.0f', '1.0f','0.0f','0.0f',
'0.0f', '0.0f','1.0f','0.0f',
'0.0f', '0.0f','0.0f','1.0f',
],
'extension_flag': 'chromium_path_rendering',
},
],
},
'PathStencilFuncCHROMIUM': {
'type': 'Normal',
'func': 'PathStencilFuncNV',
'extension_flag': 'chromium_path_rendering',
'states': [
{
'name': 'stencil_path_func',
'type': 'GLenum',
'enum': 'GL_PATH_STENCIL_FUNC_CHROMIUM',
'default': 'GL_ALWAYS',
},
{
'name': 'stencil_path_ref',
'type': 'GLint',
'enum': 'GL_PATH_STENCIL_REF_CHROMIUM',
'default': '0',
},
{
'name': 'stencil_path_mask',
'type': 'GLuint',
'enum': 'GL_PATH_STENCIL_VALUE_MASK_CHROMIUM',
'default': '0xFFFFFFFFU',
},
],
},
'WindowRectanglesEXT': {
'type': 'Normal',
'func': 'WindowRectanglesEXT',
'custom_function': True,
'extension_flag': 'ext_window_rectangles',
'no_init': True,
'states': [
{
'name': 'window_rectangles_mode',
'type': 'GLenum',
'enum': 'GL_WINDOW_RECTANGLE_MODE_EXT',
'default': 'GL_EXCLUSIVE_EXT',
},
{
'name': 'num_window_rectangles',
'type': 'GLint',
'enum': 'GL_NUM_WINDOW_RECTANGLES_EXT',
'default': '0',
},
],
},
}
_prefix = None
_upper_prefix = None
_lower_prefix = None
def InitializePrefix(mixed_case_prefix):
"""Initialize prefix used for autogenerated code.
Must be called before autogenerating code. Prefixes are used by autogenerated
code in many places: class names, filenames, namespaces, constants,
defines. Given a single mixed case prefix suitable for a class name, we also
initialize lower and upper case prefixes for other uses (e.g. filenames and
#defines).
"""
global _prefix
if _prefix:
raise AssertionError
_prefix = mixed_case_prefix
global _upper_prefix
_upper_prefix = mixed_case_prefix.upper()
global _lower_prefix
_lower_prefix = mixed_case_prefix.lower()
def _Namespace():
if _lower_prefix != 'gles2':
return 'gles2::'
return ''
def Grouper(n, iterable, fillvalue=None):
"""Collect data into fixed-length chunks or blocks"""
args = [iter(iterable)] * n
return itertools.izip_longest(fillvalue=fillvalue, *args)
def SplitWords(input_string):
"""Split by '_' if found, otherwise split at uppercase/numeric chars.
Will split "some_TEXT" into ["some", "TEXT"], "CamelCase" into ["Camel",
"Case"], and "Vector3" into ["Vector", "3"].
"""
if input_string.find('_') > -1:
# 'some_TEXT_' -> 'some TEXT'
return input_string.replace('_', ' ').strip().split()
else:
input_string = input_string.replace('::', ' ')
if re.search('[A-Z]', input_string) and re.search('[a-z]', input_string):
# mixed case.
# look for capitalization to cut input_strings
# 'SomeText' -> 'Some Text'
input_string = re.sub('([A-Z])', r' \1', input_string).strip()
# 'Vector3' -> 'Vector 3'
input_string = re.sub('([^0-9])([0-9])', r'\1 \2', input_string)
return input_string.split()
def ToUnderscore(input_string):
"""converts CamelCase to camel_case."""
words = SplitWords(input_string)
return '_'.join([word.lower() for word in words])
def ValidatorClassName(type_name):
"""Converts some::namespace::TypeName to SomeNamespaceTypeNameValidator."""
words = SplitWords(type_name)
prefix = ''.join([word.title() for word in words])
return '%sValidator' % prefix
def CachedStateName(item):
if item.get('cached', False):
return 'cached_' + item['name']
return item['name']
def GuardState(state, operation, feature_info):
if 'manual' in state:
assert state['manual']
return ""
result = []
result_end = []
if 'es3' in state:
assert state['es3']
result.append(" if (%s->IsES3Capable()) {\n" % feature_info);
result_end.append(" }\n")
if 'extension_flag' in state:
result.append(" if (%s->feature_flags().%s) {\n " %
(feature_info, state['extension_flag']))
result_end.append(" }\n")
if 'gl_version_flag' in state:
name = state['gl_version_flag']
inverted = ''
if name[0] == '!':
inverted = '!'
name = name[1:]
result.append(" if (%s%s->gl_version_info().%s) {\n" %
(inverted, feature_info, name))
result_end.append(" }\n")
result.append(operation)
return ''.join(result + result_end)
def ToGLExtensionString(extension_flag):
"""Returns GL-type extension string of a extension flag."""
if extension_flag == "oes_compressed_etc1_rgb8_texture":
return "OES_compressed_ETC1_RGB8_texture" # Fixup inconsitency with rgb8,
# unfortunate.
uppercase_words = [ 'img', 'ext', 'arb', 'chromium', 'oes', 'amd', 'bgra8888',
'egl', 'atc', 'etc1', 'angle']
parts = extension_flag.split('_')
return "_".join(
[part.upper() if part in uppercase_words else part for part in parts])
def ToCamelCase(input_string):
"""converts ABC_underscore_case to ABCUnderscoreCase."""
return ''.join(w[0].upper() + w[1:] for w in input_string.split('_'))
def EnumsConflict(a, b):
"""Returns true if the enums have different names (ignoring suffixes) and one
of them is a Chromium enum."""
if a == b:
return False
if b.endswith('_CHROMIUM'):
a, b = b, a
if not a.endswith('_CHROMIUM'):
return False
def removesuffix(string, suffix):
if not string.endswith(suffix):
return string
return string[:-len(suffix)]
b = removesuffix(b, "_NV")
b = removesuffix(b, "_EXT")
b = removesuffix(b, "_OES")
return removesuffix(a, "_CHROMIUM") != b
def GetGLGetTypeConversion(result_type, value_type, value):
"""Makes a gl compatible type conversion string for accessing state variables.
Useful when accessing state variables through glGetXXX calls.
glGet documetation (for example, the manual pages):
[...] If glGetIntegerv is called, [...] most floating-point values are
rounded to the nearest integer value. [...]
Args:
result_type: the gl type to be obtained
value_type: the GL type of the state variable
value: the name of the state variable
Returns:
String that converts the state variable to desired GL type according to GL
rules.
"""
if result_type == 'GLint':
if value_type == 'GLfloat':
return 'static_cast<GLint>(round(%s))' % value
return 'static_cast<%s>(%s)' % (result_type, value)
class CWriter(object):
"""Context manager that creates a C source file.
To be used with the `with` statement. Returns a normal `file` type, open only
for writing - any existing files with that name will be overwritten. It will
automatically write the contents of `_LICENSE` and `_DO_NOT_EDIT_WARNING`
at the beginning.
Example:
with CWriter("file.cpp") as myfile:
myfile.write("hello")
# type(myfile) == file
"""
def __init__(self, filename, year):
self.filename = filename
self._ENTER_MSG = _LICENSE % year + _DO_NOT_EDIT_WARNING % _lower_prefix
self._EXIT_MSG = ""
try:
os.makedirs(os.path.dirname(filename))
except OSError as e:
if e.errno == errno.EEXIST:
pass
self._file = open(filename, 'wb')
def __enter__(self):
self._file.write(self._ENTER_MSG)
return self._file
def __exit__(self, exc_type, exc_value, traceback):
self._file.write(self._EXIT_MSG)
self._file.close()
class CHeaderWriter(CWriter):
"""Context manager that creates a C header file.
Works the same way as CWriter, except it will also add the #ifdef guard
around it. If `file_comment` is set, it will write that before the #ifdef
guard.
"""
def __init__(self, filename, year, file_comment=None):
super(CHeaderWriter, self).__init__(filename, year)
guard = self._get_guard()
if file_comment is None:
file_comment = ""
self._ENTER_MSG = self._ENTER_MSG + file_comment \
+ "#ifndef %s\n#define %s\n\n" % (guard, guard)
self._EXIT_MSG = self._EXIT_MSG + "#endif // %s\n" % guard
def _get_guard(self):
non_alnum_re = re.compile(r'[^a-zA-Z0-9]')
assert self.filename.startswith("gpu/")
return non_alnum_re.sub('_', self.filename).upper() + '_'
class TypeHandler(object):
"""This class emits code for a particular type of function."""
_remove_expected_call_re = re.compile(r' EXPECT_CALL.*?;\n', re.S)
def InitFunction(self, func):
"""Add or adjust anything type specific for this function."""
if func.GetInfo('needs_size') and not func.name.endswith('Bucket'):
func.AddCmdArg(DataSizeArgument('data_size'))
def NeedsDataTransferFunction(self, func):
"""Overriden from TypeHandler."""
return func.num_pointer_args >= 1
def WriteStruct(self, func, f):
"""Writes a structure that matches the arguments to a function."""
comment = func.GetInfo('cmd_comment')
if not comment == None:
f.write(comment)
f.write("struct %s {\n" % func.name)
f.write(" typedef %s ValueType;\n" % func.name)
f.write(" static const CommandId kCmdId = k%s;\n" % func.name)
func.WriteCmdArgFlag(f)
func.WriteCmdFlag(f)
f.write("\n")
result = func.GetInfo('result')
if not result == None:
if len(result) == 1:
f.write(" typedef %s Result;\n\n" % result[0])
else:
f.write(" struct Result {\n")
for line in result:
f.write(" %s;\n" % line)
f.write(" };\n\n")
func.WriteCmdComputeSize(f)
func.WriteCmdSetHeader(f)
func.WriteCmdInit(f)
func.WriteCmdSet(f)
func.WriteArgAccessors(f)
f.write(" gpu::CommandHeader header;\n")
total_args = 0
args = func.GetCmdArgs()
for arg in args:
for cmd_type, name in arg.GetArgDecls():
f.write(" %s %s;\n" % (cmd_type, name))
total_args += 1
trace_queue = func.GetInfo('trace_queueing_flow', False)
if trace_queue:
f.write(" uint32_t trace_id;\n")
total_args += 1
consts = func.GetCmdConstants()
for const in consts:
const_decls = const.GetArgDecls()
assert(len(const_decls) == 1)
const_cmd_type, const_name = const_decls[0]
f.write(" static const %s %s = %s;\n" %
(const_cmd_type, const_name, const.GetConstantValue()))
f.write("};\n")
f.write("\n")
size = total_args * _SIZE_OF_UINT32 + _SIZE_OF_COMMAND_HEADER
f.write("static_assert(sizeof(%s) == %d,\n" % (func.name, size))
f.write(" \"size of %s should be %d\");\n" %
(func.name, size))
f.write("static_assert(offsetof(%s, header) == 0,\n" % func.name)
f.write(" \"offset of %s header should be 0\");\n" %
func.name)
offset = _SIZE_OF_COMMAND_HEADER
for arg in args:
for _, name in arg.GetArgDecls():
f.write("static_assert(offsetof(%s, %s) == %d,\n" %
(func.name, name, offset))
f.write(" \"offset of %s %s should be %d\");\n" %
(func.name, name, offset))
offset += _SIZE_OF_UINT32
if not result == None and len(result) > 1:
offset = 0;
for line in result:
parts = line.split()
name = parts[-1]
check = """
static_assert(offsetof(%(cmd_name)s::Result, %(field_name)s) == %(offset)d,
"offset of %(cmd_name)s Result %(field_name)s should be "
"%(offset)d");
"""
f.write((check.strip() + "\n") % {
'cmd_name': func.name,
'field_name': name,
'offset': offset,
})
offset += _SIZE_OF_UINT32
f.write("\n")
def WriteHandlerImplementation(self, func, f):
"""Writes the handler implementation for this command."""
args = []
for arg in func.GetOriginalArgs():
if arg.name.endswith("size") and arg.type == "GLsizei":
args.append("num_%s" % func.GetLastOriginalArg().name)
elif arg.name == "length":
args.append("nullptr")
else:
args.append(arg.name)
if func.GetInfo('type') == 'GETn' and func.name != 'GetSynciv':
args.append('num_values')
f.write(" %s(%s);\n" %
(func.GetGLFunctionName(), ", ".join(args)))
def WriteCmdSizeTest(self, _func, f):
"""Writes the size test for a command."""
f.write(" EXPECT_EQ(sizeof(cmd), cmd.header.size * 4u);\n")
def WriteFormatTest(self, func, f):
"""Writes a format test for a command."""
f.write("TEST_F(%sFormatTest, %s) {\n" % (_prefix, func.name))
f.write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
f.write(" void* next_cmd = cmd.Set(\n")
f.write(" &cmd")
args = func.GetCmdArgs()
for value, arg in enumerate(args):
f.write(",\n static_cast<%s>(%d)" % (arg.type, value + 11))
f.write(");\n")
f.write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n" %
func.name)
f.write(" cmd.header.command);\n")
func.type_handler.WriteCmdSizeTest(func, f)
for value, arg in enumerate(args):
f.write(" EXPECT_EQ(static_cast<%s>(%d), %s);\n" %
(arg.type, value + 11, arg.GetArgAccessor('cmd')))
f.write(" CheckBytesWrittenMatchesExpectedSize(\n")
f.write(" next_cmd, sizeof(cmd));\n")
f.write("}\n")
f.write("\n")
def WriteImmediateFormatTest(self, func, f):
"""Writes a format test for an immediate version of a command."""
pass
def WriteGetDataSizeCode(self, func, arg, f):
"""Writes the code to set data_size used in validation"""
pass
def WriteImmediateHandlerImplementation (self, func, f):
"""Writes the handler impl for the immediate version of a command."""
f.write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
def WriteBucketHandlerImplementation (self, func, f):
"""Writes the handler impl for the bucket version of a command."""
f.write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
def WriteServiceHandlerFunctionHeader(self, func, f):
"""Writes function header for service implementation handlers."""
f.write("""error::Error %(prefix)sDecoderImpl::Handle%(name)s(
uint32_t immediate_data_size, const volatile void* cmd_data) {
""" % {'name': func.name, 'prefix' : _prefix})
if func.IsES3():
f.write("""if (!feature_info_->IsWebGL2OrES3OrHigherContext())
return error::kUnknownCommand;
""")
if func.IsES31():
f.write("""return error::kUnknownCommand;
}
""")
return
if func.GetCmdArgs():
f.write("""const volatile %(prefix)s::cmds::%(name)s& c =
*static_cast<const volatile %(prefix)s::cmds::%(name)s*>(cmd_data);
""" % {'name': func.name, 'prefix': _lower_prefix})
def WriteServiceHandlerArgGetCode(self, func, f):
"""Writes the argument unpack code for service handlers."""
if len(func.GetOriginalArgs()) > 0:
for arg in func.GetOriginalArgs():
if not arg.IsPointer():
arg.WriteGetCode(f)
# Write pointer arguments second. Sizes may be dependent on other args
for arg in func.GetOriginalArgs():
if arg.IsPointer():
self.WriteGetDataSizeCode(func, arg, f)
arg.WriteGetCode(f)
def WriteImmediateServiceHandlerArgGetCode(self, func, f):
"""Writes the argument unpack code for immediate service handlers."""
for arg in func.GetOriginalArgs():
if arg.IsPointer():
self.WriteGetDataSizeCode(func, arg, f)
arg.WriteGetCode(f)
def WriteBucketServiceHandlerArgGetCode(self, func, f):
"""Writes the argument unpack code for bucket service handlers."""
for arg in func.GetCmdArgs():
arg.WriteGetCode(f)
for arg in func.GetOriginalArgs():
if arg.IsConstant():
arg.WriteGetCode(f)
self.WriteGetDataSizeCode(func, arg, f)
def WriteServiceImplementation(self, func, f):
"""Writes the service implementation for a command."""
self.WriteServiceHandlerFunctionHeader(func, f)
if func.IsES31():
return
self.WriteHandlerExtensionCheck(func, f)
self.WriteHandlerDeferReadWrite(func, f);
self.WriteServiceHandlerArgGetCode(func, f)
func.WriteHandlerValidation(f)
func.WriteQueueTraceEvent(f)
func.WriteHandlerImplementation(f)
f.write(" return error::kNoError;\n")
f.write("}\n")
f.write("\n")
def WriteImmediateServiceImplementation(self, func, f):
"""Writes the service implementation for an immediate version of command."""
self.WriteServiceHandlerFunctionHeader(func, f)
if func.IsES31():
return
self.WriteHandlerExtensionCheck(func, f)
self.WriteHandlerDeferReadWrite(func, f);
self.WriteImmediateServiceHandlerArgGetCode(func, f)
func.WriteHandlerValidation(f)
func.WriteQueueTraceEvent(f)
func.WriteHandlerImplementation(f)
f.write(" return error::kNoError;\n")
f.write("}\n")
f.write("\n")
def WriteBucketServiceImplementation(self, func, f):
"""Writes the service implementation for a bucket version of command."""
self.WriteServiceHandlerFunctionHeader(func, f)
if func.IsES31():
return
self.WriteHandlerExtensionCheck(func, f)
self.WriteHandlerDeferReadWrite(func, f);
self.WriteBucketServiceHandlerArgGetCode(func, f)
func.WriteHandlerValidation(f)
func.WriteQueueTraceEvent(f)
func.WriteHandlerImplementation(f)
f.write(" return error::kNoError;\n")
f.write("}\n")
f.write("\n")
def WritePassthroughServiceFunctionHeader(self, func, f):
"""Writes function header for service passthrough handlers."""
f.write("""error::Error GLES2DecoderPassthroughImpl::Handle%(name)s(
uint32_t immediate_data_size, const volatile void* cmd_data) {
""" % {'name': func.name})
if func.IsES3():
f.write("""if (!feature_info_->IsWebGL2OrES3OrHigherContext())
return error::kUnknownCommand;
""")
if func.IsES31():
f.write("""if (!feature_info_->IsWebGL2ComputeContext())
return error::kUnknownCommand;
""")
if func.GetCmdArgs():
f.write("""const volatile gles2::cmds::%(name)s& c =
*static_cast<const volatile gles2::cmds::%(name)s*>(cmd_data);
""" % {'name': func.name})
def WritePassthroughServiceFunctionDoerCall(self, func, f):
"""Writes the function call to the passthrough service doer."""
f.write(""" error::Error error = Do%(name)s(%(args)s);
if (error != error::kNoError) {
return error;
}""" % {'name': func.original_name,
'args': func.MakePassthroughServiceDoerArgString("")})
def WritePassthroughServiceImplementation(self, func, f):
"""Writes the service implementation for a command."""
self.WritePassthroughServiceFunctionHeader(func, f)
self.WriteHandlerExtensionCheck(func, f)
self.WriteServiceHandlerArgGetCode(func, f)
func.WritePassthroughHandlerValidation(f)
self.WritePassthroughServiceFunctionDoerCall(func, f)
f.write(" return error::kNoError;\n")
f.write("}\n")
f.write("\n")
def WritePassthroughImmediateServiceImplementation(self, func, f):
"""Writes the service implementation for a command."""
self.WritePassthroughServiceFunctionHeader(func, f)
self.WriteHandlerExtensionCheck(func, f)
self.WriteImmediateServiceHandlerArgGetCode(func, f)
func.WritePassthroughHandlerValidation(f)
self.WritePassthroughServiceFunctionDoerCall(func, f)
f.write(" return error::kNoError;\n")
f.write("}\n")
f.write("\n")
def WritePassthroughBucketServiceImplementation(self, func, f):
"""Writes the service implementation for a command."""
self.WritePassthroughServiceFunctionHeader(func, f)
self.WriteHandlerExtensionCheck(func, f)
self.WriteBucketServiceHandlerArgGetCode(func, f)
func.WritePassthroughHandlerValidation(f)
self.WritePassthroughServiceFunctionDoerCall(func, f)
f.write(" return error::kNoError;\n")
f.write("}\n")
f.write("\n")
def WriteHandlerExtensionCheck(self, func, f):
if func.GetInfo('extension_flag'):
f.write(" if (!features().%s) {\n" % func.GetInfo('extension_flag'))
f.write(" return error::kUnknownCommand;")
f.write(" }\n\n")
def WriteHandlerDeferReadWrite(self, func, f):
"""Writes the code to handle deferring reads or writes."""
defer_draws = func.GetInfo('defer_draws')
defer_reads = func.GetInfo('defer_reads')
if defer_draws or defer_reads:
f.write(" error::Error error;\n")
if defer_draws:
f.write(" error = WillAccessBoundFramebufferForDraw();\n")
f.write(" if (error != error::kNoError)\n")
f.write(" return error;\n")
if defer_reads:
f.write(" error = WillAccessBoundFramebufferForRead();\n")
f.write(" if (error != error::kNoError)\n")
f.write(" return error;\n")
def WriteValidUnitTest(self, func, f, test, *extras):
"""Writes a valid unit test for the service implementation."""
if not func.GetInfo('expectation', True):
test = self._remove_expected_call_re.sub('', test)
name = func.name
arg_strings = [
arg.GetValidArg(func) \
for arg in func.GetOriginalArgs() if not arg.IsConstant()
]
gl_arg_strings = [
arg.GetValidGLArg(func) \
for arg in func.GetOriginalArgs()
]
gl_func_name = func.GetGLTestFunctionName()
varz = {
'name': name,
'gl_func_name': gl_func_name,
'args': ", ".join(arg_strings),
'gl_args': ", ".join(gl_arg_strings),
}
for extra in extras:
varz.update(extra)
old_test = ""
while (old_test != test):
old_test = test
test = test % varz
f.write(test % varz)
def WriteInvalidUnitTest(self, func, f, test, *extras):
"""Writes an invalid unit test for the service implementation."""
if func.IsES3():
return
for invalid_arg_index, invalid_arg in enumerate(func.GetOriginalArgs()):
# Service implementation does not test constants, as they are not part of
# the call in the service side.
if invalid_arg.IsConstant():
continue
num_invalid_values = invalid_arg.GetNumInvalidValues(func)
for value_index in range(0, num_invalid_values):
arg_strings = []
parse_result = "kNoError"
gl_error = None
for arg in func.GetOriginalArgs():
if arg.IsConstant():
continue
if invalid_arg is arg:
(arg_string, parse_result, gl_error) = arg.GetInvalidArg(
value_index)
else:
arg_string = arg.GetValidArg(func)
arg_strings.append(arg_string)
gl_arg_strings = []
for arg in func.GetOriginalArgs():
gl_arg_strings.append("_")
gl_func_name = func.GetGLTestFunctionName()
gl_error_test = ''
if not gl_error == None:
gl_error_test = '\n EXPECT_EQ(%s, GetGLError());' % gl_error
varz = {
'name': func.name,
'arg_index': invalid_arg_index,
'value_index': value_index,
'gl_func_name': gl_func_name,
'args': ", ".join(arg_strings),
'all_but_last_args': ", ".join(arg_strings[:-1]),
'gl_args': ", ".join(gl_arg_strings),
'parse_result': parse_result,
'gl_error_test': gl_error_test,
}
for extra in extras:
varz.update(extra)
f.write(test % varz)
def WriteServiceUnitTest(self, func, f, *extras):
"""Writes the service unit test for a command."""
if func.name == 'Enable':
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
SetupExpectationsForEnableDisable(%(gl_args)s, true);
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
elif func.name == 'Disable':
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
SetupExpectationsForEnableDisable(%(gl_args)s, false);
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
else:
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);"""
valid_test += """
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
self.WriteValidUnitTest(func, f, valid_test, *extras)
if not func.IsES3():
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, *extras)
def WriteImmediateServiceUnitTest(self, func, f, *extras):
"""Writes the service unit test for an immediate command."""
pass
def WriteImmediateValidationCode(self, func, f):
"""Writes the validation code for an immediate version of a command."""
pass
def WriteBucketServiceUnitTest(self, func, f, *extras):
"""Writes the service unit test for a bucket command."""
pass
def WriteGLES2ImplementationDeclaration(self, func, f):
"""Writes the GLES2 Implemention declaration."""
f.write("%s %s(%s) override;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("", add_default = True)))
f.write("\n")
def WriteGLES2CLibImplementation(self, func, f):
f.write("%s GL_APIENTRY GLES2%s(%s) {\n" %
(func.return_type, func.name,
func.MakeTypedOriginalArgString("")))
result_string = "return "
if func.return_type == "void":
result_string = ""
f.write(" %sgles2::GetGLContext()->%s(%s);\n" %
(result_string, func.original_name,
func.MakeOriginalArgString("")))
f.write("}\n")
def WriteGLES2Header(self, func, f):
"""Writes a re-write macro for GLES"""
f.write("#define gl%s GLES2_GET_FUN(%s)\n" %(func.name, func.name))
def WriteClientGLCallLog(self, func, f):
"""Writes a logging macro for the client side code."""
comma = ""
if len(func.GetOriginalArgs()):
comma = " << "
f.write(
' GPU_CLIENT_LOG("[" << GetLogPrefix() << "] %s("%s%s << ")");\n' %
(func.prefixed_name, comma, func.MakeLogArgString()))
def WriteClientGLReturnLog(self, func, f):
"""Writes the return value logging code."""
if func.return_type != "void":
f.write(' GPU_CLIENT_LOG("return:" << result)\n')
def WriteGLES2ImplementationHeader(self, func, f):
"""Writes the GLES2 Implemention."""
self.WriteGLES2ImplementationDeclaration(func, f)
def WriteGLES2TraceImplementationHeader(self, func, f):
"""Writes the GLES2 Trace Implemention header."""
f.write("%s %s(%s) override;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
def WriteGLES2TraceImplementation(self, func, f):
"""Writes the GLES2 Trace Implemention."""
f.write("%s GLES2TraceImplementation::%s(%s) {\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
result_string = "return "
if func.return_type == "void":
result_string = ""
f.write(' TRACE_EVENT_BINARY_EFFICIENT0("gpu", "GLES2Trace::%s");\n' %
func.name)
f.write(" %sgl_->%s(%s);\n" %
(result_string, func.name, func.MakeOriginalArgString("")))
f.write("}\n")
f.write("\n")
def WriteGLES2Implementation(self, func, f):
"""Writes the GLES2 Implemention."""
impl_func = func.GetInfo('impl_func', True)
if func.can_auto_generate and impl_func:
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
self.WriteClientGLCallLog(func, f)
func.WriteDestinationInitalizationValidation(f)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
f.write(" helper_->%s(%s);\n" %
(func.name, func.MakeHelperArgString("")))
if _prefix != 'WebGPU':
f.write(" CheckGLError();\n")
self.WriteClientGLReturnLog(func, f)
f.write("}\n")
f.write("\n")
def WriteGLES2InterfaceHeader(self, func, f):
"""Writes the GLES2 Interface."""
f.write("virtual %s %s(%s) = 0;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("", add_default = True)))
def WriteGLES2InterfaceStub(self, func, f):
"""Writes the GLES2 Interface stub declaration."""
f.write("%s %s(%s) override;\n" %
(func.return_type, func.original_name,
func.MakeTypedOriginalArgString("")))
def WriteGLES2InterfaceStubImpl(self, func, f):
"""Writes the GLES2 Interface stub declaration."""
args = func.GetOriginalArgs()
arg_string = ", ".join(
["%s /* %s */" % (arg.type, arg.name) for arg in args])
f.write("%s %sInterfaceStub::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name, arg_string))
if func.return_type != "void":
f.write(" return 0;\n")
f.write("}\n")
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Writes the GLES2 Implemention unit test."""
client_test = func.GetInfo('client_test', True)
if func.can_auto_generate and client_test:
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
Cmds expected;
expected.cmd.Init(%(cmd_args)s);
gl_->%(name)s(%(args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
cmd_arg_strings = [
arg.GetValidClientSideCmdArg(func) for arg in func.GetCmdArgs()
]
gl_arg_strings = [
arg.GetValidClientSideArg(func) for arg in func.GetOriginalArgs()
]
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
# Test constants for invalid values, as they are not tested by the
# service.
constants = [arg for arg in func.GetOriginalArgs() if arg.IsConstant()]
if constants:
code = """
TEST_F(%(prefix)sImplementationTest,
%(name)sInvalidConstantArg%(invalid_index)d) {
gl_->%(name)s(%(args)s);
EXPECT_TRUE(NoCommandsWritten());
EXPECT_EQ(%(gl_error)s, CheckError());
}
"""
for invalid_arg in constants:
gl_arg_strings = []
invalid = invalid_arg.GetInvalidArg(func)
for arg in func.GetOriginalArgs():
if arg is invalid_arg:
gl_arg_strings.append(invalid[0])
else:
gl_arg_strings.append(arg.GetValidClientSideArg(func))
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'invalid_index': func.GetOriginalArgs().index(invalid_arg),
'args': ", ".join(gl_arg_strings),
'gl_error': invalid[2],
})
def WriteDestinationInitalizationValidation(self, func, f):
"""Writes the client side destintion initialization validation."""
for arg in func.GetOriginalArgs():
arg.WriteDestinationInitalizationValidation(f, func)
def WriteTraceEvent(self, func, f):
f.write(' TRACE_EVENT0("gpu", "%sImplementation::%s");\n' %
(_prefix, func.original_name))
def WriteImmediateCmdComputeSize(self, _func, f):
"""Writes the size computation code for the immediate version of a cmd."""
f.write(" static uint32_t ComputeSize(uint32_t size_in_bytes) {\n")
f.write(" return static_cast<uint32_t>(\n")
f.write(" sizeof(ValueType) + // NOLINT\n")
f.write(" RoundSizeToMultipleOfEntries(size_in_bytes));\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSetHeader(self, _func, f):
"""Writes the SetHeader function for the immediate version of a cmd."""
f.write(" void SetHeader(uint32_t size_in_bytes) {\n")
f.write(" header.SetCmdByTotalSize<ValueType>(size_in_bytes);\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdInit(self, func, f):
"""Writes the Init function for the immediate version of a command."""
raise NotImplementedError(func.name)
def WriteImmediateCmdSet(self, func, f):
"""Writes the Set function for the immediate version of a command."""
raise NotImplementedError(func.name)
def WriteCmdHelper(self, func, f):
"""Writes the cmd helper definition for a cmd."""
code = """ void %(name)s(%(typed_args)s) {
%(lp)s::cmds::%(name)s* c = GetCmdSpace<%(lp)s::cmds::%(name)s>();
if (c) {
c->Init(%(args)s);
}
}
"""
f.write(code % {
"lp" : _lower_prefix,
"name": func.name,
"typed_args": func.MakeTypedCmdArgString(""),
"args": func.MakeCmdArgString(""),
})
def WriteImmediateCmdHelper(self, func, f):
"""Writes the cmd helper definition for the immediate version of a cmd."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t s = 0;
%(lp)s::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<%(lp)s::cmds::%(name)s>(s);
if (c) {
c->Init(%(args)s);
}
}
"""
f.write(code % {
"lp" : _lower_prefix,
"name": func.name,
"typed_args": func.MakeTypedCmdArgString(""),
"args": func.MakeCmdArgString(""),
})
class StateSetHandler(TypeHandler):
"""Handler for commands that simply set state."""
def WriteHandlerImplementation(self, func, f):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATE_INFO[state_name]
states = state['states']
args = func.GetOriginalArgs()
for ndx,item in enumerate(states):
code = []
if 'range_checks' in item:
for range_check in item['range_checks']:
code.append("%s %s" % (args[ndx].name, range_check['check']))
if 'nan_check' in item:
# Drivers might generate an INVALID_VALUE error when a value is set
# to NaN. This is allowed behavior under GLES 3.0 section 2.1.1 or
# OpenGL 4.5 section 2.3.4.1 - providing NaN allows undefined results.
# Make this behavior consistent within Chromium, and avoid leaking GL
# errors by generating the error in the command buffer instead of
# letting the GL driver generate it.
code.append("std::isnan(%s)" % args[ndx].name)
if len(code):
f.write(" if (%s) {\n" % " ||\n ".join(code))
f.write(
' LOCAL_SET_GL_ERROR(GL_INVALID_VALUE,'
' "%s", "%s out of range");\n' %
(func.name, args[ndx].name))
f.write(" return error::kNoError;\n")
f.write(" }\n")
code = []
for ndx,item in enumerate(states):
code.append("state_.%s != %s" % (item['name'], args[ndx].name))
f.write(" if (%s) {\n" % " ||\n ".join(code))
for ndx,item in enumerate(states):
f.write(" state_.%s = %s;\n" % (item['name'], args[ndx].name))
if 'on_change' in state:
f.write(" %s\n" % state['on_change'])
if not func.GetInfo("no_gl"):
for ndx,item in enumerate(states):
if item.get('cached', False):
f.write(" state_.%s = %s;\n" %
(CachedStateName(item), args[ndx].name))
f.write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
f.write(" }\n")
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
TypeHandler.WriteServiceUnitTest(self, func, f, *extras)
state_name = func.GetInfo('state')
state = _STATE_INFO[state_name]
states = state['states']
for ndx,item in enumerate(states):
if 'range_checks' in item:
for check_ndx, range_check in enumerate(item['range_checks']):
valid_test = """
TEST_P(%(test_name)s, %(name)sInvalidValue%(ndx)d_%(check_ndx)d) {
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
name = func.name
arg_strings = [
arg.GetValidArg(func) \
for arg in func.GetOriginalArgs() if not arg.IsConstant()
]
arg_strings[ndx] = range_check['test_value']
varz = {
'name': name,
'ndx': ndx,
'check_ndx': check_ndx,
'args': ", ".join(arg_strings),
}
for extra in extras:
varz.update(extra)
f.write(valid_test % varz)
if 'nan_check' in item:
valid_test = """
TEST_P(%(test_name)s, %(name)sNaNValue%(ndx)d) {
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
name = func.name
arg_strings = [
arg.GetValidArg(func) \
for arg in func.GetOriginalArgs() if not arg.IsConstant()
]
arg_strings[ndx] = 'nanf("")'
varz = {
'name': name,
'ndx': ndx,
'args': ", ".join(arg_strings),
}
for extra in extras:
varz.update(extra)
f.write(valid_test % varz)
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class StateSetRGBAlphaHandler(TypeHandler):
"""Handler for commands that simply set state that have rgb/alpha."""
def WriteHandlerImplementation(self, func, f):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATE_INFO[state_name]
states = state['states']
args = func.GetOriginalArgs()
num_args = len(args)
code = []
for ndx,item in enumerate(states):
code.append("state_.%s != %s" % (item['name'], args[ndx % num_args].name))
f.write(" if (%s) {\n" % " ||\n ".join(code))
for ndx, item in enumerate(states):
f.write(" state_.%s = %s;\n" %
(item['name'], args[ndx % num_args].name))
if 'on_change' in state:
f.write(" %s\n" % state['on_change'])
if not func.GetInfo("no_gl"):
f.write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
f.write(" }\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class StateSetFrontBackSeparateHandler(TypeHandler):
"""Handler for commands that simply set state that have front/back."""
def WriteHandlerImplementation(self, func, f):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATE_INFO[state_name]
states = state['states']
args = func.GetOriginalArgs()
face = args[0].name
num_args = len(args)
f.write(" bool changed = false;\n")
for group_ndx, group in enumerate(Grouper(num_args - 1, states)):
f.write(" if (%s == %s || %s == GL_FRONT_AND_BACK) {\n" %
(face, ('GL_FRONT', 'GL_BACK')[group_ndx], face))
code = []
for ndx, item in enumerate(group):
code.append("state_.%s != %s" % (item['name'], args[ndx + 1].name))
f.write(" changed |= %s;\n" % " ||\n ".join(code))
f.write(" }\n")
f.write(" if (changed) {\n")
for group_ndx, group in enumerate(Grouper(num_args - 1, states)):
f.write(" if (%s == %s || %s == GL_FRONT_AND_BACK) {\n" %
(face, ('GL_FRONT', 'GL_BACK')[group_ndx], face))
for ndx, item in enumerate(group):
f.write(" state_.%s = %s;\n" %
(item['name'], args[ndx + 1].name))
f.write(" }\n")
if 'on_change' in state:
f.write(" %s\n" % state['on_change'])
if not func.GetInfo("no_gl"):
f.write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
f.write(" }\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class StateSetFrontBackHandler(TypeHandler):
"""Handler for commands that simply set state that set both front/back."""
def WriteHandlerImplementation(self, func, f):
"""Overrriden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATE_INFO[state_name]
states = state['states']
args = func.GetOriginalArgs()
num_args = len(args)
code = []
for group in Grouper(num_args, states):
for ndx, item in enumerate(group):
code.append("state_.%s != %s" % (item['name'], args[ndx].name))
f.write(" if (%s) {\n" % " ||\n ".join(code))
for group in Grouper(num_args, states):
for ndx, item in enumerate(group):
f.write(" state_.%s = %s;\n" % (item['name'], args[ndx].name))
if 'on_change' in state:
f.write(" %s\n" % state['on_change'])
if not func.GetInfo("no_gl"):
f.write(" %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
f.write(" }\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class StateSetNamedParameter(TypeHandler):
"""Handler for commands that set a state chosen with an enum parameter."""
def WriteHandlerImplementation(self, func, f):
"""Overridden from TypeHandler."""
state_name = func.GetInfo('state')
state = _STATE_INFO[state_name]
states = state['states']
args = func.GetOriginalArgs()
num_args = len(args)
assert num_args == 2
f.write(" switch (%s) {\n" % args[0].name)
for state in states:
f.write(" case %s:\n" % state['enum'])
f.write(" if (state_.%s != %s) {\n" %
(state['name'], args[1].name))
f.write(" state_.%s = %s;\n" % (state['name'], args[1].name))
if not func.GetInfo("no_gl"):
operation = " %s(%s);\n" % \
(func.GetGLFunctionName(), func.MakeOriginalArgString(""))
f.write(GuardState(state, operation, "feature_info_"))
f.write(" }\n")
f.write(" break;\n")
f.write(" default:\n")
f.write(" NOTREACHED();\n")
f.write(" }\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class CustomHandler(TypeHandler):
"""Handler for commands that are auto-generated but require minor tweaks."""
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
if (func.name.startswith('CompressedTex') and func.name.endswith('Bucket')):
# Remove imageSize argument, take the size from the bucket instead.
func.cmd_args = [arg for arg in func.cmd_args if arg.name != 'imageSize']
func.AddCmdArg(Argument('bucket_id', 'GLuint'))
else:
TypeHandler.InitFunction(self, func)
def WriteServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
if func.IsES31():
TypeHandler.WriteServiceImplementation(self, func, f)
def WriteImmediateServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
if func.IsES31():
TypeHandler.WriteImmediateServiceImplementation(self, func, f)
def WriteBucketServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
if func.IsES31():
TypeHandler.WriteBucketServiceImplementation(self, func, f)
def WritePassthroughServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WritePassthroughImmediateServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WritePassthroughBucketServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdGetTotalSize(self, _func, f):
"""Overrriden from TypeHandler."""
f.write(
" uint32_t total_size = 0; // WARNING: compute correct size.\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
f.write(" void Init(%s) {\n" % func.MakeTypedCmdArgString("_"))
self.WriteImmediateCmdGetTotalSize(func, f)
f.write(" SetHeader(total_size);\n")
args = func.GetCmdArgs()
for arg in args:
arg.WriteSetCode(f, 4, '_%s' % arg.name)
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
copy_args = func.MakeCmdArgString("_", False)
f.write(" void* Set(void* cmd%s) {\n" %
func.MakeTypedCmdArgString("_", True))
self.WriteImmediateCmdGetTotalSize(func, f)
f.write(" static_cast<ValueType*>(cmd)->Init(%s);\n" % copy_args)
f.write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, total_size);\n")
f.write(" }\n")
f.write("\n")
class NoCommandHandler(CustomHandler):
"""Handler for functions that don't use commands"""
def WriteGLES2Implementation(self, func, f):
pass
def WriteGLES2ImplementationUnitTest(self, func, f):
pass
class DataHandler(TypeHandler):
"""
Handler for glBufferData, glBufferSubData, glTex{Sub}Image*D.
"""
def WriteGetDataSizeCode(self, func, arg, f):
"""Overrriden from TypeHandler."""
# TODO: Move this data to _FUNCTION_INFO?
name = func.name
if name.endswith("Immediate"):
name = name[0:-9]
if arg.name in func.size_args:
size = func.size_args[arg.name]
f.write(" uint32_t %s = %s;\n" % (arg.GetReservedSizeId(), size))
else:
f.write("// uint32_t %s = 0; // WARNING: compute correct size.\n" % (
arg.GetReservedSizeId()))
def WriteImmediateCmdGetTotalSize(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
f.write(" void Init(%s) {\n" % func.MakeTypedCmdArgString("_"))
self.WriteImmediateCmdGetTotalSize(func, f)
f.write(" SetHeader(total_size);\n")
args = func.GetCmdArgs()
for arg in args:
f.write(" %s = _%s;\n" % (arg.name, arg.name))
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
copy_args = func.MakeCmdArgString("_", False)
f.write(" void* Set(void* cmd%s) {\n" %
func.MakeTypedCmdArgString("_", True))
self.WriteImmediateCmdGetTotalSize(func, f)
f.write(" static_cast<ValueType*>(cmd)->Init(%s);\n" % copy_args)
f.write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, total_size);\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateFormatTest(self, func, f):
"""Overrriden from TypeHandler."""
# TODO: Remove this exception.
return
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class BindHandler(TypeHandler):
"""Handler for glBind___ type functions."""
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
if len(func.GetOriginalArgs()) == 1:
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
if func.GetInfo("gen_func"):
valid_test += """
TEST_P(%(test_name)s, %(name)sValidArgsNewId) {
EXPECT_CALL(*gl_, %(gl_func_name)s(kNewServiceId));
EXPECT_CALL(*gl_, %(gl_gen_func_name)s(1, _))
.WillOnce(SetArgPointee<1>(kNewServiceId));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(kNewClientId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_type)s(kNewClientId) != nullptr);
}
"""
self.WriteValidUnitTest(func, f, valid_test, {
'resource_type': func.GetOriginalArgs()[0].resource_type,
'gl_gen_func_name': func.GetInfo("gen_func"),
}, *extras)
else:
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
if func.GetInfo("gen_func"):
valid_test += """
TEST_P(%(test_name)s, %(name)sValidArgsNewId) {
EXPECT_CALL(*gl_,
%(gl_func_name)s(%(gl_args_with_new_id)s));
EXPECT_CALL(*gl_, %(gl_gen_func_name)s(1, _))
.WillOnce(SetArgPointee<1>(kNewServiceId));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args_with_new_id)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_type)s(kNewClientId) != nullptr);
}
"""
gl_args_with_new_id = []
args_with_new_id = []
for arg in func.GetOriginalArgs():
if hasattr(arg, 'resource_type'):
gl_args_with_new_id.append('kNewServiceId')
args_with_new_id.append('kNewClientId')
else:
gl_args_with_new_id.append(arg.GetValidGLArg(func))
args_with_new_id.append(arg.GetValidArg(func))
self.WriteValidUnitTest(func, f, valid_test, {
'args_with_new_id': ", ".join(args_with_new_id),
'gl_args_with_new_id': ", ".join(gl_args_with_new_id),
'resource_type': func.GetResourceIdArg().resource_type,
'gl_gen_func_name': func.GetInfo("gen_func"),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, *extras)
def WriteGLES2Implementation(self, func, f):
"""Writes the GLES2 Implemention."""
impl_func = func.GetInfo('impl_func', True)
if func.can_auto_generate and impl_func:
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
code = """ if (Is%(type)sReservedId(%(id)s)) {
SetGLError(GL_INVALID_OPERATION, "%(name)s\", \"%(id)s reserved id");
return;
}
%(name)sHelper(%(arg_string)s);
CheckGLError();
}
"""
name_arg = func.GetResourceIdArg()
f.write(code % {
'name': func.name,
'arg_string': func.MakeOriginalArgString(""),
'id': name_arg.name,
'type': name_arg.resource_type,
'lc_type': name_arg.resource_type.lower(),
})
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Overrriden from TypeHandler."""
client_test = func.GetInfo('client_test', True)
if not client_test:
return
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
Cmds expected;
expected.cmd.Init(%(cmd_args)s);
gl_->%(name)s(%(args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));"""
if not func.IsES3():
code += """
ClearCommands();
gl_->%(name)s(%(args)s);
EXPECT_TRUE(NoCommandsWritten());"""
code += """
}
"""
cmd_arg_strings = [
arg.GetValidClientSideCmdArg(func) for arg in func.GetCmdArgs()
]
gl_arg_strings = [
arg.GetValidClientSideArg(func) for arg in func.GetOriginalArgs()
]
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class GENnHandler(TypeHandler):
"""Handler for glGen___ type functions."""
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
pass
def WriteGetDataSizeCode(self, func, arg, f):
"""Overrriden from TypeHandler."""
code = """ uint32_t %(data_size)s;
if (!base::CheckMul(n, sizeof(GLuint)).AssignIfValid(&%(data_size)s)) {
return error::kOutOfBounds;
}
""" % {'data_size': arg.GetReservedSizeId()}
f.write(code)
def WriteHandlerImplementation (self, func, f):
"""Overrriden from TypeHandler."""
raise NotImplementedError("GENn functions are immediate")
def WriteImmediateHandlerImplementation(self, func, f):
"""Overrriden from TypeHandler."""
param_name = func.GetLastOriginalArg().name
f.write(" auto %(name)s_copy = std::make_unique<GLuint[]>(n);\n"
" GLuint* %(name)s_safe = %(name)s_copy.get();\n"
" std::copy(%(name)s, %(name)s + n, %(name)s_safe);\n"
" if (!%(ns)sCheckUniqueAndNonNullIds(n, %(name)s_safe) ||\n"
" !%(func)sHelper(n, %(name)s_safe)) {\n"
" return error::kInvalidArguments;\n"
" }\n" % {'name': param_name,
'func': func.original_name,
'ns': _Namespace()})
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
log_code = (""" GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei i = 0; i < n; ++i) {
GPU_CLIENT_LOG(" " << i << ": " << %s[i]);
}
});""" % func.GetOriginalArgs()[1].name)
args = {
'log_code': log_code,
'return_type': func.return_type,
'prefix' : _prefix,
'name': func.original_name,
'typed_args': func.MakeTypedOriginalArgString(""),
'args': func.MakeOriginalArgString(""),
'resource_types': func.GetInfo('resource_types'),
'count_name': func.GetOriginalArgs()[0].name,
}
f.write(
"%(return_type)s %(prefix)sImplementation::"
"%(name)s(%(typed_args)s) {\n" %
args)
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
not_shared = func.GetInfo('not_shared')
if not_shared:
alloc_code = ("""\
IdAllocator* id_allocator = GetIdAllocator(IdNamespaces::k%s);
for (GLsizei ii = 0; ii < n; ++ii)
%s[ii] = id_allocator->AllocateID();""" %
(func.GetInfo('resource_types'), func.GetOriginalArgs()[1].name))
else:
alloc_code = ("""\
GetIdHandler(SharedIdNamespaces::k%(resource_types)s)->
MakeIds(this, 0, %(args)s);""" % args)
args['alloc_code'] = alloc_code
code = """\
GPU_CLIENT_SINGLE_THREAD_CHECK();
%(alloc_code)s
%(name)sHelper(%(args)s);
helper_->%(name)sImmediate(%(args)s);
"""
if not not_shared:
code += """\
if (share_group_->bind_generates_resource())
helper_->CommandBufferHelper::Flush();
"""
code += """\
%(log_code)s
CheckGLError();
}
"""
f.write(code % args)
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Overrriden from TypeHandler."""
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
GLuint ids[2] = { 0, };
struct Cmds {
cmds::%(name)sImmediate gen;
GLuint data[2];
};
Cmds expected;
expected.gen.Init(base::size(ids), &ids[0]);
expected.data[0] = k%(types)sStartId;
expected.data[1] = k%(types)sStartId + 1;
gl_->%(name)s(base::size(ids), &ids[0]);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
EXPECT_EQ(k%(types)sStartId, ids[0]);
EXPECT_EQ(k%(types)sStartId + 1, ids[1]);
}
"""
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'types': func.GetInfo('resource_types'),
})
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
raise NotImplementedError("GENn functions are immediate")
def WriteImmediateServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(1, _))
.WillOnce(SetArgPointee<1>(kNewServiceId));
cmds::%(name)s* cmd = GetImmediateAs<cmds::%(name)s>();
GLuint temp = kNewClientId;
SpecializedSetup<cmds::%(name)s, 0>(true);
cmd->Init(1, &temp);
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(*cmd, sizeof(temp)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(Get%(resource_name)s(kNewClientId) != nullptr);
}
"""
self.WriteValidUnitTest(func, f, valid_test, {
'resource_name': func.GetInfo('resource_type'),
}, *extras)
duplicate_id_test = """
TEST_P(%(test_name)s, %(name)sDuplicateOrNullIds) {
EXPECT_CALL(*gl_, %(gl_func_name)s(_, _)).Times(0);
cmds::%(name)s* cmd = GetImmediateAs<cmds::%(name)s>();
GLuint temp[3] = {kNewClientId, kNewClientId + 1, kNewClientId};
SpecializedSetup<cmds::%(name)s, 1>(true);
cmd->Init(3, temp);
EXPECT_EQ(error::kInvalidArguments,
ExecuteImmediateCmd(*cmd, sizeof(temp)));
EXPECT_TRUE(Get%(resource_name)s(kNewClientId) == nullptr);
EXPECT_TRUE(Get%(resource_name)s(kNewClientId + 1) == nullptr);
GLuint null_id[2] = {kNewClientId, 0};
cmd->Init(2, null_id);
EXPECT_EQ(error::kInvalidArguments,
ExecuteImmediateCmd(*cmd, sizeof(temp)));
EXPECT_TRUE(Get%(resource_name)s(kNewClientId) == nullptr);
}
"""
self.WriteValidUnitTest(func, f, duplicate_id_test, {
'resource_name': func.GetInfo('resource_type'),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(_, _)).Times(0);
cmds::%(name)s* cmd = GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(false);
cmd->Init(1, &client_%(resource_name)s_id_);
EXPECT_EQ(error::kInvalidArguments,
ExecuteImmediateCmd(*cmd, sizeof(&client_%(resource_name)s_id_)));
}
"""
self.WriteValidUnitTest(func, f, invalid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
}, *extras)
def WriteImmediateCmdComputeSize(self, _func, f):
"""Overrriden from TypeHandler."""
f.write(" static uint32_t ComputeDataSize(GLsizei _n) {\n")
f.write(
" return static_cast<uint32_t>(sizeof(GLuint) * _n); // NOLINT\n")
f.write(" }\n")
f.write("\n")
f.write(" static uint32_t ComputeSize(GLsizei _n) {\n")
f.write(" return static_cast<uint32_t>(\n")
f.write(" sizeof(ValueType) + ComputeDataSize(_n)); // NOLINT\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSetHeader(self, _func, f):
"""Overrriden from TypeHandler."""
f.write(" void SetHeader(GLsizei _n) {\n")
f.write(" header.SetCmdByTotalSize<ValueType>(ComputeSize(_n));\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
f.write(" void Init(%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_"),
last_arg.type, last_arg.name))
f.write(" SetHeader(_n);\n")
args = func.GetCmdArgs()
for arg in args:
f.write(" %s = _%s;\n" % (arg.name, arg.name))
f.write(" memcpy(ImmediateDataAddress(this),\n")
f.write(" _%s, ComputeDataSize(_n));\n" % last_arg.name)
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
copy_args = func.MakeCmdArgString("_", False)
f.write(" void* Set(void* cmd%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_", True),
last_arg.type, last_arg.name))
f.write(" static_cast<ValueType*>(cmd)->Init(%s, _%s);\n" %
(copy_args, last_arg.name))
f.write(" const uint32_t size = ComputeSize(_n);\n")
f.write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdHelper(self, func, f):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t size = %(lp)s::cmds::%(name)s::ComputeSize(n);
%(lp)s::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<%(lp)s::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
f.write(code % {
"lp" : _lower_prefix,
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, f):
"""Overrriden from TypeHandler."""
f.write("TEST_F(%sFormatTest, %s) {\n" % (_prefix, func.name))
f.write(" static GLuint ids[] = { 12, 23, 34, };\n")
f.write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
f.write(" void* next_cmd = cmd.Set(\n")
f.write(" &cmd, static_cast<GLsizei>(base::size(ids)), ids);\n")
f.write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n" %
func.name)
f.write(" cmd.header.command);\n")
f.write(" EXPECT_EQ(sizeof(cmd) +\n")
f.write(" RoundSizeToMultipleOfEntries(cmd.n * 4u),\n")
f.write(" cmd.header.size * 4u);\n")
f.write(" EXPECT_EQ(static_cast<GLsizei>(base::size(ids)), cmd.n);\n");
f.write(" CheckBytesWrittenMatchesExpectedSize(\n")
f.write(" next_cmd, sizeof(cmd) +\n")
f.write(" RoundSizeToMultipleOfEntries(base::size(ids) * 4u));\n")
f.write(" EXPECT_EQ(0, memcmp(ids, ImmediateDataAddress(&cmd),\n")
f.write(" sizeof(ids)));\n")
f.write("}\n")
f.write("\n")
class CreateHandler(TypeHandler):
"""Handler for glCreate___ type functions."""
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
func.AddCmdArg(Argument("client_id", 'uint32_t'))
def __GetResourceType(self, func):
if func.return_type == "GLsync":
return "Sync"
else:
return func.name[6:] # Create*
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
%(id_type_cast)sEXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s))
.WillOnce(Return(%(const_service_id)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)skNewClientId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());"""
if func.IsES3():
valid_test += """
%(return_type)s service_id = 0;
EXPECT_TRUE(Get%(resource_type)sServiceId(kNewClientId, &service_id));
EXPECT_EQ(%(const_service_id)s, service_id);
}
"""
else:
valid_test += """
EXPECT_TRUE(Get%(resource_type)s(kNewClientId));
}
"""
comma = ""
cmd_arg_count = 0
for arg in func.GetOriginalArgs():
if not arg.IsConstant():
cmd_arg_count += 1
if cmd_arg_count:
comma = ", "
if func.return_type == 'GLsync':
id_type_cast = ("const GLsync kNewServiceIdGLuint = reinterpret_cast"
"<GLsync>(kNewServiceId);\n ")
const_service_id = "kNewServiceIdGLuint"
else:
id_type_cast = ""
const_service_id = "kNewServiceId"
self.WriteValidUnitTest(func, f, valid_test, {
'comma': comma,
'resource_type': self.__GetResourceType(func),
'return_type': func.return_type,
'id_type_cast': id_type_cast,
'const_service_id': const_service_id,
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)skNewClientId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, {
'comma': comma,
}, *extras)
def WriteHandlerImplementation (self, func, f):
"""Overrriden from TypeHandler."""
if func.IsES3():
code = """ uint32_t client_id = c.client_id;
%(return_type)s service_id = 0;
if (group_->Get%(resource_name)sServiceId(client_id, &service_id)) {
return error::kInvalidArguments;
}
service_id = %(gl_func_name)s(%(gl_args)s);
if (service_id) {
group_->Add%(resource_name)sId(client_id, service_id);
}
"""
else:
code = """ uint32_t client_id = c.client_id;
if (Get%(resource_name)s(client_id)) {
return error::kInvalidArguments;
}
%(return_type)s service_id = %(gl_func_name)s(%(gl_args)s);
if (service_id) {
Create%(resource_name)s(client_id, service_id%(gl_args_with_comma)s);
}
"""
f.write(code % {
'resource_name': self.__GetResourceType(func),
'return_type': func.return_type,
'gl_func_name': func.GetGLFunctionName(),
'gl_args': func.MakeOriginalArgString(""),
'gl_args_with_comma': func.MakeOriginalArgString("", True) })
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
f.write(" GLuint client_id;\n")
not_shared = func.GetInfo('not_shared')
if not_shared:
f.write('IdAllocator* id_allocator = GetIdAllocator(IdNamespaces::k%s);' %
func.GetInfo('resource_types'))
f.write('client_id = id_allocator->AllocateID();')
else:
if func.return_type == "GLsync":
f.write(
" GetIdHandler(SharedIdNamespaces::kSyncs)->\n")
else:
f.write(
" GetIdHandler(SharedIdNamespaces::kProgramsAndShaders)->\n")
f.write(" MakeIds(this, 0, 1, &client_id);\n")
f.write(" helper_->%s(%s);\n" %
(func.name, func.MakeCmdArgString("")))
f.write(' GPU_CLIENT_LOG("returned " << client_id);\n')
f.write(" CheckGLError();\n")
if func.return_type == "GLsync":
f.write(" return reinterpret_cast<GLsync>(client_id);\n")
else:
f.write(" return client_id;\n")
f.write("}\n")
f.write("\n")
def WritePassthroughServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class DeleteHandler(TypeHandler):
"""Handler for glDelete___ single resource type functions."""
def WriteServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
if func.IsES3() or func.IsES31():
TypeHandler.WriteServiceImplementation(self, func, f)
# HandleDeleteShader and HandleDeleteProgram are manually written.
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
f.write(
" if (%s == 0)\n return;" % func.GetOriginalArgs()[-1].name);
f.write(" %sHelper(%s);\n" %
(func.original_name, func.GetOriginalArgs()[-1].name))
f.write(" CheckGLError();\n")
f.write("}\n")
f.write("\n")
def WriteHandlerImplementation (self, func, f):
"""Overrriden from TypeHandler."""
assert len(func.GetOriginalArgs()) == 1
arg = func.GetOriginalArgs()[0]
f.write(" %sHelper(%s);\n" % (func.original_name, arg.name))
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class DELnHandler(TypeHandler):
"""Handler for glDelete___ type functions."""
def WriteGetDataSizeCode(self, func, arg, f):
"""Overrriden from TypeHandler."""
code = """ uint32_t %(data_size)s;
if (!base::CheckMul(n, sizeof(GLuint)).AssignIfValid(&%(data_size)s)) {
return error::kOutOfBounds;
}
""" % {'data_size': arg.GetReservedSizeId()}
f.write(code)
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Overrriden from TypeHandler."""
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
GLuint ids[2] = { k%(types)sStartId, k%(types)sStartId + 1 };
struct Cmds {
cmds::%(name)sImmediate del;
GLuint data[2];
};
Cmds expected;
expected.del.Init(base::size(ids), &ids[0]);
expected.data[0] = k%(types)sStartId;
expected.data[1] = k%(types)sStartId + 1;
gl_->%(name)s(base::size(ids), &ids[0]);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'types': func.GetInfo('resource_types'),
})
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(
*gl_,
%(gl_func_name)s(1, Pointee(kService%(upper_resource_name)sId)))
.Times(1);
GetSharedMemoryAs<GLuint*>()[0] = client_%(resource_name)s_id_;
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(
Get%(upper_resource_name)s(client_%(resource_name)s_id_) == nullptr);
}
"""
self.WriteValidUnitTest(func, f, valid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
'upper_resource_name': func.GetInfo('resource_type'),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
GetSharedMemoryAs<GLuint*>()[0] = kInvalidClientId;
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
}
"""
self.WriteValidUnitTest(func, f, invalid_test, *extras)
def WriteImmediateServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(
*gl_,
%(gl_func_name)s(1, Pointee(kService%(upper_resource_name)sId)))
.Times(1);
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(true);
cmd.Init(1, &client_%(resource_name)s_id_);
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(client_%(resource_name)s_id_)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
EXPECT_TRUE(
Get%(upper_resource_name)s(client_%(resource_name)s_id_) == nullptr);
}
"""
self.WriteValidUnitTest(func, f, valid_test, {
'resource_name': func.GetInfo('resource_type').lower(),
'upper_resource_name': func.GetInfo('resource_type'),
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(false);
GLuint temp = kInvalidClientId;
cmd.Init(1, &temp);
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
}
"""
self.WriteValidUnitTest(func, f, invalid_test, *extras)
def WriteHandlerImplementation (self, func, f):
"""Overrriden from TypeHandler."""
f.write(" %sHelper(n, %s);\n" %
(func.name, func.GetLastOriginalArg().name))
def WriteImmediateHandlerImplementation (self, func, f):
"""Overrriden from TypeHandler."""
f.write(" %sHelper(n, %s);\n" %
(func.original_name, func.GetLastOriginalArg().name))
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
impl_func = func.GetInfo('impl_func', True)
if impl_func:
args = {
'return_type': func.return_type,
'prefix' : _prefix,
'name': func.original_name,
'typed_args': func.MakeTypedOriginalArgString(""),
'args': func.MakeOriginalArgString(""),
'resource_type': func.GetInfo('resource_type').lower(),
'count_name': func.GetOriginalArgs()[0].name,
}
f.write(
"%(return_type)s %(prefix)sImplementation::"
"%(name)s(%(typed_args)s) {\n" %
args)
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
f.write(""" GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei i = 0; i < n; ++i) {
GPU_CLIENT_LOG(" " << i << ": " << %s[i]);
}
});
""" % func.GetOriginalArgs()[1].name)
f.write(""" GPU_CLIENT_DCHECK_CODE_BLOCK({
for (GLsizei i = 0; i < n; ++i) {
DCHECK(%s[i] != 0);
}
});
""" % func.GetOriginalArgs()[1].name)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
code = """ %(name)sHelper(%(args)s);
CheckGLError();
}
"""
f.write(code % args)
def WriteImmediateCmdComputeSize(self, _func, f):
"""Overrriden from TypeHandler."""
f.write(" static uint32_t ComputeDataSize(GLsizei _n) {\n")
f.write(
" return static_cast<uint32_t>(sizeof(GLuint) * _n); // NOLINT\n")
f.write(" }\n")
f.write("\n")
f.write(" static uint32_t ComputeSize(GLsizei _n) {\n")
f.write(" return static_cast<uint32_t>(\n")
f.write(" sizeof(ValueType) + ComputeDataSize(_n)); // NOLINT\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSetHeader(self, _func, f):
"""Overrriden from TypeHandler."""
f.write(" void SetHeader(GLsizei _n) {\n")
f.write(" header.SetCmdByTotalSize<ValueType>(ComputeSize(_n));\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
f.write(" void Init(%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_"),
last_arg.type, last_arg.name))
f.write(" SetHeader(_n);\n")
args = func.GetCmdArgs()
for arg in args:
f.write(" %s = _%s;\n" % (arg.name, arg.name))
f.write(" memcpy(ImmediateDataAddress(this),\n")
f.write(" _%s, ComputeDataSize(_n));\n" % last_arg.name)
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
copy_args = func.MakeCmdArgString("_", False)
f.write(" void* Set(void* cmd%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_", True),
last_arg.type, last_arg.name))
f.write(" static_cast<ValueType*>(cmd)->Init(%s, _%s);\n" %
(copy_args, last_arg.name))
f.write(" const uint32_t size = ComputeSize(_n);\n")
f.write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdHelper(self, func, f):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t size = %(lp)s::cmds::%(name)s::ComputeSize(n);
%(lp)s::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<%(lp)s::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
f.write(code % {
"lp" : _lower_prefix,
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, f):
"""Overrriden from TypeHandler."""
f.write("TEST_F(%sFormatTest, %s) {\n" % (_prefix, func.name))
f.write(" static GLuint ids[] = { 12, 23, 34, };\n")
f.write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
f.write(" void* next_cmd = cmd.Set(\n")
f.write(" &cmd, static_cast<GLsizei>(base::size(ids)), ids);\n")
f.write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n" %
func.name)
f.write(" cmd.header.command);\n")
f.write(" EXPECT_EQ(sizeof(cmd) +\n")
f.write(" RoundSizeToMultipleOfEntries(cmd.n * 4u),\n")
f.write(" cmd.header.size * 4u);\n")
f.write(" EXPECT_EQ(static_cast<GLsizei>(base::size(ids)), cmd.n);\n");
f.write(" CheckBytesWrittenMatchesExpectedSize(\n")
f.write(" next_cmd, sizeof(cmd) +\n")
f.write(" RoundSizeToMultipleOfEntries(base::size(ids) * 4u));\n")
f.write(" EXPECT_EQ(0, memcmp(ids, ImmediateDataAddress(&cmd),\n")
f.write(" sizeof(ids)));\n")
f.write("}\n")
f.write("\n")
class GETnHandler(TypeHandler):
"""Handler for GETn for glGetBooleanv, glGetFloatv, ... type functions."""
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
TypeHandler.InitFunction(self, func)
if func.name == 'GetSynciv':
return
arg_insert_point = len(func.passthrough_service_doer_args) - 1;
func.passthrough_service_doer_args.insert(
arg_insert_point, Argument('length', 'GLsizei*'))
func.passthrough_service_doer_args.insert(
arg_insert_point, Argument('bufsize', 'GLsizei'))
def NeedsDataTransferFunction(self, func):
"""Overriden from TypeHandler."""
return False
def WriteServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
self.WriteServiceHandlerFunctionHeader(func, f)
if func.IsES31():
return
last_arg = func.GetLastOriginalArg()
# All except shm_id and shm_offset.
all_but_last_args = func.GetCmdArgs()[:-2]
for arg in all_but_last_args:
arg.WriteGetCode(f)
code = """ typedef cmds::%(func_name)s::Result Result;
GLsizei num_values = 0;
if (!GetNumValuesReturnedForGLGet(pname, &num_values)) {
LOCAL_SET_GL_ERROR_INVALID_ENUM(":%(func_name)s", pname, "pname");
return error::kNoError;
}
uint32_t checked_size = 0;
if (!Result::ComputeSize(num_values).AssignIfValid(&checked_size)) {
return error::kOutOfBounds;
}
Result* result = GetSharedMemoryAs<Result*>(
c.%(last_arg_name)s_shm_id, c.%(last_arg_name)s_shm_offset,
checked_size);
%(last_arg_type)s %(last_arg_name)s = result ? result->GetData() : nullptr;
"""
f.write(code % {
'last_arg_type': last_arg.type,
'last_arg_name': last_arg.name,
'func_name': func.name,
})
func.WriteHandlerValidation(f)
code = """ // Check that the client initialized the result.
if (result->size != 0) {
return error::kInvalidArguments;
}
"""
shadowed = func.GetInfo('shadowed')
if not shadowed:
f.write(' LOCAL_COPY_REAL_GL_ERRORS_TO_WRAPPER("%s");\n' % func.name)
f.write(code)
func.WriteHandlerImplementation(f)
if shadowed:
code = """ result->SetNumResults(num_values);
return error::kNoError;
}
"""
else:
code = """ GLenum error = LOCAL_PEEK_GL_ERROR("%(func_name)s");
if (error == GL_NO_ERROR) {
result->SetNumResults(num_values);
}
return error::kNoError;
}
"""
f.write(code % {'func_name': func.name})
def WritePassthroughServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
self.WritePassthroughServiceFunctionHeader(func, f)
last_arg = func.GetLastOriginalArg()
# All except shm_id and shm_offset.
all_but_last_args = func.GetCmdArgs()[:-2]
for arg in all_but_last_args:
arg.WriteGetCode(f)
code = """ unsigned int buffer_size = 0;
typedef cmds::%(func_name)s::Result Result;
Result* result = GetSharedMemoryAndSizeAs<Result*>(
c.%(last_arg_name)s_shm_id, c.%(last_arg_name)s_shm_offset,
sizeof(Result), &buffer_size);
%(last_arg_type)s %(last_arg_name)s = result ? result->GetData() : nullptr;
if (%(last_arg_name)s == nullptr) {
return error::kOutOfBounds;
}
GLsizei bufsize = Result::ComputeMaxResults(buffer_size);
GLsizei written_values = 0;
GLsizei* length = &written_values;
"""
f.write(code % {
'last_arg_type': last_arg.type,
'last_arg_name': last_arg.name,
'func_name': func.name,
})
self.WritePassthroughServiceFunctionDoerCall(func, f)
code = """ if (written_values > bufsize) {
return error::kOutOfBounds;
}
result->SetNumResults(written_values);
return error::kNoError;
}
"""
f.write(code % {'func_name': func.name})
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
impl_func = func.GetInfo('impl_func', True)
if impl_func:
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
all_but_last_args = func.GetOriginalArgs()[:-1]
args = []
has_length_arg = False
for arg in all_but_last_args:
if arg.type == 'GLsync':
args.append('ToGLuint(%s)' % arg.name)
elif arg.name.endswith('size') and arg.type == 'GLsizei':
continue
elif arg.name == 'length':
has_length_arg = True
continue
else:
args.append(arg.name)
arg_string = ", ".join(args)
all_arg_string = (
", ".join([
"%s" % arg.name
for arg in func.GetOriginalArgs() if not arg.IsConstant()]))
self.WriteTraceEvent(func, f)
code = """ if (%(func_name)sHelper(%(all_arg_string)s)) {
return;
}
typedef cmds::%(func_name)s::Result Result;
ScopedResultPtr<Result> result = GetResultAs<Result>();
if (!result) {
return;
}
result->SetNumResults(0);
helper_->%(func_name)s(%(arg_string)s,
GetResultShmId(), result.offset());
WaitForCmd();
result->CopyResult(%(last_arg_name)s);
GPU_CLIENT_LOG_CODE_BLOCK({
for (int32_t i = 0; i < result->GetNumResults(); ++i) {
GPU_CLIENT_LOG(" " << i << ": " << result->GetData()[i]);
}
});"""
if has_length_arg:
code += """
if (length) {
*length = result->GetNumResults();
}"""
code += """
CheckGLError();
}
"""
f.write(code % {
'func_name': func.name,
'arg_string': arg_string,
'all_arg_string': all_arg_string,
'last_arg_name': func.GetLastOriginalArg().name,
})
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Writes the GLES2 Implemention unit test."""
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
typedef cmds::%(name)s::Result::Type ResultType;
ResultType result = 0;
Cmds expected;
ExpectedMemoryInfo result1 = GetExpectedResultMemory(
sizeof(uint32_t) + sizeof(ResultType));
expected.cmd.Init(%(cmd_args)s, result1.id, result1.offset);
EXPECT_CALL(*command_buffer(), OnFlush())
.WillOnce(SetMemory(result1.ptr, SizedResultHelper<ResultType>(1)))
.RetiresOnSaturation();
gl_->%(name)s(%(args)s, &result);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
EXPECT_EQ(static_cast<ResultType>(1), result);
}
"""
first_cmd_arg = func.GetCmdArgs()[0].GetValidNonCachedClientSideCmdArg(func)
if not first_cmd_arg:
return
first_gl_arg = func.GetOriginalArgs()[0].GetValidNonCachedClientSideArg(
func)
cmd_arg_strings = [first_cmd_arg]
for arg in func.GetCmdArgs()[1:-2]:
cmd_arg_strings.append(arg.GetValidClientSideCmdArg(func))
gl_arg_strings = [first_gl_arg]
for arg in func.GetOriginalArgs()[1:-1]:
gl_arg_strings.append(arg.GetValidClientSideArg(func))
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, GetError())
.WillRepeatedly(Return(GL_NO_ERROR));
SpecializedSetup<cmds::%(name)s, 0>(true);
typedef cmds::%(name)s::Result Result;
Result* result = static_cast<Result*>(shared_memory_address_);
EXPECT_CALL(*gl_, %(gl_func_name)s(%(local_gl_args)s));
result->size = 0;
cmds::%(name)s cmd;
cmd.Init(%(cmd_args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(decoder_->GetGLES2Util()->GLGetNumValuesReturned(
%(valid_pname)s),
result->GetNumResults());
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
gl_arg_strings = []
cmd_arg_strings = []
valid_pname = ''
for arg in func.GetOriginalArgs()[:-1]:
if arg.name == 'length':
gl_arg_value = 'nullptr'
elif arg.name.endswith('size'):
gl_arg_value = ("decoder_->GetGLES2Util()->GLGetNumValuesReturned(%s)" %
valid_pname)
elif arg.type == 'GLsync':
gl_arg_value = 'reinterpret_cast<GLsync>(kServiceSyncId)'
else:
gl_arg_value = arg.GetValidGLArg(func)
gl_arg_strings.append(gl_arg_value)
if arg.name == 'pname':
valid_pname = gl_arg_value
if arg.name.endswith('size') or arg.name == 'length':
continue
if arg.type == 'GLsync':
arg_value = 'client_sync_id_'
else:
arg_value = arg.GetValidArg(func)
cmd_arg_strings.append(arg_value)
if func.GetInfo('gl_test_func') == 'glGetIntegerv':
gl_arg_strings.append("_")
else:
gl_arg_strings.append("result->GetData()")
cmd_arg_strings.append("shared_memory_id_")
cmd_arg_strings.append("shared_memory_offset_")
self.WriteValidUnitTest(func, f, valid_test, {
'local_gl_args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
'valid_pname': valid_pname,
}, *extras)
if not func.IsES3():
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s::Result* result =
static_cast<cmds::%(name)s::Result*>(shared_memory_address_);
result->size = 0;
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));
EXPECT_EQ(0u, result->size);%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, *extras)
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class ArrayArgTypeHandler(TypeHandler):
"""Base class for type handlers that handle args that are arrays"""
def GetArrayType(self, func):
"""Returns the type of the element in the element array being PUT to."""
for arg in func.GetOriginalArgs():
if arg.IsPointer():
element_type = arg.GetPointedType()
return element_type
# Special case: array type handler is used for a function that is forwarded
# to the actual array type implementation
element_type = func.GetOriginalArgs()[-1].type
assert all(arg.type == element_type \
for arg in func.GetOriginalArgs()[-self.GetArrayCount(func):])
return element_type
def GetArrayCount(self, func):
"""Returns the count of the elements in the array being PUT to."""
return func.GetInfo('count')
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class PUTHandler(ArrayArgTypeHandler):
"""Handler for glTexParameter_v, glVertexAttrib_v functions."""
def WriteServiceUnitTest(self, func, f, *extras):
"""Writes the service unit test for a command."""
expected_call = "EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));"
if func.GetInfo("first_element_only"):
gl_arg_strings = [
arg.GetValidGLArg(func) for arg in func.GetOriginalArgs()
]
gl_arg_strings[-1] = "*" + gl_arg_strings[-1]
expected_call = ("EXPECT_CALL(*gl_, %%(gl_func_name)s(%s));" %
", ".join(gl_arg_strings))
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
GetSharedMemoryAs<%(data_type)s*>()[0] = %(data_value)s;
%(expected_call)s
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
extra = {
'data_type': self.GetArrayType(func),
'data_value': func.GetInfo('data_value') or '0',
'expected_call': expected_call,
}
self.WriteValidUnitTest(func, f, valid_test, extra, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
GetSharedMemoryAs<%(data_type)s*>()[0] = %(data_value)s;
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, extra, *extras)
def WriteImmediateServiceUnitTest(self, func, f, *extras):
"""Writes the service unit test for a command."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(true);
%(data_type)s temp[%(data_count)s] = { %(data_value)s, };
cmd.Init(%(gl_client_args)s, &temp[0]);
EXPECT_CALL(
*gl_,
%(gl_func_name)s(%(gl_args)s, %(expectation)s));
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
gl_client_arg_strings = [
arg.GetValidArg(func) for arg in func.GetOriginalArgs()[0:-1]
]
gl_arg_strings = [
arg.GetValidGLArg(func) for arg in func.GetOriginalArgs()[0:-1]
]
gl_any_strings = ["_"] * len(gl_arg_strings)
data_count = self.GetArrayCount(func)
if func.GetInfo('first_element_only'):
expectation = "temp[0]"
else:
expectation = "PointsToArray(temp, %s)" % data_count
extra = {
'expectation': expectation,
'data_type': self.GetArrayType(func),
'data_count': data_count,
'data_value': func.GetInfo('data_value') or '0',
'gl_client_args': ", ".join(gl_client_arg_strings),
'gl_args': ", ".join(gl_arg_strings),
'gl_any_args': ", ".join(gl_any_strings),
}
self.WriteValidUnitTest(func, f, valid_test, extra, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();"""
if func.IsES3():
invalid_test += """
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_any_args)s, _)).Times(1);
"""
else:
invalid_test += """
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_any_args)s, _)).Times(0);
"""
invalid_test += """
SpecializedSetup<cmds::%(name)s, 0>(false);
%(data_type)s temp[%(data_count)s] = { %(data_value)s, };
cmd.Init(%(all_but_last_args)s, &temp[0]);
EXPECT_EQ(error::%(parse_result)s,
ExecuteImmediateCmd(cmd, sizeof(temp)));
%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, extra, *extras)
def WriteGetDataSizeCode(self, func, arg, f):
"""Overrriden from TypeHandler."""
code = (""" uint32_t %(data_size)s;
if (!%(namespace)sGLES2Util::""" +
"""ComputeDataSize<%(arrayType)s, %(arrayCount)d>(1, &%(data_size)s)) {
return error::kOutOfBounds;
}
""")
f.write(code % {'data_size': arg.GetReservedSizeId(),
'namespace': _Namespace(),
'arrayType': self.GetArrayType(func),
'arrayCount': self.GetArrayCount(func)})
if func.IsImmediate():
f.write(" if (%s > immediate_data_size) {\n" % arg.GetReservedSizeId())
f.write(" return error::kOutOfBounds;\n")
f.write(" }\n")
def __NeedsToCalcDataCount(self, func):
use_count_func = func.GetInfo('use_count_func')
return use_count_func != None and use_count_func != False
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
impl_func = func.GetInfo('impl_func')
if (impl_func != None and impl_func != True):
return;
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
if self.__NeedsToCalcDataCount(func):
f.write(" uint32_t count = %sGLES2Util::Calc%sDataCount(%s);\n" %
(_Namespace(), func.name, func.GetOriginalArgs()[0].name))
f.write(" DCHECK_LE(count, %du);\n" % self.GetArrayCount(func))
f.write(" if (count == 0) {\n")
f.write(" SetGLErrorInvalidEnum(\"%s\", %s, \"%s\");\n" %
(func.prefixed_name, func.GetOriginalArgs()[0].name,
func.GetOriginalArgs()[0].name))
f.write(" return;\n")
f.write(" }\n")
else:
f.write(" uint32_t count = %d;" % self.GetArrayCount(func))
f.write(" for (uint32_t ii = 0; ii < count; ++ii)\n")
f.write(' GPU_CLIENT_LOG("value[" << ii << "]: " << %s[ii]);\n' %
func.GetLastOriginalArg().name)
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
f.write(" helper_->%sImmediate(%s);\n" %
(func.name, func.MakeOriginalArgString("")))
f.write(" CheckGLError();\n")
f.write("}\n")
f.write("\n")
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Writes the GLES2 Implemention unit test."""
client_test = func.GetInfo('client_test', True)
if not client_test:
return;
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
%(type)s data[%(count)d] = {0};
struct Cmds {
cmds::%(name)sImmediate cmd;
%(type)s data[%(count)d];
};
for (int jj = 0; jj < %(count)d; ++jj) {
data[jj] = static_cast<%(type)s>(jj);
}
Cmds expected;
expected.cmd.Init(%(cmd_args)s, &data[0]);
gl_->%(name)s(%(args)s, &data[0]);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
cmd_arg_strings = [
arg.GetValidClientSideCmdArg(func) for arg in func.GetCmdArgs()[0:-2]
]
gl_arg_strings = [
arg.GetValidClientSideArg(func) for arg in func.GetOriginalArgs()[0:-1]
]
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'type': self.GetArrayType(func),
'count': self.GetArrayCount(func),
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
})
def WriteImmediateCmdComputeSize(self, func, f):
"""Overrriden from TypeHandler."""
f.write(" static uint32_t ComputeDataSize() {\n")
f.write(" return static_cast<uint32_t>(\n")
f.write(" sizeof(%s) * %d);\n" %
(self.GetArrayType(func), self.GetArrayCount(func)))
f.write(" }\n")
f.write("\n")
if self.__NeedsToCalcDataCount(func):
f.write(" static uint32_t ComputeEffectiveDataSize(%s %s) {\n" %
(func.GetOriginalArgs()[0].type,
func.GetOriginalArgs()[0].name))
f.write(" return static_cast<uint32_t>(\n")
f.write(" sizeof(%s) * %sGLES2Util::Calc%sDataCount(%s));\n" %
(self.GetArrayType(func), _Namespace(), func.original_name,
func.GetOriginalArgs()[0].name))
f.write(" }\n")
f.write("\n")
f.write(" static uint32_t ComputeSize() {\n")
f.write(" return static_cast<uint32_t>(\n")
f.write(
" sizeof(ValueType) + ComputeDataSize());\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSetHeader(self, _func, f):
"""Overrriden from TypeHandler."""
f.write(" void SetHeader() {\n")
f.write(
" header.SetCmdByTotalSize<ValueType>(ComputeSize());\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
f.write(" void Init(%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_"),
last_arg.type, last_arg.name))
f.write(" SetHeader();\n")
args = func.GetCmdArgs()
for arg in args:
arg.WriteSetCode(f, 4, "_%s" % arg.name)
f.write(" memcpy(ImmediateDataAddress(this),\n")
if self.__NeedsToCalcDataCount(func):
f.write(" _%s, ComputeEffectiveDataSize(%s));" %
(last_arg.name, func.GetOriginalArgs()[0].name))
f.write("""
DCHECK_GE(ComputeDataSize(), ComputeEffectiveDataSize(%(arg)s));
char* pointer = reinterpret_cast<char*>(ImmediateDataAddress(this)) +
ComputeEffectiveDataSize(%(arg)s);
memset(pointer, 0, ComputeDataSize() - ComputeEffectiveDataSize(%(arg)s));
""" % { 'arg': func.GetOriginalArgs()[0].name, })
else:
f.write(" _%s, ComputeDataSize());\n" % last_arg.name)
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
copy_args = func.MakeCmdArgString("_", False)
f.write(" void* Set(void* cmd%s, %s _%s) {\n" %
(func.MakeTypedCmdArgString("_", True),
last_arg.type, last_arg.name))
f.write(" static_cast<ValueType*>(cmd)->Init(%s, _%s);\n" %
(copy_args, last_arg.name))
f.write(" const uint32_t size = ComputeSize();\n")
f.write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdHelper(self, func, f):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t size = %(lp)s::cmds::%(name)s::ComputeSize();
%(lp)s::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<%(lp)s::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
f.write(code % {
"lp" : _lower_prefix,
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, f):
"""Overrriden from TypeHandler."""
f.write("TEST_F(%sFormatTest, %s) {\n" % (_prefix, func.name))
f.write(" const int kSomeBaseValueToTestWith = 51;\n")
f.write(" static %s data[] = {\n" % self.GetArrayType(func))
for v in range(0, self.GetArrayCount(func)):
f.write(" static_cast<%s>(kSomeBaseValueToTestWith + %d),\n" %
(self.GetArrayType(func), v))
f.write(" };\n")
f.write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
f.write(" void* next_cmd = cmd.Set(\n")
f.write(" &cmd")
args = func.GetCmdArgs()
for value, arg in enumerate(args):
f.write(",\n static_cast<%s>(%d)" % (arg.type, value + 11))
f.write(",\n data);\n")
args = func.GetCmdArgs()
f.write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n"
% func.name)
f.write(" cmd.header.command);\n")
f.write(" EXPECT_EQ(sizeof(cmd) +\n")
f.write(" RoundSizeToMultipleOfEntries(sizeof(data)),\n")
f.write(" cmd.header.size * 4u);\n")
for value, arg in enumerate(args):
f.write(" EXPECT_EQ(static_cast<%s>(%d), %s);\n" %
(arg.type, value + 11, arg.GetArgAccessor('cmd')))
f.write(" CheckBytesWrittenMatchesExpectedSize(\n")
f.write(" next_cmd, sizeof(cmd) +\n")
f.write(" RoundSizeToMultipleOfEntries(sizeof(data)));\n")
# TODO: Check that data was inserted
f.write("}\n")
f.write("\n")
class PUTnHandler(ArrayArgTypeHandler):
"""Handler for PUTn 'glUniform__v' type functions."""
def WriteServiceUnitTest(self, func, f, *extras):
"""Overridden from TypeHandler."""
ArrayArgTypeHandler.WriteServiceUnitTest(self, func, f, *extras)
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgsCountTooLarge) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
gl_arg_strings = []
arg_strings = []
for count, arg in enumerate(func.GetOriginalArgs()):
# hardcoded to match unit tests.
if count == 0:
# the location of the second element of the 2nd uniform.
# defined in GLES2DecoderBase::SetupShaderForUniform
gl_arg_strings.append("3")
arg_strings.append("ProgramManager::MakeFakeLocation(1, 1)")
elif count == 1:
# the number of elements that gl will be called with.
gl_arg_strings.append("3")
# the number of elements requested in the command.
arg_strings.append("5")
else:
gl_arg_strings.append(arg.GetValidGLArg(func))
if not arg.IsConstant():
arg_strings.append(arg.GetValidArg(func))
extra = {
'gl_args': ", ".join(gl_arg_strings),
'args': ", ".join(arg_strings),
}
self.WriteValidUnitTest(func, f, valid_test, extra, *extras)
def WriteImmediateServiceUnitTest(self, func, f, *extras):
"""Overridden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
SpecializedSetup<cmds::%(name)s, 0>(true);
%(data_type)s temp[%(data_count)s * 2] = { 0, };
EXPECT_CALL(
*gl_,
%(gl_func_name)s(%(gl_args)s,
PointsToArray(temp, %(data_count)s)));
cmd.Init(%(args)s, &temp[0]);
EXPECT_EQ(error::kNoError,
ExecuteImmediateCmd(cmd, sizeof(temp)));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
gl_arg_strings = []
gl_any_strings = []
arg_strings = []
for arg in func.GetOriginalArgs()[0:-1]:
gl_arg_strings.append(arg.GetValidGLArg(func))
gl_any_strings.append("_")
if not arg.IsConstant():
arg_strings.append(arg.GetValidArg(func))
extra = {
'data_type': self.GetArrayType(func),
'data_count': self.GetArrayCount(func),
'args': ", ".join(arg_strings),
'gl_args': ", ".join(gl_arg_strings),
'gl_any_args': ", ".join(gl_any_strings),
}
self.WriteValidUnitTest(func, f, valid_test, extra, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
cmds::%(name)s& cmd = *GetImmediateAs<cmds::%(name)s>();
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_any_args)s, _)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
%(data_type)s temp[%(data_count)s * 2] = { 0, };
cmd.Init(%(all_but_last_args)s, &temp[0]);
EXPECT_EQ(error::%(parse_result)s,
ExecuteImmediateCmd(cmd, sizeof(temp)));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, extra, *extras)
def WriteGetDataSizeCode(self, func, arg, f):
"""Overrriden from TypeHandler."""
code = (""" uint32_t %(data_size)s = 0;
if (count >= 0 && !%(namespace)sGLES2Util::""" +
"""ComputeDataSize<%(arrayType)s, %(arrayCount)d>(count, &%(data_size)s)) {
return error::kOutOfBounds;
}
""")
f.write(code % {'data_size': arg.GetReservedSizeId(),
'namespace': _Namespace(),
'arrayType': self.GetArrayType(func),
'arrayCount': self.GetArrayCount(func)})
if func.IsImmediate():
f.write(" if (%s > immediate_data_size) {\n" % arg.GetReservedSizeId())
f.write(" return error::kOutOfBounds;\n")
f.write(" }\n")
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
impl_func = func.GetInfo('impl_func')
if (impl_func != None and impl_func != True):
return;
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
last_pointer_name = func.GetLastOriginalPointerArg().name
f.write(""" GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei i = 0; i < count; ++i) {
""")
values_str = ' << ", " << '.join(
["%s[%d + i * %d]" % (
last_pointer_name, ndx, self.GetArrayCount(func)) for ndx in range(
0, self.GetArrayCount(func))])
f.write(' GPU_CLIENT_LOG(" " << i << ": " << %s);\n' % values_str)
f.write(" }\n });\n")
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
f.write(" helper_->%sImmediate(%s);\n" %
(func.name, func.MakeInitString("")))
f.write(" CheckGLError();\n")
f.write("}\n")
f.write("\n")
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Writes the GLES2 Implemention unit test."""
client_test = func.GetInfo('client_test', True)
if not client_test:
return;
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
%(type)s data[%(count_param)d][%(count)d] = {{0}};
struct Cmds {
cmds::%(name)sImmediate cmd;
%(type)s data[%(count_param)d][%(count)d];
};
Cmds expected;
for (int ii = 0; ii < %(count_param)d; ++ii) {
for (int jj = 0; jj < %(count)d; ++jj) {
data[ii][jj] = static_cast<%(type)s>(ii * %(count)d + jj);
}
}
expected.cmd.Init(%(cmd_args)s);
gl_->%(name)s(%(args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
cmd_arg_strings = []
for arg in func.GetCmdArgs():
if arg.name.endswith("_shm_id"):
cmd_arg_strings.append("&data[0][0]")
elif arg.name.endswith("_shm_offset"):
continue
else:
cmd_arg_strings.append(arg.GetValidClientSideCmdArg(func))
gl_arg_strings = []
count_param = 0
for arg in func.GetOriginalArgs():
if arg.IsPointer():
valid_value = "&data[0][0]"
else:
valid_value = arg.GetValidClientSideArg(func)
gl_arg_strings.append(valid_value)
if arg.name == "count":
count_param = int(valid_value)
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'type': self.GetArrayType(func),
'count': self.GetArrayCount(func),
'args': ", ".join(gl_arg_strings),
'cmd_args': ", ".join(cmd_arg_strings),
'count_param': count_param,
})
# Test constants for invalid values, as they are not tested by the
# service.
constants = [
arg for arg in func.GetOriginalArgs()[0:-1] if arg.IsConstant()
]
if not constants:
return
code = """
TEST_F(%(prefix)sImplementationTest,
%(name)sInvalidConstantArg%(invalid_index)d) {
%(type)s data[%(count_param)d][%(count)d] = {{0}};
for (int ii = 0; ii < %(count_param)d; ++ii) {
for (int jj = 0; jj < %(count)d; ++jj) {
data[ii][jj] = static_cast<%(type)s>(ii * %(count)d + jj);
}
}
gl_->%(name)s(%(args)s);
EXPECT_TRUE(NoCommandsWritten());
EXPECT_EQ(%(gl_error)s, CheckError());
}
"""
for invalid_arg in constants:
gl_arg_strings = []
invalid = invalid_arg.GetInvalidArg(func)
for arg in func.GetOriginalArgs():
if arg is invalid_arg:
gl_arg_strings.append(invalid[0])
elif arg.IsPointer():
gl_arg_strings.append("&data[0][0]")
else:
valid_value = arg.GetValidClientSideArg(func)
gl_arg_strings.append(valid_value)
if arg.name == "count":
count_param = int(valid_value)
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'invalid_index': func.GetOriginalArgs().index(invalid_arg),
'type': self.GetArrayType(func),
'count': self.GetArrayCount(func),
'args': ", ".join(gl_arg_strings),
'gl_error': invalid[2],
'count_param': count_param,
})
def WriteImmediateCmdComputeSize(self, func, f):
"""Overrriden from TypeHandler."""
f.write(" static uint32_t ComputeDataSize(GLsizei _n) {\n")
f.write(" return static_cast<uint32_t>(\n")
f.write(" sizeof(%s) * %d * _n); // NOLINT\n" %
(self.GetArrayType(func), self.GetArrayCount(func)))
f.write(" }\n")
f.write("\n")
f.write(" static uint32_t ComputeSize(GLsizei _n) {\n")
f.write(" return static_cast<uint32_t>(\n")
f.write(
" sizeof(ValueType) + ComputeDataSize(_n)); // NOLINT\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSetHeader(self, _func, f):
"""Overrriden from TypeHandler."""
f.write(" void SetHeader(GLsizei _n) {\n")
f.write(
" header.SetCmdByTotalSize<ValueType>(ComputeSize(_n));\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
f.write(" void Init(%s) {\n" %
func.MakeTypedInitString("_"))
f.write(" SetHeader(_count);\n")
args = func.GetCmdArgs()
for arg in args:
arg.WriteSetCode(f, 4, "_%s" % arg.name)
f.write(" memcpy(ImmediateDataAddress(this),\n")
pointer_arg = func.GetLastOriginalPointerArg()
f.write(" _%s, ComputeDataSize(_count));\n" % pointer_arg.name)
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
f.write(" void* Set(void* cmd%s) {\n" %
func.MakeTypedInitString("_", True))
f.write(" static_cast<ValueType*>(cmd)->Init(%s);\n" %
func.MakeInitString("_"))
f.write(" const uint32_t size = ComputeSize(_count);\n")
f.write(" return NextImmediateCmdAddressTotalSize<ValueType>("
"cmd, size);\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdHelper(self, func, f):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t size = %(lp)s::cmds::%(name)s::ComputeSize(count);
%(lp)s::cmds::%(name)s* c =
GetImmediateCmdSpaceTotalSize<%(lp)s::cmds::%(name)s>(size);
if (c) {
c->Init(%(args)s);
}
}
"""
f.write(code % {
"lp" : _lower_prefix,
"name": func.name,
"typed_args": func.MakeTypedInitString(""),
"args": func.MakeInitString("")
})
def WriteImmediateFormatTest(self, func, f):
"""Overrriden from TypeHandler."""
args = func.GetOriginalArgs()
count_param = 0
for arg in args:
if arg.name == "count":
count_param = int(arg.GetValidClientSideCmdArg(func))
f.write("TEST_F(%sFormatTest, %s) {\n" % (_prefix, func.name))
f.write(" const int kSomeBaseValueToTestWith = 51;\n")
f.write(" static %s data[] = {\n" % self.GetArrayType(func))
for v in range(0, self.GetArrayCount(func) * count_param):
f.write(" static_cast<%s>(kSomeBaseValueToTestWith + %d),\n" %
(self.GetArrayType(func), v))
f.write(" };\n")
f.write(" cmds::%s& cmd = *GetBufferAs<cmds::%s>();\n" %
(func.name, func.name))
f.write(" const GLsizei kNumElements = %d;\n" % count_param)
f.write(" const size_t kExpectedCmdSize =\n")
f.write(" sizeof(cmd) + kNumElements * sizeof(%s) * %d;\n" %
(self.GetArrayType(func), self.GetArrayCount(func)))
f.write(" void* next_cmd = cmd.Set(\n")
f.write(" &cmd")
for value, arg in enumerate(args):
if arg.IsPointer():
f.write(",\n data")
elif arg.IsConstant():
continue
else:
f.write(",\n static_cast<%s>(%d)" % (arg.type, value + 1))
f.write(");\n")
f.write(" EXPECT_EQ(static_cast<uint32_t>(cmds::%s::kCmdId),\n" %
func.name)
f.write(" cmd.header.command);\n")
f.write(" EXPECT_EQ(kExpectedCmdSize, cmd.header.size * 4u);\n")
for value, arg in enumerate(args):
if arg.IsPointer() or arg.IsConstant():
continue
f.write(" EXPECT_EQ(static_cast<%s>(%d), %s);\n" %
(arg.type, value + 1, arg.GetArgAccessor('cmd')))
f.write(" CheckBytesWrittenMatchesExpectedSize(\n")
f.write(" next_cmd, sizeof(cmd) +\n")
f.write(" RoundSizeToMultipleOfEntries(sizeof(data)));\n")
# TODO: Check that data was inserted
f.write("}\n")
f.write("\n")
class PUTSTRHandler(ArrayArgTypeHandler):
"""Handler for functions that pass a string array."""
def __GetDataArg(self, func):
"""Return the argument that points to the 2D char arrays"""
for arg in func.GetOriginalArgs():
if arg.IsPointer2D():
return arg
return None
def __GetLengthArg(self, func):
"""Return the argument that holds length for each char array"""
for arg in func.GetOriginalArgs():
if arg.IsPointer() and not arg.IsPointer2D():
return arg
return None
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
data_arg = self.__GetDataArg(func)
length_arg = self.__GetLengthArg(func)
log_code_block = """ GPU_CLIENT_LOG_CODE_BLOCK({
for (GLsizei ii = 0; ii < count; ++ii) {
if (%(data)s[ii]) {"""
if length_arg == None:
log_code_block += """
GPU_CLIENT_LOG(" " << ii << ": ---\\n" << %(data)s[ii] << "\\n---");"""
else:
log_code_block += """
if (%(length)s && %(length)s[ii] >= 0) {
const std::string my_str(%(data)s[ii], %(length)s[ii]);
GPU_CLIENT_LOG(" " << ii << ": ---\\n" << my_str << "\\n---");
} else {
GPU_CLIENT_LOG(" " << ii << ": ---\\n" << %(data)s[ii] << "\\n---");
}"""
log_code_block += """
} else {
GPU_CLIENT_LOG(" " << ii << ": NULL");
}
}
});
"""
f.write(log_code_block % {
'data': data_arg.name,
'length': length_arg.name if not length_arg == None else ''
})
for arg in func.GetOriginalArgs():
arg.WriteClientSideValidationCode(f, func)
bucket_args = []
for arg in func.GetOriginalArgs():
if arg.name == 'count' or arg == self.__GetLengthArg(func):
continue
if arg == self.__GetDataArg(func):
bucket_args.append('kResultBucketId')
else:
bucket_args.append(arg.name)
code_block = """
if (!PackStringsToBucket(count, %(data)s, %(length)s, "gl%(func_name)s")) {
return;
}
helper_->%(func_name)sBucket(%(bucket_args)s);
helper_->SetBucketSize(kResultBucketId, 0);
CheckGLError();
}
"""
f.write(code_block % {
'data': data_arg.name,
'length': length_arg.name if not length_arg == None else 'nullptr',
'func_name': func.name,
'bucket_args': ', '.join(bucket_args),
})
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Overrriden from TypeHandler."""
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
const uint32_t kBucketId = %(prefix)sImplementation::kResultBucketId;
const char* kString1 = "happy";
const char* kString2 = "ending";
const size_t kString1Size = ::strlen(kString1) + 1;
const size_t kString2Size = ::strlen(kString2) + 1;
const size_t kHeaderSize = sizeof(GLint) * 3;
const size_t kSourceSize = kHeaderSize + kString1Size + kString2Size;
const size_t kPaddedHeaderSize =
transfer_buffer_->RoundToAlignment(kHeaderSize);
const size_t kPaddedString1Size =
transfer_buffer_->RoundToAlignment(kString1Size);
const size_t kPaddedString2Size =
transfer_buffer_->RoundToAlignment(kString2Size);
struct Cmds {
cmd::SetBucketSize set_bucket_size;
cmd::SetBucketData set_bucket_header;
cmd::SetToken set_token1;
cmd::SetBucketData set_bucket_data1;
cmd::SetToken set_token2;
cmd::SetBucketData set_bucket_data2;
cmd::SetToken set_token3;
cmds::%(name)sBucket cmd_bucket;
cmd::SetBucketSize clear_bucket_size;
};
ExpectedMemoryInfo mem0 = GetExpectedMemory(kPaddedHeaderSize);
ExpectedMemoryInfo mem1 = GetExpectedMemory(kPaddedString1Size);
ExpectedMemoryInfo mem2 = GetExpectedMemory(kPaddedString2Size);
Cmds expected;
expected.set_bucket_size.Init(kBucketId, kSourceSize);
expected.set_bucket_header.Init(
kBucketId, 0, kHeaderSize, mem0.id, mem0.offset);
expected.set_token1.Init(GetNextToken());
expected.set_bucket_data1.Init(
kBucketId, kHeaderSize, kString1Size, mem1.id, mem1.offset);
expected.set_token2.Init(GetNextToken());
expected.set_bucket_data2.Init(
kBucketId, kHeaderSize + kString1Size, kString2Size, mem2.id,
mem2.offset);
expected.set_token3.Init(GetNextToken());
expected.cmd_bucket.Init(%(bucket_args)s);
expected.clear_bucket_size.Init(kBucketId, 0);
const char* kStrings[] = { kString1, kString2 };
gl_->%(name)s(%(gl_args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
gl_args = []
bucket_args = []
for arg in func.GetOriginalArgs():
if arg == self.__GetDataArg(func):
gl_args.append('kStrings')
bucket_args.append('kBucketId')
elif arg == self.__GetLengthArg(func):
gl_args.append('nullptr')
elif arg.name == 'count':
gl_args.append('2')
else:
gl_args.append(arg.GetValidClientSideArg(func))
bucket_args.append(arg.GetValidClientSideArg(func))
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'gl_args': ", ".join(gl_args),
'bucket_args': ", ".join(bucket_args),
})
if self.__GetLengthArg(func) == None:
return
code = """
TEST_F(%(prefix)sImplementationTest, %(name)sWithLength) {
const uint32_t kBucketId = %(prefix)sImplementation::kResultBucketId;
const char* kString = "foobar******";
const size_t kStringSize = 6; // We only need "foobar".
const size_t kHeaderSize = sizeof(GLint) * 2;
const size_t kSourceSize = kHeaderSize + kStringSize + 1;
const size_t kPaddedHeaderSize =
transfer_buffer_->RoundToAlignment(kHeaderSize);
const size_t kPaddedStringSize =
transfer_buffer_->RoundToAlignment(kStringSize + 1);
struct Cmds {
cmd::SetBucketSize set_bucket_size;
cmd::SetBucketData set_bucket_header;
cmd::SetToken set_token1;
cmd::SetBucketData set_bucket_data;
cmd::SetToken set_token2;
cmds::ShaderSourceBucket shader_source_bucket;
cmd::SetBucketSize clear_bucket_size;
};
ExpectedMemoryInfo mem0 = GetExpectedMemory(kPaddedHeaderSize);
ExpectedMemoryInfo mem1 = GetExpectedMemory(kPaddedStringSize);
Cmds expected;
expected.set_bucket_size.Init(kBucketId, kSourceSize);
expected.set_bucket_header.Init(
kBucketId, 0, kHeaderSize, mem0.id, mem0.offset);
expected.set_token1.Init(GetNextToken());
expected.set_bucket_data.Init(
kBucketId, kHeaderSize, kStringSize + 1, mem1.id, mem1.offset);
expected.set_token2.Init(GetNextToken());
expected.shader_source_bucket.Init(%(bucket_args)s);
expected.clear_bucket_size.Init(kBucketId, 0);
const char* kStrings[] = { kString };
const GLint kLength[] = { kStringSize };
gl_->%(name)s(%(gl_args)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
}
"""
gl_args = []
for arg in func.GetOriginalArgs():
if arg == self.__GetDataArg(func):
gl_args.append('kStrings')
elif arg == self.__GetLengthArg(func):
gl_args.append('kLength')
elif arg.name == 'count':
gl_args.append('1')
else:
gl_args.append(arg.GetValidClientSideArg(func))
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'gl_args': ", ".join(gl_args),
'bucket_args': ", ".join(bucket_args),
})
def WriteBucketServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
cmd_args = []
cmd_args_with_invalid_id = []
gl_args = []
for index, arg in enumerate(func.GetOriginalArgs()):
if arg == self.__GetLengthArg(func):
gl_args.append('_')
elif arg.name == 'count':
gl_args.append('1')
elif arg == self.__GetDataArg(func):
cmd_args.append('kBucketId')
cmd_args_with_invalid_id.append('kBucketId')
gl_args.append('_')
elif index == 0: # Resource ID arg
cmd_args.append(arg.GetValidArg(func))
cmd_args_with_invalid_id.append('kInvalidClientId')
gl_args.append(arg.GetValidGLArg(func))
else:
cmd_args.append(arg.GetValidArg(func))
cmd_args_with_invalid_id.append(arg.GetValidArg(func))
gl_args.append(arg.GetValidGLArg(func))
test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
const uint32_t kBucketId = 123;
const char kSource0[] = "hello";
const char* kSource[] = { kSource0 };
const char kValidStrEnd = 0;
SetBucketAsCStrings(kBucketId, 1, kSource, 1, kValidStrEnd);
cmds::%(name)s cmd;
cmd.Init(%(cmd_args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));"""
test += """
}
"""
self.WriteValidUnitTest(func, f, test, {
'cmd_args': ", ".join(cmd_args),
'gl_args': ", ".join(gl_args),
}, *extras)
test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
const uint32_t kBucketId = 123;
const char kSource0[] = "hello";
const char* kSource[] = { kSource0 };
const char kValidStrEnd = 0;
cmds::%(name)s cmd;
// Test no bucket.
cmd.Init(%(cmd_args)s);
EXPECT_NE(error::kNoError, ExecuteCmd(cmd));
// Test invalid client.
SetBucketAsCStrings(kBucketId, 1, kSource, 1, kValidStrEnd);
cmd.Init(%(cmd_args_with_invalid_id)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
self.WriteValidUnitTest(func, f, test, {
'cmd_args': ", ".join(cmd_args),
'cmd_args_with_invalid_id': ", ".join(cmd_args_with_invalid_id),
}, *extras)
test = """
TEST_P(%(test_name)s, %(name)sInvalidHeader) {
const uint32_t kBucketId = 123;
const char kSource0[] = "hello";
const char* kSource[] = { kSource0 };
const char kValidStrEnd = 0;
const GLsizei kCount = static_cast<GLsizei>(base::size(kSource));
const GLsizei kTests[] = {
kCount + 1,
0,
std::numeric_limits<GLsizei>::max(),
-1,
};
for (size_t ii = 0; ii < base::size(kTests); ++ii) {
SetBucketAsCStrings(kBucketId, 1, kSource, kTests[ii], kValidStrEnd);
cmds::%(name)s cmd;
cmd.Init(%(cmd_args)s);
EXPECT_EQ(error::kInvalidArguments, ExecuteCmd(cmd));
}
}
"""
self.WriteValidUnitTest(func, f, test, {
'cmd_args': ", ".join(cmd_args),
}, *extras)
test = """
TEST_P(%(test_name)s, %(name)sInvalidStringEnding) {
const uint32_t kBucketId = 123;
const char kSource0[] = "hello";
const char* kSource[] = { kSource0 };
const char kInvalidStrEnd = '*';
SetBucketAsCStrings(kBucketId, 1, kSource, 1, kInvalidStrEnd);
cmds::%(name)s cmd;
cmd.Init(%(cmd_args)s);
EXPECT_EQ(error::kInvalidArguments, ExecuteCmd(cmd));
}
"""
self.WriteValidUnitTest(func, f, test, {
'cmd_args': ", ".join(cmd_args),
}, *extras)
class PUTXnHandler(ArrayArgTypeHandler):
"""Handler for glUniform?f functions."""
def WriteHandlerImplementation(self, func, f):
"""Overrriden from TypeHandler."""
code = """ %(type)s temp[%(count)s] = { %(values)s};
Do%(name)sv(%(location)s, 1, &temp[0]);
"""
values = ""
args = func.GetOriginalArgs()
count = int(self.GetArrayCount(func))
for ii in range(count):
values += "%s, " % args[len(args) - count + ii].name
f.write(code % {
'name': func.name,
'count': self.GetArrayCount(func),
'type': self.GetArrayType(func),
'location': args[0].name,
'args': func.MakeOriginalArgString(""),
'values': values,
})
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(name)sv(%(local_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
args = func.GetOriginalArgs()
local_args = "%s, 1, _" % args[0].GetValidGLArg(func)
self.WriteValidUnitTest(func, f, valid_test, {
'name': func.name,
'count': self.GetArrayCount(func),
'local_args': local_args,
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(name)sv(_, _, _).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, {
'name': func.GetInfo('name'),
'count': self.GetArrayCount(func),
})
class GLcharHandler(CustomHandler):
"""Handler for functions that pass a single string ."""
def WriteImmediateCmdComputeSize(self, _func, f):
"""Overrriden from TypeHandler."""
f.write(" static uint32_t ComputeSize(uint32_t data_size) {\n")
f.write(" return static_cast<uint32_t>(\n")
f.write(" sizeof(ValueType) + data_size); // NOLINT\n")
f.write(" }\n")
def WriteImmediateCmdSetHeader(self, _func, f):
"""Overrriden from TypeHandler."""
code = """
void SetHeader(uint32_t data_size) {
header.SetCmdBySize<ValueType>(data_size);
}
"""
f.write(code)
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
last_arg = func.GetLastOriginalArg()
args = func.GetCmdArgs()
code = """
void Init(%s, uint32_t _data_size) {
SetHeader(_data_size);
"""
f.write(code % func.MakeTypedArgString("_"))
for arg in args:
arg.WriteSetCode(f, 4, "_%s" % arg.name)
code = """
memcpy(ImmediateDataAddress(this), _%s, _data_size);
}
"""
f.write(code % last_arg.name)
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
f.write(" void* Set(void* cmd%s, uint32_t _data_size) {\n" %
func.MakeTypedCmdArgString("_", True))
f.write(" static_cast<ValueType*>(cmd)->Init(%s, _data_size);\n" %
func.MakeCmdArgString("_"))
f.write(" return NextImmediateCmdAddress<ValueType>("
"cmd, _data_size);\n")
f.write(" }\n")
f.write("\n")
def WriteImmediateCmdHelper(self, func, f):
"""Overrriden from TypeHandler."""
code = """ void %(name)s(%(typed_args)s) {
const uint32_t data_size = strlen(name);
%(lp)s::cmds::%(name)s* c =
GetImmediateCmdSpace<%(lp)s::cmds::%(name)s>(data_size);
if (c) {
c->Init(%(args)s, data_size);
}
}
"""
f.write(code % {
"lp" : _lower_prefix,
"name": func.name,
"typed_args": func.MakeTypedOriginalArgString(""),
"args": func.MakeOriginalArgString(""),
})
def WriteImmediateFormatTest(self, func, f):
"""Overrriden from TypeHandler."""
init_code = []
check_code = []
all_but_last_arg = func.GetCmdArgs()[:-1]
for value, arg in enumerate(all_but_last_arg):
init_code.append(" static_cast<%s>(%d)," % (arg.type, value + 11))
for value, arg in enumerate(all_but_last_arg):
check_code.append(" EXPECT_EQ(static_cast<%s>(%d), %s);" %
(arg.type, value + 11, arg.GetArgAccessor('cmd')))
code = """
TEST_F(%(prefix)sFormatTest, %(func_name)s) {
cmds::%(func_name)s& cmd = *GetBufferAs<cmds::%(func_name)s>();
static const char* const test_str = \"test string\";
void* next_cmd = cmd.Set(
&cmd,
%(init_code)s
test_str,
strlen(test_str));
EXPECT_EQ(static_cast<uint32_t>(cmds::%(func_name)s::kCmdId),
cmd.header.command);
EXPECT_EQ(sizeof(cmd) +
RoundSizeToMultipleOfEntries(strlen(test_str)),
cmd.header.size * 4u);
EXPECT_EQ(static_cast<char*>(next_cmd),
reinterpret_cast<char*>(&cmd) + sizeof(cmd) +
RoundSizeToMultipleOfEntries(strlen(test_str)));
%(check_code)s
EXPECT_EQ(static_cast<uint32_t>(strlen(test_str)), cmd.data_size);
EXPECT_EQ(0, memcmp(test_str, ImmediateDataAddress(&cmd), strlen(test_str)));
CheckBytesWritten(
next_cmd,
sizeof(cmd) + RoundSizeToMultipleOfEntries(strlen(test_str)),
sizeof(cmd) + strlen(test_str));
}
"""
f.write(code % {
'prefix': _prefix,
'func_name': func.name,
'init_code': "\n".join(init_code),
'check_code': "\n".join(check_code),
})
class GLcharNHandler(CustomHandler):
"""Handler for functions that pass a single string with an optional len."""
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
func.cmd_args = []
func.AddCmdArg(Argument('bucket_id', 'GLuint'))
def NeedsDataTransferFunction(self, func):
"""Overriden from TypeHandler."""
return False
def WriteServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
self.WriteServiceHandlerFunctionHeader(func, f)
if func.IsES31():
return
f.write("""
GLuint bucket_id = static_cast<GLuint>(c.%(bucket_id)s);
Bucket* bucket = GetBucket(bucket_id);
if (!bucket || bucket->size() == 0) {
return error::kInvalidArguments;
}
std::string str;
if (!bucket->GetAsString(&str)) {
return error::kInvalidArguments;
}
%(gl_func_name)s(0, str.c_str());
return error::kNoError;
}
""" % {
'gl_func_name': func.GetGLFunctionName(),
'bucket_id': func.cmd_args[0].name,
})
class IsHandler(TypeHandler):
"""Handler for glIs____ type and glGetError functions."""
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
func.AddCmdArg(Argument("result_shm_id", 'uint32_t'))
func.AddCmdArg(Argument("result_shm_offset", 'uint32_t'))
if func.GetInfo('result') == None:
func.AddInfo('result', ['uint32_t'])
func.passthrough_service_doer_args.append(Argument('result', 'uint32_t*'))
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s));
SpecializedSetup<cmds::%(name)s, 0>(true);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)sshared_memory_id_, shared_memory_offset_);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
comma = ""
if len(func.GetOriginalArgs()):
comma =", "
self.WriteValidUnitTest(func, f, valid_test, {
'comma': comma,
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs%(arg_index)d_%(value_index)d) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)sshared_memory_id_, shared_memory_offset_);
EXPECT_EQ(error::%(parse_result)s, ExecuteCmd(cmd));%(gl_error_test)s
}
"""
self.WriteInvalidUnitTest(func, f, invalid_test, {
'comma': comma,
}, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgsBadSharedMemoryId) {
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s)).Times(0);
SpecializedSetup<cmds::%(name)s, 0>(false);
cmds::%(name)s cmd;
cmd.Init(%(args)s%(comma)skInvalidSharedMemoryId, shared_memory_offset_);
EXPECT_EQ(error::kOutOfBounds, ExecuteCmd(cmd));
cmd.Init(%(args)s%(comma)sshared_memory_id_, kInvalidSharedMemoryOffset);
EXPECT_EQ(error::kOutOfBounds, ExecuteCmd(cmd));
}
"""
self.WriteValidUnitTest(func, f, invalid_test, {
'comma': comma,
}, *extras)
def WriteServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
self.WriteServiceHandlerFunctionHeader(func, f)
if func.IsES31():
return
self.WriteHandlerExtensionCheck(func, f)
args = func.GetOriginalArgs()
for arg in args:
arg.WriteGetCode(f)
code = """ typedef cmds::%(func_name)s::Result Result;
Result* result_dst = GetSharedMemoryAs<Result*>(
c.result_shm_id, c.result_shm_offset, sizeof(*result_dst));
if (!result_dst) {
return error::kOutOfBounds;
}
"""
f.write(code % {'func_name': func.name})
func.WriteHandlerValidation(f)
f.write(" *result_dst = %s(%s);\n" %
(func.GetGLFunctionName(), func.MakeOriginalArgString("")))
f.write(" return error::kNoError;\n")
f.write("}\n")
f.write("\n")
def WritePassthroughServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
self.WritePassthroughServiceFunctionHeader(func, f)
self.WriteHandlerExtensionCheck(func, f)
self.WriteServiceHandlerArgGetCode(func, f)
code = """ typedef cmds::%(func_name)s::Result Result;
Result* result = GetSharedMemoryAs<Result*>(
c.result_shm_id, c.result_shm_offset, sizeof(*result));
if (!result) {
return error::kOutOfBounds;
}
"""
f.write(code % {'func_name': func.name})
self.WritePassthroughServiceFunctionDoerCall(func, f)
f.write(" return error::kNoError;\n")
f.write("}\n")
f.write("\n")
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
impl_func = func.GetInfo('impl_func', True)
if impl_func:
error_value = func.GetInfo("error_value") or "GL_FALSE"
f.write("%s %sImplementation::%s(%s) {\n" %
(func.return_type, _prefix, func.original_name,
func.MakeTypedOriginalArgString("")))
f.write(" GPU_CLIENT_SINGLE_THREAD_CHECK();\n")
self.WriteTraceEvent(func, f)
func.WriteDestinationInitalizationValidation(f)
self.WriteClientGLCallLog(func, f)
f.write(" typedef cmds::%s::Result Result;\n" % func.name)
f.write(" ScopedResultPtr<Result> result = GetResultAs<Result>();\n")
f.write(" if (!result) {\n")
f.write(" return %s;\n" % error_value)
f.write(" }\n")
f.write(" *result = 0;\n")
assert len(func.GetOriginalArgs()) == 1
id_arg = func.GetOriginalArgs()[0]
if id_arg.type == 'GLsync':
arg_string = "ToGLuint(%s)" % func.MakeOriginalArgString("")
else:
arg_string = func.MakeOriginalArgString("")
f.write(
" helper_->%s(%s, GetResultShmId(), result.offset());\n" %
(func.name, arg_string))
f.write(" WaitForCmd();\n")
f.write(" %s result_value = *result" % func.return_type)
if func.return_type == "GLboolean":
f.write(" != 0")
f.write(';\n GPU_CLIENT_LOG("returned " << result_value);\n')
f.write(" CheckGLError();\n")
f.write(" return result_value;\n")
f.write("}\n")
f.write("\n")
def WriteGLES2ImplementationUnitTest(self, func, f):
"""Overrriden from TypeHandler."""
client_test = func.GetInfo('client_test', True)
if client_test:
code = """
TEST_F(%(prefix)sImplementationTest, %(name)s) {
struct Cmds {
cmds::%(name)s cmd;
};
Cmds expected;
ExpectedMemoryInfo result1 =
GetExpectedResultMemory(sizeof(cmds::%(name)s::Result));
expected.cmd.Init(%(cmd_id_value)s, result1.id, result1.offset);
EXPECT_CALL(*command_buffer(), OnFlush())
.WillOnce(SetMemory(result1.ptr, uint32_t(GL_TRUE)))
.RetiresOnSaturation();
GLboolean result = gl_->%(name)s(%(gl_id_value)s);
EXPECT_EQ(0, memcmp(&expected, commands_, sizeof(expected)));
EXPECT_TRUE(result);
}
"""
args = func.GetOriginalArgs()
assert len(args) == 1
f.write(code % {
'prefix' : _prefix,
'name': func.name,
'cmd_id_value': args[0].GetValidClientSideCmdArg(func),
'gl_id_value': args[0].GetValidClientSideArg(func) })
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class STRnHandler(TypeHandler):
"""Handler for GetProgramInfoLog, GetShaderInfoLog, GetShaderSource, and
GetTranslatedShaderSourceANGLE."""
def InitFunction(self, func):
"""Overrriden from TypeHandler."""
# remove all but the first cmd args.
cmd_args = func.GetCmdArgs()
func.ClearCmdArgs()
func.AddCmdArg(cmd_args[0])
# add on a bucket id.
func.AddCmdArg(Argument('bucket_id', 'uint32_t'))
def WriteGLES2Implementation(self, func, f):
"""Overrriden from TypeHandler."""
code_1 = """%(return_type)s %(prefix)sImplementation::%(func_name)s(
%(args)s) {
GPU_CLIENT_SINGLE_THREAD_CHECK();
"""
code_2 = """ GPU_CLIENT_LOG("[" << GetLogPrefix()
<< "] gl%(func_name)s" << "("
<< %(arg0)s << ", "
<< %(arg1)s << ", "
<< static_cast<void*>(%(arg2)s) << ", "
<< static_cast<void*>(%(arg3)s) << ")");
helper_->SetBucketSize(kResultBucketId, 0);
helper_->%(func_name)s(%(id_name)s, kResultBucketId);
std::string str;
GLsizei max_size = 0;
if (GetBucketAsString(kResultBucketId, &str)) {
if (bufsize > 0) {
max_size =
std::min(static_cast<size_t>(%(bufsize_name)s) - 1, str.size());
memcpy(%(dest_name)s, str.c_str(), max_size);
%(dest_name)s[max_size] = '\\0';
GPU_CLIENT_LOG("------\\n" << %(dest_name)s << "\\n------");
}
}
if (%(length_name)s != nullptr) {
*%(length_name)s = max_size;
}
CheckGLError();
}
"""
args = func.GetOriginalArgs()
str_args = {
'prefix' : _prefix,
'return_type': func.return_type,
'func_name': func.original_name,
'args': func.MakeTypedOriginalArgString(""),
'id_name': args[0].name,
'bufsize_name': args[1].name,
'length_name': args[2].name,
'dest_name': args[3].name,
'arg0': args[0].name,
'arg1': args[1].name,
'arg2': args[2].name,
'arg3': args[3].name,
}
f.write(code_1 % str_args)
func.WriteDestinationInitalizationValidation(f)
f.write(code_2 % str_args)
def WriteServiceUnitTest(self, func, f, *extras):
"""Overrriden from TypeHandler."""
valid_test = """
TEST_P(%(test_name)s, %(name)sValidArgs) {
const char* kInfo = "hello";
const uint32_t kBucketId = 123;
SpecializedSetup<cmds::%(name)s, 0>(true);
%(expect_len_code)s
EXPECT_CALL(*gl_, %(gl_func_name)s(%(gl_args)s))
.WillOnce(DoAll(SetArgPointee<2>(strlen(kInfo)),
SetArrayArgument<3>(kInfo, kInfo + strlen(kInfo) + 1)));
cmds::%(name)s cmd;
cmd.Init(%(args)s);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
CommonDecoder::Bucket* bucket = decoder_->GetBucket(kBucketId);
ASSERT_TRUE(bucket != nullptr);
EXPECT_EQ(strlen(kInfo) + 1, bucket->size());
EXPECT_EQ(0, memcmp(bucket->GetData(0, bucket->size()), kInfo,
bucket->size()));
EXPECT_EQ(GL_NO_ERROR, GetGLError());
}
"""
args = func.GetOriginalArgs()
id_name = args[0].GetValidGLArg(func)
get_len_func = func.GetInfo('get_len_func')
get_len_enum = func.GetInfo('get_len_enum')
sub = {
'id_name': id_name,
'get_len_func': get_len_func,
'get_len_enum': get_len_enum,
'gl_args': '%s, strlen(kInfo) + 1, _, _' %
args[0].GetValidGLArg(func),
'args': '%s, kBucketId' % args[0].GetValidArg(func),
'expect_len_code': '',
}
if get_len_func and get_len_func[0:2] == 'gl':
sub['expect_len_code'] = (
" EXPECT_CALL(*gl_, %s(%s, %s, _))\n"
" .WillOnce(SetArgPointee<2>(strlen(kInfo) + 1));") % (
get_len_func[2:], id_name, get_len_enum)
self.WriteValidUnitTest(func, f, valid_test, sub, *extras)
invalid_test = """
TEST_P(%(test_name)s, %(name)sInvalidArgs) {
const uint32_t kBucketId = 123;
EXPECT_CALL(*gl_, %(gl_func_name)s(_, _, _, _))
.Times(0);
cmds::%(name)s cmd;
cmd.Init(kInvalidClientId, kBucketId);
EXPECT_EQ(error::kNoError, ExecuteCmd(cmd));
EXPECT_EQ(GL_INVALID_VALUE, GetGLError());
}
"""
self.WriteValidUnitTest(func, f, invalid_test, *extras)
def WriteServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
if func.IsES31():
TypeHandler.WriteServiceImplementation(self, func, f)
def WritePassthroughServiceImplementation(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdInit(self, func, f):
"""Overrriden from TypeHandler."""
pass
def WriteImmediateCmdSet(self, func, f):
"""Overrriden from TypeHandler."""
pass
class NamedType(object):
"""A class that represents a type of an argument in a client function.
A type of an argument that is to be passed through in the command buffer
command. Currently used only for the arguments that are specificly named in
the 'cmd_buffer_functions.txt' f, mostly enums.
"""
def __init__(self, info):
assert not 'is_complete' in info or info['is_complete'] == True
self.info = info
self.valid = info['valid']
if 'invalid' in info:
self.invalid = info['invalid']
else:
self.invalid = []
if 'valid_es3' in info:
self.valid_es3 = info['valid_es3']
else:
self.valid_es3 = []
if 'deprecated_es3' in info:
self.deprecated_es3 = info['deprecated_es3']
else:
self.deprecated_es3 = []
self.create_validator = info.get('validator', True)
self.is_complete = info.get('is_complete', False)
def GetType(self):
return self.info['type']
def GetInvalidValues(self):
return self.invalid
def GetValidValues(self):
return self.valid
def GetValidValuesES3(self):
return self.valid_es3
def GetDeprecatedValuesES3(self):
return self.deprecated_es3
def HasES3Values(self):
return self.valid_es3 or self.deprecated_es3
def IsConstant(self):
return self.is_complete and len(self.GetValidValues()) == 1
def IsComplete(self):
return self.is_complete
def CreateValidator(self):
return self.create_validator and not self.IsConstant()
def GetConstantValue(self):
return self.GetValidValues()[0]
class Argument(object):
"""A class that represents a function argument."""
cmd_type_map_ = {
'GLenum': ['uint32_t'],
'GLint': ['int32_t'],
'GLintptr': ['int32_t'],
'GLsizei': ['int32_t'],
'GLsizeiptr': ['int32_t'],
'GLfloat': ['float'],
'GLclampf': ['float'],
'GLuint64': ['uint32_t', 'uint32_t'],
}
need_validation_ = ['GLsizei*', 'GLboolean*', 'GLenum*', 'GLint*']
def __init__(self, name, arg_type, arg_default = None):
self.name = name
self.optional = arg_type.endswith("Optional*")
if self.optional:
arg_type = arg_type[:-len("Optional*")] + "*"
self.type = arg_type
self.default = arg_default
if arg_type in self.cmd_type_map_:
self.cmd_type = self.cmd_type_map_[arg_type]
else:
self.cmd_type = ['uint32_t']
def IsPointer(self):
"""Returns true if argument is a pointer."""
return False
def IsPointer2D(self):
"""Returns true if argument is a 2D pointer."""
return False
def IsConstant(self):
"""Returns true if the argument has only one valid value."""
return False
def AddCmdArgs(self, args):
"""Adds command arguments for this argument to the given list."""
if not self.IsConstant():
return args.append(self)
def AddInitArgs(self, args):
"""Adds init arguments for this argument to the given list."""
if not self.IsConstant():
return args.append(self)
def GetValidArg(self, func):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
index = func.GetOriginalArgs().index(self)
return str(index + 1)
def GetArgDecls(self):
if len(self.cmd_type) == 1:
return [(self.cmd_type[0], self.name)]
else:
return [(cmd_type, self.name + '_%d' % i)
for i, cmd_type
in enumerate(self.cmd_type)]
def GetReservedSizeId(self):
"""Gets a special identifier name for the data size of this argument"""
return "%s_size" % self.name
def GetValidClientSideArg(self, func):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
if self.IsPointer():
return 'nullptr'
index = func.GetOriginalArgs().index(self)
if self.type == 'GLsync':
return ("reinterpret_cast<GLsync>(%d)" % (index + 1))
return str(index + 1)
def GetValidClientSideCmdArg(self, func):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
try:
index = func.GetOriginalArgs().index(self)
return str(index + 1)
except ValueError:
pass
index = func.GetCmdArgs().index(self)
return str(index + 1)
def GetValidGLArg(self, func):
"""Gets a valid GL value for this argument."""
value = self.GetValidArg(func)
if self.type == 'GLsync':
return ("reinterpret_cast<GLsync>(%s)" % value)
return value
def GetValidNonCachedClientSideArg(self, _func):
"""Returns a valid value for this argument in a GL call.
Using the value will produce a command buffer service invocation.
Returns None if there is no such value."""
value = '123'
if self.type == 'GLsync':
return ("reinterpret_cast<GLsync>(%s)" % value)
return value
def GetValidNonCachedClientSideCmdArg(self, _func):
"""Returns a valid value for this argument in a command buffer command.
Calling the GL function with the value returned by
GetValidNonCachedClientSideArg will result in a command buffer command
that contains the value returned by this function. """
return '123'
def GetNumInvalidValues(self, _func):
"""returns the number of invalid values to be tested."""
return 0
def GetInvalidArg(self, _index):
"""returns an invalid value and expected parse result by index."""
return ("---ERROR0---", "---ERROR2---", None)
def GetArgAccessor(self, cmd_struct_name):
"""Returns the name of the accessor for the argument within the struct."""
return '%s.%s' % (cmd_struct_name, self.name)
def GetLogArg(self):
"""Get argument appropriate for LOG macro."""
if self.type == 'GLboolean':
return '%sGLES2Util::GetStringBool(%s)' % (_Namespace(), self.name)
if self.type == 'GLenum':
return '%sGLES2Util::GetStringEnum(%s)' % (_Namespace(), self.name)
return self.name
def WriteGetCode(self, f):
"""Writes the code to get an argument from a command structure."""
if self.type == 'GLsync':
my_type = 'GLuint'
else:
my_type = self.type
f.write(" %s %s = static_cast<%s>(c.%s);\n" %
(my_type, self.name, my_type, self.name))
def WriteSetCode(self, f, indent, var):
f.write("%s%s = %s;\n" % (' ' * indent, self.name, var))
def WriteArgAccessor(self, f):
"""Writes specialized accessor for argument."""
pass
def WriteValidationCode(self, f, func):
"""Writes the validation code for an argument."""
pass
def WritePassthroughValidationCode(self, f, func):
"""Writes the passthrough validation code for an argument."""
pass
def WriteClientSideValidationCode(self, f, func):
"""Writes the validation code for an argument."""
pass
def WriteDestinationInitalizationValidation(self, f, func):
"""Writes the client side destintion initialization validation."""
pass
def WriteDestinationInitalizationValidatationIfNeeded(self, f, _func):
"""Writes the client side destintion initialization validation if needed."""
parts = self.type.split(" ")
if len(parts) > 1:
return
if parts[0] in self.need_validation_:
f.write(
" GPU_CLIENT_VALIDATE_DESTINATION_%sINITALIZATION(%s, %s);\n" %
("OPTIONAL_" if self.optional else "", self.type[:-1], self.name))
def GetImmediateVersion(self):
"""Gets the immediate version of this argument."""
return self
def GetBucketVersion(self):
"""Gets the bucket version of this argument."""
return self
class BoolArgument(Argument):
"""class for C++ bool"""
def __init__(self, name, _type, arg_default):
Argument.__init__(self, name, _type, arg_default)
def GetValidArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidClientSideArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidClientSideCmdArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidGLArg(self, func):
"""Gets a valid GL value for this argument."""
return 'true'
def GetArgAccessor(self, struct_name):
"""Returns the name of the accessor for the argument within the struct."""
return 'static_cast<bool>(%s.%s)' % (struct_name, self.name)
class GLBooleanArgument(Argument):
"""class for GLboolean"""
def __init__(self, name, _type, arg_default):
Argument.__init__(self, name, 'GLboolean', arg_default)
def GetValidArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidClientSideArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidClientSideCmdArg(self, func):
"""Gets a valid value for this argument."""
return 'true'
def GetValidGLArg(self, func):
"""Gets a valid GL value for this argument."""
return 'true'
class UniformLocationArgument(Argument):
"""class for uniform locations."""
def __init__(self, name, arg_default):
Argument.__init__(self, name, "GLint", arg_default)
def WriteGetCode(self, f):
"""Writes the code to get an argument from a command structure."""
code = """ %s %s = static_cast<%s>(c.%s);
"""
f.write(code % (self.type, self.name, self.type, self.name))
class DataSizeArgument(Argument):
"""class for data_size which Bucket commands do not need."""
def __init__(self, name):
Argument.__init__(self, name, "uint32_t")
def GetBucketVersion(self):
return None
class SizeArgument(Argument):
"""class for GLsizei and GLsizeiptr."""
def GetNumInvalidValues(self, func):
"""overridden from Argument."""
if func.IsImmediate():
return 0
return 1
def GetInvalidArg(self, _index):
"""overridden from Argument."""
return ("-1", "kNoError", "GL_INVALID_VALUE")
def WriteValidationCode(self, f, func):
"""overridden from Argument."""
code = """ if (%(var_name)s < 0) {
LOCAL_SET_GL_ERROR(GL_INVALID_VALUE, "gl%(func_name)s", "%(var_name)s < 0");
return error::kNoError;
}
"""
f.write(code % {
"var_name": self.name,
"func_name": func.original_name,
})
def WriteClientSideValidationCode(self, f, func):
"""overridden from Argument."""
code = """ if (%(var_name)s < 0) {
SetGLError(GL_INVALID_VALUE, "gl%(func_name)s", "%(var_name)s < 0");
return;
}
"""
f.write(code % {
"var_name": self.name,
"func_name": func.original_name,
})
class SizeNotNegativeArgument(SizeArgument):
"""class for GLsizeiNotNegative. It's NEVER allowed to be negative"""
def GetInvalidArg(self, _index):
"""overridden from SizeArgument."""
return ("-1", "kOutOfBounds", "GL_NO_ERROR")
def WriteValidationCode(self, f, func):
"""overridden from SizeArgument."""
pass
class EnumBaseArgument(Argument):
"""Base class for EnumArgument, IntArgument, and BitfieldArgument."""
def __init__(self, name, gl_type, type_name, arg_type, gl_error,
named_type_info, arg_default):
Argument.__init__(self, name, gl_type, arg_default)
self.gl_error = gl_error
self.type_name = type_name
self.named_type = NamedType(named_type_info[type_name])
def IsConstant(self):
return self.named_type.IsConstant()
def GetConstantValue(self):
return self.named_type.GetConstantValue()
def WriteValidationCode(self, f, func):
if self.named_type.IsConstant():
return
f.write(" if (!validators_->%s.IsValid(%s)) {\n" %
(ToUnderscore(self.type_name), self.name))
if self.gl_error == "GL_INVALID_ENUM":
f.write(
" LOCAL_SET_GL_ERROR_INVALID_ENUM(\"gl%s\", %s, \"%s\");\n" %
(func.original_name, self.name, self.name))
else:
f.write(
" LOCAL_SET_GL_ERROR(%s, \"gl%s\", \"%s %s\");\n" %
(self.gl_error, func.original_name, self.name, self.gl_error))
f.write(" return error::kNoError;\n")
f.write(" }\n")
def WriteClientSideValidationCode(self, f, func):
if not self.named_type.IsConstant():
return
f.write(" if (%s != %s) {" % (self.name,
self.GetConstantValue()))
f.write(
" SetGLError(%s, \"gl%s\", \"%s %s\");\n" %
(self.gl_error, func.original_name, self.name, self.gl_error))
if func.return_type == "void":
f.write(" return;\n")
else:
f.write(" return %s;\n" % func.GetErrorReturnString())
f.write(" }\n")
def GetValidArg(self, func):
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
valid = self.named_type.GetValidValues()
if valid:
return valid[0]
index = func.GetOriginalArgs().index(self)
return str(index + 1)
def GetValidClientSideArg(self, func):
"""Gets a valid value for this argument."""
return self.GetValidArg(func)
def GetValidClientSideCmdArg(self, func):
"""Gets a valid value for this argument."""
valid_arg = func.GetValidArg(self)
if valid_arg != None:
return valid_arg
valid = self.named_type.GetValidValues()
if valid:
return valid[0]
try:
index = func.GetOriginalArgs().index(self)
return str(index + 1)
except ValueError:
pass
index = func.GetCmdArgs().index(self)
return str(index + 1)
def GetValidGLArg(self, func):
"""Gets a valid value for this argument."""
return self.GetValidArg(func)
def GetNumInvalidValues(self, _func):
"""returns the number of invalid values to be tested."""
return len(self.named_type.GetInvalidValues())
def GetInvalidArg(self, index):
"""returns an invalid value by index."""
invalid = self.named_type.GetInvalidValues()
if invalid:
num_invalid = len(invalid)
if index >= num_invalid:
index = num_invalid - 1
return (invalid[index], "kNoError", self.gl_error)
return ("---ERROR1---", "kNoError", self.gl_error)
class EnumArgument(EnumBaseArgument):
"""A class that represents a GLenum argument"""
def __init__(self, name, arg_type, named_type_info, arg_default):
EnumBaseArgument.__init__(self, name, "GLenum", arg_type[len("GLenum"):],
arg_type, "GL_INVALID_ENUM", named_type_info,
arg_default)
def GetLogArg(self):
"""Overridden from Argument."""
return ("GLES2Util::GetString%s(%s)" %
(self.type_name, self.name))
class EnumClassArgument(EnumBaseArgument):
"""A class that represents a C++ enum argument encoded as uint32_t"""
def __init__(self, name, arg_type, named_type_info, arg_default):
type_name = arg_type[len("EnumClass"):]
EnumBaseArgument.__init__(self, name, type_name, type_name, arg_type,
"GL_INVALID_ENUM", named_type_info, arg_default)
def GetArgAccessor(self, struct_name):
"""Returns the name of the accessor for the argument within the struct."""
return 'static_cast<%s>(%s.%s)' % (self.type_name, struct_name, self.name)
def WriteSetCode(self, f, indent, var):
f.write("%s%s = static_cast<uint32_t>(%s);\n" %
(' ' * indent, self.name, var))
def GetLogArg(self):
return 'static_cast<uint32_t>(%s)' % self.name
class IntArgument(EnumBaseArgument):
"""A class for a GLint argument that can only accept specific values.
For example glTexImage2D takes a GLint for its internalformat
argument instead of a GLenum.
"""
def __init__(self, name, arg_type, named_type_info, arg_default):
EnumBaseArgument.__init__(self, name, "GLint", arg_type[len("GLint"):],
arg_type, "GL_INVALID_VALUE", named_type_info,
arg_default)
class BitFieldArgument(EnumBaseArgument):
"""A class for a GLbitfield argument that can only accept specific values.
For example glFenceSync takes a GLbitfield for its flags argument bit it
must be 0.
"""
def __init__(self, name, arg_type, named_type_info, arg_default):
EnumBaseArgument.__init__(self, name, "GLbitfield",
arg_type[len("GLbitfield"):], arg_type,
"GL_INVALID_VALUE", named_type_info, arg_default)
class ImmediatePointerArgument(Argument):
"""A class that represents an immediate argument to a function.
An immediate argument is one where the data follows the command.
"""
def IsPointer(self):
return True
def GetPointedType(self):
match = re.match('(const\s+)?(?P<element_type>[\w]+)\s*\*', self.type)
assert match
return match.groupdict()['element_type']
def AddCmdArgs(self, args):
"""Overridden from Argument."""
pass
def WriteGetCode(self, f):
"""Overridden from Argument."""
f.write(" volatile %s %s = %sGetImmediateDataAs<volatile %s>(\n" %
(self.type, self.name, _Namespace(), self.type))
f.write(" c, %s, immediate_data_size);\n" %
self.GetReservedSizeId())
def WriteValidationCode(self, f, func):
"""Overridden from Argument."""
if self.optional:
return
f.write(" if (%s == nullptr) {\n" % self.name)
f.write(" return error::kOutOfBounds;\n")
f.write(" }\n")
def WritePassthroughValidationCode(self, f, func):
"""Overridden from Argument."""
if self.optional:
return
f.write(" if (%s == nullptr) {\n" % self.name)
f.write(" return error::kOutOfBounds;\n")
f.write(" }\n")
def GetImmediateVersion(self):
"""Overridden from Argument."""
return None
def WriteDestinationInitalizationValidation(self, f, func):
"""Overridden from Argument."""
self.WriteDestinationInitalizationValidatationIfNeeded(f, func)
def GetLogArg(self):
"""Overridden from Argument."""
return "static_cast<const void*>(%s)" % self.name
class PointerArgument(Argument):
"""A class that represents a pointer argument to a function."""
def IsPointer(self):
"""Overridden from Argument."""
return True
def IsPointer2D(self):
"""Overridden from Argument."""
return self.type.count('*') == 2
def GetPointedType(self):
match = re.match('(const\s+)?(?P<element_type>[\w]+)\s*\*', self.type)
assert match
return match.groupdict()['element_type']
def GetValidArg(self, func):
"""Overridden from Argument."""
return "shared_memory_id_, shared_memory_offset_"
def GetValidGLArg(self, func):
"""Overridden from Argument."""
return "reinterpret_cast<%s>(shared_memory_address_)" % self.type
def GetNumInvalidValues(self, _func):
"""Overridden from Argument."""
return 2
def GetInvalidArg(self, index):
"""Overridden from Argument."""
if index == 0:
return ("kInvalidSharedMemoryId, 0", "kOutOfBounds", None)
else:
return ("shared_memory_id_, kInvalidSharedMemoryOffset",
"kOutOfBounds", None)
def GetLogArg(self):
"""Overridden from Argument."""
return "static_cast<const void*>(%s)" % self.name
def AddCmdArgs(self, args):
"""Overridden from Argument."""
args.append(Argument("%s_shm_id" % self.name, 'uint32_t'))
args.append(Argument("%s_shm_offset" % self.name, 'uint32_t'))
def WriteGetCode(self, f):
"""Overridden from Argument."""
f.write(
" %s %s = GetSharedMemoryAs<%s>(\n" %
(self.type, self.name, self.type))
f.write(
" c.%s_shm_id, c.%s_shm_offset, %s);\n" %
(self.name, self.name, self.GetReservedSizeId()))
def WriteValidationCode(self, f, func):
"""Overridden from Argument."""
if self.optional:
return
f.write(" if (%s == nullptr) {\n" % self.name)
f.write(" return error::kOutOfBounds;\n")
f.write(" }\n")
def GetImmediateVersion(self):
"""Overridden from Argument."""
return ImmediatePointerArgument(self.name, self.type)
def GetBucketVersion(self):
"""Overridden from Argument."""
if self.type.find('char') >= 0:
if self.IsPointer2D():
return InputStringArrayBucketArgument(self.name, self.type)
return InputStringBucketArgument(self.name, self.type)
return BucketPointerArgument(self.name, self.type)
def WriteDestinationInitalizationValidation(self, f, func):
"""Overridden from Argument."""
self.WriteDestinationInitalizationValidatationIfNeeded(f, func)
class BucketPointerArgument(PointerArgument):
"""A class that represents an bucket argument to a function."""
def AddCmdArgs(self, args):
"""Overridden from Argument."""
pass
def WriteGetCode(self, f):
"""Overridden from Argument."""
f.write(
" %s %s = bucket->GetData(0, %s);\n" %
(self.type, self.name, self.GetReservedSizeId()))
def WriteValidationCode(self, f, func):
"""Overridden from Argument."""
pass
def GetImmediateVersion(self):
"""Overridden from Argument."""
return None
def WriteDestinationInitalizationValidation(self, f, func):
"""Overridden from Argument."""
self.WriteDestinationInitalizationValidatationIfNeeded(f, func)
def GetLogArg(self):
"""Overridden from Argument."""
return "static_cast<const void*>(%s)" % self.name
class InputStringBucketArgument(Argument):
"""A string input argument where the string is passed in a bucket."""
def __init__(self, name, _type):
Argument.__init__(self, name + "_bucket_id", "uint32_t")
def IsPointer(self):
"""Overridden from Argument."""
return True
def IsPointer2D(self):
"""Overridden from Argument."""
return False
class InputStringArrayBucketArgument(Argument):
"""A string array input argument where the strings are passed in a bucket."""
def __init__(self, name, _type):
Argument.__init__(self, name + "_bucket_id", "uint32_t")
self._original_name = name
def WriteGetCode(self, f):
"""Overridden from Argument."""
code = """
Bucket* bucket = GetBucket(c.%(name)s);
if (!bucket) {
return error::kInvalidArguments;
}
GLsizei count = 0;
std::vector<char*> strs;
std::vector<GLint> len;
if (!bucket->GetAsStrings(&count, &strs, &len)) {
return error::kInvalidArguments;
}
const char** %(original_name)s =
strs.size() > 0 ? const_cast<const char**>(&strs[0]) : nullptr;
const GLint* length =
len.size() > 0 ? const_cast<const GLint*>(&len[0]) : nullptr;
(void)length;
"""
f.write(code % {
'name': self.name,
'original_name': self._original_name,
})
def GetValidArg(self, func):
return "kNameBucketId"
def GetValidGLArg(self, func):
return "_"
def IsPointer(self):
"""Overridden from Argument."""
return True
def IsPointer2D(self):
"""Overridden from Argument."""
return True
class ResourceIdArgument(Argument):
"""A class that represents a resource id argument to a function."""
def __init__(self, name, arg_type, arg_default):
match = re.match("(GLid\w+)", arg_type)
self.resource_type = match.group(1)[4:]
if self.resource_type == "Sync":
arg_type = arg_type.replace(match.group(1), "GLsync")
else:
arg_type = arg_type.replace(match.group(1), "GLuint")
Argument.__init__(self, name, arg_type, arg_default)
def WriteGetCode(self, f):
"""Overridden from Argument."""
if self.type == "GLsync":
my_type = "GLuint"
else:
my_type = self.type
f.write(" %s %s = %s;\n" % (my_type, self.name, self.GetArgAccessor('c')))
def GetValidArg(self, func):
return "client_%s_id_" % self.resource_type.lower()
def GetValidGLArg(self, func):
if self.resource_type == "Sync":
return "reinterpret_cast<GLsync>(kService%sId)" % self.resource_type
return "kService%sId" % self.resource_type
class ResourceIdBindArgument(Argument):
"""Represents a resource id argument to a bind function."""
def __init__(self, name, arg_type, arg_default):
match = re.match("(GLidBind\w+)", arg_type)
self.resource_type = match.group(1)[8:]
arg_type = arg_type.replace(match.group(1), "GLuint")
Argument.__init__(self, name, arg_type, arg_default)
def WriteGetCode(self, f):
"""Overridden from Argument."""
code = """ %(type)s %(name)s = c.%(name)s;
"""
f.write(code % {'type': self.type, 'name': self.name})
def GetValidArg(self, func):
return "client_%s_id_" % self.resource_type.lower()
def GetValidGLArg(self, func):
return "kService%sId" % self.resource_type
class ResourceIdZeroArgument(Argument):
"""Represents a resource id argument to a function that can be zero."""
def __init__(self, name, arg_type, arg_default):
match = re.match("(GLidZero\w+)", arg_type)
self.resource_type = match.group(1)[8:]
arg_type = arg_type.replace(match.group(1), "GLuint")
Argument.__init__(self, name, arg_type, arg_default)
def WriteGetCode(self, f):
"""Overridden from Argument."""
f.write(" %s %s = %s;\n" % (self.type, self.name,
self.GetArgAccessor('c')))
def GetValidArg(self, func):
return "client_%s_id_" % self.resource_type.lower()
def GetValidGLArg(self, func):
return "kService%sId" % self.resource_type
def GetNumInvalidValues(self, _func):
"""returns the number of invalid values to be tested."""
return 1
def GetInvalidArg(self, _index):
"""returns an invalid value by index."""
return ("kInvalidClientId", "kNoError", "GL_INVALID_VALUE")
class Int64Argument(Argument):
"""Represents a GLuint64 argument which splits up into 2 uint32_t items."""
def __init__(self, name, arg_type, arg_default):
Argument.__init__(self, name, arg_type, arg_default)
def GetArgAccessor(self, cmd_struct_name):
return "%s.%s()" % (cmd_struct_name, self.name)
def WriteArgAccessor(self, f):
"""Writes specialized accessor for compound members."""
f.write(" %s %s() const volatile {\n" % (self.type, self.name))
f.write(" return static_cast<%s>(\n" % self.type)
f.write(" %sGLES2Util::MapTwoUint32ToUint64(\n" % _Namespace())
f.write(" %s_0,\n" % self.name)
f.write(" %s_1));\n" % self.name)
f.write(" }\n")
f.write("\n")
def WriteGetCode(self, f):
"""Writes the code to get an argument from a command structure."""
f.write(" %s %s = c.%s();\n" % (self.type, self.name, self.name))
def WriteSetCode(self, f, indent, var):
indent_str = ' ' * indent
f.write("%s%sGLES2Util::MapUint64ToTwoUint32(static_cast<uint64_t>(%s),\n" %
(indent_str, _Namespace(), var))
f.write("%s &%s_0,\n" %
(indent_str, self.name))
f.write("%s &%s_1);\n" %
(indent_str, self.name))
class Function(object):
"""A class that represents a function."""
def __init__(self, name, info, named_type_info, type_handlers):
self.name = name
self.named_type_info = named_type_info
self.prefixed_name = info['prefixed_name']
self.original_name = info['original_name']
self.original_args = self.ParseArgs(info['original_args'])
if 'cmd_args' in info:
self.args_for_cmds = self.ParseArgs(info['cmd_args'])
else:
self.args_for_cmds = self.original_args[:]
self.passthrough_service_doer_args = self.original_args[:]
if 'size_args' in info:
self.size_args = info['size_args']
else:
self.size_args = {}
self.return_type = info['return_type']
if self.return_type != 'void':
self.return_arg = CreateArg(info['return_type'] + " result",
named_type_info)
else:
self.return_arg = None
self.num_pointer_args = sum(
[1 for arg in self.args_for_cmds if arg.IsPointer()])
if self.num_pointer_args > 0:
for arg in reversed(self.original_args):
if arg.IsPointer():
self.last_original_pointer_arg = arg
break
else:
self.last_original_pointer_arg = None
self.info = info
self.type_handler = type_handlers[info['type']]
self.can_auto_generate = (self.num_pointer_args == 0 and
info['return_type'] == "void")
# Satisfy pylint warning attribute-defined-outside-init.
#
# self.cmd_args is typically set in InitFunction, but that method may be
# overriden.
self.cmd_args = []
self.InitFunction()
def ParseArgs(self, arg_string):
"""Parses a function arg string."""
args = []
parts = arg_string.split(',')
for arg_string in parts:
arg = CreateArg(arg_string, self.named_type_info)
if arg:
args.append(arg)
return args
def IsType(self, type_name):
"""Returns true if function is a certain type."""
return self.info['type'] == type_name
def InitFunction(self):
"""Creates command args and calls the init function for the type handler.
Creates argument lists for command buffer commands, eg. self.cmd_args and
self.init_args.
Calls the type function initialization.
Override to create different kind of command buffer command argument lists.
"""
self.cmd_args = []
for arg in self.args_for_cmds:
arg.AddCmdArgs(self.cmd_args)
self.init_args = []
for arg in self.args_for_cmds:
arg.AddInitArgs(self.init_args)
if self.return_arg:
self.init_args.append(self.return_arg)
self.type_handler.InitFunction(self)
def IsImmediate(self):
"""Returns whether the function is immediate data function or not."""
return False
def IsES3(self):
"""Returns whether the function requires an ES3 context or not."""
return self.GetInfo('es3', False)
def IsES31(self):
"""Returns whether the function requires an ES31 context or not."""
return self.GetInfo('es31', False)
def GetInfo(self, name, default = None):
"""Returns a value from the function info for this function."""
if name in self.info:
return self.info[name]
return default
def GetValidArg(self, arg):
"""Gets a valid argument value for the parameter arg from the function info
if one exists."""
try:
index = self.GetOriginalArgs().index(arg)
except ValueError:
return None
valid_args = self.GetInfo('valid_args')
if valid_args and str(index) in valid_args:
return valid_args[str(index)]
return None
def AddInfo(self, name, value):
"""Adds an info."""
self.info[name] = value
def IsExtension(self):
return self.GetInfo('extension') or self.GetInfo('extension_flag')
def IsCoreGLFunction(self):
return (not self.IsExtension() and
not self.GetInfo('pepper_interface') and
not self.IsES3() and
not self.IsES31())
def InPepperInterface(self, interface):
ext = self.GetInfo('pepper_interface')
if not interface.GetName():
return self.IsCoreGLFunction()
return ext == interface.GetName()
def InAnyPepperExtension(self):
return self.IsCoreGLFunction() or self.GetInfo('pepper_interface')
def GetErrorReturnString(self):
if self.GetInfo("error_return"):
return self.GetInfo("error_return")
elif self.return_type == "GLboolean":
return "GL_FALSE"
elif "*" in self.return_type:
return "nullptr"
return "0"
def GetGLFunctionName(self):
"""Gets the function to call to execute GL for this command."""
if self.GetInfo('decoder_func'):
return self.GetInfo('decoder_func')
return "api()->gl%sFn" % self.original_name
def GetGLTestFunctionName(self):
gl_func_name = self.GetInfo('gl_test_func')
if gl_func_name == None:
gl_func_name = self.GetGLFunctionName()
if gl_func_name.startswith("gl"):
gl_func_name = gl_func_name[2:]
else:
gl_func_name = self.original_name
return gl_func_name
def GetDataTransferMethods(self):
return self.GetInfo('data_transfer_methods',
['immediate' if self.num_pointer_args == 1 else 'shm'])
def AddCmdArg(self, arg):
"""Adds a cmd argument to this function."""
self.cmd_args.append(arg)
def GetCmdArgs(self):
"""Gets the command args for this function."""
return self.cmd_args
def ClearCmdArgs(self):
"""Clears the command args for this function."""
self.cmd_args = []
def GetCmdConstants(self):
"""Gets the constants for this function."""
return [arg for arg in self.args_for_cmds if arg.IsConstant()]
def GetInitArgs(self):
"""Gets the init args for this function."""
return self.init_args
def GetOriginalArgs(self):
"""Gets the original arguments to this function."""
return self.original_args
def GetPassthroughServiceDoerArgs(self):
"""Gets the original arguments to this function."""
return self.passthrough_service_doer_args
def GetLastOriginalArg(self):
"""Gets the last original argument to this function."""
return self.original_args[len(self.original_args) - 1]
def GetLastOriginalPointerArg(self):
return self.last_original_pointer_arg
def GetResourceIdArg(self):
for arg in self.original_args:
if hasattr(arg, 'resource_type'):
return arg
return None
def _MaybePrependComma(self, arg_string, add_comma):
"""Adds a comma if arg_string is not empty and add_comma is true."""
comma = ""
if add_comma and len(arg_string):
comma = ", "
return "%s%s" % (comma, arg_string)
def MakeTypedOriginalArgString(self, prefix, add_comma = False,
add_default = False):
"""Gets a list of arguments as they are in GL."""
args = self.GetOriginalArgs()
def ArgToString(arg):
tmp = [arg.type, prefix + arg.name]
if add_default and arg.default:
tmp.append("=")
tmp.append(arg.default)
return " ".join(tmp)
arg_string = ", ".join([ArgToString(arg) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeOriginalArgString(self, prefix, add_comma = False, separator = ", "):
"""Gets the list of arguments as they are in GL."""
args = self.GetOriginalArgs()
arg_string = separator.join(
["%s%s" % (prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakePassthroughServiceDoerArgString(self, prefix, add_comma = False,
separator = ", "):
"""Gets the list of arguments as they are in used by the passthrough
service doer function."""
args = self.GetPassthroughServiceDoerArgs()
arg_string = separator.join(
["%s%s" % (prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeHelperArgString(self, prefix, add_comma = False, separator = ", "):
"""Gets a list of GL arguments after removing unneeded arguments."""
args = self.GetOriginalArgs()
arg_string = separator.join(
["%s%s" % (prefix, arg.name)
for arg in args if not arg.IsConstant()])
return self._MaybePrependComma(arg_string, add_comma)
def MakeTypedPepperArgString(self, prefix):
"""Gets a list of arguments as they need to be for Pepper."""
if self.GetInfo("pepper_args"):
return self.GetInfo("pepper_args")
else:
return self.MakeTypedOriginalArgString(prefix, False)
def MapCTypeToPepperIdlType(self, ctype, is_for_return_type=False):
"""Converts a C type name to the corresponding Pepper IDL type."""
idltype = {
'char*': '[out] str_t',
'const GLchar* const*': '[out] cstr_t',
'const char*': 'cstr_t',
'const void*': 'mem_t',
'void*': '[out] mem_t',
'void**': '[out] mem_ptr_t',
}.get(ctype, ctype)
# We use "GLxxx_ptr_t" for "GLxxx*".
matched = re.match(r'(const )?(GL\w+)\*$', ctype)
if matched:
idltype = matched.group(2) + '_ptr_t'
if not matched.group(1):
idltype = '[out] ' + idltype
# If an in/out specifier is not specified yet, prepend [in].
if idltype[0] != '[':
idltype = '[in] ' + idltype
# Strip the in/out specifier for a return type.
if is_for_return_type:
idltype = re.sub(r'\[\w+\] ', '', idltype)
return idltype
def MakeTypedPepperIdlArgStrings(self):
"""Gets a list of arguments as they need to be for Pepper IDL."""
args = self.GetOriginalArgs()
return ["%s %s" % (self.MapCTypeToPepperIdlType(arg.type), arg.name)
for arg in args]
def GetPepperName(self):
if self.GetInfo("pepper_name"):
return self.GetInfo("pepper_name")
return self.name
def MakeTypedCmdArgString(self, prefix, add_comma = False):
"""Gets a typed list of arguments as they need to be for command buffers."""
args = self.GetCmdArgs()
arg_string = ", ".join(
["%s %s%s" % (arg.type, prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeCmdArgString(self, prefix, add_comma = False):
"""Gets the list of arguments as they need to be for command buffers."""
args = self.GetCmdArgs()
arg_string = ", ".join(
["%s%s" % (prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeTypedInitString(self, prefix, add_comma = False):
"""Gets a typed list of arguments as they need to be for cmd Init/Set."""
args = self.GetInitArgs()
arg_string = ", ".join(
["%s %s%s" % (arg.type, prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeInitString(self, prefix, add_comma = False):
"""Gets the list of arguments as they need to be for cmd Init/Set."""
args = self.GetInitArgs()
arg_string = ", ".join(
["%s%s" % (prefix, arg.name) for arg in args])
return self._MaybePrependComma(arg_string, add_comma)
def MakeLogArgString(self):
"""Makes a string of the arguments for the LOG macros"""
args = self.GetOriginalArgs()
return ' << ", " << '.join([arg.GetLogArg() for arg in args])
def WriteHandlerValidation(self, f):
"""Writes validation code for the function."""
for arg in self.GetOriginalArgs():
arg.WriteValidationCode(f, self)
self.WriteValidationCode(f)
def WriteQueueTraceEvent(self, f):
if self.GetInfo("trace_queueing_flow", False):
trace = 'TRACE_DISABLED_BY_DEFAULT("gpu_cmd_queue")'
f.write("""if (c.trace_id) {
TRACE_EVENT_WITH_FLOW0(%s, "CommandBufferQueue",
c.trace_id, TRACE_EVENT_FLAG_FLOW_IN);\n}""" % trace)
def WritePassthroughHandlerValidation(self, f):
"""Writes validation code for the function."""
for arg in self.GetOriginalArgs():
arg.WritePassthroughValidationCode(f, self)
def WriteHandlerImplementation(self, f):
"""Writes the handler implementation for this command."""
self.type_handler.WriteHandlerImplementation(self, f)
def WriteValidationCode(self, f):
"""Writes the validation code for a command."""
pass
def WriteCmdFlag(self, f):
"""Writes the cmd cmd_flags constant."""
# By default trace only at the highest level 3.
trace_level = int(self.GetInfo('trace_level', default = 3))
if trace_level not in xrange(0, 4):
raise KeyError("Unhandled trace_level: %d" % trace_level)
cmd_flags = ('CMD_FLAG_SET_TRACE_LEVEL(%d)' % trace_level)
f.write(" static const uint8_t cmd_flags = %s;\n" % cmd_flags)
def WriteCmdArgFlag(self, f):
"""Writes the cmd kArgFlags constant."""
f.write(" static const cmd::ArgFlags kArgFlags = cmd::kFixed;\n")
def WriteCmdComputeSize(self, f):
"""Writes the ComputeSize function for the command."""
f.write(" static uint32_t ComputeSize() {\n")
f.write(
" return static_cast<uint32_t>(sizeof(ValueType)); // NOLINT\n")
f.write(" }\n")
f.write("\n")
def WriteCmdSetHeader(self, f):
"""Writes the cmd's SetHeader function."""
f.write(" void SetHeader() {\n")
f.write(" header.SetCmd<ValueType>();\n")
f.write(" }\n")
f.write("\n")
def WriteCmdInit(self, f):
"""Writes the cmd's Init function."""
f.write(" void Init(%s) {\n" % self.MakeTypedCmdArgString("_"))
f.write(" SetHeader();\n")
args = self.GetCmdArgs()
for arg in args:
arg.WriteSetCode(f, 4, '_%s' % arg.name)
if self.GetInfo("trace_queueing_flow", False):
trace = 'TRACE_DISABLED_BY_DEFAULT("gpu_cmd_queue")'
f.write('bool is_tracing = false;')
f.write('TRACE_EVENT_CATEGORY_GROUP_ENABLED(%s, &is_tracing);' % trace)
f.write('if (is_tracing) {')
f.write(' trace_id = base::RandUint64();')
f.write('TRACE_EVENT_WITH_FLOW1(%s, "CommandBufferQueue",' % trace)
f.write('trace_id, TRACE_EVENT_FLAG_FLOW_OUT,')
f.write('"command", "%s");' % self.name)
f.write('} else {\n trace_id = 0;\n}\n');
f.write("}\n")
f.write("\n")
def WriteCmdSet(self, f):
"""Writes the cmd's Set function."""
copy_args = self.MakeCmdArgString("_", False)
f.write(" void* Set(void* cmd%s) {\n" %
self.MakeTypedCmdArgString("_", True))
f.write(" static_cast<ValueType*>(cmd)->Init(%s);\n" % copy_args)
f.write(" return NextCmdAddress<ValueType>(cmd);\n")
f.write(" }\n")
f.write("\n")
def WriteArgAccessors(self, f):
"""Writes the cmd's accessor functions."""
for arg in self.GetCmdArgs():
arg.WriteArgAccessor(f)
def WriteStruct(self, f):
self.type_handler.WriteStruct(self, f)
def WriteDocs(self, f):
self.type_handler.WriteDocs(self, f)
def WriteCmdHelper(self, f):
"""Writes the cmd's helper."""
self.type_handler.WriteCmdHelper(self, f)
def WriteServiceImplementation(self, f):
"""Writes the service implementation for a command."""
self.type_handler.WriteServiceImplementation(self, f)
def WritePassthroughServiceImplementation(self, f):
"""Writes the service implementation for a command."""
self.type_handler.WritePassthroughServiceImplementation(self, f)
def WriteServiceUnitTest(self, f, *extras):
"""Writes the service implementation for a command."""
self.type_handler.WriteServiceUnitTest(self, f, *extras)
def WriteGLES2CLibImplementation(self, f):
"""Writes the GLES2 C Lib Implemention."""
self.type_handler.WriteGLES2CLibImplementation(self, f)
def WriteGLES2InterfaceHeader(self, f):
"""Writes the GLES2 Interface declaration."""
self.type_handler.WriteGLES2InterfaceHeader(self, f)
def WriteGLES2InterfaceStub(self, f):
"""Writes the GLES2 Interface Stub declaration."""
self.type_handler.WriteGLES2InterfaceStub(self, f)
def WriteGLES2InterfaceStubImpl(self, f):
"""Writes the GLES2 Interface Stub declaration."""
self.type_handler.WriteGLES2InterfaceStubImpl(self, f)
def WriteGLES2ImplementationHeader(self, f):
"""Writes the GLES2 Implemention declaration."""
self.type_handler.WriteGLES2ImplementationHeader(self, f)
def WriteGLES2Implementation(self, f):
"""Writes the GLES2 Implemention definition."""
self.type_handler.WriteGLES2Implementation(self, f)
def WriteGLES2TraceImplementationHeader(self, f):
"""Writes the GLES2 Trace Implemention declaration."""
self.type_handler.WriteGLES2TraceImplementationHeader(self, f)
def WriteGLES2TraceImplementation(self, f):
"""Writes the GLES2 Trace Implemention definition."""
self.type_handler.WriteGLES2TraceImplementation(self, f)
def WriteGLES2Header(self, f):
"""Writes the GLES2 Implemention unit test."""
self.type_handler.WriteGLES2Header(self, f)
def WriteGLES2ImplementationUnitTest(self, f):
"""Writes the GLES2 Implemention unit test."""
self.type_handler.WriteGLES2ImplementationUnitTest(self, f)
def WriteDestinationInitalizationValidation(self, f):
"""Writes the client side destintion initialization validation."""
self.type_handler.WriteDestinationInitalizationValidation(self, f)
def WriteFormatTest(self, f):
"""Writes the cmd's format test."""
self.type_handler.WriteFormatTest(self, f)
class PepperInterface(object):
"""A class that represents a function."""
def __init__(self, info):
self.name = info["name"]
self.dev = info["dev"]
def GetName(self):
return self.name
def GetInterfaceName(self):
upperint = ""
dev = ""
if self.name:
upperint = "_" + self.name.upper()
if self.dev:
dev = "_DEV"
return "PPB_OPENGLES2%s%s_INTERFACE" % (upperint, dev)
def GetStructName(self):
dev = ""
if self.dev:
dev = "_Dev"
return "PPB_OpenGLES2%s%s" % (self.name, dev)
class ImmediateFunction(Function):
"""A class that represents an immediate function command."""
def __init__(self, func, type_handlers):
Function.__init__(
self,
"%sImmediate" % func.name,
func.info,
func.named_type_info,
type_handlers)
def InitFunction(self):
# Override args in original_args and args_for_cmds with immediate versions
# of the args.
new_original_args = []
for arg in self.original_args:
new_arg = arg.GetImmediateVersion()
if new_arg:
new_original_args.append(new_arg)
self.original_args = new_original_args
new_args_for_cmds = []
for arg in self.args_for_cmds:
new_arg = arg.GetImmediateVersion()
if new_arg:
new_args_for_cmds.append(new_arg)
self.args_for_cmds = new_args_for_cmds
Function.InitFunction(self)
def IsImmediate(self):
return True
def WriteServiceImplementation(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateServiceImplementation(self, f)
def WritePassthroughServiceImplementation(self, f):
"""Overridden from Function"""
self.type_handler.WritePassthroughImmediateServiceImplementation(self, f)
def WriteHandlerImplementation(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateHandlerImplementation(self, f)
def WriteServiceUnitTest(self, f, *extras):
"""Writes the service implementation for a command."""
self.type_handler.WriteImmediateServiceUnitTest(self, f, *extras)
def WriteValidationCode(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateValidationCode(self, f)
def WriteCmdArgFlag(self, f):
"""Overridden from Function"""
f.write(" static const cmd::ArgFlags kArgFlags = cmd::kAtLeastN;\n")
def WriteCmdComputeSize(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdComputeSize(self, f)
def WriteCmdSetHeader(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdSetHeader(self, f)
def WriteCmdInit(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdInit(self, f)
def WriteCmdSet(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdSet(self, f)
def WriteCmdHelper(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateCmdHelper(self, f)
def WriteFormatTest(self, f):
"""Overridden from Function"""
self.type_handler.WriteImmediateFormatTest(self, f)
class BucketFunction(Function):
"""A class that represnets a bucket version of a function command."""
def __init__(self, func, type_handlers):
Function.__init__(
self,
"%sBucket" % func.name,
func.info,
func.named_type_info,
type_handlers)
def InitFunction(self):
# Override args in original_args and args_for_cmds with bucket versions
# of the args.
new_original_args = []
for arg in self.original_args:
new_arg = arg.GetBucketVersion()
if new_arg:
new_original_args.append(new_arg)
self.original_args = new_original_args
new_args_for_cmds = []
for arg in self.args_for_cmds:
new_arg = arg.GetBucketVersion()
if new_arg:
new_args_for_cmds.append(new_arg)
self.args_for_cmds = new_args_for_cmds
Function.InitFunction(self)
def WriteServiceImplementation(self, f):
"""Overridden from Function"""
self.type_handler.WriteBucketServiceImplementation(self, f)
def WritePassthroughServiceImplementation(self, f):
"""Overridden from Function"""
self.type_handler.WritePassthroughBucketServiceImplementation(self, f)
def WriteHandlerImplementation(self, f):
"""Overridden from Function"""
self.type_handler.WriteBucketHandlerImplementation(self, f)
def WriteServiceUnitTest(self, f, *extras):
"""Overridden from Function"""
self.type_handler.WriteBucketServiceUnitTest(self, f, *extras)
def MakeOriginalArgString(self, prefix, add_comma = False, separator = ", "):
"""Overridden from Function"""
args = self.GetOriginalArgs()
arg_string = separator.join(
["%s%s" % (prefix, arg.name[0:-10] if arg.name.endswith("_bucket_id")
else arg.name) for arg in args])
return super(BucketFunction, self)._MaybePrependComma(arg_string, add_comma)
def CreateArg(arg_string, named_type_info):
"""Convert string argument to an Argument class that represents it.
The parameter 'arg_string' can be a single argument to a GL function,
something like 'GLsizei width' or 'const GLenum* bufs'. Returns an instance of
the Argument class, or None if 'arg_string' is 'void'.
"""
if arg_string == 'void':
return None
arg_string = arg_string.strip()
arg_default = None
if '=' in arg_string:
arg_string, arg_default = arg_string.split('=')
arg_default = arg_default.strip()
arg_parts = arg_string.split()
assert len(arg_parts) > 1
arg_name = arg_parts[-1]
arg_type = " ".join(arg_parts[0:-1])
t = arg_parts[0] # only the first part of arg_type
# Is this a pointer argument?
if arg_string.find('*') >= 0:
return PointerArgument(arg_name, arg_type, arg_default)
elif t.startswith('EnumClass'):
return EnumClassArgument(arg_name, arg_type, named_type_info, arg_default)
# Is this a resource argument? Must come after pointer check.
elif t.startswith('GLidBind'):
return ResourceIdBindArgument(arg_name, arg_type, arg_default)
elif t.startswith('GLidZero'):
return ResourceIdZeroArgument(arg_name, arg_type, arg_default)
elif t.startswith('GLid'):
return ResourceIdArgument(arg_name, arg_type, arg_default)
elif t.startswith('GLenum') and t !='GLenum':
return EnumArgument(arg_name, arg_type, named_type_info, arg_default)
elif t.startswith('GLbitfield') and t != 'GLbitfield':
return BitFieldArgument(arg_name, arg_type, named_type_info, arg_default)
elif t.startswith('GLboolean'):
return GLBooleanArgument(arg_name, arg_type, arg_default)
elif t.startswith('GLintUniformLocation'):
return UniformLocationArgument(arg_name, arg_default)
elif (t.startswith('GLint') and t != 'GLint' and
not t.startswith('GLintptr')):
return IntArgument(arg_name, arg_type, named_type_info, arg_default)
elif t == 'bool':
return BoolArgument(arg_name, arg_type, arg_default)
elif t == 'GLsizeiNotNegative' or t == 'GLintptrNotNegative':
return SizeNotNegativeArgument(arg_name, t.replace('NotNegative', ''),
arg_default)
elif t.startswith('GLsize'):
return SizeArgument(arg_name, arg_type, arg_default)
elif t == 'GLuint64' or t == 'GLint64':
return Int64Argument(arg_name, arg_type, arg_default)
else:
return Argument(arg_name, arg_type, arg_default)
class GLGenerator(object):
"""A class to generate GL command buffers."""
_whitespace_re = re.compile(r'^\w*$')
_comment_re = re.compile(r'^//.*$')
_function_re = re.compile(r'^GL_APICALL(.*?)GL_APIENTRY (.*?) \((.*?)\);$')
def __init__(self, verbose, year, function_info, named_type_info,
chromium_root_dir):
self.original_functions = []
self.functions = []
self.chromium_root_dir = chromium_root_dir
self.verbose = verbose
self.year = year
self.errors = 0
self.pepper_interfaces = []
self.interface_info = {}
self.generated_cpp_filenames = []
self.function_info = function_info
self.named_type_info = named_type_info
self.capability_flags = _CAPABILITY_FLAGS
self.type_handlers = {
'': TypeHandler(),
'Bind': BindHandler(),
'Create': CreateHandler(),
'Custom': CustomHandler(),
'Data': DataHandler(),
'Delete': DeleteHandler(),
'DELn': DELnHandler(),
'GENn': GENnHandler(),
'GETn': GETnHandler(),
'GLchar': GLcharHandler(),
'GLcharN': GLcharNHandler(),
'Is': IsHandler(),
'NoCommand': NoCommandHandler(),
'PUT': PUTHandler(),
'PUTn': PUTnHandler(),
'PUTSTR': PUTSTRHandler(),
'PUTXn': PUTXnHandler(),
'StateSet': StateSetHandler(),
'StateSetRGBAlpha': StateSetRGBAlphaHandler(),
'StateSetFrontBack': StateSetFrontBackHandler(),
'StateSetFrontBackSeparate':
StateSetFrontBackSeparateHandler(),
'StateSetNamedParameter': StateSetNamedParameter(),
'STRn': STRnHandler(),
}
for interface in _PEPPER_INTERFACES:
interface = PepperInterface(interface)
self.pepper_interfaces.append(interface)
self.interface_info[interface.GetName()] = interface
def AddFunction(self, func):
"""Adds a function."""
self.functions.append(func)
def GetFunctionInfo(self, name):
"""Gets a type info for the given function name."""
if name in self.function_info:
func_info = self.function_info[name].copy()
else:
func_info = {}
if not 'type' in func_info:
func_info['type'] = ''
return func_info
def Log(self, msg):
"""Prints something if verbose is true."""
if self.verbose:
print msg
def Error(self, msg):
"""Prints an error."""
print "Error: %s" % msg
self.errors += 1
def ParseGLH(self, filename):
"""Parses the cmd_buffer_functions.txt file and extracts the functions"""
filename = os.path.join(self.chromium_root_dir, filename)
with open(filename, "r") as f:
functions = f.read()
for line in functions.splitlines():
if self._whitespace_re.match(line) or self._comment_re.match(line):
continue
match = self._function_re.match(line)
if match:
prefixed_name = match.group(2)
func_name = prefixed_name[2:]
func_info = self.GetFunctionInfo(func_name)
if func_info['type'] == 'Noop':
continue
parsed_func_info = {
'prefixed_name': prefixed_name,
'original_name': func_name,
'original_args': match.group(3),
'return_type': match.group(1).strip(),
}
for k in parsed_func_info.keys():
if not k in func_info:
func_info[k] = parsed_func_info[k]
f = Function(func_name, func_info, self.named_type_info,
self.type_handlers)
if not f.GetInfo('internal'):
self.original_functions.append(f)
#for arg in f.GetOriginalArgs():
# if not isinstance(arg, EnumArgument) and arg.type == 'GLenum':
# self.Log("%s uses bare GLenum %s." % (func_name, arg.name))
func_type = f.GetInfo('type')
if func_type != 'NoCommand':
if f.type_handler.NeedsDataTransferFunction(f):
methods = f.GetDataTransferMethods()
if 'immediate' in methods:
self.AddFunction(ImmediateFunction(f, self.type_handlers))
if 'bucket' in methods:
self.AddFunction(BucketFunction(f, self.type_handlers))
if 'shm' in methods:
self.AddFunction(f)
else:
self.AddFunction(f)
else:
self.Error("Could not parse function: %s using regex: %s" %
(line, self._function_re.pattern))
self.Log("Auto Generated Functions : %d" %
len([f for f in self.functions if f.can_auto_generate or
(not f.IsType('') and not f.IsType('Custom') and
not f.IsType('Todo'))]))
funcs = [f for f in self.functions if not f.can_auto_generate and
(f.IsType('') or f.IsType('Custom') or f.IsType('Todo'))]
self.Log("Non Auto Generated Functions: %d" % len(funcs))
for f in funcs:
self.Log(" %-10s %-20s gl%s" % (f.info['type'], f.return_type, f.name))
def WriteCommandIds(self, filename):
"""Writes the command buffer format"""
with CHeaderWriter(filename, self.year) as f:
f.write("#define %s_COMMAND_LIST(OP) \\\n" % _upper_prefix)
cmd_id = 256
for func in self.functions:
f.write(" %-60s /* %d */ \\\n" %
("OP(%s)" % func.name, cmd_id))
cmd_id += 1
f.write("\n")
f.write("enum CommandId {\n")
f.write(" kOneBeforeStartPoint = cmd::kLastCommonId, "
"// All %s commands start after this.\n" % _prefix)
f.write("#define %s_CMD_OP(name) k ## name,\n" % _upper_prefix)
f.write(" %s_COMMAND_LIST(%s_CMD_OP)\n" %
(_upper_prefix, _upper_prefix))
f.write("#undef %s_CMD_OP\n" % _upper_prefix)
f.write(" kNumCommands,\n")
f.write(" kFirst%sCommand = kOneBeforeStartPoint + 1\n" % _prefix)
f.write("};\n")
f.write("\n")
self.generated_cpp_filenames.append(filename)
def WriteFormat(self, filename):
"""Writes the command buffer format"""
with CHeaderWriter(filename, self.year) as f:
# Forward declaration of a few enums used in constant argument
# to avoid including GL header files.
enum_defines = {'GL_SCANOUT_CHROMIUM': '0x6000'}
if 'FenceSync' in self.function_info:
enum_defines['GL_SYNC_GPU_COMMANDS_COMPLETE'] = '0x9117'
if 'ClientWaitSync' in self.function_info:
enum_defines['GL_SYNC_FLUSH_COMMANDS_BIT'] = '0x00000001'
f.write('\n')
for enum in enum_defines:
f.write("#define %s %s\n" % (enum, enum_defines[enum]))
f.write('\n')
for func in self.functions:
func.WriteStruct(f)
f.write("\n")
self.generated_cpp_filenames.append(filename)
def WriteDocs(self, filename):
"""Writes the command buffer doc version of the commands"""
with CHeaderWriter(filename, self.year) as f:
for func in self.functions:
func.WriteDocs(f)
f.write("\n")
self.generated_cpp_filenames.append(filename)
def WriteFormatTest(self, filename):
"""Writes the command buffer format test."""
comment = ("// This file contains unit tests for %s commands\n"
"// It is included by %s_cmd_format_test.cc\n\n" %
(_lower_prefix, _lower_prefix))
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.functions:
func.WriteFormatTest(f)
self.generated_cpp_filenames.append(filename)
def WriteCmdHelperHeader(self, filename):
"""Writes the gles2 command helper."""
with CHeaderWriter(filename, self.year) as f:
for func in self.functions:
func.WriteCmdHelper(f)
self.generated_cpp_filenames.append(filename)
def WriteServiceContextStateHeader(self, filename):
"""Writes the service context state header."""
comment = "// It is included by context_state.h\n"
with CHeaderWriter(filename, self.year, comment) as f:
f.write("struct EnableFlags {\n")
f.write(" EnableFlags();\n")
for capability in self.capability_flags:
f.write(" bool %s;\n" % capability['name'])
f.write(" bool cached_%s;\n" % capability['name'])
f.write("};\n\n")
for state_name in sorted(_STATE_INFO.keys()):
state = _STATE_INFO[state_name]
for item in state['states']:
if isinstance(item['default'], list):
f.write("%s %s[%d];\n" % (item['type'], item['name'],
len(item['default'])))
else:
f.write("%s %s;\n" % (item['type'], item['name']))
if item.get('cached', False):
if isinstance(item['default'], list):
f.write("%s cached_%s[%d];\n" % (item['type'], item['name'],
len(item['default'])))
else:
f.write("%s cached_%s;\n" % (item['type'], item['name']))
f.write("\n")
f.write("""
inline void SetDeviceCapabilityState(GLenum cap, bool enable) {
switch (cap) {
""")
for capability in self.capability_flags:
f.write("""\
case GL_%s:
""" % capability['name'].upper())
f.write("""\
if (enable_flags.cached_%(name)s == enable &&
!ignore_cached_state)
return;
enable_flags.cached_%(name)s = enable;
break;
""" % capability)
f.write("""\
default:
NOTREACHED();
return;
}
if (enable)
api()->glEnableFn(cap);
else
api()->glDisableFn(cap);
}
""")
self.generated_cpp_filenames.append(filename)
def WriteClientContextStateHeader(self, filename):
"""Writes the client context state header."""
comment = "// It is included by client_context_state.h\n"
with CHeaderWriter(filename, self.year, comment) as f:
f.write("struct EnableFlags {\n")
f.write(" EnableFlags();\n")
for capability in self.capability_flags:
if 'extension_flag' in capability:
continue
f.write(" bool %s;\n" % capability['name'])
f.write("};\n\n")
self.generated_cpp_filenames.append(filename)
def WriteContextStateGetters(self, f, class_name):
"""Writes the state getters."""
for gl_type in ["GLint", "GLfloat"]:
f.write("""
bool %s::GetStateAs%s(
GLenum pname, %s* params, GLsizei* num_written) const {
switch (pname) {
""" % (class_name, gl_type, gl_type))
for state_name in sorted(_STATE_INFO.keys()):
state = _STATE_INFO[state_name]
if 'enum' in state:
f.write(" case %s:\n" % state['enum'])
f.write(" *num_written = %d;\n" % len(state['states']))
f.write(" if (params) {\n")
for ndx,item in enumerate(state['states']):
f.write(" params[%d] = static_cast<%s>(%s);\n" %
(ndx, gl_type, item['name']))
f.write(" }\n")
f.write(" return true;\n")
else:
for item in state['states']:
f.write(" case %s:\n" % item['enum'])
if isinstance(item['default'], list):
item_len = len(item['default'])
f.write(" *num_written = %d;\n" % item_len)
f.write(" if (params) {\n")
if item['type'] == gl_type:
f.write(" memcpy(params, %s, sizeof(%s) * %d);\n" %
(item['name'], item['type'], item_len))
else:
f.write(" for (size_t i = 0; i < %s; ++i) {\n" %
item_len)
f.write(" params[i] = %s;\n" %
(GetGLGetTypeConversion(gl_type, item['type'],
"%s[i]" % item['name'])))
f.write(" }\n");
else:
f.write(" *num_written = 1;\n")
f.write(" if (params) {\n")
f.write(" params[0] = %s;\n" %
(GetGLGetTypeConversion(gl_type, item['type'],
item['name'])))
f.write(" }\n")
f.write(" return true;\n")
for capability in self.capability_flags:
f.write(" case GL_%s:\n" % capability['name'].upper())
f.write(" *num_written = 1;\n")
f.write(" if (params) {\n")
f.write(
" params[0] = static_cast<%s>(enable_flags.%s);\n" %
(gl_type, capability['name']))
f.write(" }\n")
f.write(" return true;\n")
f.write(""" default:
return false;
}
}
""")
def WriteServiceContextStateImpl(self, filename):
"""Writes the context state service implementation."""
comment = "// It is included by context_state.cc\n"
with CHeaderWriter(filename, self.year, comment) as f:
code = []
for capability in self.capability_flags:
code.append("%s(%s)" %
(capability['name'],
('false', 'true')['default' in capability]))
code.append("cached_%s(%s)" %
(capability['name'],
('false', 'true')['default' in capability]))
f.write("ContextState::EnableFlags::EnableFlags()\n : %s {\n}\n" %
",\n ".join(code))
f.write("\n")
f.write("void ContextState::Initialize() {\n")
for state_name in sorted(_STATE_INFO.keys()):
state = _STATE_INFO[state_name]
for item in state['states']:
if isinstance(item['default'], list):
for ndx, value in enumerate(item['default']):
f.write(" %s[%d] = %s;\n" % (item['name'], ndx, value))
else:
f.write(" %s = %s;\n" % (item['name'], item['default']))
if item.get('cached', False):
if isinstance(item['default'], list):
for ndx, value in enumerate(item['default']):
f.write(" cached_%s[%d] = %s;\n" % (item['name'], ndx, value))
else:
f.write(" cached_%s = %s;\n" % (item['name'], item['default']))
f.write("}\n")
f.write("""
void ContextState::InitCapabilities(const ContextState* prev_state) const {
""")
def WriteCapabilities(test_prev, es3_caps):
for capability in self.capability_flags:
capability_name = capability['name']
capability_no_init = 'no_init' in capability and \
capability['no_init'] == True
if capability_no_init:
continue
capability_es3 = 'es3' in capability and capability['es3'] == True
if capability_es3 and not es3_caps or not capability_es3 and es3_caps:
continue
if 'extension_flag' in capability:
f.write(" if (feature_info_->feature_flags().%s) {\n " %
capability['extension_flag'])
if test_prev:
f.write(""" if (prev_state->enable_flags.cached_%s !=
enable_flags.cached_%s) {\n""" %
(capability_name, capability_name))
f.write(" EnableDisable(GL_%s, enable_flags.cached_%s);\n" %
(capability_name.upper(), capability_name))
if test_prev:
f.write(" }")
if 'extension_flag' in capability:
f.write(" }")
f.write(" if (prev_state) {")
WriteCapabilities(True, False)
f.write(" if (feature_info_->IsES3Capable()) {\n")
WriteCapabilities(True, True)
f.write(" }\n")
f.write(" } else {")
WriteCapabilities(False, False)
f.write(" if (feature_info_->IsES3Capable()) {\n")
WriteCapabilities(False, True)
f.write(" }\n")
f.write(" }")
f.write("""}
void ContextState::InitState(const ContextState *prev_state) const {
""")
def WriteStates(test_prev):
# We need to sort the keys so the expectations match
for state_name in sorted(_STATE_INFO.keys()):
state = _STATE_INFO[state_name]
if 'no_init' in state and state['no_init']:
continue
if state['type'] == 'FrontBack':
num_states = len(state['states'])
for ndx, group in enumerate(Grouper(num_states / 2,
state['states'])):
if test_prev:
f.write(" if (")
args = []
for place, item in enumerate(group):
item_name = CachedStateName(item)
args.append('%s' % item_name)
if test_prev:
if place > 0:
f.write(' ||\n')
f.write("(%s != prev_state->%s)" % (item_name, item_name))
if test_prev:
f.write(")\n")
f.write(
" api()->gl%sFn(%s, %s);\n" %
(state['func'], ('GL_FRONT', 'GL_BACK')[ndx],
", ".join(args)))
elif state['type'] == 'NamedParameter':
for item in state['states']:
item_name = CachedStateName(item)
operation = []
if test_prev:
if isinstance(item['default'], list):
operation.append(" if (memcmp(prev_state->%s, %s, "
"sizeof(%s) * %d)) {\n" %
(item_name, item_name, item['type'],
len(item['default'])))
else:
operation.append(" if (prev_state->%s != %s) {\n " %
(item_name, item_name))
operation.append(" api()->gl%sFn(%s, %s);\n" %
(state['func'],
(item['enum_set']
if 'enum_set' in item else item['enum']),
item['name']))
if test_prev:
operation.append(" }")
guarded_operation = GuardState(item, ''.join(operation),
"feature_info_")
f.write(guarded_operation)
else:
if 'extension_flag' in state:
f.write(" if (feature_info_->feature_flags().%s)\n " %
state['extension_flag'])
if test_prev:
f.write(" if (")
args = []
for place, item in enumerate(state['states']):
item_name = CachedStateName(item)
args.append('%s' % item_name)
if test_prev:
if place > 0:
f.write(' ||\n')
f.write("(%s != prev_state->%s)" %
(item_name, item_name))
if test_prev:
f.write(" )\n")
if 'custom_function' in state:
f.write(" %s(%s);\n" % (state['func'], ", ".join(args)))
else:
f.write(" api()->gl%sFn(%s);\n" % (state['func'],
", ".join(args)))
f.write(" if (prev_state) {")
WriteStates(True)
f.write(" } else {")
WriteStates(False)
f.write(" }")
f.write(" InitStateManual(prev_state);")
f.write("}\n")
f.write("""bool ContextState::GetEnabled(GLenum cap) const {
switch (cap) {
""")
for capability in self.capability_flags:
f.write(" case GL_%s:\n" % capability['name'].upper())
f.write(" return enable_flags.%s;\n" % capability['name'])
f.write(""" default:
NOTREACHED();
return false;
}
}
""")
self.WriteContextStateGetters(f, "ContextState")
self.generated_cpp_filenames.append(filename)
def WriteClientContextStateImpl(self, filename):
"""Writes the context state client side implementation."""
comment = "// It is included by client_context_state.cc\n"
with CHeaderWriter(filename, self.year, comment) as f:
code = []
for capability in self.capability_flags:
if 'extension_flag' in capability:
continue
code.append("%s(%s)" %
(capability['name'],
('false', 'true')['default' in capability]))
f.write(
"ClientContextState::EnableFlags::EnableFlags()\n : %s {\n}\n" %
",\n ".join(code))
f.write("\n")
f.write("""
bool ClientContextState::SetCapabilityState(
GLenum cap, bool enabled, bool* changed) {
*changed = false;
switch (cap) {
""")
for capability in self.capability_flags:
if 'extension_flag' in capability:
continue
f.write(" case GL_%s:\n" % capability['name'].upper())
f.write(""" if (enable_flags.%(name)s != enabled) {
*changed = true;
enable_flags.%(name)s = enabled;
}
return true;
""" % capability)
f.write(""" default:
return false;
}
}
""")
f.write("""bool ClientContextState::GetEnabled(
GLenum cap, bool* enabled) const {
switch (cap) {
""")
for capability in self.capability_flags:
if 'extension_flag' in capability:
continue
f.write(" case GL_%s:\n" % capability['name'].upper())
f.write(" *enabled = enable_flags.%s;\n" % capability['name'])
f.write(" return true;\n")
f.write(""" default:
return false;
}
}
""")
self.generated_cpp_filenames.append(filename)
def WriteServiceImplementation(self, filename):
"""Writes the service decoder implementation."""
comment = "// It is included by %s_cmd_decoder.cc\n" % _lower_prefix
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.functions:
func.WriteServiceImplementation(f)
if self.capability_flags and _prefix == 'GLES2':
f.write("""
bool GLES2DecoderImpl::SetCapabilityState(GLenum cap, bool enabled) {
switch (cap) {
""")
for capability in self.capability_flags:
f.write(" case GL_%s:\n" % capability['name'].upper())
if 'on_change' in capability:
f.write("""\
state_.enable_flags.%(name)s = enabled;
if (state_.enable_flags.cached_%(name)s != enabled
|| state_.ignore_cached_state) {
%(on_change)s
}
return false;
""" % capability)
else:
f.write("""\
state_.enable_flags.%(name)s = enabled;
if (state_.enable_flags.cached_%(name)s != enabled
|| state_.ignore_cached_state) {
state_.enable_flags.cached_%(name)s = enabled;
return true;
}
return false;
""" % capability)
f.write(""" default:
NOTREACHED();
return false;
}
}
""")
self.generated_cpp_filenames.append(filename)
def WritePassthroughServiceImplementation(self, filename):
"""Writes the passthrough service decoder implementation."""
with CWriter(filename, self.year) as f:
header = """
#include \"gpu/command_buffer/service/gles2_cmd_decoder_passthrough.h\"
namespace gpu {
namespace gles2 {
""";
f.write(header);
for func in self.functions:
func.WritePassthroughServiceImplementation(f)
footer = """
} // namespace gles2
} // namespace gpu
""";
f.write(footer);
self.generated_cpp_filenames.append(filename)
def WriteServiceUnitTests(self, filename_pattern):
"""Writes the service decoder unit tests."""
num_tests = len(self.functions)
FUNCTIONS_PER_FILE = 98 # hard code this so it doesn't change.
count = 0
for test_num in range(0, num_tests, FUNCTIONS_PER_FILE):
count += 1
filename = filename_pattern % count
comment = "// It is included by %s_cmd_decoder_unittest_%d.cc\n" \
% (_lower_prefix, count)
with CHeaderWriter(filename, self.year, comment) as f:
end = test_num + FUNCTIONS_PER_FILE
if end > num_tests:
end = num_tests
for idx in range(test_num, end):
func = self.functions[idx]
test_name = '%sDecoderTest%d' % (_prefix, count)
if func.IsES3():
test_name = 'GLES3DecoderTest%d' % count
# Do any filtering of the functions here, so that the functions
# will not move between the numbered files if filtering properties
# are changed.
if func.GetInfo('extension_flag'):
continue
if func.GetInfo('unit_test') != False:
func.WriteServiceUnitTest(f, {
'test_name': test_name
})
self.generated_cpp_filenames.append(filename)
def WriteServiceContextStateTestHelpers(self, filename):
comment = "// It is included by context_state_test_helpers.cc\n"
with CHeaderWriter(filename, self.year, comment) as f:
if self.capability_flags:
f.write(
"""void ContextStateTestHelpers::SetupInitCapabilitiesExpectations(
MockGL* gl,
gles2::FeatureInfo* feature_info) {
""")
for capability in self.capability_flags:
capability_no_init = 'no_init' in capability and \
capability['no_init'] == True
if capability_no_init:
continue
capability_es3 = 'es3' in capability and capability['es3'] == True
if capability_es3:
continue
if 'extension_flag' in capability:
f.write(" if (feature_info->feature_flags().%s) {\n" %
capability['extension_flag'])
f.write(" ")
f.write(" ExpectEnableDisable(gl, GL_%s, %s);\n" %
(capability['name'].upper(),
('false', 'true')['default' in capability]))
if 'extension_flag' in capability:
f.write(" }")
f.write(" if (feature_info->IsES3Capable()) {")
for capability in self.capability_flags:
capability_es3 = 'es3' in capability and capability['es3'] == True
if capability_es3:
f.write(" ExpectEnableDisable(gl, GL_%s, %s);\n" %
(capability['name'].upper(),
('false', 'true')['default' in capability]))
f.write(""" }
}
""")
f.write("""
void ContextStateTestHelpers::SetupInitStateExpectations(
MockGL* gl,
gles2::FeatureInfo* feature_info,
const gfx::Size& initial_size) {
""")
# We need to sort the keys so the expectations match
for state_name in sorted(_STATE_INFO.keys()):
state = _STATE_INFO[state_name]
if state['type'] == 'FrontBack':
num_states = len(state['states'])
for ndx, group in enumerate(Grouper(num_states / 2,
state['states'])):
args = []
for item in group:
if 'expected' in item:
args.append(item['expected'])
else:
args.append(item['default'])
f.write(
" EXPECT_CALL(*gl, %s(%s, %s))\n" %
(state['func'], ('GL_FRONT', 'GL_BACK')[ndx],
", ".join(args)))
f.write(" .Times(1)\n")
f.write(" .RetiresOnSaturation();\n")
elif state['type'] == 'NamedParameter':
for item in state['states']:
expect_value = item['default']
if isinstance(expect_value, list):
# TODO: Currently we do not check array values.
expect_value = "_"
operation = []
operation.append(
" EXPECT_CALL(*gl, %s(%s, %s))\n" %
(state['func'],
(item['enum_set']
if 'enum_set' in item else item['enum']),
expect_value))
operation.append(" .Times(1)\n")
operation.append(" .RetiresOnSaturation();\n")
guarded_operation = GuardState(item, ''.join(operation),
"feature_info")
f.write(guarded_operation)
elif 'no_init' not in state:
if 'extension_flag' in state:
f.write(" if (feature_info->feature_flags().%s) {\n" %
state['extension_flag'])
f.write(" ")
args = []
for item in state['states']:
if 'expected' in item:
args.append(item['expected'])
else:
args.append(item['default'])
# TODO: Currently we do not check array values.
args = ["_" if isinstance(arg, list) else arg for arg in args]
if 'custom_function' in state:
f.write(" SetupInitStateManualExpectationsFor%s(gl, %s);\n" %
(state['func'], ", ".join(args)))
else:
f.write(" EXPECT_CALL(*gl, %s(%s))\n" %
(state['func'], ", ".join(args)))
f.write(" .Times(1)\n")
f.write(" .RetiresOnSaturation();\n")
if 'extension_flag' in state:
f.write(" }\n")
f.write(" SetupInitStateManualExpectations(gl, feature_info);\n")
f.write("}\n")
self.generated_cpp_filenames.append(filename)
def WriteServiceUnitTestsForExtensions(self, filename):
"""Writes the service decoder unit tests for functions with extension_flag.
The functions are special in that they need a specific unit test
baseclass to turn on the extension.
"""
functions = [f for f in self.functions if f.GetInfo('extension_flag')]
comment = "// It is included by gles2_cmd_decoder_unittest_extensions.cc\n"
with CHeaderWriter(filename, self.year, comment) as f:
for func in functions:
if True:
if func.GetInfo('unit_test') != False:
extension = ToCamelCase(
ToGLExtensionString(func.GetInfo('extension_flag')))
test_name = 'GLES2DecoderTestWith%s' % extension
if func.IsES3():
test_name = 'GLES3DecoderTestWith%s' % extension
func.WriteServiceUnitTest(f, {
'test_name': test_name
})
self.generated_cpp_filenames.append(filename)
def WriteGLES2Header(self, filename):
"""Writes the GLES2 header."""
comment = "// This file contains Chromium-specific GLES2 declarations.\n\n"
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2Header(f)
f.write("\n")
self.generated_cpp_filenames.append(filename)
def WriteGLES2CLibImplementation(self, filename):
"""Writes the GLES2 c lib implementation."""
comment = "// These functions emulate GLES2 over command buffers.\n"
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2CLibImplementation(f)
f.write("""
namespace gles2 {
extern const NameToFunc g_gles2_function_table[] = {
""")
for func in self.original_functions:
f.write(
' { "gl%s", reinterpret_cast<GLES2FunctionPointer>(gl%s), },\n' %
(func.name, func.name))
f.write(""" { nullptr, nullptr, },
};
} // namespace gles2
""")
self.generated_cpp_filenames.append(filename)
def WriteGLES2InterfaceHeader(self, filename):
"""Writes the GLES2 interface header."""
comment = ("// This file is included by %s_interface.h to declare the\n"
"// GL api functions.\n" % _lower_prefix)
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2InterfaceHeader(f)
self.generated_cpp_filenames.append(filename)
def WriteGLES2InterfaceStub(self, filename):
"""Writes the GLES2 interface stub header."""
comment = "// This file is included by gles2_interface_stub.h.\n"
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2InterfaceStub(f)
self.generated_cpp_filenames.append(filename)
def WriteGLES2InterfaceStubImpl(self, filename):
"""Writes the GLES2 interface header."""
comment = "// This file is included by gles2_interface_stub.cc.\n"
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2InterfaceStubImpl(f)
self.generated_cpp_filenames.append(filename)
def WriteGLES2ImplementationHeader(self, filename):
"""Writes the GLES2 Implementation header."""
comment = \
("// This file is included by %s_implementation.h to declare the\n"
"// GL api functions.\n" % _lower_prefix)
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2ImplementationHeader(f)
self.generated_cpp_filenames.append(filename)
def WriteGLES2Implementation(self, filename):
"""Writes the GLES2 Implementation."""
comment = \
("// This file is included by %s_implementation.cc to define the\n"
"// GL api functions.\n" % _lower_prefix)
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2Implementation(f)
self.generated_cpp_filenames.append(filename)
def WriteGLES2TraceImplementationHeader(self, filename):
"""Writes the GLES2 Trace Implementation header."""
comment = "// This file is included by gles2_trace_implementation.h\n"
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2TraceImplementationHeader(f)
self.generated_cpp_filenames.append(filename)
def WriteGLES2TraceImplementation(self, filename):
"""Writes the GLES2 Trace Implementation."""
comment = "// This file is included by gles2_trace_implementation.cc\n"
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2TraceImplementation(f)
self.generated_cpp_filenames.append(filename)
def WriteGLES2ImplementationUnitTests(self, filename):
"""Writes the GLES2 helper header."""
comment = \
("// This file is included by %s_implementation.h to declare the\n"
"// GL api functions.\n" % _lower_prefix)
with CHeaderWriter(filename, self.year, comment) as f:
for func in self.original_functions:
func.WriteGLES2ImplementationUnitTest(f)
self.generated_cpp_filenames.append(filename)
def WriteServiceUtilsHeader(self, filename):
"""Writes the gles2 auto generated utility header."""
with CHeaderWriter(filename, self.year) as f:
for name in sorted(self.named_type_info.keys()):
named_type = NamedType(self.named_type_info[name])
if not named_type.CreateValidator():
continue
class_name = ValidatorClassName(name)
if named_type.IsComplete():
f.write("""class %(class_name)s {
public:
bool IsValid(const %(type)s value) const;"""% {
'class_name': class_name,
'type': named_type.GetType()
})
if named_type.HasES3Values():
f.write("""%s();
void SetIsES3(bool is_es3) { is_es3_ = is_es3; }
private:
bool is_es3_;""" % class_name)
f.write("};\n")
f.write("%s %s;\n\n" %
(class_name, ToUnderscore(name)))
else:
f.write("ValueValidator<%s> %s;\n" %
(named_type.GetType(), ToUnderscore(name)))
f.write("\n")
self.generated_cpp_filenames.append(filename)
def WriteServiceUtilsImplementation(self, filename):
"""Writes the gles2 auto generated utility implementation."""
with CHeaderWriter(filename, self.year) as f:
names = sorted(self.named_type_info.keys())
for name in names:
named_type = NamedType(self.named_type_info[name])
class_name = ValidatorClassName(name)
if not named_type.CreateValidator():
continue
if named_type.IsComplete():
if named_type.HasES3Values():
f.write("""Validators::%(class_name)s::%(class_name)s()
: is_es3_(false) {}""" % { 'class_name': class_name })
f.write("""bool Validators::%(class_name)s::IsValid(
const %(type)s value) const {
switch(value) {\n""" % {
'class_name': class_name,
'type': named_type.GetType()
})
if named_type.GetValidValues():
for value in named_type.GetValidValues():
f.write("case %s:\n" % value)
f.write("return true;\n")
if named_type.GetValidValuesES3():
for value in named_type.GetValidValuesES3():
f.write("case %s:\n" % value)
f.write("return is_es3_;\n")
if named_type.GetDeprecatedValuesES3():
for value in named_type.GetDeprecatedValuesES3():
f.write("case %s:\n" % value)
f.write("return !is_es3_;\n")
f.write("}\nreturn false;\n}\n")
f.write("\n")
else:
if named_type.GetValidValues():
f.write("static const %s valid_%s_table[] = {\n" %
(named_type.GetType(), ToUnderscore(name)))
for value in named_type.GetValidValues():
f.write(" %s,\n" % value)
f.write("};\n")
f.write("\n")
if named_type.GetValidValuesES3():
f.write("static const %s valid_%s_table_es3[] = {\n" %
(named_type.GetType(), ToUnderscore(name)))
for value in named_type.GetValidValuesES3():
f.write(" %s,\n" % value)
f.write("};\n")
f.write("\n")
if named_type.GetDeprecatedValuesES3():
f.write("static const %s deprecated_%s_table_es3[] = {\n" %
(named_type.GetType(), ToUnderscore(name)))
for value in named_type.GetDeprecatedValuesES3():
f.write(" %s,\n" % value)
f.write("};\n")
f.write("\n")
f.write("Validators::Validators()")
pre = ' : '
for name in names:
named_type = NamedType(self.named_type_info[name])
if not named_type.CreateValidator() or named_type.IsComplete():
continue
if named_type.GetValidValues():
code = """%(pre)s%(name)s(
valid_%(name)s_table, base::size(valid_%(name)s_table))"""
else:
code = "%(pre)s%(name)s()"
f.write(code % {
'name': ToUnderscore(name),
'pre': pre,
})
pre = ',\n '
f.write(" {\n");
f.write("}\n\n");
if _prefix == 'GLES2':
f.write("void Validators::UpdateValuesES3() {\n")
for name in names:
named_type = NamedType(self.named_type_info[name])
if not named_type.IsConstant() and named_type.IsComplete():
if named_type.HasES3Values():
f.write(" %(name)s.SetIsES3(true);" % {
'name': ToUnderscore(name),
})
continue
if named_type.GetDeprecatedValuesES3():
code = """ %(name)s.RemoveValues(
deprecated_%(name)s_table_es3, base::size(deprecated_%(name)s_table_es3));
"""
f.write(code % {
'name': ToUnderscore(name),
})
if named_type.GetValidValuesES3():
code = """ %(name)s.AddValues(
valid_%(name)s_table_es3, base::size(valid_%(name)s_table_es3));
"""
f.write(code % {
'name': ToUnderscore(name),
})
f.write("}\n\n");
f.write("void Validators::UpdateETCCompressedTextureFormats() {\n")
for name in ['CompressedTextureFormat', 'TextureInternalFormatStorage']:
for fmt in _ETC_COMPRESSED_TEXTURE_FORMATS:
code = """ %(name)s.AddValue(%(format)s);
"""
f.write(code % {
'name': ToUnderscore(name),
'format': fmt,
})
f.write("}\n\n");
self.generated_cpp_filenames.append(filename)
def WriteCommonUtilsHeader(self, filename):
"""Writes the gles2 common utility header."""
with CHeaderWriter(filename, self.year) as f:
type_infos = sorted(self.named_type_info.keys())
for type_info in type_infos:
if self.named_type_info[type_info]['type'] == 'GLenum':
f.write("static std::string GetString%s(uint32_t value);\n" %
type_info)
f.write("\n")
self.generated_cpp_filenames.append(filename)
def WriteCommonUtilsImpl(self, filename):
"""Writes the gles2 common utility header."""
enum_re = re.compile(r'\#define\s+(GL_[a-zA-Z0-9_]+)\s+([0-9A-Fa-fx]+)')
define_dict = {}
for fname in ['third_party/khronos/GLES2/gl2.h',
'third_party/khronos/GLES2/gl2ext.h',
'third_party/khronos/GLES3/gl3.h',
'third_party/khronos/GLES3/gl31.h',
'gpu/GLES2/gl2chromium.h',
'gpu/GLES2/gl2extchromium.h']:
fname = os.path.join(self.chromium_root_dir, fname)
lines = open(fname).readlines()
for line in lines:
m = enum_re.match(line)
if m:
name = m.group(1)
value = m.group(2)
if len(value) <= 10 and value.startswith('0x'):
if not value in define_dict:
define_dict[value] = name
# check our own _CHROMIUM macro conflicts with khronos GL headers.
elif EnumsConflict(define_dict[value], name):
self.Error("code collision: %s and %s have the same code %s" %
(define_dict[value], name, value))
with CHeaderWriter(filename, self.year) as f:
f.write("static const %sUtil::EnumToString "
"enum_to_string_table[] = {\n" % _prefix)
for value in sorted(define_dict):
f.write(' { %s, "%s", },\n' % (value, define_dict[value]))
f.write("""};
const %(p)sUtil::EnumToString* const %(p)sUtil::enum_to_string_table_ =
enum_to_string_table;
const size_t %(p)sUtil::enum_to_string_table_len_ =
sizeof(enum_to_string_table) / sizeof(enum_to_string_table[0]);
""" % { 'p' : _prefix})
enums = sorted(self.named_type_info.keys())
for enum in enums:
if self.named_type_info[enum]['type'] == 'GLenum':
f.write("std::string %sUtil::GetString%s(uint32_t value) {\n" %
(_prefix, enum))
valid_list = self.named_type_info[enum]['valid']
if 'valid_es3' in self.named_type_info[enum]:
for es3_enum in self.named_type_info[enum]['valid_es3']:
if not es3_enum in valid_list:
valid_list.append(es3_enum)
assert len(valid_list) == len(set(valid_list))
if len(valid_list) > 0:
f.write(" static const EnumToString string_table[] = {\n")
for value in valid_list:
f.write(' { %s, "%s" },\n' % (value, value))
f.write(""" };
return %sUtil::GetQualifiedEnumString(
string_table, base::size(string_table), value);
}
""" % _prefix)
else:
f.write(""" return %sUtil::GetQualifiedEnumString(
nullptr, 0, value);
}
""" % _prefix)
self.generated_cpp_filenames.append(filename)
def WritePepperGLES2Interface(self, filename, dev):
"""Writes the Pepper OpenGLES interface definition."""
with CWriter(filename, self.year) as f:
f.write("label Chrome {\n")
f.write(" M39 = 1.0\n")
f.write("};\n\n")
if not dev:
# Declare GL types.
f.write("[version=1.0]\n")
f.write("describe {\n")
for gltype in ['GLbitfield', 'GLboolean', 'GLbyte', 'GLclampf',
'GLclampx', 'GLenum', 'GLfixed', 'GLfloat', 'GLint',
'GLintptr', 'GLshort', 'GLsizei', 'GLsizeiptr',
'GLubyte', 'GLuint', 'GLushort']:
f.write(" %s;\n" % gltype)
f.write(" %s_ptr_t;\n" % gltype)
f.write("};\n\n")
# C level typedefs.
f.write("#inline c\n")
f.write("#include \"ppapi/c/pp_resource.h\"\n")
if dev:
f.write("#include \"ppapi/c/ppb_opengles2.h\"\n\n")
else:
f.write("\n#ifndef __gl2_h_\n")
for (k, v) in _GL_TYPES.iteritems():
f.write("typedef %s %s;\n" % (v, k))
f.write("#ifdef _WIN64\n")
for (k, v) in _GL_TYPES_64.iteritems():
f.write("typedef %s %s;\n" % (v, k))
f.write("#else\n")
for (k, v) in _GL_TYPES_32.iteritems():
f.write("typedef %s %s;\n" % (v, k))
f.write("#endif // _WIN64\n")
f.write("#endif // __gl2_h_\n\n")
f.write("#endinl\n")
for interface in self.pepper_interfaces:
if interface.dev != dev:
continue
# Historically, we provide OpenGLES2 interfaces with struct
# namespace. Not to break code which uses the interface as
# "struct OpenGLES2", we put it in struct namespace.
f.write('\n[macro="%s", force_struct_namespace]\n' %
interface.GetInterfaceName())
f.write("interface %s {\n" % interface.GetStructName())
for func in self.original_functions:
if not func.InPepperInterface(interface):
continue
ret_type = func.MapCTypeToPepperIdlType(func.return_type,
is_for_return_type=True)
func_prefix = " %s %s(" % (ret_type, func.GetPepperName())
f.write(func_prefix)
f.write("[in] PP_Resource context")
for arg in func.MakeTypedPepperIdlArgStrings():
f.write(",\n" + " " * len(func_prefix) + arg)
f.write(");\n")
f.write("};\n\n")
def WritePepperGLES2Implementation(self, filename):
"""Writes the Pepper OpenGLES interface implementation."""
with CWriter(filename, self.year) as f:
f.write("#include \"ppapi/shared_impl/ppb_opengles2_shared.h\"\n\n")
f.write("#include \"base/logging.h\"\n")
f.write("#include \"gpu/command_buffer/client/gles2_implementation.h\"\n")
f.write("#include \"ppapi/shared_impl/ppb_graphics_3d_shared.h\"\n")
f.write("#include \"ppapi/thunk/enter.h\"\n\n")
f.write("namespace ppapi {\n\n")
f.write("namespace {\n\n")
f.write("typedef thunk::EnterResource<thunk::PPB_Graphics3D_API>"
" Enter3D;\n\n")
f.write("gpu::gles2::GLES2Implementation* ToGles2Impl(Enter3D*"
" enter) {\n")
f.write(" DCHECK(enter);\n")
f.write(" DCHECK(enter->succeeded());\n")
f.write(" return static_cast<PPB_Graphics3D_Shared*>(enter->object())->"
"gles2_impl();\n");
f.write("}\n\n");
for func in self.original_functions:
if not func.InAnyPepperExtension():
continue
original_arg = func.MakeTypedPepperArgString("")
context_arg = "PP_Resource context_id"
if len(original_arg):
arg = context_arg + ", " + original_arg
else:
arg = context_arg
f.write("%s %s(%s) {\n" %
(func.return_type, func.GetPepperName(), arg))
f.write(" Enter3D enter(context_id, true);\n")
f.write(" if (enter.succeeded()) {\n")
return_str = "" if func.return_type == "void" else "return "
f.write(" %sToGles2Impl(&enter)->%s(%s);\n" %
(return_str, func.original_name,
func.MakeOriginalArgString("")))
f.write(" }")
if func.return_type == "void":
f.write("\n")
else:
f.write(" else {\n")
f.write(" return %s;\n" % func.GetErrorReturnString())
f.write(" }\n")
f.write("}\n\n")
f.write("} // namespace\n")
for interface in self.pepper_interfaces:
f.write("const %s* PPB_OpenGLES2_Shared::Get%sInterface() {\n" %
(interface.GetStructName(), interface.GetName()))
f.write(" static const struct %s "
"ppb_opengles2 = {\n" % interface.GetStructName())
f.write(" &")
f.write(",\n &".join(
f.GetPepperName() for f in self.original_functions
if f.InPepperInterface(interface)))
f.write("\n")
f.write(" };\n")
f.write(" return &ppb_opengles2;\n")
f.write("}\n")
f.write("} // namespace ppapi\n")
self.generated_cpp_filenames.append(filename)
def WriteGLES2ToPPAPIBridge(self, filename):
"""Connects GLES2 helper library to PPB_OpenGLES2 interface"""
with CWriter(filename, self.year) as f:
f.write("#ifndef GL_GLEXT_PROTOTYPES\n")
f.write("#define GL_GLEXT_PROTOTYPES\n")
f.write("#endif\n")
f.write("#include <GLES2/gl2.h>\n")
f.write("#include <GLES2/gl2ext.h>\n")
f.write("#include \"ppapi/lib/gl/gles2/gl2ext_ppapi.h\"\n\n")
for func in self.original_functions:
if not func.InAnyPepperExtension():
continue
interface = self.interface_info[func.GetInfo('pepper_interface') or '']
f.write("%s GL_APIENTRY gl%s(%s) {\n" %
(func.return_type, func.GetPepperName(),
func.MakeTypedPepperArgString("")))
return_str = "" if func.return_type == "void" else "return "
interface_str = "glGet%sInterfacePPAPI()" % interface.GetName()
original_arg = func.MakeOriginalArgString("")
context_arg = "glGetCurrentContextPPAPI()"
if len(original_arg):
arg = context_arg + ", " + original_arg
else:
arg = context_arg
if interface.GetName():
f.write(" const struct %s* ext = %s;\n" %
(interface.GetStructName(), interface_str))
f.write(" if (ext)\n")
f.write(" %sext->%s(%s);\n" %
(return_str, func.GetPepperName(), arg))
if return_str:
f.write(" %s0;\n" % return_str)
else:
f.write(" %s%s->%s(%s);\n" %
(return_str, interface_str, func.GetPepperName(), arg))
f.write("}\n\n")
self.generated_cpp_filenames.append(filename)
def Format(generated_files, output_dir, chromium_root_dir):
"""Format generated_files relative to output_dir using clang-format."""
formatter = "third_party/depot_tools/clang-format"
if platform.system() == "Windows":
formatter = "third_party\\depot_tools\\clang-format.bat"
formatter = os.path.join(chromium_root_dir, formatter)
generated_files = map(lambda filename: os.path.join(output_dir, filename),
generated_files)
for filename in generated_files:
call([formatter, "-i", "-style=chromium", filename], cwd=chromium_root_dir)
|
Rabies is primarily a disease of animals, and is transmitted to man by the bite of an infected animal, usually a dog. It is almost always fatal and one of the most torturous forms of death from any infectious disease. Children under 15 years age are most affected as they play in the streets and are unable to run away or defend themselves against an attacking animal. Many families have been devastated by the loss of a child or a working family member. However, if the victim is given proper wound care and post exposure prevention soon after the bite, rabies is 100% preventable.
Nearly 15-20,000 cases of dog bites are recorded annually among the three major hospital of Karachi, while many more remain undocumented. Some of the bites are severe, disfiguring, as well as dangerous, especially when bitten on the head, face or neck in children. If untreated, the victim may develop the fatal brain infection –rabies, which is irreversible. Occasionally, several rogue dogs maul small, defenseless children. The Indus Hospital (TIH) alone treats 30-40 such dog bites daily.
In response to such tragic injuries, we have formed the ‘Rabies-free Karachi Team’ under the aegis of Indus Hospital Research Center (IHRC). We have identified a fishing village, Ibrahim Hyderi, with a human population of 1.5 million, from where TIH receives at least several bite victimsin a day. Through the Union Council members we have contacted the local population to secure their supportfor elimination of rabies.
WHO/OIE strictly discourages killing of dogs, as not only is this inhumane, but also, ineffective in reducing dog population. WHO/OIE recommends mass dog vaccination (MDV) of at least 70% of dogs in an area, which will produce herd immunity against rabies. In order to reduce dog population, the recommended strategy is to perform Animal Birth Control (ABC).
‘Rabies-free Karachi Team’ proposes to run a pilot project in Ibrahim Hyderi, by means of MDV and ABC. The project for MDV will prevent rabies among dogs, thus interrupting transmission to humans. ABC will yield long term benefit by reducing dog population. The local community will be safe from attacks of feral dogs, which not only create nuisance, but also can be dangerous to adults and children. We have complete support of the Karachi Metropolitan Corporation (KMC) under administration of the Mayor of Karachi, Mr. Waseem Akhtar.
As this will be our first experience in performing MDV and ABC, we have invited experts from Sri Lanka and South Africa to train local personnel and veterinarians who will perform the procedures. Arrangements for their travel and accommodation will be made through official channels.
WHO/OIE is prepared to provide dog anti-rabies vaccine, which will be utilized for vaccination of feral, as well as, owned dogs in Ibrahim Hyderi. We will use the services of locally trained personnel to catch dogs, and vets to perform MDV and ABC procedures. All personnel will be given Pre exposure prophylaxis (PreP) injections against rabies prior to their training.
The success of MDV and ABC will be extrapolated to other areas of Karachi where similar problems abound.
|
# This script only draws a glyph, no VD is calculated
# truetype-tracer also outputs arcs, and we plot them orange
import truetypetracer as ttt # https://github.com/aewallin/truetype-tracer
import openvoronoi as ovd # https://github.com/aewallin/openvoronoi
import ovdvtk
import time
import vtk
def drawLine(myscreen, previous, p, loopColor):
myscreen.addActor(ovdvtk.Line(p1=(previous[0], previous[1], 0), p2=(p[0], p[1], 0), color=loopColor))
def drawSeg(myscreen, previous, p):
ovdvtk.drawVertex(myscreen, ovd.Point(p[0], p[1]), 0.0001, ovdvtk.red)
if (p[2] == -1): # a line-segment
drawLine(myscreen, previous, p, ovdvtk.yellow)
else: # an arc
prev = ovd.Point(previous[0], previous[1])
target = ovd.Point(p[0], p[1])
radius = p[2]
cw = p[3]
center = ovd.Point(p[4], p[5])
# print "prev ",prev
# print "center ",center
# print "diff ",prev-center
# print "p ",p
ovdvtk.drawArc(myscreen, prev, target, radius, center, cw, ovdvtk.orange)
# drawArc(myscreen, pt1, pt2, r, cen, cw, arcColor, da=0.1)
# r, cen, cw, arcColor, da=0.1)
def drawLoops(myscreen, loops, loopColor):
# draw the loops
nloop = 0
for lop in loops:
n = 0
N = len(lop)
first_point = []
previous = []
n_lines = 0
n_arcs = 0
for p in lop:
# p = [x, y, r, cw, cx, cy]
if n == 0: # don't draw anything on the first iteration
previous = p
first_point = p
elif n == (N - 1): # the last point
drawSeg(myscreen, previous, p)
if p[2] == -1:
n_lines += 1
else:
n_arcs += 1
drawSeg(myscreen, p, first_point)
if first_point[2] == -1:
n_lines += 1
else:
n_arcs += 1
else: # normal segment
drawSeg(myscreen, previous, p)
if p[2] == -1:
n_lines += 1
else:
n_arcs += 1
previous = p
n = n + 1
print "rendered loop ", nloop, " with ", len(lop), " points"
print " n_lines = ", n_lines
print " n_arcs = ", n_arcs
nloop = nloop + 1
def translate(segs, x, y):
out = []
for seg in segs:
seg2 = []
for p in seg:
p2 = p
p2[0] += x
p2[1] += y
p2[4] += x
p2[5] += y
seg2.append(p2)
# seg2.append(seg[3] + y)
out.append(seg2)
return out
def modify_segments(segs):
segs_mod = []
for seg in segs:
first = seg[0]
last = seg[len(seg) - 1]
assert (first[0] == last[0] and first[1] == last[1])
seg.pop()
segs_mod.append(seg)
return segs_mod
def draw_ttt(myscreen, text, x, y, scale):
wr = ttt.SEG_Writer()
# wr.arc = False
wr.arc = True
# wr.conic = False
# wr.cubic = False
wr.scale = float(1) / float(scale)
# "L" has 36 points by default
wr.conic_biarc_subdivision = 200
wr.conic_line_subdivision = 50 # this increasesn nr of points to 366
# wr.cubic_biarc_subdivision = 10 # no effect?
# wr.cubic_line_subdivision = 10 # no effect?
wr.setFont(0)
s3 = ttt.ttt(text, wr)
ext = wr.extents
dx = ext.maxx - ext.minx
segs = wr.get_segments()
segs = translate(segs, x, y)
print "number of polygons: ", len(segs)
np = 0
sum_pts = 0
segs = modify_segments(segs)
for s in segs:
sum_pts += len(s)
print " polygon ", np, " has ", len(s), " points"
np = np + 1
print "total points: ", sum_pts
drawLoops(myscreen, segs, ovdvtk.yellow)
# this script only draws geometry from ttt
# no voronoi-diagram is created!
if __name__ == "__main__":
print "ttt version = ", ttt.version()
# w=2500
# h=1500
# w=1920
# h=1080
# w=1024
# h=1024
w = 800
h = 600
myscreen = ovdvtk.VTKScreen(width=w, height=h)
ovdvtk.drawOCLtext(myscreen, rev_text=ovd.version())
scale = 1
far = 1
camPos = far
zmult = 3
myscreen.camera.SetPosition(0, -camPos / float(1000), zmult * camPos)
myscreen.camera.SetClippingRange(-(zmult + 1) * camPos, (zmult + 1) * camPos)
myscreen.camera.SetFocalPoint(0.0, 0, 0)
# draw a unit-circle
ca = ovdvtk.Circle(center=(0, 0, 0), radius=1, color=(0, 1, 1), resolution=50)
myscreen.addActor(ca)
# draw_ttt(myscreen, "R", 0,0,10000)
draw_ttt(myscreen, "ABCDEFGHIJKLMNOPQRSTUVWXYZ", -0.5, 0, 80000)
# draw_ttt(myscreen, "abcdefghijklmnopqrstuvwxyz", -0.5,-0.1,80000)
# draw_ttt(myscreen, "1234567890*", -0.5,-0.2,80000)
# draw_ttt(myscreen, "m", -0.5,-0.2,80000)
print "PYTHON All DONE."
myscreen.render()
myscreen.iren.Start()
|
*Advanced Vitamin & Mineral Supplement for Men* A comprehensive multiple vitamin, mineral, and herbal supplement, with select amino acids, designed specifically for the unique nutritional requirements of men. These Iron and Copper free vegetable capsules deliver select nutrients to support the prostate, liver, and adrenals along with broad spectrum antioxidant protection. It also features patent Albion TRAACS amino acid chelates for optimal mineral absorption.
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Category'
db.create_table(u'category', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=150)),
('created_user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='category_created_user', to=orm['users.User'])),
('created', self.gf('django.db.models.fields.DateTimeField')()),
('modified_user', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='category_modified_user', null=True, to=orm['users.User'])),
('modified', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
))
db.send_create_signal('home', ['Category'])
# Adding model 'Country'
db.create_table(u'countries', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=150)),
))
db.send_create_signal('home', ['Country'])
def backwards(self, orm):
# Deleting model 'Category'
db.delete_table(u'category')
# Deleting model 'Country'
db.delete_table(u'countries')
models = {
'home.category': {
'Meta': {'object_name': 'Category', 'db_table': "u'category'"},
'created': ('django.db.models.fields.DateTimeField', [], {}),
'created_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'category_created_user'", 'to': "orm['users.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'category_modified_user'", 'null': 'True', 'to': "orm['users.User']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '150'})
},
'home.country': {
'Meta': {'object_name': 'Country', 'db_table': "u'countries'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '150'})
},
'users.user': {
'Meta': {'object_name': 'User', 'db_table': "u'user'"},
'bio': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'country': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'country_id'"}),
'data': ('django.db.models.fields.TextField', [], {}),
'email': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interests': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['users.User']", 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'picture_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'picture_original_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'skills': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['home']
|
Pinkerton Mirrors recommends that you print out and keep a copy of these terms and conditions for future reference.
This site is owned and operated by Pinkerton Mirrors ('Pinkerton Mirrors', 'we' or 'us' or 'our'). If you have any questions or comments with regard to these terms and conditions please contact us by either email: pinkertonmirrors@fireflyuk.net or phone: 01487 773566 (Monday–Friday, 09:00 to 17:00.
Any contract for purchases made through the website will be with Pinkerton Mirrors. Pinkerton Mirrors must receive payment of the whole of the price for the goods that you order before your order can be accepted, and the contract formed.
Once payment has been received Pinkerton Mirrors will confirm that your order has been received by sending an email to you at the email address you provide in your registration form. The shipment email will include your name, the order number and the total price.Pinkerton Mirrors acceptance of your order brings into existence a legally binding contract between us on these terms. Any term sought to be imposed by you in your order will not form part of the contract.
Pinkerton Mirrors is entitled to withdraw from any contract in the case of obvious errors or inaccuracies regarding the goods appearing on our website. If an error or inaccuracy is discovered with regards to the advertised price of the goods that you have ordered, we will contact you as soon as possible by e-mail. This will be to inform you of the correct price of the goods, and to ask you if you wish to continue with the order at the amended price, or to cancel the order altogether.
These terms and conditions only cover the Pinkerton Mirrors website. Any other websites which are linked to from this site are governed by their own terms and conditions. We accept no responsibility or liability for the content or operation of websites, which are not under our control. We are required by law to tell you that sales can be concluded in English only and that no public filing requirements apply.
Pinkerton Mirrors own the copyright, trademarks and other intellectual property rights in all material and content on this website, which you may use, download, copy, publish, transmit or otherwise make available by any other means only for your own personal, non–commercial use. Any other use or reproduction of the material or content is strictly prohibited.
While we will use reasonable endeavours to verify the accuracy of any information we place on the Pinkerton Mirrors website, we make no warranties, whether express or implied in relation to its accuracy.
We make no warranty that the Pinkerton Mirrors website will meet your requirements or will be uninterrupted, timely or error–free, that defects will be corrected, or that the site or the server that makes it available are free of viruses or bugs or represents the full functionality, accuracy, and reliability of the website.
We will not be responsible or liable to you for any loss of content or material uploaded or transmitted through the Pinkerton Mirrors website.
Pinkerton Mirrors accepts payment by Visa, MasterCard, American Express, Switch, Maestro and Delta. All product prices and delivery charges are shown in UK pounds sterling. Your payment card company will perform any currency conversion.We take payment on order.
If Pinkerton Mirrors has insufficient stock to deliver the goods ordered by you, we will notify you as soon as possible and any sum debited by Pinkerton Mirrors from your credit card will be re-credited to your account and Pinkerton Mirrors will notify you by email at the address given by you in your order form. The refund will be made as soon as possible and in any event within 30 days of your order.Pinkerton Mirrors will not be obliged to offer any additional compensation for disappointment suffered.
Pinkerton Mirrors reserves the right to make changes to our site, policies, and these terms and conditions at any time. If any of these conditions shall be deemed invalid, void, or for any reason unforeseen, that condition shall be deemed severable and shall not affect the validity and enforceability of any remaining condition.
Except as set out below, neither Pinkerton Mirrors nor any of their directors, employees or other representatives will be liable for damages arising out of or in connection with the use of this site. Pinkerton mirrors does not accept liability (except as set out below) for any errors or omissions and reserves the right to change information, specifications and descriptions of listed goods, products and services. Pinkerton mirrors does not accept liability for any indirect loss, consequential loss, loss of data, loss of income or profit, loss of or damage to property and/or loss from claims of third parties arising out of the use of the Pinkerton Mirrors web site or for any products purchased from the Pinkerton Mirrors web site. Pinkerton Mirrors will only be liable for direct loss up to a maximum total of the price of the product purchased in respect of any claim.
Pinkerton Mirrors prides itself on quality craftsmanship and a superior product. The mirror and glass products we supply are of the highest grade, and produced using state-of-the-art equipment. Our products are inspected during fabrication, and before packing for dispatch.
mirror. (The warranty covers the product only and not installation or any other out of pocket expenses).
Our warranty guarantees against silver spoilage, cloudiness, and other imperfections that may occur in the glass mirror. However, our warranties do not cover scratches, cracks or chips which should be reported within 24hours of delivery and no more than seven days after delivery. Should your mirror arrive damaged, please report it to our customer services.
Our warranty does not cover damage caused by improper care.
|
'''
Author: Fabio Rizzello
Functions to create fixtures for a provided list of teams.
'''
ghost_team = "___"
def create_fixtures(teams_list):
if(len(teams_list) == 0):
return []
home = 0
away = 0
rounds = []
teams_number = len(teams_list)
# check even or odd number of teams, use ghost team in latter case
ghost = False
if ( teams_number % 2 == 1 ):
teams_list.append(ghost_team)
teams_number = len(teams_list)
ghost = True
# finding how many rounds and matches per round are involved
total_rounds = (teams_number - 1) * 2
matches_per_round = teams_number / 2
# completing fixtures
for single_round in range(0,total_rounds):
day = []
for mat in range(0,matches_per_round):
home = (single_round + mat) % (teams_number - 1)
away = (teams_number - 1 - mat + single_round) % (teams_number - 1)
if (mat == 0):
away = teams_number - 1
if(single_round % 2 == 0):
day.append([teams_list[home] , teams_list[away]])
else:
day.append([teams_list[away] , teams_list[home]])
rounds.append(day)
return rounds
def print_matches(rounds):
for single_round in rounds:
for match in single_round:
for team in match:
print team,
print
print
def try_module():
rounds = create_fixtures(['Napoli', 'Parma', 'Juventus', 'Roma'])
print_matches(rounds)
#print_matches(create_fixtures(['Napoli', 'Parma', 'Juventus', 'Roma', "cagat"]))
#try_module()
|
We are a forward-looking employer that offers a dynamic and exciting future in a fast paced industry. Combine this with the scope of our parent organisation New Zealand Post and you have a great organisation to be a part of! Our employees also have access to careers within the wider New Zealand Post Group.
With a strong commitment to developing our people, recognising and rewarding strong performance and a great range of benefits we are truly a great place to work.
This is backed by what our employees say. Our scores on the JRA/Kenexa Engagement Survey consistently see us scoring on a par with New Zealand's Best Workplaces.
Check out Your Career, Recognition and Reward and Caring for you and your family for more great reasons why you should join us.
|
#!/usr/bin/python
import math
import os,sys
ncs_lib_path = ('../../../../python/')
sys.path.append(ncs_lib_path)
import ncs
def run(argv):
sim = ncs.Simulation()
bursting_parameters = sim.addNeuron("bursting","izhikevich",
{
"a": 0.02,
"b": 0.3,
"c": -50.0,
"d": 4.0,
"u": -12.0,
"v": -65.0,
"threshold": 30,
})
group_1=sim.addNeuronGroup("group_1",2,bursting_parameters,None)
if not sim.init(argv):
print "failed to initialize simulation."
return
sim.addStimulus("sine_current",
{
"amplitude_scale":10,
"time_scale": 200.0 / math.pi,
"phase": 0.0,
"amplitude_shift":10
},
group_1,
1,
0.01,
1.0)
# current_report=sim.addReport("group_1","neuron","synaptic_current",1.0)
# current_report.toStdOut()
voltage_report=sim.addReport("group_1","neuron","input_current",1.0,0.0,1.0).toStdOut()
#voltage_report.toAsciiFile("./bursting_izh.txt")
sim.run(duration=1.0)
return
if __name__ == "__main__":
run(sys.argv)
|
Local police are on the lookout for a bogus policeman who robbed tourists of their credit cards and PIN numbers.
He approached tourists on Tuesday 10th, while they were taking photos at a local viewpoint, near Los Gigantes, and asked them for their paperwork. Those who could not provide any ID were then asked for their credit cards and PIN numbers. He then left them declaring he was going to the nearest bank to verify the cards.
Three tourists obliged and handed over their cards with PIN numbers and the cheeky criminal drew out almost Euros 2000 at the nearest cash point.
|
"""Module that provides the default implementation of MutableBinaryTreeLadenNode.
TODO: Delete as no search here uses this."""
from mutable_binary_tree_laden_node import MutableBinaryTreeLadenNode
class DefaultImplMutableBinaryTreeLadenNode(MutableBinaryTreeLadenNode):
"""Node of binary tree, carrying a payload object.
Left and right children are either None or also Node.
Self is altered regularily to avoid excessive object creation."""
def __init__(self, payload):
"""Initialize an childless node."""
self.payload = payload
self.left_child = None
self.right_child = None
def get_payload(self):
"""Return the payload, do not change state."""
return self.payload
def get_left_child(self):
"""Return left child or None, do not change state."""
return self.left_child
def get_right_child(self):
"""Return right child or None, do not change state."""
return self.right_child
def swap_payload(self, payload):
"""Set the new payload, return the old payload."""
odl_payload = self.payload
self.payload = payload
return odl_payload
def swap_left_child(self, node):
"""Set node (may be None) as new left child, return the previous left child."""
old_left_child = self.left_child
self.left_child = node
return odl_left_child
def swap_right_child(self, node):
"""Set node (may be None) as new right child, return the previous right child."""
old_right_child = self.right_child
self.right_child = node
return odl_right_child
|
We are excited to partner with Paychex to help you with your payroll needs.
Stop wasting your valuable time on that tedious chore of doing payroll. With Paychex, your payroll is done for you quickly and accurately, with less time and effort on your part. This frees up your time so you can make better use of your business day.
You get a choice of options in how you want to deliver pay to your employees. You can choose from traditional paper checks to Paychex Visa debit card, or make your employees even happier by depositing directly into their accounts.
Access payroll information, including time sheets, reports and general ledger over a secure internet site. Makes payroll even easier for you and your accountant.
Call 303.443.4672 or stop by one of our branches to get connected to our Paychex representative.
|
# -*- coding: utf-8 -*-
"""
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.contrib import admin
from django.contrib.sitemaps.views import sitemap
from django.urls import include, path, re_path
from django.views import defaults as default_views
from scuole.campuses.sitemaps import CampusSitemap
from scuole.core.sitemaps import StaticSitemap
from scuole.core.views import (
AboutView,
AcceptRedirectView,
LandingView,
LookupView,
SearchView,
)
from scuole.districts.sitemaps import DistrictSitemap
from scuole.states.sitemaps import StateSitemap
from scuole.cohorts.sitemaps_county import CountyCohortSitemap
from scuole.cohorts.sitemaps_region import RegionCohortSitemap
from scuole.cohorts.sitemaps_state import StateCohortSitemap
sitemaps = {
"scuole.campuses": CampusSitemap,
"scuole.districts": DistrictSitemap,
"scuole.states": StateSitemap,
"scuole.cohorts_counties": CountyCohortSitemap,
"scuole.cohorts_regions": RegionCohortSitemap,
"scuole.cohorts_states": StateCohortSitemap,
"scuole.core": StaticSitemap,
}
def trigger_error(request):
division_by_zero = 1 / 0
urlpatterns = [
path('sentry-debug/', trigger_error),
path("", LandingView.as_view(), name="landing"),
path("outcomes/", include("scuole.cohorts.urls", namespace="cohorts")),
path("districts/", include("scuole.districts.urls", namespace="districts")),
path("states/", include("scuole.states.urls", namespace="states")),
path("search/", SearchView.as_view(), name="search"),
path("lookup/", LookupView.as_view(), name="lookup"),
path("about/", AboutView.as_view(), name="about"),
path("redirect/", AcceptRedirectView.as_view(), name="redirect"),
path("admin/", admin.site.urls),
path(
"sitemap.xml",
sitemap,
{"sitemaps": sitemaps},
name="django.contrib.sitemaps.views.sitemap",
),
]
# Test pages normally not reachable when DEBUG = True
if settings.DEBUG:
urlpatterns += [
path(
"400/", default_views.bad_request, {"exception": Exception("Bad request")}
),
path(
"403/",
default_views.permission_denied,
{"exception": Exception("Permission denied")},
),
path(
"404/",
default_views.page_not_found,
{"exception": Exception("Page not found")},
),
path("500/", default_views.server_error),
]
if "debug_toolbar" in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
re_path(r"^__debug__/", include(debug_toolbar.urls))
] + urlpatterns
|
Zora Neale Hurston’s novels and short stories are an example of writing motivated by true life experiences. This unit will focus on the study of Ms. Hurston’s life and how her personal experiences influenced the development of one of her most prized novels, Their Eyes Were Watching God. The goal of the unit will be to use brain based strategies, within an inclusive philosophy to increase the student’s knowledge of Ms. Hurston’s life and how elements of it emerge in her novel. The student’s will be able to use this information to better understand, in Ms. Hurston’s case, the link between personal experience and creative expression. As a culminating project, students will develop a literary work based on their own or a family member’s life experiences.
Posted on March 5, 2013 by Scott Gartlan. This entry was posted in 2012, Curriculum Units, Vol 2: Reading African American Lives and tagged American, Autobiography, Harlem Renaissance, History, Literature. Bookmark the permalink.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.