repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
grlee77/scipy | benchmarks/benchmarks/cluster.py | 13 | 1796 | import numpy as np
from numpy.testing import suppress_warnings
from .common import Benchmark, safe_import
with safe_import():
from scipy.cluster.hierarchy import linkage
from scipy.cluster.vq import kmeans, kmeans2, vq
class HierarchyLinkage(Benchmark):
params = ['single', 'complete', 'average', 'weighted', 'centroid',
'median', 'ward']
param_names = ['method']
def __init__(self):
rnd = np.random.RandomState(0)
self.X = rnd.randn(2000, 2)
def time_linkage(self, method):
linkage(self.X, method=method)
class KMeans(Benchmark):
params = [2, 10, 50]
param_names = ['k']
def __init__(self):
rnd = np.random.RandomState(0)
self.obs = rnd.rand(1000, 5)
def time_kmeans(self, k):
kmeans(self.obs, k, iter=10)
class KMeans2(Benchmark):
params = [[2, 10, 50], ['random', 'points', '++']]
param_names = ['k', 'init']
def __init__(self):
rnd = np.random.RandomState(0)
self.obs = rnd.rand(1000, 5)
def time_kmeans2(self, k, init):
with suppress_warnings() as sup:
sup.filter(UserWarning,
"One of the clusters is empty. Re-run kmeans with a "
"different initialization")
kmeans2(self.obs, k, minit=init, iter=10)
class VQ(Benchmark):
params = [[2, 10, 50], ['float32', 'float64', 'float128']]
param_names = ['k', 'dtype']
def __init__(self):
rnd = np.random.RandomState(0)
self.data = rnd.rand(5000, 5)
self.cbook_source = rnd.rand(50, 5)
def setup(self, k, dtype):
self.obs = self.data.astype(dtype)
self.cbook = self.cbook_source[:k].astype(dtype)
def time_vq(self, k, dtype):
vq(self.obs, self.cbook)
| bsd-3-clause |
mangosR2/mangos | dep/ACE_wrappers/bin/svn_props.py | 95 | 1292 | #!/usr/bin/python
import sys
import re
import os
import string
print """WARNING: this script is dumb. I mean, really, really dumb. Every file is treated
as a text file, so if you are checking in any binary files, YOU MUST set a non-text
MIME type by hand, otherwise it WILL be corrupted by the checkout process.
A better approach will be to add the unmatched files to the config file in
ACE/docs/svn/config (and update yours!) so others won't have to put up with them
in the future.
To use this program, copy and paste the output from the svn command into standard
input.
"""
foo = raw_input("That being said, if you want to continue, press enter")
sin, sout = os.popen2 ("svn info")
sin.close ()
os.wait ()
url = ""
root = ""
path = ""
for line in sout.readlines ():
if line.startswith ("URL: "):
url = line.replace ("URL: ", "")[:-1]
if line.startswith ("Repository Root: "):
root = line.replace ("Repository Root: ", "")[:-1]
path = url.replace (root, "")[1:] + '/'
files = ""
eol_style = " svn ps svn:eol-style native "
keywords = " svn ps svn:keywords 'Author Date Id Revision' "
for line in sys.stdin.readlines ():
ln = line[0:line.find (':')] + ' '
ln = ln.replace (path,"")
os.system (eol_style + ln)
os.system (keywords + ln)
| gpl-2.0 |
ipaoTAT/python-cloudfoundry | cloudfoundry/routes.py | 2 | 1172 | __author__ = 'mcowger'
import logging
from pprint import pformat, pprint
class CloudFoundryRoute(object):
@classmethod
def get_class_name(cls):
return cls.__name__
def __str__(self):
# to show include all variables in sorted order
return "<{}>@0x{}:\n".format(self.get_class_name(),id(self)) + "\n".join([" %s: %s" % (key.rjust(16), self.__dict__[key]) for key in sorted(set(self.__dict__))])
def __repr__(self):
return self.__str__()
def __init__(
self,
host=None,
guid=None,
url=None,
domain_guid=None,
space_guid=None,
domain_url=None,
space_url=None,
apps_url=None,
metadata=None
):
self.host=host
self.domain_guid=domain_guid
self.space_guid=space_guid
self.domain_url=domain_url
self.space_url=space_url
self.apps_url=apps_url
self.guid = metadata['guid']
self.url = metadata['url']
@property
def name(self):
return self.host
@staticmethod
def from_dict(metadata, dict):
return CloudFoundryRoute(metadata=metadata, **dict)
| mit |
zeinsteinz/tacker | tacker/tests/functional/vnfm/test_vnfm_param.py | 1 | 5503 | # Copyright 2015 Brocade Communications System, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import yaml
from tacker.plugins.common import constants as evt_constants
from tacker.tests import constants
from tacker.tests.functional import base
from tacker.tests.utils import read_file
class VnfmTestParam(base.BaseTackerTest):
def _test_vnfd_create(self, vnfd_file, vnfd_name):
yaml_input = read_file(vnfd_file)
req_dict = {'vnfd': {'name': vnfd_name,
'attributes': {'vnfd': yaml_input}}}
# Create vnfd
vnfd_instance = self.client.create_vnfd(body=req_dict)
self.assertIsNotNone(vnfd_instance)
vnfd_id = vnfd_instance['vnfd']['id']
self.assertIsNotNone(vnfd_id)
self.verify_vnfd_events(
vnfd_id, evt_constants.RES_EVT_CREATE,
evt_constants.RES_EVT_ONBOARDED)
return vnfd_instance
def _test_vnfd_delete(self, vnfd_instance):
# Delete vnfd
vnfd_id = vnfd_instance['vnfd']['id']
self.assertIsNotNone(vnfd_id)
try:
self.client.delete_vnfd(vnfd_id)
except Exception:
assert False, "vnfd Delete failed"
self.verify_vnfd_events(vnfd_id, evt_constants.RES_EVT_DELETE,
evt_constants.RES_EVT_NA_STATE)
try:
vnfd_d = self.client.show_vnfd(vnfd_id)
except Exception:
assert True, "Vnfd Delete success" + str(vnfd_d) + str(Exception)
def _test_vnf_create(self, vnfd_instance, vnf_name, param_values):
# Create the vnf with values
vnfd_id = vnfd_instance['vnfd']['id']
# Create vnf with values file
vnf_dict = dict()
vnf_dict = {'vnf': {'vnfd_id': vnfd_id, 'name': vnf_name,
'attributes': {'param_values': param_values}}}
vnf_instance = self.client.create_vnf(body=vnf_dict)
self.validate_vnf_instance(vnfd_instance, vnf_instance)
vnf_id = vnf_instance['vnf']['id']
self.wait_until_vnf_active(
vnf_id,
constants.VNF_CIRROS_CREATE_TIMEOUT,
constants.ACTIVE_SLEEP_TIME)
self.assertIsNotNone(self.client.show_vnf(vnf_id)['vnf']['mgmt_url'])
vnf_instance = self.client.show_vnf(vnf_id)
self.verify_vnf_crud_events(
vnf_id, evt_constants.RES_EVT_CREATE,
evt_constants.PENDING_CREATE, cnt=2)
self.verify_vnf_crud_events(
vnf_id, evt_constants.RES_EVT_CREATE, evt_constants.ACTIVE)
# Verify values dictionary is same as param values from vnf_show
param_values = vnf_instance['vnf']['attributes']['param_values']
param_values_dict = yaml.safe_load(param_values)
return vnf_instance, param_values_dict
def _test_vnf_delete(self, vnf_instance):
# Delete Vnf
vnf_id = vnf_instance['vnf']['id']
try:
self.client.delete_vnf(vnf_id)
except Exception:
assert False, "vnf Delete failed"
self.wait_until_vnf_delete(vnf_id,
constants.VNF_CIRROS_DELETE_TIMEOUT)
self.verify_vnf_crud_events(vnf_id, evt_constants.RES_EVT_DELETE,
evt_constants.PENDING_DELETE, cnt=2)
try:
vnf_d = self.client.show_vnf(vnf_id)
except Exception:
assert True, "Vnf Delete success" + str(vnf_d) + str(Exception)
def test_vnfd_param_tosca_template(self):
vnfd_name = 'sample_cirros_vnfd_tosca'
vnfd_instance = self._test_vnfd_create(
'sample-tosca-vnfd-param.yaml', vnfd_name)
self._test_vnfd_delete(vnfd_instance)
def test_vnf_param_tosca_template(self):
vnfd_name = 'cirros_vnfd_tosca_param'
vnfd_instance = self._test_vnfd_create(
'sample-tosca-vnfd-param.yaml', vnfd_name)
values_str = read_file('sample-tosca-vnf-values.yaml')
values_dict = yaml.safe_load(values_str)
vnf_instance, param_values_dict = self._test_vnf_create(vnfd_instance,
'test_vnf_with_parameters_tosca_template',
values_dict)
self.assertEqual(values_dict, param_values_dict)
self._test_vnf_delete(vnf_instance)
vnf_id = vnf_instance['vnf']['id']
self.verify_vnf_crud_events(
vnf_id, evt_constants.RES_EVT_CREATE,
evt_constants.PENDING_CREATE, cnt=2)
self.verify_vnf_crud_events(
vnf_id, evt_constants.RES_EVT_CREATE, evt_constants.ACTIVE)
self.wait_until_vnf_delete(vnf_id,
constants.VNF_CIRROS_DELETE_TIMEOUT)
self.verify_vnf_crud_events(vnf_id, evt_constants.RES_EVT_DELETE,
evt_constants.PENDING_DELETE, cnt=2)
self.addCleanup(self.client.delete_vnfd, vnfd_instance['vnfd']['id'])
| apache-2.0 |
mohamedhagag/dvit-odoo8 | account_financial_report_webkit/wizard/partner_balance_wizard.py | 8 | 1737 | # -*- coding: utf-8 -*-
# Author: Guewen Baconnier, Leonardo Pistone
# © 2011-2016 Camptocamp
from openerp import fields, models, api
class AccountPartnerBalanceWizard(models.TransientModel):
"""Will launch partner balance report and pass required args"""
_inherit = "account.common.balance.report"
_name = "partner.balance.webkit"
_description = "Partner Balance Report"
result_selection = fields.Selection(
[
('customer', 'Receivable Accounts'),
('supplier', 'Payable Accounts'),
('customer_supplier', 'Receivable and Payable Accounts')
],
"Partner's", required=True, default='customer_supplier')
partner_ids = fields.Many2many(
'res.partner', string='Filter on partner',
help="Only selected partners will be printed. "
"Leave empty to print all partners.")
# same field in the module account
display_partner = fields.Selection(
[
('non-zero_balance', 'With non-zero balance'),
('all', 'All Partners')
], 'Display Partners', default='all')
@api.multi
def pre_print_report(self, data):
self.ensure_one()
data = super(AccountPartnerBalanceWizard, self).pre_print_report(data)
vals = self.read(['result_selection', 'partner_ids',
'display_partner'])[0]
data['form'].update(vals)
return data
@api.multi
def _print_report(self, data):
# we update form with display account value
data = self.pre_print_report(data)
return {'type': 'ir.actions.report.xml',
'report_name': 'account.account_report_partner_balance_webkit',
'datas': data}
| agpl-3.0 |
abadger/ansible | test/integration/targets/ansible-test/ansible_collections/ns/col/plugins/filter/check_pylint.py | 18 | 1239 | """
These test cases verify ansible-test version constraints for pylint and its dependencies across Python versions.
The initial test cases were discovered while testing various Python versions against ansible/ansible.
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
# Python 3.8 fails with astroid 2.2.5 but works on 2.3.3
# syntax-error: Cannot import 'string' due to syntax error 'invalid syntax (<unknown>, line 109)'
# Python 3.9 fails with astroid 2.2.5 but works on 2.3.3
# syntax-error: Cannot import 'string' due to syntax error 'invalid syntax (<unknown>, line 104)'
import string
# Python 3.9 fails with pylint 2.3.1 or 2.4.4 with astroid 2.3.3 but works with pylint 2.5.0 and astroid 2.4.0
# 'Call' object has no attribute 'value'
result = {None: None}[{}.get('something')]
# pylint 2.3.1 and 2.4.4 report the following error but 2.5.0 and 2.6.0 do not
# blacklisted-name: Black listed name "foo"
# see: https://github.com/PyCQA/pylint/issues/3701
# regression: documented as a known issue and removed from ignore.txt so pylint can be upgraded to 2.6.0
# if future versions of pylint fix this issue then the ignore should be restored
foo = {}.keys()
| gpl-3.0 |
702nADOS/sumo | tools/contributed/sumopy/agilepy/lib_wx/test_app.py | 1 | 7648 | import os
import sys
import wx
from wx.lib.wordwrap import wordwrap
if __name__ == '__main__':
try:
APPDIR = os.path.dirname(os.path.abspath(__file__))
except:
APPDIR = os.path.dirname(os.path.abspath(sys.argv[0]))
AGILEDIR = os.path.join(APPDIR, '..')
print 'APPDIR,AGILEDIR', APPDIR, AGILEDIR
sys.path.append(AGILEDIR)
libpaths = [AGILEDIR, os.path.join(
AGILEDIR, "lib_base"), os.path.join(AGILEDIR, "lib_wx"), ]
for libpath in libpaths:
print ' libpath=', libpath
lp = os.path.abspath(libpath)
if not lp in sys.path:
# print ' append',lp
sys.path.append(lp)
from mainframe import *
# import corepackages
#from test_glcanvas import *
from ogleditor import *
##
##import wx
##
# try:
## dirName = os.path.dirname(os.path.abspath(__file__))
# except:
## dirName = os.path.dirname(os.path.abspath(sys.argv[0]))
##
# sys.path.append(os.path.split(dirName)[0])
IMAGEDIR = os.path.join(os.path.dirname(__file__), "images")
ICONPATH = os.path.join(IMAGEDIR, 'icon_color_small.png') # None
class MyApp(wx.App):
def __init__(self, redirect=False, filename=None):
wx.App.__init__(self, redirect, filename)
#self.frame = wx.Frame(None, wx.ID_ANY, title='My Title')
self.mainframe = AgileMainframe(
title='MyApp', size_toolbaricons=(32, 32))
if ICONPATH != None:
icon = wx.Icon(ICONPATH, wx.BITMAP_TYPE_PNG, 16, 16)
self.mainframe.SetIcon(icon)
self.gleditor = self.mainframe.add_view("OGleditor", OGleditor)
self.mainframe.Show()
self.on_test()
self.make_menu()
self.make_toolbar()
#canvas = gleditor.get_canvas()
# canvas.add_element(lines)
# canvas.add_element(triangles)
# canvas.add_element(rectangles)
def make_toolbar(self):
tsize = self.mainframe.get_size_toolbaricons()
new_bmp = wx.ArtProvider.GetBitmap(wx.ART_NEW, wx.ART_TOOLBAR, tsize)
open_bmp = wx.ArtProvider.GetBitmap(
wx.ART_FILE_OPEN, wx.ART_TOOLBAR, tsize)
save_bmp = wx.ArtProvider.GetBitmap(
wx.ART_FILE_SAVE, wx.ART_TOOLBAR, tsize)
#cut_bmp = wx.ArtProvider.GetBitmap(wx.ART_CUT, wx.ART_TOOLBAR, tsize)
#copy_bmp = wx.ArtProvider.GetBitmap(wx.ART_COPY, wx.ART_TOOLBAR, tsize)
#paste_bmp= wx.ArtProvider.GetBitmap(wx.ART_PASTE, wx.ART_TOOLBAR, tsize)
self.mainframe.add_tool('new', self.on_open, new_bmp, 'create new doc')
self.mainframe.add_tool('open', self.on_open, open_bmp, 'Open doc')
self.mainframe.add_tool('save', self.on_save, save_bmp, 'Save doc')
# self.toolbar.AddSeparator()
# self.add_tool('cut',self.on_open,cut_bmp,'Cut')
# self.add_tool('copy',self.on_open,copy_bmp,'Copy')
# self.add_tool('paste',self.on_open,paste_bmp,'Paste')
def make_menu(self):
self.mainframe.menubar.append_menu('file')
self.mainframe.menubar.append_menu('file/doc')
self.mainframe.menubar.append_item('file/doc/open', self.on_open,
shortkey='Ctrl+o', info='open it out')
self.mainframe.menubar.append_item('file/doc/save', self.on_save,
shortkey='Ctrl+s', info='save it out')
def on_save(self, event):
print 'save it!!'
def on_open(self, event):
"""Open a document"""
#wildcards = CreateWildCards() + "All files (*.*)|*.*"
print 'open it!!'
def on_test(self, event=None):
print '\non_test'
vertices = np.array([
[[0.0, 0.0, 0.0], [0.2, 0.0, 0.0]], # 0 green
[[0.0, 0.0, 0.0], [0.0, 0.9, 0.0]], # 1 red
])
colors = np.array([
[0.0, 0.9, 0.0, 0.9], # 0
[0.9, 0.0, 0.0, 0.9], # 1
])
colors2 = np.array([
[0.5, 0.9, 0.5, 0.5], # 0
[0.9, 0.5, 0.9, 0.5], # 1
])
colors2o = np.array([
[0.8, 0.9, 0.8, 0.9], # 0
[0.9, 0.8, 0.9, 0.9], # 1
])
drawing = OGLdrawing()
#-------------------------------------------------------------------------
if 0:
lines = Lines('lines', drawing)
lines.add_drawobjs(vertices, colors)
drawing.add_drawobj(lines)
#-------------------------------------------------------------------------
if 0:
fancylines = Fancylines('fancylines', drawing)
vertices_fancy = np.array([
[[0.0, -1.0, 0.0], [2, -1.0, 0.0]], # 0 green
[[0.0, -1.0, 0.0], [0.0, -5.0, 0.0]], # 1 red
])
widths = [0.5,
0.3,
]
# print ' vertices_fancy\n',vertices_fancy
# FLATHEAD = 0
#BEVELHEAD = 1
#TRIANGLEHEAD = 2
#ARROWHEAD = 3
fancylines.add_drawobjs(vertices_fancy,
widths, # width
colors,
beginstyles=[TRIANGLEHEAD, TRIANGLEHEAD],
endstyles=[ARROWHEAD, ARROWHEAD])
drawing.add_drawobj(fancylines)
#-------------------------------------------------------------------------
if 0:
polylines = Polylines('polylines', drawing, joinstyle=BEVELHEAD)
colors_poly = np.array([
[0.0, 0.8, 0.5, 0.9], # 0
[0.8, 0.0, 0.5, 0.9], # 1
])
vertices_poly = np.array([
[[0.0, 2.0, 0.0], [5.0, 2.0, 0.0], [
5.0, 7.0, 0.0], [0.0, 7.0, 0.0]], # 0 green
[[0.0, -2.0, 0.0], [-2.0, -2.0, 0.0]], # 1 red
], np.object)
widths = [0.5,
0.3,
]
# print ' vertices_poly\n',vertices_poly
polylines.add_drawobjs(vertices_poly,
widths, # width
colors_poly,
beginstyles=[ARROWHEAD, ARROWHEAD],
endstyles=[ARROWHEAD, ARROWHEAD])
drawing.add_drawobj(polylines)
#-------------------------------------------------------------------------
if 1:
polygons = Polygons('polygons', drawing, linewidth=5)
colors_poly = np.array([
[0.0, 0.9, 0.9, 0.9], # 0
[0.8, 0.2, 0.2, 0.9], # 1
])
vertices_poly = np.array([
[[0.0, 2.0, 0.0], [5.0, 2.0, 0.0], [
5.0, 7.0, 0.0], [0.0, 7.0, 0.0]], # 0 green
[[0.0, -2.0, 0.0], [-2.0, -2.0, 0.0],
[-2.0, 0.0, 0.0]], # 1 red
], np.object)
print ' vertices_polygon\n', vertices_poly
polygons.add_drawobjs(vertices_poly,
colors_poly)
drawing.add_drawobj(polygons)
canvas = self.gleditor.get_canvas()
canvas.set_drawing(drawing)
#lines.add_drawobj([[0.0,0.0,0.0],[-0.2,-0.8,0.0]], [0.0,0.9,0.9,0.9])
#circles.add_drawobj([1.5,0.0,0.0],0.6,colors2o[0], colors2[0])
# canvas.zoom_tofit()
wx.CallAfter(canvas.zoom_tofit)
if __name__ == '__main__':
# if len(sys.argv)==3:
# ident = sys.argv[1]
# dirpath = sys.argv[2]
# else:
# ident = None
# dirpath = None
myapp = MyApp(0)
myapp.MainLoop()
| gpl-3.0 |
snowdream1314/scrapy | scrapy/extensions/closespider.py | 150 | 2462 | """CloseSpider is an extension that forces spiders to be closed after certain
conditions are met.
See documentation in docs/topics/extensions.rst
"""
from collections import defaultdict
from twisted.internet import reactor
from scrapy import signals
class CloseSpider(object):
def __init__(self, crawler):
self.crawler = crawler
self.close_on = {
'timeout': crawler.settings.getfloat('CLOSESPIDER_TIMEOUT'),
'itemcount': crawler.settings.getint('CLOSESPIDER_ITEMCOUNT'),
'pagecount': crawler.settings.getint('CLOSESPIDER_PAGECOUNT'),
'errorcount': crawler.settings.getint('CLOSESPIDER_ERRORCOUNT'),
}
self.counter = defaultdict(int)
if self.close_on.get('errorcount'):
crawler.signals.connect(self.error_count, signal=signals.spider_error)
if self.close_on.get('pagecount'):
crawler.signals.connect(self.page_count, signal=signals.response_received)
if self.close_on.get('timeout'):
crawler.signals.connect(self.spider_opened, signal=signals.spider_opened)
if self.close_on.get('itemcount'):
crawler.signals.connect(self.item_scraped, signal=signals.item_scraped)
crawler.signals.connect(self.spider_closed, signal=signals.spider_closed)
@classmethod
def from_crawler(cls, crawler):
return cls(crawler)
def error_count(self, failure, response, spider):
self.counter['errorcount'] += 1
if self.counter['errorcount'] == self.close_on['errorcount']:
self.crawler.engine.close_spider(spider, 'closespider_errorcount')
def page_count(self, response, request, spider):
self.counter['pagecount'] += 1
if self.counter['pagecount'] == self.close_on['pagecount']:
self.crawler.engine.close_spider(spider, 'closespider_pagecount')
def spider_opened(self, spider):
self.task = reactor.callLater(self.close_on['timeout'], \
self.crawler.engine.close_spider, spider, \
reason='closespider_timeout')
def item_scraped(self, item, spider):
self.counter['itemcount'] += 1
if self.counter['itemcount'] == self.close_on['itemcount']:
self.crawler.engine.close_spider(spider, 'closespider_itemcount')
def spider_closed(self, spider):
task = getattr(self, 'task', False)
if task and task.active():
task.cancel()
| bsd-3-clause |
offbyone/Flexget | tests/test_exists_movie.py | 12 | 7586 | from __future__ import unicode_literals, division, absolute_import
import os
from tests import FlexGetBase, build_parser_function, use_vcr
from tests.util import maketemp
class BaseExistsMovie(FlexGetBase):
__yaml__ = """
tasks:
test_dirs:
mock:
- {title: 'Existence.2012'}
- {title: 'The.Missing.2014'}
accept_all: yes
exists_movie:
path: autogenerated in setup()
test_files:
mock:
- {title: 'Duplicity.2009'}
- {title: 'Downloaded.2013'}
- {title: 'Gone.Missing.2013'}
accept_all: yes
exists_movie:
path: autogenerated in setup()
type: files
test_lookup_imdb:
mock:
- {title: 'Existence.2012'}
- {title: 'The.Matrix.1999'}
accept_all: yes
exists_movie:
path: autogenerated in setup()
lookup: imdb
test_diff_qualities_allowed:
mock:
- {title: 'Quality.of.Life.480p'}
accept_all: yes
exists_movie:
path: path autogenerated in setup()
allow_different_qualities: yes
test_diff_qualities_not_allowed:
mock:
- {title: 'Quality.of.Life.1080p'}
accept_all: yes
exists_movie: path autogenerated in setup()
test_diff_qualities_downgrade:
mock:
- {title: 'Quality.of.Life.480p'}
accept_all: yes
exists_movie:
path: path autogenerated in setup()
allow_different_qualities: better
test_diff_qualities_upgrade:
mock:
- {title: 'Quality.of.Life.1080p'}
accept_all: yes
exists_movie:
path: path autogenerated in setup()
allow_different_qualities: better
test_propers:
mock:
- {title: 'Mock.S01E01.Proper'}
- {title: 'Test.S01E01'}
accept_all: yes
exists_movie: path autogenerated in setup()
test_invalid:
mock:
- {title: 'Invalid.S01E01'}
accept_all: yes
exists_movie: path autogenerated in setup()
"""
test_files = [ 'Downloaded.2013.mkv', 'Invalid.jpg' ]
test_dirs = [ 'Existence.2012', 'Quality.of.Life.720p', 'Subs']
def __init__(self):
self.test_home = None
FlexGetBase.__init__(self)
def setup(self):
FlexGetBase.setup(self)
# generate config
self.test_home = maketemp()
for task_name in self.manager.config['tasks'].iterkeys():
if isinstance(self.manager.config['tasks'][task_name]['exists_movie'], dict):
self.manager.config['tasks'][task_name]['exists_movie']['path'] = self.test_home
else:
self.manager.config['tasks'][task_name]['exists_movie'] = self.test_home
# create test dirs
for test_dir in self.test_dirs:
os.makedirs(os.path.join(self.test_home, test_dir))
# create test files
for test_file in self.test_files:
open(os.path.join(self.test_home, test_file), 'a').close()
def teardown(self):
curdir = os.getcwd()
os.chdir(self.test_home)
for test_dir in self.test_dirs:
os.removedirs(test_dir)
for test_file in self.test_files:
os.remove(test_file)
os.chdir(curdir)
os.rmdir(self.test_home)
FlexGetBase.teardown(self)
def test_existing_dirs(self):
"""exists_movie plugin: existing"""
self.execute_task('test_dirs')
assert not self.task.find_entry('accepted', title='Existence.2012'), \
'Existence.2012 should not have been accepted (exists)'
assert self.task.find_entry('accepted', title='The.Missing.2014'), \
'The.Missing.2014 should have been accepted'
def test_existing_files(self):
"""exists_movie plugin: existing"""
self.execute_task('test_files')
assert not self.task.find_entry('accepted', title='Downloaded.2013'), \
'Downloaded.2013 should not have been accepted (exists)'
assert self.task.find_entry('accepted', title='Gone.Missing.2013'), \
'Gone.Missing.2013 should have been accepted'
@use_vcr
def test_lookup_imdb(self):
"""exists_movie plugin: existing"""
self.execute_task('test_lookup_imdb')
assert self.task.find_entry('accepted', title='The.Matrix.1999')['imdb_id'], \
'The.Matrix.1999 should have an `imdb_id`'
assert not self.task.find_entry('accepted', title='Existence.2012'), \
'Existence.2012 should not have been accepted (exists)'
def test_diff_qualities_allowed(self):
"""exists_movie plugin: existsting but w. diff quality"""
self.execute_task('test_diff_qualities_allowed')
assert self.task.find_entry('accepted', title='Quality.of.Life.480p'), \
'Quality.of.Life.480p should have been accepted'
def test_diff_qualities_not_allowed(self):
"""exists_movie plugin: existsting but w. diff quality"""
self.execute_task('test_diff_qualities_not_allowed')
assert self.task.find_entry('rejected', title='Quality.of.Life.1080p'), \
'Quality.of.Life.1080p should have been rejected'
def test_diff_qualities_downgrade(self):
"""Test worse qualities than exist are rejected."""
self.execute_task('test_diff_qualities_downgrade')
assert self.task.find_entry('rejected', title='Quality.of.Life.480p'), \
'Quality.of.Life.480p should have been rejected'
def test_diff_qualities_upgrade(self):
"""Test better qualities than exist are accepted."""
self.execute_task('test_diff_qualities_upgrade')
assert self.task.find_entry('accepted', title='Quality.of.Life.1080p'), \
'Quality.of.Life.1080p should have been accepted'
'''
def test_propers(self):
"""exists_movie plugin: new proper & proper already exists"""
self.execute_task('test_propers')
assert self.task.find_entry('accepted', title='Mock.S01E01.Proper'), \
'new proper not accepted'
assert self.task.find_entry('rejected', title='Test.S01E01'), \
'pre-existin proper should have caused reject'
def test_invalid(self):
"""exists_movie plugin: no episode numbering on the disk"""
# shouldn't raise anything
self.execute_task('test_invalid')
def test_with_metainfo_series(self):
"""Tests that exists_movie works with series data from metainfo_series"""
self.execute_task('test_with_metainfo_series')
assert self.task.find_entry('rejected', title='Foo.Bar.S01E02.XViD'), \
'Foo.Bar.S01E02.XViD should have been rejected(exists)'
assert not self.task.find_entry('rejected', title='Foo.Bar.S01E03.XViD'), \
'Foo.Bar.S01E03.XViD should not have been rejected'
'''
class TestGuessitExistsMovie(BaseExistsMovie):
def __init__(self):
super(TestGuessitExistsMovie, self).__init__()
self.add_tasks_function(build_parser_function('guessit'))
class TestInternalExistsMovie(BaseExistsMovie):
def __init__(self):
super(TestInternalExistsMovie, self).__init__()
self.add_tasks_function(build_parser_function('internal'))
| mit |
sunzuolei/youtube-dl | youtube_dl/extractor/letv.py | 78 | 7096 | # coding: utf-8
from __future__ import unicode_literals
import datetime
import re
import time
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
compat_urllib_request,
compat_urlparse,
)
from ..utils import (
determine_ext,
ExtractorError,
parse_iso8601,
int_or_none,
)
class LetvIE(InfoExtractor):
IE_DESC = '乐视网'
_VALID_URL = r'http://www\.letv\.com/ptv/vplay/(?P<id>\d+).html'
_TESTS = [{
'url': 'http://www.letv.com/ptv/vplay/22005890.html',
'md5': 'cab23bd68d5a8db9be31c9a222c1e8df',
'info_dict': {
'id': '22005890',
'ext': 'mp4',
'title': '第87届奥斯卡颁奖礼完美落幕 《鸟人》成最大赢家',
'timestamp': 1424747397,
'upload_date': '20150224',
'description': 'md5:a9cb175fd753e2962176b7beca21a47c',
}
}, {
'url': 'http://www.letv.com/ptv/vplay/1415246.html',
'info_dict': {
'id': '1415246',
'ext': 'mp4',
'title': '美人天下01',
'description': 'md5:f88573d9d7225ada1359eaf0dbf8bcda',
},
}, {
'note': 'This video is available only in Mainland China, thus a proxy is needed',
'url': 'http://www.letv.com/ptv/vplay/1118082.html',
'md5': 'f80936fbe20fb2f58648e81386ff7927',
'info_dict': {
'id': '1118082',
'ext': 'mp4',
'title': '与龙共舞 完整版',
'description': 'md5:7506a5eeb1722bb9d4068f85024e3986',
},
'skip': 'Only available in China',
}]
@staticmethod
def urshift(val, n):
return val >> n if val >= 0 else (val + 0x100000000) >> n
# ror() and calc_time_key() are reversed from a embedded swf file in KLetvPlayer.swf
def ror(self, param1, param2):
_loc3_ = 0
while _loc3_ < param2:
param1 = self.urshift(param1, 1) + ((param1 & 1) << 31)
_loc3_ += 1
return param1
def calc_time_key(self, param1):
_loc2_ = 773625421
_loc3_ = self.ror(param1, _loc2_ % 13)
_loc3_ = _loc3_ ^ _loc2_
_loc3_ = self.ror(_loc3_, _loc2_ % 17)
return _loc3_
def _real_extract(self, url):
media_id = self._match_id(url)
page = self._download_webpage(url, media_id)
params = {
'id': media_id,
'platid': 1,
'splatid': 101,
'format': 1,
'tkey': self.calc_time_key(int(time.time())),
'domain': 'www.letv.com'
}
play_json_req = compat_urllib_request.Request(
'http://api.letv.com/mms/out/video/playJson?' + compat_urllib_parse.urlencode(params)
)
cn_verification_proxy = self._downloader.params.get('cn_verification_proxy')
if cn_verification_proxy:
play_json_req.add_header('Ytdl-request-proxy', cn_verification_proxy)
play_json = self._download_json(
play_json_req,
media_id, 'Downloading playJson data')
# Check for errors
playstatus = play_json['playstatus']
if playstatus['status'] == 0:
flag = playstatus['flag']
if flag == 1:
msg = 'Country %s auth error' % playstatus['country']
else:
msg = 'Generic error. flag = %d' % flag
raise ExtractorError(msg, expected=True)
playurl = play_json['playurl']
formats = ['350', '1000', '1300', '720p', '1080p']
dispatch = playurl['dispatch']
urls = []
for format_id in formats:
if format_id in dispatch:
media_url = playurl['domain'][0] + dispatch[format_id][0]
# Mimic what flvxz.com do
url_parts = list(compat_urlparse.urlparse(media_url))
qs = dict(compat_urlparse.parse_qs(url_parts[4]))
qs.update({
'platid': '14',
'splatid': '1401',
'tss': 'no',
'retry': 1
})
url_parts[4] = compat_urllib_parse.urlencode(qs)
media_url = compat_urlparse.urlunparse(url_parts)
url_info_dict = {
'url': media_url,
'ext': determine_ext(dispatch[format_id][1]),
'format_id': format_id,
}
if format_id[-1:] == 'p':
url_info_dict['height'] = int_or_none(format_id[:-1])
urls.append(url_info_dict)
publish_time = parse_iso8601(self._html_search_regex(
r'发布时间 ([^<>]+) ', page, 'publish time', default=None),
delimiter=' ', timezone=datetime.timedelta(hours=8))
description = self._html_search_meta('description', page, fatal=False)
return {
'id': media_id,
'formats': urls,
'title': playurl['title'],
'thumbnail': playurl['pic'],
'description': description,
'timestamp': publish_time,
}
class LetvTvIE(InfoExtractor):
_VALID_URL = r'http://www.letv.com/tv/(?P<id>\d+).html'
_TESTS = [{
'url': 'http://www.letv.com/tv/46177.html',
'info_dict': {
'id': '46177',
'title': '美人天下',
'description': 'md5:395666ff41b44080396e59570dbac01c'
},
'playlist_count': 35
}]
def _real_extract(self, url):
playlist_id = self._match_id(url)
page = self._download_webpage(url, playlist_id)
media_urls = list(set(re.findall(
r'http://www.letv.com/ptv/vplay/\d+.html', page)))
entries = [self.url_result(media_url, ie='Letv')
for media_url in media_urls]
title = self._html_search_meta('keywords', page,
fatal=False).split(',')[0]
description = self._html_search_meta('description', page, fatal=False)
return self.playlist_result(entries, playlist_id, playlist_title=title,
playlist_description=description)
class LetvPlaylistIE(LetvTvIE):
_VALID_URL = r'http://tv.letv.com/[a-z]+/(?P<id>[a-z]+)/index.s?html'
_TESTS = [{
'url': 'http://tv.letv.com/izt/wuzetian/index.html',
'info_dict': {
'id': 'wuzetian',
'title': '武媚娘传奇',
'description': 'md5:e12499475ab3d50219e5bba00b3cb248'
},
# This playlist contains some extra videos other than the drama itself
'playlist_mincount': 96
}, {
'url': 'http://tv.letv.com/pzt/lswjzzjc/index.shtml',
'info_dict': {
'id': 'lswjzzjc',
# The title should be "劲舞青春", but I can't find a simple way to
# determine the playlist title
'title': '乐视午间自制剧场',
'description': 'md5:b1eef244f45589a7b5b1af9ff25a4489'
},
'playlist_mincount': 7
}]
| unlicense |
endlessm/chromium-browser | chrome/installer/mac/signing/commands.py | 2 | 3822 | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
The commands module wraps operations that have side-effects.
"""
import os
import plistlib
import shutil
import stat
import subprocess
import tempfile
from . import logger
def file_exists(path):
return os.path.exists(path)
def copy_files(source, dest):
assert source[-1] != '/'
subprocess.check_call(
['rsync', '--archive', '--checksum', '--delete', source, dest])
def copy_dir_overwrite_and_count_changes(source, dest, dry_run=False):
assert source[-1] != '/'
command = [
'rsync', '--archive', '--checksum', '--itemize-changes', '--delete',
source + '/', dest
]
if dry_run:
command.append('--dry-run')
output = subprocess.check_output(command)
# --itemize-changes will print a '.' in the first column if the item is not
# being updated, created, or deleted. This happens if only attributes
# change, such as a timestamp or permissions. Timestamp changes are
# uninteresting for the purposes of determining changed content, but
# permissions changes are not. Columns 6-8 are also checked so that files
# that have potentially interesting attributes (permissions, owner, or
# group) changing are counted, but column 5 for the timestamp is not
# considered.
changes = 0
for line in output.split('\n'):
if line == '' or (line[0] == '.' and line[5:8] == '...'):
continue
changes += 1
return changes
def move_file(source, dest):
shutil.move(source, dest)
def make_dir(at):
os.mkdir(at)
def write_file(path, contents):
with open(path, 'w') as f:
f.write(contents)
def read_file(path):
with open(path, 'r') as f:
return f.read()
def set_executable(path):
"""Makes the file at the specified path executable.
Args:
path: The path to the file to make executable.
"""
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
| stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH
| stat.S_IXOTH) # -rwxr-xr-x a.k.a. 0755
def run_command(args, **kwargs):
logger.info('Running command: %s', args)
subprocess.check_call(args, **kwargs)
def run_command_output(args, **kwargs):
logger.info('Running command: %s', args)
return subprocess.check_output(args, **kwargs)
class PlistContext(object):
"""
PlistContext is a context manager that reads a plist on entry, providing
the contents as a dictionary. If |rewrite| is True, then the same dictionary
is re-serialized on exit. If |create_new| is True, then the file is not read
but rather an empty dictionary is created.
"""
def __init__(self, plist_path, rewrite=False, create_new=False):
self._path = plist_path
self._rewrite = rewrite
self._create_new = create_new
def __enter__(self):
if self._create_new:
self._plist = {}
else:
self._plist = plistlib.readPlist(self._path)
return self._plist
def __exit__(self, exc_type, exc_value, exc_tb):
if self._rewrite and not exc_type:
plistlib.writePlist(self._plist, self._path)
self._plist = None
class WorkDirectory(object):
"""
WorkDirectory creates a temporary directory on entry, storing the path as
the |model.Paths.work| path. On exit, the directory is destroyed.
"""
def __init__(self, paths):
self._workdir = tempfile.mkdtemp(prefix='chromesign_')
self._paths = paths
def __enter__(self):
return self._paths.replace_work(self._workdir)
def __exit__(self, exc_type, value, traceback):
shutil.rmtree(self._workdir)
| bsd-3-clause |
jamesthechamp/zamboni | mkt/users/migrations/0001_initial.py | 13 | 2691 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
import mkt.site.models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('fxa_uid', models.CharField(max_length=255, unique=True, null=True, blank=True)),
('display_name', models.CharField(default=b'', max_length=255, null=True, blank=True)),
('email', models.EmailField(max_length=75, unique=True, null=True)),
('deleted', models.BooleanField(default=False)),
('read_dev_agreement', models.DateTimeField(null=True, blank=True)),
('last_login_ip', models.CharField(default=b'', max_length=45, editable=False)),
('source', models.PositiveIntegerField(default=0, editable=False, db_index=True)),
('is_verified', models.BooleanField(default=True)),
('region', models.CharField(max_length=11, null=True, editable=False, blank=True)),
('lang', models.CharField(max_length=5, null=True, editable=False, blank=True)),
('enable_recommendations', models.BooleanField(default=True)),
],
options={
'db_table': 'users',
},
bases=(mkt.site.models.OnChangeMixin, models.Model),
),
migrations.CreateModel(
name='UserNotification',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('notification_id', models.IntegerField()),
('enabled', models.BooleanField(default=False)),
('user', models.ForeignKey(related_name='notifications', to=settings.AUTH_USER_MODEL)),
],
options={
'db_table': 'users_notifications',
},
bases=(models.Model,),
),
]
| bsd-3-clause |
infosec-au/CaptchaJackingPoC | failed_attempts/poc.py | 3 | 1494 | import requests
import cherrypy
import re
from random import choice
import sys
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from PyQt4.QtWebKit import *
class Render(QWebPage):
def __init__(self, url):
self.app = QApplication(sys.argv)
QWebPage.__init__(self)
self.loadFinished.connect(self._loadFinished)
self.mainFrame().load(QUrl(url))
self.app.exec_()
def _loadFinished(self, result):
self.frame = self.mainFrame()
self.app.quit()
url = 'http://www.reddit.com/register'
r = Render(url)
html = r.frame.toHtml()
ualist= [line.rstrip() for line in open('useragents.txt')]
headers = {
'User-Agent': '{0}'.format(choice(ualist)),
}
cap_recomp = re.compile(r'captcha/[a-zA-Z0-9.png]*')
cap_iden = re.compile(r'value="[A-Za-z0-9]{32}')
captchaf = cap_recomp.search(html)
captchai = cap_iden.search(html)
cident = captchai.group()
cident = cident.replace("value=\"", "")
print cident
cfilename = captchaf.group()
cfilename1 = captchaf.group()
cap_file = cfilename1.replace("/","")
captchaURL = "http://www.reddit.com/" + cfilename
s = requests.Session()
print captchaURL
with open(str(cap_file), 'wb') as handle:
request = s.get(captchaURL, headers=headers, stream=True)
for block in request.iter_content(1024):
if not block:
break
handle.write(block)
# class poc:
# def index(self):
# return "Hello world!"
# index.exposed = True
# cherrypy.quickstart(poc()) | mit |
arista-eosplus/ansible | lib/ansible/modules/network/net_system.py | 13 | 3476 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
#
# This file is part of Ansible by Red Hat
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = """
---
module: net_system
version_added: "2.4"
author: "Ricardo Carrillo Cruz (@rcarrillocruz)"
short_description: Manage the system attributes on network devices
description:
- This module provides declarative management of node system attributes
on network devices. It provides an option to configure host system
parameters or remove those parameters from the device active
configuration.
options:
hostname:
description:
- Configure the device hostname parameter. This option takes an ASCII string value.
domain_name:
description:
- Configure the IP domain name
on the remote device to the provided value. Value
should be in the dotted name form and will be
appended to the C(hostname) to create a fully-qualified
domain name.
domain_search:
description:
- Provides the list of domain suffixes to
append to the hostname for the purpose of doing name resolution.
This argument accepts a list of names and will be reconciled
with the current active configuration on the running node.
lookup_source:
description:
- Provides one or more source
interfaces to use for performing DNS lookups. The interface
provided in C(lookup_source) must be a valid interface configured
on the device.
name_servers:
description:
- List of DNS name servers by IP address to use to perform name resolution
lookups. This argument accepts either a list of DNS servers See
examples.
state:
description:
- State of the configuration
values in the device's current active configuration. When set
to I(present), the values should be configured in the device active
configuration and when set to I(absent) the values should not be
in the device active configuration
default: present
choices: ['present', 'absent']
"""
EXAMPLES = """
- name: configure hostname and domain name
net_system:
hostname: ios01
domain_name: test.example.com
domain-search:
- ansible.com
- redhat.com
- cisco.com
- name: remove configuration
net_system:
state: absent
- name: configure DNS lookup sources
net_system:
lookup_source: MgmtEth0/0/CPU0/0
- name: configure name servers
net_system:
name_servers:
- 8.8.8.8
- 8.8.4.4
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- hostname ios01
- ip domain name test.example.com
"""
| gpl-3.0 |
fernandog/Medusa | ext/sqlalchemy/event/attr.py | 1 | 12905 | # event/attr.py
# Copyright (C) 2005-2018 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Attribute implementation for _Dispatch classes.
The various listener targets for a particular event class are represented
as attributes, which refer to collections of listeners to be fired off.
These collections can exist at the class level as well as at the instance
level. An event is fired off using code like this::
some_object.dispatch.first_connect(arg1, arg2)
Above, ``some_object.dispatch`` would be an instance of ``_Dispatch`` and
``first_connect`` is typically an instance of ``_ListenerCollection``
if event listeners are present, or ``_EmptyListener`` if none are present.
The attribute mechanics here spend effort trying to ensure listener functions
are available with a minimum of function call overhead, that unnecessary
objects aren't created (i.e. many empty per-instance listener collections),
as well as that everything is garbage collectable when owning references are
lost. Other features such as "propagation" of listener functions across
many ``_Dispatch`` instances, "joining" of multiple ``_Dispatch`` instances,
as well as support for subclass propagation (e.g. events assigned to
``Pool`` vs. ``QueuePool``) are all implemented here.
"""
from __future__ import absolute_import, with_statement
from .. import exc
from .. import util
from ..util import threading
from . import registry
from . import legacy
from itertools import chain
import weakref
import collections
class RefCollection(util.MemoizedSlots):
__slots__ = 'ref',
def _memoized_attr_ref(self):
return weakref.ref(self, registry._collection_gced)
class _empty_collection(object):
def append(self, element):
pass
def extend(self, other):
pass
def remove(self, element):
pass
def __iter__(self):
return iter([])
def clear(self):
pass
class _ClsLevelDispatch(RefCollection):
"""Class-level events on :class:`._Dispatch` classes."""
__slots__ = ('name', 'arg_names', 'has_kw',
'legacy_signatures', '_clslevel', '__weakref__')
def __init__(self, parent_dispatch_cls, fn):
self.name = fn.__name__
argspec = util.inspect_getargspec(fn)
self.arg_names = argspec.args[1:]
self.has_kw = bool(argspec.keywords)
self.legacy_signatures = list(reversed(
sorted(
getattr(fn, '_legacy_signatures', []),
key=lambda s: s[0]
)
))
fn.__doc__ = legacy._augment_fn_docs(self, parent_dispatch_cls, fn)
self._clslevel = weakref.WeakKeyDictionary()
def _adjust_fn_spec(self, fn, named):
if named:
fn = self._wrap_fn_for_kw(fn)
if self.legacy_signatures:
try:
argspec = util.get_callable_argspec(fn, no_self=True)
except TypeError:
pass
else:
fn = legacy._wrap_fn_for_legacy(self, fn, argspec)
return fn
def _wrap_fn_for_kw(self, fn):
def wrap_kw(*args, **kw):
argdict = dict(zip(self.arg_names, args))
argdict.update(kw)
return fn(**argdict)
return wrap_kw
def insert(self, event_key, propagate):
target = event_key.dispatch_target
assert isinstance(target, type), \
"Class-level Event targets must be classes."
if not getattr(target, '_sa_propagate_class_events', True):
raise exc.InvalidRequestError(
"Can't assign an event directly to the %s class" % target)
stack = [target]
while stack:
cls = stack.pop(0)
stack.extend(cls.__subclasses__())
if cls is not target and cls not in self._clslevel:
self.update_subclass(cls)
else:
if cls not in self._clslevel:
self._assign_cls_collection(cls)
self._clslevel[cls].appendleft(event_key._listen_fn)
registry._stored_in_collection(event_key, self)
def append(self, event_key, propagate):
target = event_key.dispatch_target
assert isinstance(target, type), \
"Class-level Event targets must be classes."
if not getattr(target, '_sa_propagate_class_events', True):
raise exc.InvalidRequestError(
"Can't assign an event directly to the %s class" % target)
stack = [target]
while stack:
cls = stack.pop(0)
stack.extend(cls.__subclasses__())
if cls is not target and cls not in self._clslevel:
self.update_subclass(cls)
else:
if cls not in self._clslevel:
self._assign_cls_collection(cls)
self._clslevel[cls].append(event_key._listen_fn)
registry._stored_in_collection(event_key, self)
def _assign_cls_collection(self, target):
if getattr(target, '_sa_propagate_class_events', True):
self._clslevel[target] = collections.deque()
else:
self._clslevel[target] = _empty_collection()
def update_subclass(self, target):
if target not in self._clslevel:
self._assign_cls_collection(target)
clslevel = self._clslevel[target]
for cls in target.__mro__[1:]:
if cls in self._clslevel:
clslevel.extend([
fn for fn
in self._clslevel[cls]
if fn not in clslevel
])
def remove(self, event_key):
target = event_key.dispatch_target
stack = [target]
while stack:
cls = stack.pop(0)
stack.extend(cls.__subclasses__())
if cls in self._clslevel:
self._clslevel[cls].remove(event_key._listen_fn)
registry._removed_from_collection(event_key, self)
def clear(self):
"""Clear all class level listeners"""
to_clear = set()
for dispatcher in self._clslevel.values():
to_clear.update(dispatcher)
dispatcher.clear()
registry._clear(self, to_clear)
def for_modify(self, obj):
"""Return an event collection which can be modified.
For _ClsLevelDispatch at the class level of
a dispatcher, this returns self.
"""
return self
class _InstanceLevelDispatch(RefCollection):
__slots__ = ()
def _adjust_fn_spec(self, fn, named):
return self.parent._adjust_fn_spec(fn, named)
class _EmptyListener(_InstanceLevelDispatch):
"""Serves as a proxy interface to the events
served by a _ClsLevelDispatch, when there are no
instance-level events present.
Is replaced by _ListenerCollection when instance-level
events are added.
"""
propagate = frozenset()
listeners = ()
__slots__ = 'parent', 'parent_listeners', 'name'
def __init__(self, parent, target_cls):
if target_cls not in parent._clslevel:
parent.update_subclass(target_cls)
self.parent = parent # _ClsLevelDispatch
self.parent_listeners = parent._clslevel[target_cls]
self.name = parent.name
def for_modify(self, obj):
"""Return an event collection which can be modified.
For _EmptyListener at the instance level of
a dispatcher, this generates a new
_ListenerCollection, applies it to the instance,
and returns it.
"""
result = _ListenerCollection(self.parent, obj._instance_cls)
if getattr(obj, self.name) is self:
setattr(obj, self.name, result)
else:
assert isinstance(getattr(obj, self.name), _JoinedListener)
return result
def _needs_modify(self, *args, **kw):
raise NotImplementedError("need to call for_modify()")
exec_once = insert = append = remove = clear = _needs_modify
def __call__(self, *args, **kw):
"""Execute this event."""
for fn in self.parent_listeners:
fn(*args, **kw)
def __len__(self):
return len(self.parent_listeners)
def __iter__(self):
return iter(self.parent_listeners)
def __bool__(self):
return bool(self.parent_listeners)
__nonzero__ = __bool__
class _CompoundListener(_InstanceLevelDispatch):
__slots__ = '_exec_once_mutex', '_exec_once'
def _memoized_attr__exec_once_mutex(self):
return threading.Lock()
def exec_once(self, *args, **kw):
"""Execute this event, but only if it has not been
executed already for this collection."""
if not self._exec_once:
with self._exec_once_mutex:
if not self._exec_once:
try:
self(*args, **kw)
finally:
self._exec_once = True
def __call__(self, *args, **kw):
"""Execute this event."""
for fn in self.parent_listeners:
fn(*args, **kw)
for fn in self.listeners:
fn(*args, **kw)
def __len__(self):
return len(self.parent_listeners) + len(self.listeners)
def __iter__(self):
return chain(self.parent_listeners, self.listeners)
def __bool__(self):
return bool(self.listeners or self.parent_listeners)
__nonzero__ = __bool__
class _ListenerCollection(_CompoundListener):
"""Instance-level attributes on instances of :class:`._Dispatch`.
Represents a collection of listeners.
As of 0.7.9, _ListenerCollection is only first
created via the _EmptyListener.for_modify() method.
"""
__slots__ = (
'parent_listeners', 'parent', 'name', 'listeners',
'propagate', '__weakref__')
def __init__(self, parent, target_cls):
if target_cls not in parent._clslevel:
parent.update_subclass(target_cls)
self._exec_once = False
self.parent_listeners = parent._clslevel[target_cls]
self.parent = parent
self.name = parent.name
self.listeners = collections.deque()
self.propagate = set()
def for_modify(self, obj):
"""Return an event collection which can be modified.
For _ListenerCollection at the instance level of
a dispatcher, this returns self.
"""
return self
def _update(self, other, only_propagate=True):
"""Populate from the listeners in another :class:`_Dispatch`
object."""
existing_listeners = self.listeners
existing_listener_set = set(existing_listeners)
self.propagate.update(other.propagate)
other_listeners = [l for l
in other.listeners
if l not in existing_listener_set
and not only_propagate or l in self.propagate
]
existing_listeners.extend(other_listeners)
to_associate = other.propagate.union(other_listeners)
registry._stored_in_collection_multi(self, other, to_associate)
def insert(self, event_key, propagate):
if event_key.prepend_to_list(self, self.listeners):
if propagate:
self.propagate.add(event_key._listen_fn)
def append(self, event_key, propagate):
if event_key.append_to_list(self, self.listeners):
if propagate:
self.propagate.add(event_key._listen_fn)
def remove(self, event_key):
self.listeners.remove(event_key._listen_fn)
self.propagate.discard(event_key._listen_fn)
registry._removed_from_collection(event_key, self)
def clear(self):
registry._clear(self, self.listeners)
self.propagate.clear()
self.listeners.clear()
class _JoinedListener(_CompoundListener):
__slots__ = 'parent', 'name', 'local', 'parent_listeners'
def __init__(self, parent, name, local):
self._exec_once = False
self.parent = parent
self.name = name
self.local = local
self.parent_listeners = self.local
@property
def listeners(self):
return getattr(self.parent, self.name)
def _adjust_fn_spec(self, fn, named):
return self.local._adjust_fn_spec(fn, named)
def for_modify(self, obj):
self.local = self.parent_listeners = self.local.for_modify(obj)
return self
def insert(self, event_key, propagate):
self.local.insert(event_key, propagate)
def append(self, event_key, propagate):
self.local.append(event_key, propagate)
def remove(self, event_key):
self.local.remove(event_key)
def clear(self):
raise NotImplementedError()
| gpl-3.0 |
frangucc/gamify | www/sandbox/pals/node_modules/cordova/node_modules/cordova-lib/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py | 217 | 5286 | # This file comes from
# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
# Do not edit! Edit the upstream one instead.
"""Python module for generating .ninja files.
Note that this is emphatically not a required piece of Ninja; it's
just a helpful utility for build-file-generation systems that already
use Python.
"""
import textwrap
import re
def escape_path(word):
return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:')
class Writer(object):
def __init__(self, output, width=78):
self.output = output
self.width = width
def newline(self):
self.output.write('\n')
def comment(self, text):
for line in textwrap.wrap(text, self.width - 2):
self.output.write('# ' + line + '\n')
def variable(self, key, value, indent=0):
if value is None:
return
if isinstance(value, list):
value = ' '.join(filter(None, value)) # Filter out empty strings.
self._line('%s = %s' % (key, value), indent)
def rule(self, name, command, description=None, depfile=None,
generator=False, restat=False, rspfile=None, rspfile_content=None):
self._line('rule %s' % name)
self.variable('command', command, indent=1)
if description:
self.variable('description', description, indent=1)
if depfile:
self.variable('depfile', depfile, indent=1)
if generator:
self.variable('generator', '1', indent=1)
if restat:
self.variable('restat', '1', indent=1)
if rspfile:
self.variable('rspfile', rspfile, indent=1)
if rspfile_content:
self.variable('rspfile_content', rspfile_content, indent=1)
def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
variables=None):
outputs = self._as_list(outputs)
all_inputs = self._as_list(inputs)[:]
out_outputs = list(map(escape_path, outputs))
all_inputs = list(map(escape_path, all_inputs))
if implicit:
implicit = map(escape_path, self._as_list(implicit))
all_inputs.append('|')
all_inputs.extend(implicit)
if order_only:
order_only = map(escape_path, self._as_list(order_only))
all_inputs.append('||')
all_inputs.extend(order_only)
self._line('build %s: %s %s' % (' '.join(out_outputs),
rule,
' '.join(all_inputs)))
if variables:
if isinstance(variables, dict):
iterator = variables.iteritems()
else:
iterator = iter(variables)
for key, val in iterator:
self.variable(key, val, indent=1)
return outputs
def include(self, path):
self._line('include %s' % path)
def subninja(self, path):
self._line('subninja %s' % path)
def default(self, paths):
self._line('default %s' % ' '.join(self._as_list(paths)))
def _count_dollars_before_index(self, s, i):
"""Returns the number of '$' characters right in front of s[i]."""
dollar_count = 0
dollar_index = i - 1
while dollar_index > 0 and s[dollar_index] == '$':
dollar_count += 1
dollar_index -= 1
return dollar_count
def _line(self, text, indent=0):
"""Write 'text' word-wrapped at self.width characters."""
leading_space = ' ' * indent
while len(leading_space) + len(text) > self.width:
# The text is too wide; wrap if possible.
# Find the rightmost space that would obey our width constraint and
# that's not an escaped space.
available_space = self.width - len(leading_space) - len(' $')
space = available_space
while True:
space = text.rfind(' ', 0, space)
if space < 0 or \
self._count_dollars_before_index(text, space) % 2 == 0:
break
if space < 0:
# No such space; just use the first unescaped space we can find.
space = available_space - 1
while True:
space = text.find(' ', space + 1)
if space < 0 or \
self._count_dollars_before_index(text, space) % 2 == 0:
break
if space < 0:
# Give up on breaking.
break
self.output.write(leading_space + text[0:space] + ' $\n')
text = text[space+1:]
# Subsequent lines are continuations, so indent them.
leading_space = ' ' * (indent+2)
self.output.write(leading_space + text + '\n')
def _as_list(self, input):
if input is None:
return []
if isinstance(input, list):
return input
return [input]
def escape(string):
"""Escape a string such that it can be embedded into a Ninja file without
further interpretation."""
assert '\n' not in string, 'Ninja syntax does not allow newlines'
# We only have one special metacharacter: '$'.
return string.replace('$', '$$')
| mit |
nubark/odoo | addons/fetchmail/fetchmail.py | 38 | 14937 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
import poplib
import time
from imaplib import IMAP4
from imaplib import IMAP4_SSL
from poplib import POP3
from poplib import POP3_SSL
try:
import cStringIO as StringIO
except ImportError:
import StringIO
from openerp.osv import fields, osv
from openerp import tools, api, SUPERUSER_ID
from openerp.tools.translate import _
from openerp.exceptions import UserError
_logger = logging.getLogger(__name__)
MAX_POP_MESSAGES = 50
MAIL_TIMEOUT = 60
# Workaround for Python 2.7.8 bug https://bugs.python.org/issue23906
poplib._MAXLINE = 65536
class fetchmail_server(osv.osv):
"""Incoming POP/IMAP mail server account"""
_name = 'fetchmail.server'
_description = "POP/IMAP Server"
_order = 'priority'
_columns = {
'name':fields.char('Name', required=True, readonly=False),
'active':fields.boolean('Active', required=False),
'state':fields.selection([
('draft', 'Not Confirmed'),
('done', 'Confirmed'),
], 'Status', select=True, readonly=True, copy=False),
'server' : fields.char('Server Name', readonly=True, help="Hostname or IP of the mail server", states={'draft':[('readonly', False)]}),
'port' : fields.integer('Port', readonly=True, states={'draft':[('readonly', False)]}),
'type':fields.selection([
('pop', 'POP Server'),
('imap', 'IMAP Server'),
('local', 'Local Server'),
], 'Server Type', select=True, required=True, readonly=False),
'is_ssl':fields.boolean('SSL/TLS', help="Connections are encrypted with SSL/TLS through a dedicated port (default: IMAPS=993, POP3S=995)"),
'attach':fields.boolean('Keep Attachments', help="Whether attachments should be downloaded. "
"If not enabled, incoming emails will be stripped of any attachments before being processed"),
'original':fields.boolean('Keep Original', help="Whether a full original copy of each email should be kept for reference"
"and attached to each processed message. This will usually double the size of your message database."),
'date': fields.datetime('Last Fetch Date', readonly=True),
'user' : fields.char('Username', readonly=True, states={'draft':[('readonly', False)]}),
'password' : fields.char('Password', readonly=True, states={'draft':[('readonly', False)]}),
'action_id':fields.many2one('ir.actions.server', 'Server Action', help="Optional custom server action to trigger for each incoming mail, "
"on the record that was created or updated by this mail"),
'object_id': fields.many2one('ir.model', "Create a New Record", help="Process each incoming mail as part of a conversation "
"corresponding to this document type. This will create "
"new documents for new conversations, or attach follow-up "
"emails to the existing conversations (documents)."),
'priority': fields.integer('Server Priority', readonly=True, states={'draft':[('readonly', False)]}, help="Defines the order of processing, "
"lower values mean higher priority"),
'message_ids': fields.one2many('mail.mail', 'fetchmail_server_id', 'Messages', readonly=True),
'configuration' : fields.text('Configuration', readonly=True),
'script' : fields.char('Script', readonly=True),
}
_defaults = {
'state': "draft",
'type': "pop",
'active': True,
'priority': 5,
'attach': True,
'script': '/mail/static/scripts/openerp_mailgate.py',
}
def onchange_server_type(self, cr, uid, ids, server_type=False, ssl=False, object_id=False):
port = 0
values = {}
if server_type == 'pop':
port = ssl and 995 or 110
elif server_type == 'imap':
port = ssl and 993 or 143
else:
values['server'] = ''
values['port'] = port
conf = {
'dbname' : cr.dbname,
'uid' : uid,
'model' : 'MODELNAME',
}
if object_id:
m = self.pool.get('ir.model')
r = m.read(cr,uid,[object_id],['model'])
conf['model']=r[0]['model']
values['configuration'] = """Use the below script with the following command line options with your Mail Transport Agent (MTA)
openerp_mailgate.py --host=HOSTNAME --port=PORT -u %(uid)d -p PASSWORD -d %(dbname)s
Example configuration for the postfix mta running locally:
/etc/postfix/virtual_aliases:
@youdomain openerp_mailgate@localhost
/etc/aliases:
openerp_mailgate: "|/path/to/openerp-mailgate.py --host=localhost -u %(uid)d -p PASSWORD -d %(dbname)s"
""" % conf
return {'value':values}
def set_draft(self, cr, uid, ids, context=None):
self.write(cr, uid, ids , {'state':'draft'})
return True
@api.cr_uid_ids_context
def connect(self, cr, uid, server_id, context=None):
if isinstance(server_id, (list,tuple)):
server_id = server_id[0]
server = self.browse(cr, uid, server_id, context)
if server.type == 'imap':
if server.is_ssl:
connection = IMAP4_SSL(server.server, int(server.port))
else:
connection = IMAP4(server.server, int(server.port))
connection.login(server.user, server.password)
elif server.type == 'pop':
if server.is_ssl:
connection = POP3_SSL(server.server, int(server.port))
else:
connection = POP3(server.server, int(server.port))
#TODO: use this to remove only unread messages
#connection.user("recent:"+server.user)
connection.user(server.user)
connection.pass_(server.password)
# Add timeout on socket
connection.sock.settimeout(MAIL_TIMEOUT)
return connection
def button_confirm_login(self, cr, uid, ids, context=None):
if context is None:
context = {}
for server in self.browse(cr, uid, ids, context=context):
try:
connection = server.connect()
server.write({'state':'done'})
except Exception, e:
_logger.info("Failed to connect to %s server %s.", server.type, server.name, exc_info=True)
raise UserError(_("Connection test failed: %s") % tools.ustr(e))
finally:
try:
if connection:
if server.type == 'imap':
connection.close()
elif server.type == 'pop':
connection.quit()
except Exception:
# ignored, just a consequence of the previous exception
pass
return True
def _fetch_mails(self, cr, uid, ids=False, context=None):
if not ids:
ids = self.search(cr, uid, [('state','=','done'),('type','in',['pop','imap'])])
return self.fetch_mail(cr, uid, ids, context=context)
def fetch_mail(self, cr, uid, ids, context=None):
"""WARNING: meant for cron usage only - will commit() after each email!"""
context = dict(context or {})
context['fetchmail_cron_running'] = True
mail_thread = self.pool.get('mail.thread')
action_pool = self.pool.get('ir.actions.server')
for server in self.browse(cr, uid, ids, context=context):
_logger.info('start checking for new emails on %s server %s', server.type, server.name)
context.update({'fetchmail_server_id': server.id, 'server_type': server.type})
count, failed = 0, 0
imap_server = False
pop_server = False
if server.type == 'imap':
try:
imap_server = server.connect()
imap_server.select()
result, data = imap_server.search(None, '(UNSEEN)')
for num in data[0].split():
res_id = None
result, data = imap_server.fetch(num, '(RFC822)')
imap_server.store(num, '-FLAGS', '\\Seen')
try:
res_id = mail_thread.message_process(cr, uid, server.object_id.model,
data[0][1],
save_original=server.original,
strip_attachments=(not server.attach),
context=context)
except Exception:
_logger.info('Failed to process mail from %s server %s.', server.type, server.name, exc_info=True)
failed += 1
if res_id and server.action_id:
action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids': [res_id], 'active_model': context.get("thread_model", server.object_id.model)})
imap_server.store(num, '+FLAGS', '\\Seen')
cr.commit()
count += 1
_logger.info("Fetched %d email(s) on %s server %s; %d succeeded, %d failed.", count, server.type, server.name, (count - failed), failed)
except Exception:
_logger.info("General failure when trying to fetch mail from %s server %s.", server.type, server.name, exc_info=True)
finally:
if imap_server:
imap_server.close()
imap_server.logout()
elif server.type == 'pop':
try:
while True:
pop_server = server.connect()
(numMsgs, totalSize) = pop_server.stat()
pop_server.list()
for num in range(1, min(MAX_POP_MESSAGES, numMsgs) + 1):
(header, msges, octets) = pop_server.retr(num)
msg = '\n'.join(msges)
res_id = None
try:
res_id = mail_thread.message_process(cr, uid, server.object_id.model,
msg,
save_original=server.original,
strip_attachments=(not server.attach),
context=context)
pop_server.dele(num)
except Exception:
_logger.info('Failed to process mail from %s server %s.', server.type, server.name, exc_info=True)
failed += 1
if res_id and server.action_id:
action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids': [res_id], 'active_model': context.get("thread_model", server.object_id.model)})
cr.commit()
if numMsgs < MAX_POP_MESSAGES:
break
pop_server.quit()
_logger.info("Fetched %d email(s) on %s server %s; %d succeeded, %d failed.", numMsgs, server.type, server.name, (numMsgs - failed), failed)
except Exception:
_logger.info("General failure when trying to fetch mail from %s server %s.", server.type, server.name, exc_info=True)
finally:
if pop_server:
pop_server.quit()
server.write({'date': time.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT)})
return True
def _update_cron(self, cr, uid, context=None):
if context and context.get('fetchmail_cron_running'):
return
try:
cron = self.pool['ir.model.data'].get_object(
cr, SUPERUSER_ID, 'fetchmail', 'ir_cron_mail_gateway_action', context=context)
except ValueError:
# Nevermind if default cron cannot be found
return
# Enabled/Disable cron based on the number of 'done' server of type pop or imap
cron.toggle(model=self._name, domain=[('state','=','done'), ('type','in',['pop','imap'])])
def create(self, cr, uid, values, context=None):
res = super(fetchmail_server, self).create(cr, uid, values, context=context)
self._update_cron(cr, uid, context=context)
return res
def write(self, cr, uid, ids, values, context=None):
res = super(fetchmail_server, self).write(cr, uid, ids, values, context=context)
self._update_cron(cr, uid, context=context)
return res
def unlink(self, cr, uid, ids, context=None):
res = super(fetchmail_server, self).unlink(cr, uid, ids, context=context)
self._update_cron(cr, uid, context=context)
return res
class mail_mail(osv.osv):
_inherit = "mail.mail"
_columns = {
'fetchmail_server_id': fields.many2one('fetchmail.server', "Inbound Mail Server",
readonly=True,
select=True,
oldname='server_id'),
}
def create(self, cr, uid, values, context=None):
if context is None:
context = {}
fetchmail_server_id = context.get('fetchmail_server_id')
if fetchmail_server_id:
values['fetchmail_server_id'] = fetchmail_server_id
res = super(mail_mail, self).create(cr, uid, values, context=context)
return res
def write(self, cr, uid, ids, values, context=None):
if context is None:
context = {}
fetchmail_server_id = context.get('fetchmail_server_id')
if fetchmail_server_id:
values['fetchmail_server_id'] = fetchmail_server_id
res = super(mail_mail, self).write(cr, uid, ids, values, context=context)
return res
| gpl-3.0 |
toddpalino/kafka-tools | kafka/tools/protocol/requests/describe_acls_v0.py | 1 | 1408 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from kafka.tools.protocol.requests import BaseRequest
from kafka.tools.protocol.responses.describe_acls_v0 import DescribeAclsV0Response
class DescribeAclsV0Request(BaseRequest):
api_key = 29
api_version = 0
response = DescribeAclsV0Response
cmd = "DescribeAcls"
help_string = ''
schema = [
{'name': 'resource_type', 'type': 'int8'},
{'name': 'resource_name', 'type': 'string'},
{'name': 'principal', 'type': 'string'},
{'name': 'host', 'type': 'string'},
{'name': 'operation', 'type': 'int8'},
{'name': 'permission_type', 'type': 'int8'},
]
| apache-2.0 |
jusjusjus/pyedf | pyedf/recording/edf_param_struct.py | 1 | 1026 | #!/usr/bin/python
import ctypes as ct
class edf_param_struct(ct.Structure): # this structure contains all the relevant EDF-signal parameters of one signal
_fields_ = [("label_b", ct.c_char*17), # label (name) of the signal, null-terminated string
("smp_in_file", ct.c_longlong), # number of samples of this signal in the file
("phys_max", ct.c_double), # physical maximum
("phys_min", ct.c_double), # physical minimum
("dig_max", ct.c_int), # digital maximum
("dig_min", ct.c_int), # digital minimum
("smp_in_datarecord", ct.c_int), # number of samples of this signal in a datarecord
("physdimension_b", ct.c_char*9), # physical dimension (uV, bpm, mA, etc.), null-terminated string
("prefilter_b", ct.c_char*81), # null-terminated string
("transducer_b", ct.c_char*81)] # null-terminated string
| gpl-3.0 |
BTCGPU/BTCGPU | test/functional/wallet_resendwallettransactions.py | 19 | 1410 | #!/usr/bin/env python3
# Copyright (c) 2017-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test resendwallettransactions RPC."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
class ResendWalletTransactionsTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['--walletbroadcast=false']]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
# Should raise RPC_WALLET_ERROR (-4) if walletbroadcast is disabled.
assert_raises_rpc_error(-4, "Error: Wallet transaction broadcasting is disabled with -walletbroadcast", self.nodes[0].resendwallettransactions)
# Should return an empty array if there aren't unconfirmed wallet transactions.
self.stop_node(0)
self.start_node(0, extra_args=[])
assert_equal(self.nodes[0].resendwallettransactions(), [])
# Should return an array with the unconfirmed wallet transaction.
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
assert_equal(self.nodes[0].resendwallettransactions(), [txid])
if __name__ == '__main__':
ResendWalletTransactionsTest().main()
| mit |
brkrishna/freelance | bolly_reviews/process_data.py | 1 | 5263 | #-------------------------------------------------------------------------------
# Name: process_data
# Purpose:
#
# Author: Ramakrishna
#
# Created: 17/04/2014
# Copyright: (c) Ramakrishna 2014
# Licence: <your licence>
#-------------------------------------------------------------------------------
from bs4 import BeautifulSoup
import re
import my_caching
import process_BWH, process_BM, process_BH, process_BL, process_BOS, process_BS
import process_BW, process_BW3, process_BWM, process_DNAI, process_ETC, process_FF
import process_FP, process_FS, process_GS, process_HT, process_IBN, process_IE
import process_IG, process_IN, process_IT, process_KM, process_KR, process_MD
import process_MM, process_MSN, process_MZ, process_NDTV, process_NR, process_OI
import process_PB, process_RE, process_REDIFF, process_RM, process_RMH
import process_SB, process_SH, process_SIFY, process_SULEKHA, process_TOI
import process_YAHOO, process_ZNI
def process(base_url, source_cd, data):
try:
record = []
if source_cd == 'BWH':
record = process_BWH.process(source_cd, base_url, data)
if source_cd == 'BH':
record = process_BH.process(source_cd, base_url, data)
if source_cd == 'BM':
record = process_BM.process(source_cd, base_url, data)
if source_cd == 'BL':
record = process_BL.process(source_cd, base_url, data)
if source_cd == 'BOS':
record = process_BOS.process(source_cd, base_url, data)
if source_cd == 'BS':
record = process_BS.process(source_cd, base_url, data)
if source_cd == 'BW':
record = process_BW.process(source_cd, base_url, data)
if source_cd == 'BW3':
record = process_BW3.process(source_cd, base_url, data)
if source_cd == 'BWM':
record = process_BWM.process(source_cd, base_url, data)
if source_cd == 'DNAI':
record = process_DNAI.process(source_cd, base_url, data)
if source_cd == 'ETC':
record = process_ETC.process(source_cd, base_url, data)
if source_cd == 'FF':
record = process_FF.process(source_cd, base_url, data)
if source_cd == 'FP':
record = process_FP.process(source_cd, base_url, data)
if source_cd == 'FS':
record = process_FS.process(source_cd, base_url, data)
if source_cd == 'GS':
record = process_GS.process(source_cd, base_url, data)
if source_cd == 'HT':
record = process_HT.process(source_cd, base_url, data)
if source_cd == 'IBN':
record = process_IBN.process(source_cd, base_url, data)
if source_cd == 'IE':
record = process_IE.process(source_cd, base_url, data)
if source_cd == 'IG':
record = process_IG.process(source_cd, base_url, data)
if source_cd == 'IN':
record = process_IN.process(source_cd, base_url, data)
if source_cd == 'IT':
record = process_IT.process(source_cd, base_url, data)
if source_cd == 'KM':
record = process_KM.process(source_cd, base_url, data)
if source_cd == 'KR':
record = process_KR.process(source_cd, base_url, data)
if source_cd == 'MD':
record = process_MD.process(source_cd, base_url, data)
if source_cd == 'MM':
record = process_MM.process(source_cd, base_url, data)
if source_cd == 'MSN':
record = process_MSN.process(source_cd, base_url, data)
if source_cd == 'MZ':
record = process_MZ.process(source_cd, base_url, data)
if source_cd == 'NDTV':
record = process_NDTV.process(source_cd, base_url, data)
if source_cd == 'NR':
record = process_NR.process(source_cd, base_url, data)
if source_cd == 'OI':
record = process_OI.process(source_cd, base_url, data)
if source_cd == 'PB':
record = process_PB.process(source_cd, base_url, data)
if source_cd == 'RE':
record = process_RE.process(source_cd, base_url, data)
if source_cd == 'REDIFF':
record = process_REDIFF.process(source_cd, base_url, data)
if source_cd == 'RM':
record = process_RM.process(source_cd, base_url, data)
if source_cd == 'RMH':
record = process_RMH.process(source_cd, base_url, data)
if source_cd == 'SB':
record = process_SB.process(source_cd, base_url, data)
if source_cd == 'SH':
record = process_SH.process(source_cd, base_url, data)
if source_cd == 'SIFY':
record = process_SIFY.process(source_cd, base_url, data)
if source_cd == 'SULEKHA':
record = process_SULEKHA.process(source_cd, base_url, data)
if source_cd == 'TOI':
record = process_TOI.process(source_cd, base_url, data)
if source_cd == 'YAHOO':
record = process_YAHOO.process(source_cd, base_url, data)
if source_cd == 'ZNI':
record = process_ZNI.process(source_cd, base_url, data)
return record
except Exception as e:
print(e.__doc__)
print(e.args)
| gpl-2.0 |
RoboCupULaval/StrategyIA | tests/Util/test_path.py | 1 | 2516 |
import unittest
from Util import Path, Position
__author__ = 'Simon Bouchard'
A_START = Position(100, 200)
A_TARGET = Position(123, -456)
A_PATH = Path(start=A_START, target=A_TARGET)
A_START_ARRAY = A_START.array
A_TARGET_ARRAY = A_TARGET.array
A_LIST_OF_POSITION = [Position(0, 0),
Position(0, 10),
Position(10, 10),
Position(10, 20),
Position(20, 20),
Position(20, 30)]
A_LONG_PATH = Path.from_sequence(A_LIST_OF_POSITION)
PATH_LENGTH = 50
A_LIST_OF_CLOSE_POSITION = [Position(1,1), Position(1,-1), Position(1, -2),
Position(10,1), Position(10,1), Position(10,2),
Position(10, 21), Position(10, 20), Position(10, 22),
Position(30, 21), Position(30, 20), Position(30, 22)]
A_PATH_WITH_CLOSE_POINTS = Path.from_sequence(A_LIST_OF_CLOSE_POSITION)
A_PATH_WITH_CLOSE_POINTS_FILTERED = Path.from_sequence([Position(1, 1),
Position(10, 1),
Position(10, 21),
Position(30, 22)])
class TestPath(unittest.TestCase):
def test_givenStartTarget_whenNew_thenReturnPath(self):
path = Path(start=A_START, target=A_TARGET)
self.assertEqual(path.start, A_START)
assert path.target == A_TARGET
def test_givenStartTargetArray_whenFromArray_thenReturnPath(self):
path = Path.from_array(A_START_ARRAY, A_TARGET_ARRAY)
self.assertEqual(path.start, A_START)
assert path.target == A_TARGET
def test_whenInitializingFromAListOfPoints_thenAListOfPointsIsAssigned(self):
path = Path.from_sequence(A_LIST_OF_POSITION)
assert path.points == A_LIST_OF_POSITION
def test_givenPath_whenFilter_thenRemoveClosePointsAndKeepTarget(self):
path = A_PATH_WITH_CLOSE_POINTS.copy()
path.filter(threshold=5)
assert path.points == A_PATH_WITH_CLOSE_POINTS_FILTERED.points
assert path.start == A_PATH_WITH_CLOSE_POINTS_FILTERED.start
assert path.target == A_PATH_WITH_CLOSE_POINTS_FILTERED.target
def test_givenPath_whenCopy_thenReturnPathCopy(self):
path = A_PATH.copy()
assert path is not A_PATH
def test_givenPath_whenGettingLength_thenReturnLength(self):
length = A_LONG_PATH.length
assert length == PATH_LENGTH
| mit |
abloomston/sympy | sympy/functions/elementary/tests/test_trigonometric.py | 18 | 50521 | from sympy import (symbols, Symbol, nan, oo, zoo, I, sinh, sin, pi, atan,
acos, Rational, sqrt, asin, acot, coth, E, S, tan, tanh, cos,
cosh, atan2, exp, log, asinh, acoth, atanh, O, cancel, Matrix, re, im,
Float, Pow, gcd, sec, csc, cot, diff, simplify, Heaviside, arg,
conjugate, series, FiniteSet, asec, acsc, Mul, sinc, jn, Product)
from sympy.core.compatibility import range
from sympy.utilities.pytest import XFAIL, slow, raises
x, y, z = symbols('x y z')
r = Symbol('r', real=True)
k = Symbol('k', integer=True)
p = Symbol('p', positive=True)
n = Symbol('n', negative=True)
a = Symbol('a', algebraic=True)
na = Symbol('na', nonzero=True, algebraic=True)
def test_sin():
x, y = symbols('x y')
assert sin.nargs == FiniteSet(1)
assert sin(nan) == nan
assert sin(oo*I) == oo*I
assert sin(-oo*I) == -oo*I
assert sin(oo).args[0] == oo
assert sin(0) == 0
assert sin(asin(x)) == x
assert sin(atan(x)) == x / sqrt(1 + x**2)
assert sin(acos(x)) == sqrt(1 - x**2)
assert sin(acot(x)) == 1 / (sqrt(1 + 1 / x**2) * x)
assert sin(atan2(y, x)) == y / sqrt(x**2 + y**2)
assert sin(pi*I) == sinh(pi)*I
assert sin(-pi*I) == -sinh(pi)*I
assert sin(-2*I) == -sinh(2)*I
assert sin(pi) == 0
assert sin(-pi) == 0
assert sin(2*pi) == 0
assert sin(-2*pi) == 0
assert sin(-3*10**73*pi) == 0
assert sin(7*10**103*pi) == 0
assert sin(pi/2) == 1
assert sin(-pi/2) == -1
assert sin(5*pi/2) == 1
assert sin(7*pi/2) == -1
ne = symbols('ne', integer=True, even=False)
e = symbols('e', even=True)
assert sin(pi*ne/2) == (-1)**(ne/2 - S.Half)
assert sin(pi*k/2).func == sin
assert sin(pi*e/2) == 0
assert sin(pi*k) == 0
assert sin(pi*k).subs(k, 3) == sin(pi*k/2).subs(k, 6) # issue 8298
assert sin(pi/3) == S.Half*sqrt(3)
assert sin(-2*pi/3) == -S.Half*sqrt(3)
assert sin(pi/4) == S.Half*sqrt(2)
assert sin(-pi/4) == -S.Half*sqrt(2)
assert sin(17*pi/4) == S.Half*sqrt(2)
assert sin(-3*pi/4) == -S.Half*sqrt(2)
assert sin(pi/6) == S.Half
assert sin(-pi/6) == -S.Half
assert sin(7*pi/6) == -S.Half
assert sin(-5*pi/6) == -S.Half
assert sin(1*pi/5) == sqrt((5 - sqrt(5)) / 8)
assert sin(2*pi/5) == sqrt((5 + sqrt(5)) / 8)
assert sin(3*pi/5) == sin(2*pi/5)
assert sin(4*pi/5) == sin(1*pi/5)
assert sin(6*pi/5) == -sin(1*pi/5)
assert sin(8*pi/5) == -sin(2*pi/5)
assert sin(-1273*pi/5) == -sin(2*pi/5)
assert sin(pi/8) == sqrt((2 - sqrt(2))/4)
assert sin(pi/10) == -1/4 + sqrt(5)/4
assert sin(pi/12) == -sqrt(2)/4 + sqrt(6)/4
assert sin(5*pi/12) == sqrt(2)/4 + sqrt(6)/4
assert sin(-7*pi/12) == -sqrt(2)/4 - sqrt(6)/4
assert sin(-11*pi/12) == sqrt(2)/4 - sqrt(6)/4
assert sin(104*pi/105) == sin(pi/105)
assert sin(106*pi/105) == -sin(pi/105)
assert sin(-104*pi/105) == -sin(pi/105)
assert sin(-106*pi/105) == sin(pi/105)
assert sin(x*I) == sinh(x)*I
assert sin(k*pi) == 0
assert sin(17*k*pi) == 0
assert sin(k*pi*I) == sinh(k*pi)*I
assert sin(r).is_real is True
assert sin(0, evaluate=False).is_algebraic
assert sin(a).is_algebraic is None
assert sin(na).is_algebraic is False
q = Symbol('q', rational=True)
assert sin(pi*q).is_algebraic
qn = Symbol('qn', rational=True, nonzero=True)
assert sin(qn).is_rational is False
assert sin(q).is_rational is None # issue 8653
assert isinstance(sin( re(x) - im(y)), sin) is True
assert isinstance(sin(-re(x) + im(y)), sin) is False
for d in list(range(1, 22)) + [60, 85]:
for n in range(0, d*2 + 1):
x = n*pi/d
e = abs( float(sin(x)) - sin(float(x)) )
assert e < 1e-12
def test_sin_cos():
for d in [1, 2, 3, 4, 5, 6, 10, 12, 15, 20, 24, 30, 40, 60, 120]: # list is not exhaustive...
for n in range(-2*d, d*2):
x = n*pi/d
assert sin(x + pi/2) == cos(x), "fails for %d*pi/%d" % (n, d)
assert sin(x - pi/2) == -cos(x), "fails for %d*pi/%d" % (n, d)
assert sin(x) == cos(x - pi/2), "fails for %d*pi/%d" % (n, d)
assert -sin(x) == cos(x + pi/2), "fails for %d*pi/%d" % (n, d)
def test_sin_series():
assert sin(x).series(x, 0, 9) == \
x - x**3/6 + x**5/120 - x**7/5040 + O(x**9)
def test_sin_rewrite():
assert sin(x).rewrite(exp) == -I*(exp(I*x) - exp(-I*x))/2
assert sin(x).rewrite(tan) == 2*tan(x/2)/(1 + tan(x/2)**2)
assert sin(x).rewrite(cot) == 2*cot(x/2)/(1 + cot(x/2)**2)
assert sin(sinh(x)).rewrite(
exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, sinh(3)).n()
assert sin(cosh(x)).rewrite(
exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, cosh(3)).n()
assert sin(tanh(x)).rewrite(
exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, tanh(3)).n()
assert sin(coth(x)).rewrite(
exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, coth(3)).n()
assert sin(sin(x)).rewrite(
exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, sin(3)).n()
assert sin(cos(x)).rewrite(
exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, cos(3)).n()
assert sin(tan(x)).rewrite(
exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, tan(3)).n()
assert sin(cot(x)).rewrite(
exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, cot(3)).n()
assert sin(log(x)).rewrite(Pow) == I*x**-I / 2 - I*x**I /2
assert sin(x).rewrite(csc) == 1/csc(x)
def test_sin_expansion():
# Note: these formulas are not unique. The ones here come from the
# Chebyshev formulas.
assert sin(x + y).expand(trig=True) == sin(x)*cos(y) + cos(x)*sin(y)
assert sin(x - y).expand(trig=True) == sin(x)*cos(y) - cos(x)*sin(y)
assert sin(y - x).expand(trig=True) == cos(x)*sin(y) - sin(x)*cos(y)
assert sin(2*x).expand(trig=True) == 2*sin(x)*cos(x)
assert sin(3*x).expand(trig=True) == -4*sin(x)**3 + 3*sin(x)
assert sin(4*x).expand(trig=True) == -8*sin(x)**3*cos(x) + 4*sin(x)*cos(x)
assert sin(2).expand(trig=True) == 2*sin(1)*cos(1)
assert sin(3).expand(trig=True) == -4*sin(1)**3 + 3*sin(1)
def test_trig_symmetry():
assert sin(-x) == -sin(x)
assert cos(-x) == cos(x)
assert tan(-x) == -tan(x)
assert cot(-x) == -cot(x)
assert sin(x + pi) == -sin(x)
assert sin(x + 2*pi) == sin(x)
assert sin(x + 3*pi) == -sin(x)
assert sin(x + 4*pi) == sin(x)
assert sin(x - 5*pi) == -sin(x)
assert cos(x + pi) == -cos(x)
assert cos(x + 2*pi) == cos(x)
assert cos(x + 3*pi) == -cos(x)
assert cos(x + 4*pi) == cos(x)
assert cos(x - 5*pi) == -cos(x)
assert tan(x + pi) == tan(x)
assert tan(x - 3*pi) == tan(x)
assert cot(x + pi) == cot(x)
assert cot(x - 3*pi) == cot(x)
assert sin(pi/2 - x) == cos(x)
assert sin(3*pi/2 - x) == -cos(x)
assert sin(5*pi/2 - x) == cos(x)
assert cos(pi/2 - x) == sin(x)
assert cos(3*pi/2 - x) == -sin(x)
assert cos(5*pi/2 - x) == sin(x)
assert tan(pi/2 - x) == cot(x)
assert tan(3*pi/2 - x) == cot(x)
assert tan(5*pi/2 - x) == cot(x)
assert cot(pi/2 - x) == tan(x)
assert cot(3*pi/2 - x) == tan(x)
assert cot(5*pi/2 - x) == tan(x)
assert sin(pi/2 + x) == cos(x)
assert cos(pi/2 + x) == -sin(x)
assert tan(pi/2 + x) == -cot(x)
assert cot(pi/2 + x) == -tan(x)
def test_cos():
x, y = symbols('x y')
assert cos.nargs == FiniteSet(1)
assert cos(nan) == nan
assert cos(oo*I) == oo
assert cos(-oo*I) == oo
assert cos(0) == 1
assert cos(acos(x)) == x
assert cos(atan(x)) == 1 / sqrt(1 + x**2)
assert cos(asin(x)) == sqrt(1 - x**2)
assert cos(acot(x)) == 1 / sqrt(1 + 1 / x**2)
assert cos(atan2(y, x)) == x / sqrt(x**2 + y**2)
assert cos(pi*I) == cosh(pi)
assert cos(-pi*I) == cosh(pi)
assert cos(-2*I) == cosh(2)
assert cos(pi/2) == 0
assert cos(-pi/2) == 0
assert cos(pi/2) == 0
assert cos(-pi/2) == 0
assert cos((-3*10**73 + 1)*pi/2) == 0
assert cos((7*10**103 + 1)*pi/2) == 0
n = symbols('n', integer=True, even=False)
e = symbols('e', even=True)
assert cos(pi*n/2) == 0
assert cos(pi*e/2) == (-1)**(e/2)
assert cos(pi) == -1
assert cos(-pi) == -1
assert cos(2*pi) == 1
assert cos(5*pi) == -1
assert cos(8*pi) == 1
assert cos(pi/3) == S.Half
assert cos(-2*pi/3) == -S.Half
assert cos(pi/4) == S.Half*sqrt(2)
assert cos(-pi/4) == S.Half*sqrt(2)
assert cos(11*pi/4) == -S.Half*sqrt(2)
assert cos(-3*pi/4) == -S.Half*sqrt(2)
assert cos(pi/6) == S.Half*sqrt(3)
assert cos(-pi/6) == S.Half*sqrt(3)
assert cos(7*pi/6) == -S.Half*sqrt(3)
assert cos(-5*pi/6) == -S.Half*sqrt(3)
assert cos(1*pi/5) == (sqrt(5) + 1)/4
assert cos(2*pi/5) == (sqrt(5) - 1)/4
assert cos(3*pi/5) == -cos(2*pi/5)
assert cos(4*pi/5) == -cos(1*pi/5)
assert cos(6*pi/5) == -cos(1*pi/5)
assert cos(8*pi/5) == cos(2*pi/5)
assert cos(-1273*pi/5) == -cos(2*pi/5)
assert cos(pi/8) == sqrt((2 + sqrt(2))/4)
assert cos(pi/12) == sqrt(2)/4 + sqrt(6)/4
assert cos(5*pi/12) == -sqrt(2)/4 + sqrt(6)/4
assert cos(7*pi/12) == sqrt(2)/4 - sqrt(6)/4
assert cos(11*pi/12) == -sqrt(2)/4 - sqrt(6)/4
assert cos(104*pi/105) == -cos(pi/105)
assert cos(106*pi/105) == -cos(pi/105)
assert cos(-104*pi/105) == -cos(pi/105)
assert cos(-106*pi/105) == -cos(pi/105)
assert cos(x*I) == cosh(x)
assert cos(k*pi*I) == cosh(k*pi)
assert cos(r).is_real is True
assert cos(0, evaluate=False).is_algebraic
assert cos(a).is_algebraic is None
assert cos(na).is_algebraic is False
q = Symbol('q', rational=True)
assert cos(pi*q).is_algebraic
assert cos(2*pi/7).is_algebraic
assert cos(k*pi) == (-1)**k
assert cos(2*k*pi) == 1
for d in list(range(1, 22)) + [60, 85]:
for n in range(0, 2*d + 1):
x = n*pi/d
e = abs( float(cos(x)) - cos(float(x)) )
assert e < 1e-12
def test_issue_6190():
c = Float('123456789012345678901234567890.25', '')
for cls in [sin, cos, tan, cot]:
assert cls(c*pi) == cls(pi/4)
assert cls(4.125*pi) == cls(pi/8)
assert cls(4.7*pi) == cls((4.7 % 2)*pi)
def test_cos_series():
assert cos(x).series(x, 0, 9) == \
1 - x**2/2 + x**4/24 - x**6/720 + x**8/40320 + O(x**9)
def test_cos_rewrite():
assert cos(x).rewrite(exp) == exp(I*x)/2 + exp(-I*x)/2
assert cos(x).rewrite(tan) == (1 - tan(x/2)**2)/(1 + tan(x/2)**2)
assert cos(x).rewrite(cot) == -(1 - cot(x/2)**2)/(1 + cot(x/2)**2)
assert cos(sinh(x)).rewrite(
exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, sinh(3)).n()
assert cos(cosh(x)).rewrite(
exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, cosh(3)).n()
assert cos(tanh(x)).rewrite(
exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, tanh(3)).n()
assert cos(coth(x)).rewrite(
exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, coth(3)).n()
assert cos(sin(x)).rewrite(
exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, sin(3)).n()
assert cos(cos(x)).rewrite(
exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, cos(3)).n()
assert cos(tan(x)).rewrite(
exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, tan(3)).n()
assert cos(cot(x)).rewrite(
exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, cot(3)).n()
assert cos(log(x)).rewrite(Pow) == x**I/2 + x**-I/2
assert cos(x).rewrite(sec) == 1/sec(x)
def test_cos_expansion():
assert cos(x + y).expand(trig=True) == cos(x)*cos(y) - sin(x)*sin(y)
assert cos(x - y).expand(trig=True) == cos(x)*cos(y) + sin(x)*sin(y)
assert cos(y - x).expand(trig=True) == cos(x)*cos(y) + sin(x)*sin(y)
assert cos(2*x).expand(trig=True) == 2*cos(x)**2 - 1
assert cos(3*x).expand(trig=True) == 4*cos(x)**3 - 3*cos(x)
assert cos(4*x).expand(trig=True) == 8*cos(x)**4 - 8*cos(x)**2 + 1
assert cos(2).expand(trig=True) == 2*cos(1)**2 - 1
assert cos(3).expand(trig=True) == 4*cos(1)**3 - 3*cos(1)
def test_tan():
assert tan(nan) == nan
assert tan.nargs == FiniteSet(1)
assert tan(oo*I) == I
assert tan(-oo*I) == -I
assert tan(0) == 0
assert tan(atan(x)) == x
assert tan(asin(x)) == x / sqrt(1 - x**2)
assert tan(acos(x)) == sqrt(1 - x**2) / x
assert tan(acot(x)) == 1 / x
assert tan(atan2(y, x)) == y/x
assert tan(pi*I) == tanh(pi)*I
assert tan(-pi*I) == -tanh(pi)*I
assert tan(-2*I) == -tanh(2)*I
assert tan(pi) == 0
assert tan(-pi) == 0
assert tan(2*pi) == 0
assert tan(-2*pi) == 0
assert tan(-3*10**73*pi) == 0
assert tan(pi/2) == zoo
assert tan(3*pi/2) == zoo
assert tan(pi/3) == sqrt(3)
assert tan(-2*pi/3) == sqrt(3)
assert tan(pi/4) == S.One
assert tan(-pi/4) == -S.One
assert tan(17*pi/4) == S.One
assert tan(-3*pi/4) == S.One
assert tan(pi/6) == 1/sqrt(3)
assert tan(-pi/6) == -1/sqrt(3)
assert tan(7*pi/6) == 1/sqrt(3)
assert tan(-5*pi/6) == 1/sqrt(3)
assert tan(pi/8).expand() == -1 + sqrt(2)
assert tan(3*pi/8).expand() == 1 + sqrt(2)
assert tan(5*pi/8).expand() == -1 - sqrt(2)
assert tan(7*pi/8).expand() == 1 - sqrt(2)
assert tan(pi/12) == -sqrt(3) + 2
assert tan(5*pi/12) == sqrt(3) + 2
assert tan(7*pi/12) == -sqrt(3) - 2
assert tan(11*pi/12) == sqrt(3) - 2
assert tan(pi/24).radsimp() == -2 - sqrt(3) + sqrt(2) + sqrt(6)
assert tan(5*pi/24).radsimp() == -2 + sqrt(3) - sqrt(2) + sqrt(6)
assert tan(7*pi/24).radsimp() == 2 - sqrt(3) - sqrt(2) + sqrt(6)
assert tan(11*pi/24).radsimp() == 2 + sqrt(3) + sqrt(2) + sqrt(6)
assert tan(13*pi/24).radsimp() == -2 - sqrt(3) - sqrt(2) - sqrt(6)
assert tan(17*pi/24).radsimp() == -2 + sqrt(3) + sqrt(2) - sqrt(6)
assert tan(19*pi/24).radsimp() == 2 - sqrt(3) + sqrt(2) - sqrt(6)
assert tan(23*pi/24).radsimp() == 2 + sqrt(3) - sqrt(2) - sqrt(6)
assert 1 == (tan(8*pi/15)*cos(8*pi/15)/sin(8*pi/15)).ratsimp()
assert tan(x*I) == tanh(x)*I
assert tan(k*pi) == 0
assert tan(17*k*pi) == 0
assert tan(k*pi*I) == tanh(k*pi)*I
assert tan(r).is_real is True
assert tan(0, evaluate=False).is_algebraic
assert tan(a).is_algebraic is None
assert tan(na).is_algebraic is False
assert tan(10*pi/7) == tan(3*pi/7)
assert tan(11*pi/7) == -tan(3*pi/7)
assert tan(-11*pi/7) == tan(3*pi/7)
assert tan(15*pi/14) == tan(pi/14)
assert tan(-15*pi/14) == -tan(pi/14)
def test_tan_series():
assert tan(x).series(x, 0, 9) == \
x + x**3/3 + 2*x**5/15 + 17*x**7/315 + O(x**9)
def test_tan_rewrite():
neg_exp, pos_exp = exp(-x*I), exp(x*I)
assert tan(x).rewrite(exp) == I*(neg_exp - pos_exp)/(neg_exp + pos_exp)
assert tan(x).rewrite(sin) == 2*sin(x)**2/sin(2*x)
assert tan(x).rewrite(cos) == -cos(x + S.Pi/2)/cos(x)
assert tan(x).rewrite(cot) == 1/cot(x)
assert tan(sinh(x)).rewrite(
exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, sinh(3)).n()
assert tan(cosh(x)).rewrite(
exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, cosh(3)).n()
assert tan(tanh(x)).rewrite(
exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, tanh(3)).n()
assert tan(coth(x)).rewrite(
exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, coth(3)).n()
assert tan(sin(x)).rewrite(
exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, sin(3)).n()
assert tan(cos(x)).rewrite(
exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, cos(3)).n()
assert tan(tan(x)).rewrite(
exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, tan(3)).n()
assert tan(cot(x)).rewrite(
exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, cot(3)).n()
assert tan(log(x)).rewrite(Pow) == I*(x**-I - x**I)/(x**-I + x**I)
assert 0 == (cos(pi/34)*tan(pi/34) - sin(pi/34)).rewrite(pow)
assert 0 == (cos(pi/17)*tan(pi/17) - sin(pi/17)).rewrite(pow)
assert tan(pi/19).rewrite(pow) == tan(pi/19)
assert tan(8*pi/19).rewrite(sqrt) == tan(8*pi/19)
def test_tan_subs():
assert tan(x).subs(tan(x), y) == y
assert tan(x).subs(x, y) == tan(y)
assert tan(x).subs(x, S.Pi/2) == zoo
assert tan(x).subs(x, 3*S.Pi/2) == zoo
def test_tan_expansion():
assert tan(x + y).expand(trig=True) == ((tan(x) + tan(y))/(1 - tan(x)*tan(y))).expand()
assert tan(x - y).expand(trig=True) == ((tan(x) - tan(y))/(1 + tan(x)*tan(y))).expand()
assert tan(x + y + z).expand(trig=True) == (
(tan(x) + tan(y) + tan(z) - tan(x)*tan(y)*tan(z))/
(1 - tan(x)*tan(y) - tan(x)*tan(z) - tan(y)*tan(z))).expand()
assert 0 == tan(2*x).expand(trig=True).rewrite(tan).subs([(tan(x), Rational(1, 7))])*24 - 7
assert 0 == tan(3*x).expand(trig=True).rewrite(tan).subs([(tan(x), Rational(1, 5))])*55 - 37
assert 0 == tan(4*x - pi/4).expand(trig=True).rewrite(tan).subs([(tan(x), Rational(1, 5))])*239 - 1
def test_cot():
assert cot(nan) == nan
assert cot.nargs == FiniteSet(1)
assert cot(oo*I) == -I
assert cot(-oo*I) == I
assert cot(0) == zoo
assert cot(2*pi) == zoo
assert cot(acot(x)) == x
assert cot(atan(x)) == 1 / x
assert cot(asin(x)) == sqrt(1 - x**2) / x
assert cot(acos(x)) == x / sqrt(1 - x**2)
assert cot(atan2(y, x)) == x/y
assert cot(pi*I) == -coth(pi)*I
assert cot(-pi*I) == coth(pi)*I
assert cot(-2*I) == coth(2)*I
assert cot(pi) == cot(2*pi) == cot(3*pi)
assert cot(-pi) == cot(-2*pi) == cot(-3*pi)
assert cot(pi/2) == 0
assert cot(-pi/2) == 0
assert cot(5*pi/2) == 0
assert cot(7*pi/2) == 0
assert cot(pi/3) == 1/sqrt(3)
assert cot(-2*pi/3) == 1/sqrt(3)
assert cot(pi/4) == S.One
assert cot(-pi/4) == -S.One
assert cot(17*pi/4) == S.One
assert cot(-3*pi/4) == S.One
assert cot(pi/6) == sqrt(3)
assert cot(-pi/6) == -sqrt(3)
assert cot(7*pi/6) == sqrt(3)
assert cot(-5*pi/6) == sqrt(3)
assert cot(pi/8).expand() == 1 + sqrt(2)
assert cot(3*pi/8).expand() == -1 + sqrt(2)
assert cot(5*pi/8).expand() == 1 - sqrt(2)
assert cot(7*pi/8).expand() == -1 - sqrt(2)
assert cot(pi/12) == sqrt(3) + 2
assert cot(5*pi/12) == -sqrt(3) + 2
assert cot(7*pi/12) == sqrt(3) - 2
assert cot(11*pi/12) == -sqrt(3) - 2
assert cot(pi/24).radsimp() == sqrt(2) + sqrt(3) + 2 + sqrt(6)
assert cot(5*pi/24).radsimp() == -sqrt(2) - sqrt(3) + 2 + sqrt(6)
assert cot(7*pi/24).radsimp() == -sqrt(2) + sqrt(3) - 2 + sqrt(6)
assert cot(11*pi/24).radsimp() == sqrt(2) - sqrt(3) - 2 + sqrt(6)
assert cot(13*pi/24).radsimp() == -sqrt(2) + sqrt(3) + 2 - sqrt(6)
assert cot(17*pi/24).radsimp() == sqrt(2) - sqrt(3) + 2 - sqrt(6)
assert cot(19*pi/24).radsimp() == sqrt(2) + sqrt(3) - 2 - sqrt(6)
assert cot(23*pi/24).radsimp() == -sqrt(2) - sqrt(3) - 2 - sqrt(6)
assert 1 == (cot(4*pi/15)*sin(4*pi/15)/cos(4*pi/15)).ratsimp()
assert cot(x*I) == -coth(x)*I
assert cot(k*pi*I) == -coth(k*pi)*I
assert cot(r).is_real is True
assert cot(a).is_algebraic is None
assert cot(na).is_algebraic is False
assert cot(10*pi/7) == cot(3*pi/7)
assert cot(11*pi/7) == -cot(3*pi/7)
assert cot(-11*pi/7) == cot(3*pi/7)
assert cot(39*pi/34) == cot(5*pi/34)
assert cot(-41*pi/34) == -cot(7*pi/34)
assert cot(x).is_finite is None
assert cot(r).is_finite is None
i = Symbol('i', imaginary=True)
assert cot(i).is_finite is True
assert cot(x).subs(x, 3*pi) == zoo
def test_cot_series():
assert cot(x).series(x, 0, 9) == \
1/x - x/3 - x**3/45 - 2*x**5/945 - x**7/4725 + O(x**9)
# issue 6210
assert cot(x**4 + x**5).series(x, 0, 1) == \
x**(-4) - 1/x**3 + x**(-2) - 1/x + 1 + O(x)
def test_cot_rewrite():
neg_exp, pos_exp = exp(-x*I), exp(x*I)
assert cot(x).rewrite(exp) == I*(pos_exp + neg_exp)/(pos_exp - neg_exp)
assert cot(x).rewrite(sin) == 2*sin(2*x)/sin(x)**2
assert cot(x).rewrite(cos) == -cos(x)/cos(x + S.Pi/2)
assert cot(x).rewrite(tan) == 1/tan(x)
assert cot(sinh(x)).rewrite(
exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, sinh(3)).n()
assert cot(cosh(x)).rewrite(
exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, cosh(3)).n()
assert cot(tanh(x)).rewrite(
exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, tanh(3)).n()
assert cot(coth(x)).rewrite(
exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, coth(3)).n()
assert cot(sin(x)).rewrite(
exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, sin(3)).n()
assert cot(tan(x)).rewrite(
exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, tan(3)).n()
assert cot(log(x)).rewrite(Pow) == -I*(x**-I + x**I)/(x**-I - x**I)
assert cot(4*pi/34).rewrite(pow).ratsimp() == (cos(4*pi/34)/sin(4*pi/34)).rewrite(pow).ratsimp()
assert cot(4*pi/17).rewrite(pow) == (cos(4*pi/17)/sin(4*pi/17)).rewrite(pow)
assert cot(pi/19).rewrite(pow) == cot(pi/19)
assert cot(pi/19).rewrite(sqrt) == cot(pi/19)
def test_cot_subs():
assert cot(x).subs(cot(x), y) == y
assert cot(x).subs(x, y) == cot(y)
assert cot(x).subs(x, 0) == zoo
assert cot(x).subs(x, S.Pi) == zoo
def test_cot_expansion():
assert cot(x + y).expand(trig=True) == ((cot(x)*cot(y) - 1)/(cot(x) + cot(y))).expand()
assert cot(x - y).expand(trig=True) == (-(cot(x)*cot(y) + 1)/(cot(x) - cot(y))).expand()
assert cot(x + y + z).expand(trig=True) == (
(cot(x)*cot(y)*cot(z) - cot(x) - cot(y) - cot(z))/
(-1 + cot(x)*cot(y) + cot(x)*cot(z) + cot(y)*cot(z))).expand()
assert cot(3*x).expand(trig=True) == ((cot(x)**3 - 3*cot(x))/(3*cot(x)**2 - 1)).expand()
assert 0 == cot(2*x).expand(trig=True).rewrite(cot).subs([(cot(x), Rational(1, 3))])*3 + 4
assert 0 == cot(3*x).expand(trig=True).rewrite(cot).subs([(cot(x), Rational(1, 5))])*55 - 37
assert 0 == cot(4*x - pi/4).expand(trig=True).rewrite(cot).subs([(cot(x), Rational(1, 7))])*863 + 191
def test_sinc():
assert isinstance(sinc(x), sinc)
s = Symbol('s', zero=True)
assert sinc(s) == S.One
assert sinc(S.Infinity) == S.Zero
assert sinc(-S.Infinity) == S.Zero
assert sinc(S.NaN) == S.NaN
assert sinc(S.ComplexInfinity) == S.NaN
n = Symbol('n', integer=True, nonzero=True)
assert sinc(n*pi) == S.Zero
assert sinc(-n*pi) == S.Zero
assert sinc(pi/2) == 2 / pi
assert sinc(-pi/2) == 2 / pi
assert sinc(5*pi/2) == 2 / (5*pi)
assert sinc(7*pi/2) == -2 / (7*pi)
assert sinc(-x) == sinc(x)
assert sinc(x).diff() == (x*cos(x) - sin(x)) / x**2
assert sinc(x).series() == 1 - x**2/6 + x**4/120 + O(x**6)
assert sinc(x).rewrite(jn) == jn(0, x)
assert sinc(x).rewrite(sin) == sin(x) / x
def test_asin():
assert asin(nan) == nan
assert asin.nargs == FiniteSet(1)
assert asin(oo) == -I*oo
assert asin(-oo) == I*oo
# Note: asin(-x) = - asin(x)
assert asin(0) == 0
assert asin(1) == pi/2
assert asin(-1) == -pi/2
assert asin(sqrt(3)/2) == pi/3
assert asin(-sqrt(3)/2) == -pi/3
assert asin(sqrt(2)/2) == pi/4
assert asin(-sqrt(2)/2) == -pi/4
assert asin(sqrt((5 - sqrt(5))/8)) == pi/5
assert asin(-sqrt((5 - sqrt(5))/8)) == -pi/5
assert asin(Rational(1, 2)) == pi/6
assert asin(-Rational(1, 2)) == -pi/6
assert asin((sqrt(2 - sqrt(2)))/2) == pi/8
assert asin(-(sqrt(2 - sqrt(2)))/2) == -pi/8
assert asin((sqrt(5) - 1)/4) == pi/10
assert asin(-(sqrt(5) - 1)/4) == -pi/10
assert asin((sqrt(3) - 1)/sqrt(2**3)) == pi/12
assert asin(-(sqrt(3) - 1)/sqrt(2**3)) == -pi/12
assert asin(x).diff(x) == 1/sqrt(1 - x**2)
assert asin(0.2).is_real is True
assert asin(-2).is_real is False
assert asin(r).is_real is None
assert asin(-2*I) == -I*asinh(2)
assert asin(Rational(1, 7), evaluate=False).is_positive is True
assert asin(Rational(-1, 7), evaluate=False).is_positive is False
assert asin(p).is_positive is None
def test_asin_series():
assert asin(x).series(x, 0, 9) == \
x + x**3/6 + 3*x**5/40 + 5*x**7/112 + O(x**9)
t5 = asin(x).taylor_term(5, x)
assert t5 == 3*x**5/40
assert asin(x).taylor_term(7, x, t5, 0) == 5*x**7/112
def test_asin_rewrite():
assert asin(x).rewrite(log) == -I*log(I*x + sqrt(1 - x**2))
assert asin(x).rewrite(atan) == 2*atan(x/(1 + sqrt(1 - x**2)))
assert asin(x).rewrite(acos) == S.Pi/2 - acos(x)
assert asin(x).rewrite(acot) == 2*acot((sqrt(-x**2 + 1) + 1)/x)
assert asin(x).rewrite(asec) == -asec(1/x) + pi/2
assert asin(x).rewrite(acsc) == acsc(1/x)
def test_acos():
assert acos(nan) == nan
assert acos(zoo) == zoo
assert acos.nargs == FiniteSet(1)
assert acos(oo) == I*oo
assert acos(-oo) == -I*oo
# Note: acos(-x) = pi - acos(x)
assert acos(0) == pi/2
assert acos(Rational(1, 2)) == pi/3
assert acos(-Rational(1, 2)) == (2*pi)/3
assert acos(1) == 0
assert acos(-1) == pi
assert acos(sqrt(2)/2) == pi/4
assert acos(-sqrt(2)/2) == (3*pi)/4
assert acos(x).diff(x) == -1/sqrt(1 - x**2)
assert acos(0.2).is_real is True
assert acos(-2).is_real is False
assert acos(r).is_real is None
assert acos(Rational(1, 7), evaluate=False).is_positive is True
assert acos(Rational(-1, 7), evaluate=False).is_positive is True
assert acos(Rational(3, 2), evaluate=False).is_positive is False
assert acos(p).is_positive is None
assert acos(2 + p).conjugate() != acos(10 + p)
assert acos(-3 + n).conjugate() != acos(-3 + n)
assert acos(S.One/3).conjugate() == acos(S.One/3)
assert acos(-S.One/3).conjugate() == acos(-S.One/3)
assert acos(p + n*I).conjugate() == acos(p - n*I)
assert acos(z).conjugate() != acos(conjugate(z))
def test_acos_series():
assert acos(x).series(x, 0, 8) == \
pi/2 - x - x**3/6 - 3*x**5/40 - 5*x**7/112 + O(x**8)
assert acos(x).series(x, 0, 8) == pi/2 - asin(x).series(x, 0, 8)
t5 = acos(x).taylor_term(5, x)
assert t5 == -3*x**5/40
assert acos(x).taylor_term(7, x, t5, 0) == -5*x**7/112
def test_acos_rewrite():
assert acos(x).rewrite(log) == pi/2 + I*log(I*x + sqrt(1 - x**2))
assert acos(x).rewrite(atan) == \
atan(sqrt(1 - x**2)/x) + (pi/2)*(1 - x*sqrt(1/x**2))
assert acos(0).rewrite(atan) == S.Pi/2
assert acos(0.5).rewrite(atan) == acos(0.5).rewrite(log)
assert acos(x).rewrite(asin) == S.Pi/2 - asin(x)
assert acos(x).rewrite(acot) == -2*acot((sqrt(-x**2 + 1) + 1)/x) + pi/2
assert acos(x).rewrite(asec) == asec(1/x)
assert acos(x).rewrite(acsc) == -acsc(1/x) + pi/2
def test_atan():
assert atan(nan) == nan
assert atan.nargs == FiniteSet(1)
assert atan(oo) == pi/2
assert atan(-oo) == -pi/2
assert atan(0) == 0
assert atan(1) == pi/4
assert atan(sqrt(3)) == pi/3
assert atan(oo) == pi/2
assert atan(x).diff(x) == 1/(1 + x**2)
assert atan(r).is_real is True
assert atan(-2*I) == -I*atanh(2)
assert atan(p).is_positive is True
assert atan(n).is_positive is False
assert atan(x).is_positive is None
def test_atan_rewrite():
assert atan(x).rewrite(log) == I*log((1 - I*x)/(1 + I*x))/2
assert atan(x).rewrite(asin) == (-asin(1/sqrt(x**2 + 1)) + pi/2)*sqrt(x**2)/x
assert atan(x).rewrite(acos) == sqrt(x**2)*acos(1/sqrt(x**2 + 1))/x
assert atan(x).rewrite(acot) == acot(1/x)
assert atan(x).rewrite(asec) == sqrt(x**2)*asec(sqrt(x**2 + 1))/x
assert atan(x).rewrite(acsc) == (-acsc(sqrt(x**2 + 1)) + pi/2)*sqrt(x**2)/x
def test_atan2():
assert atan2.nargs == FiniteSet(2)
assert atan2(0, 0) == S.NaN
assert atan2(0, 1) == 0
assert atan2(1, 1) == pi/4
assert atan2(1, 0) == pi/2
assert atan2(1, -1) == 3*pi/4
assert atan2(0, -1) == pi
assert atan2(-1, -1) == -3*pi/4
assert atan2(-1, 0) == -pi/2
assert atan2(-1, 1) == -pi/4
i = symbols('i', imaginary=True)
r = symbols('r', real=True)
eq = atan2(r, i)
ans = -I*log((i + I*r)/sqrt(i**2 + r**2))
reps = ((r, 2), (i, I))
assert eq.subs(reps) == ans.subs(reps)
x = Symbol('x', negative=True)
y = Symbol('y', negative=True)
assert atan2(y, x) == atan(y/x) - pi
y = Symbol('y', nonnegative=True)
assert atan2(y, x) == atan(y/x) + pi
y = Symbol('y')
assert atan2(y, x) == atan2(y, x, evaluate=False)
u = Symbol("u", positive=True)
assert atan2(0, u) == 0
u = Symbol("u", negative=True)
assert atan2(0, u) == pi
assert atan2(y, oo) == 0
assert atan2(y, -oo)== 2*pi*Heaviside(re(y)) - pi
assert atan2(y, x).rewrite(log) == -I*log((x + I*y)/sqrt(x**2 + y**2))
assert atan2(y, x).rewrite(atan) == 2*atan(y/(x + sqrt(x**2 + y**2)))
ex = atan2(y, x) - arg(x + I*y)
assert ex.subs({x:2, y:3}).rewrite(arg) == 0
assert ex.subs({x:2, y:3*I}).rewrite(arg) == -pi - I*log(sqrt(5)*I/5)
assert ex.subs({x:2*I, y:3}).rewrite(arg) == -pi/2 - I*log(sqrt(5)*I)
assert ex.subs({x:2*I, y:3*I}).rewrite(arg) == -pi + atan(2/S(3)) + atan(3/S(2))
i = symbols('i', imaginary=True)
r = symbols('r', real=True)
e = atan2(i, r)
rewrite = e.rewrite(arg)
reps = {i: I, r: -2}
assert rewrite == -I*log(abs(I*i + r)/sqrt(abs(i**2 + r**2))) + arg((I*i + r)/sqrt(i**2 + r**2))
assert (e - rewrite).subs(reps).equals(0)
assert conjugate(atan2(x, y)) == atan2(conjugate(x), conjugate(y))
assert diff(atan2(y, x), x) == -y/(x**2 + y**2)
assert diff(atan2(y, x), y) == x/(x**2 + y**2)
assert simplify(diff(atan2(y, x).rewrite(log), x)) == -y/(x**2 + y**2)
assert simplify(diff(atan2(y, x).rewrite(log), y)) == x/(x**2 + y**2)
def test_acot():
assert acot(nan) == nan
assert acot.nargs == FiniteSet(1)
assert acot(-oo) == 0
assert acot(oo) == 0
assert acot(1) == pi/4
assert acot(0) == pi/2
assert acot(sqrt(3)/3) == pi/3
assert acot(1/sqrt(3)) == pi/3
assert acot(-1/sqrt(3)) == -pi/3
assert acot(x).diff(x) == -1/(1 + x**2)
assert acot(r).is_real is True
assert acot(I*pi) == -I*acoth(pi)
assert acot(-2*I) == I*acoth(2)
assert acot(x).is_positive is None
assert acot(r).is_positive is True
assert acot(p).is_positive is True
assert acot(I).is_positive is False
def test_acot_rewrite():
assert acot(x).rewrite(log) == I*log((x - I)/(x + I))/2
assert acot(x).rewrite(asin) == x*(-asin(sqrt(-x**2)/sqrt(-x**2 - 1)) + pi/2)*sqrt(x**(-2))
assert acot(x).rewrite(acos) == x*sqrt(x**(-2))*acos(sqrt(-x**2)/sqrt(-x**2 - 1))
assert acot(x).rewrite(atan) == atan(1/x)
assert acot(x).rewrite(asec) == x*sqrt(x**(-2))*asec(sqrt((x**2 + 1)/x**2))
assert acot(x).rewrite(acsc) == x*(-acsc(sqrt((x**2 + 1)/x**2)) + pi/2)*sqrt(x**(-2))
def test_attributes():
assert sin(x).args == (x,)
def test_sincos_rewrite():
assert sin(pi/2 - x) == cos(x)
assert sin(pi - x) == sin(x)
assert cos(pi/2 - x) == sin(x)
assert cos(pi - x) == -cos(x)
def _check_even_rewrite(func, arg):
"""Checks that the expr has been rewritten using f(-x) -> f(x)
arg : -x
"""
return func(arg).args[0] == -arg
def _check_odd_rewrite(func, arg):
"""Checks that the expr has been rewritten using f(-x) -> -f(x)
arg : -x
"""
return func(arg).func.is_Mul
def _check_no_rewrite(func, arg):
"""Checks that the expr is not rewritten"""
return func(arg).args[0] == arg
def test_evenodd_rewrite():
a = cos(2) # negative
b = sin(1) # positive
even = [cos]
odd = [sin, tan, cot, asin, atan, acot]
with_minus = [-1, -2**1024 * E, -pi/105, -x*y, -x - y]
for func in even:
for expr in with_minus:
assert _check_even_rewrite(func, expr)
assert _check_no_rewrite(func, a*b)
assert func(
x - y) == func(y - x) # it doesn't matter which form is canonical
for func in odd:
for expr in with_minus:
assert _check_odd_rewrite(func, expr)
assert _check_no_rewrite(func, a*b)
assert func(
x - y) == -func(y - x) # it doesn't matter which form is canonical
def test_issue_4547():
assert sin(x).rewrite(cot) == 2*cot(x/2)/(1 + cot(x/2)**2)
assert cos(x).rewrite(cot) == -(1 - cot(x/2)**2)/(1 + cot(x/2)**2)
assert tan(x).rewrite(cot) == 1/cot(x)
assert cot(x).fdiff() == -1 - cot(x)**2
def test_as_leading_term_issue_5272():
assert sin(x).as_leading_term(x) == x
assert cos(x).as_leading_term(x) == 1
assert tan(x).as_leading_term(x) == x
assert cot(x).as_leading_term(x) == 1/x
assert asin(x).as_leading_term(x) == x
assert acos(x).as_leading_term(x) == x
assert atan(x).as_leading_term(x) == x
assert acot(x).as_leading_term(x) == x
def test_leading_terms():
for func in [sin, cos, tan, cot, asin, acos, atan, acot]:
for arg in (1/x, S.Half):
eq = func(arg)
assert eq.as_leading_term(x) == eq
def test_atan2_expansion():
assert cancel(atan2(x**2, x + 1).diff(x) - atan(x**2/(x + 1)).diff(x)) == 0
assert cancel(atan(y/x).series(y, 0, 5) - atan2(y, x).series(y, 0, 5)
+ atan2(0, x) - atan(0)) == O(y**5)
assert cancel(atan(y/x).series(x, 1, 4) - atan2(y, x).series(x, 1, 4)
+ atan2(y, 1) - atan(y)) == O((x - 1)**4, (x, 1))
assert cancel(atan((y + x)/x).series(x, 1, 3) - atan2(y + x, x).series(x, 1, 3)
+ atan2(1 + y, 1) - atan(1 + y)) == O((x - 1)**3, (x, 1))
assert Matrix([atan2(y, x)]).jacobian([y, x]) == \
Matrix([[x/(y**2 + x**2), -y/(y**2 + x**2)]])
def test_aseries():
def t(n, v, d, e):
assert abs(
n(1/v).evalf() - n(1/x).series(x, dir=d).removeO().subs(x, v)) < e
t(atan, 0.1, '+', 1e-5)
t(atan, -0.1, '-', 1e-5)
t(acot, 0.1, '+', 1e-5)
t(acot, -0.1, '-', 1e-5)
def test_issue_4420():
i = Symbol('i', integer=True)
e = Symbol('e', even=True)
o = Symbol('o', odd=True)
# unknown parity for variable
assert cos(4*i*pi) == 1
assert sin(4*i*pi) == 0
assert tan(4*i*pi) == 0
assert cot(4*i*pi) == zoo
assert cos(3*i*pi) == cos(pi*i) # +/-1
assert sin(3*i*pi) == 0
assert tan(3*i*pi) == 0
assert cot(3*i*pi) == zoo
assert cos(4.0*i*pi) == 1
assert sin(4.0*i*pi) == 0
assert tan(4.0*i*pi) == 0
assert cot(4.0*i*pi) == zoo
assert cos(3.0*i*pi) == cos(pi*i) # +/-1
assert sin(3.0*i*pi) == 0
assert tan(3.0*i*pi) == 0
assert cot(3.0*i*pi) == zoo
assert cos(4.5*i*pi) == cos(0.5*pi*i)
assert sin(4.5*i*pi) == sin(0.5*pi*i)
assert tan(4.5*i*pi) == tan(0.5*pi*i)
assert cot(4.5*i*pi) == cot(0.5*pi*i)
# parity of variable is known
assert cos(4*e*pi) == 1
assert sin(4*e*pi) == 0
assert tan(4*e*pi) == 0
assert cot(4*e*pi) == zoo
assert cos(3*e*pi) == 1
assert sin(3*e*pi) == 0
assert tan(3*e*pi) == 0
assert cot(3*e*pi) == zoo
assert cos(4.0*e*pi) == 1
assert sin(4.0*e*pi) == 0
assert tan(4.0*e*pi) == 0
assert cot(4.0*e*pi) == zoo
assert cos(3.0*e*pi) == 1
assert sin(3.0*e*pi) == 0
assert tan(3.0*e*pi) == 0
assert cot(3.0*e*pi) == zoo
assert cos(4.5*e*pi) == cos(0.5*pi*e)
assert sin(4.5*e*pi) == sin(0.5*pi*e)
assert tan(4.5*e*pi) == tan(0.5*pi*e)
assert cot(4.5*e*pi) == cot(0.5*pi*e)
assert cos(4*o*pi) == 1
assert sin(4*o*pi) == 0
assert tan(4*o*pi) == 0
assert cot(4*o*pi) == zoo
assert cos(3*o*pi) == -1
assert sin(3*o*pi) == 0
assert tan(3*o*pi) == 0
assert cot(3*o*pi) == zoo
assert cos(4.0*o*pi) == 1
assert sin(4.0*o*pi) == 0
assert tan(4.0*o*pi) == 0
assert cot(4.0*o*pi) == zoo
assert cos(3.0*o*pi) == -1
assert sin(3.0*o*pi) == 0
assert tan(3.0*o*pi) == 0
assert cot(3.0*o*pi) == zoo
assert cos(4.5*o*pi) == cos(0.5*pi*o)
assert sin(4.5*o*pi) == sin(0.5*pi*o)
assert tan(4.5*o*pi) == tan(0.5*pi*o)
assert cot(4.5*o*pi) == cot(0.5*pi*o)
# x could be imaginary
assert cos(4*x*pi) == cos(4*pi*x)
assert sin(4*x*pi) == sin(4*pi*x)
assert tan(4*x*pi) == tan(4*pi*x)
assert cot(4*x*pi) == cot(4*pi*x)
assert cos(3*x*pi) == cos(3*pi*x)
assert sin(3*x*pi) == sin(3*pi*x)
assert tan(3*x*pi) == tan(3*pi*x)
assert cot(3*x*pi) == cot(3*pi*x)
assert cos(4.0*x*pi) == cos(4.0*pi*x)
assert sin(4.0*x*pi) == sin(4.0*pi*x)
assert tan(4.0*x*pi) == tan(4.0*pi*x)
assert cot(4.0*x*pi) == cot(4.0*pi*x)
assert cos(3.0*x*pi) == cos(3.0*pi*x)
assert sin(3.0*x*pi) == sin(3.0*pi*x)
assert tan(3.0*x*pi) == tan(3.0*pi*x)
assert cot(3.0*x*pi) == cot(3.0*pi*x)
assert cos(4.5*x*pi) == cos(4.5*pi*x)
assert sin(4.5*x*pi) == sin(4.5*pi*x)
assert tan(4.5*x*pi) == tan(4.5*pi*x)
assert cot(4.5*x*pi) == cot(4.5*pi*x)
def test_inverses():
raises(AttributeError, lambda: sin(x).inverse())
raises(AttributeError, lambda: cos(x).inverse())
assert tan(x).inverse() == atan
assert cot(x).inverse() == acot
raises(AttributeError, lambda: csc(x).inverse())
raises(AttributeError, lambda: sec(x).inverse())
assert asin(x).inverse() == sin
assert acos(x).inverse() == cos
assert atan(x).inverse() == tan
assert acot(x).inverse() == cot
def test_real_imag():
a, b = symbols('a b', real=True)
z = a + b*I
for deep in [True, False]:
assert sin(
z).as_real_imag(deep=deep) == (sin(a)*cosh(b), cos(a)*sinh(b))
assert cos(
z).as_real_imag(deep=deep) == (cos(a)*cosh(b), -sin(a)*sinh(b))
assert tan(z).as_real_imag(deep=deep) == (sin(2*a)/(cos(2*a) +
cosh(2*b)), sinh(2*b)/(cos(2*a) + cosh(2*b)))
assert cot(z).as_real_imag(deep=deep) == (-sin(2*a)/(cos(2*a) -
cosh(2*b)), -sinh(2*b)/(cos(2*a) - cosh(2*b)))
assert sin(a).as_real_imag(deep=deep) == (sin(a), 0)
assert cos(a).as_real_imag(deep=deep) == (cos(a), 0)
assert tan(a).as_real_imag(deep=deep) == (tan(a), 0)
assert cot(a).as_real_imag(deep=deep) == (cot(a), 0)
@XFAIL
def test_sin_cos_with_infinity():
# Test for issue 5196
# https://github.com/sympy/sympy/issues/5196
assert sin(oo) == S.NaN
assert cos(oo) == S.NaN
@slow
def test_sincos_rewrite_sqrt():
# equivalent to testing rewrite(pow)
for p in [1, 3, 5, 17]:
for t in [1, 8]:
n = t*p
for i in range(1, (n + 1)//2 + 1):
if 1 == gcd(i, n):
x = i*pi/n
s1 = sin(x).rewrite(sqrt)
c1 = cos(x).rewrite(sqrt)
assert not s1.has(cos, sin), "fails for %d*pi/%d" % (i, n)
assert not c1.has(cos, sin), "fails for %d*pi/%d" % (i, n)
assert 1e-3 > abs(sin(x.evalf(5)) - s1.evalf(2)), "fails for %d*pi/%d" % (i, n)
assert 1e-3 > abs(cos(x.evalf(5)) - c1.evalf(2)), "fails for %d*pi/%d" % (i, n)
assert cos(pi/14).rewrite(sqrt) == sqrt(cos(pi/7)/2 + S.Half)
assert cos(pi/257).rewrite(sqrt).evalf(64) == cos(pi/257).evalf(64)
assert cos(-15*pi/2/11, evaluate=False).rewrite(
sqrt) == -sqrt(-cos(4*pi/11)/2 + S.Half)
assert cos(Mul(2, pi, S.Half, evaluate=False), evaluate=False).rewrite(
sqrt) == -1
e = cos(pi/3/17) # don't use pi/15 since that is caught at instantiation
a = (
-3*sqrt(-sqrt(17) + 17)*sqrt(sqrt(17) + 17)/64 -
3*sqrt(34)*sqrt(sqrt(17) + 17)/128 - sqrt(sqrt(17) +
17)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17)
+ sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/64 - sqrt(-sqrt(17)
+ 17)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/128 - S(1)/32 +
sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/64 +
3*sqrt(2)*sqrt(sqrt(17) + 17)/128 + sqrt(34)*sqrt(-sqrt(17) + 17)/128
+ 13*sqrt(2)*sqrt(-sqrt(17) + 17)/128 + sqrt(17)*sqrt(-sqrt(17) +
17)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17)
+ sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/128 + 5*sqrt(17)/32
+ sqrt(3)*sqrt(-sqrt(2)*sqrt(sqrt(17) + 17)*sqrt(sqrt(17)/32 +
sqrt(2)*sqrt(-sqrt(17) + 17)/32 +
sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + S(15)/32)/8 -
5*sqrt(2)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 +
sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 +
S(15)/32)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/64 -
3*sqrt(2)*sqrt(-sqrt(17) + 17)*sqrt(sqrt(17)/32 +
sqrt(2)*sqrt(-sqrt(17) + 17)/32 +
sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + S(15)/32)/32
+ sqrt(34)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 +
sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 +
S(15)/32)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/64 +
sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 +
sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + S(15)/32)/2 +
S.Half + sqrt(-sqrt(17) + 17)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) +
17)/32 + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) -
sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) +
6*sqrt(17) + 34)/32 + S(15)/32)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) -
sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) +
6*sqrt(17) + 34)/32 + sqrt(34)*sqrt(-sqrt(17) + 17)*sqrt(sqrt(17)/32 +
sqrt(2)*sqrt(-sqrt(17) + 17)/32 +
sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) +
17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 +
S(15)/32)/32)/2)
assert e.rewrite(sqrt) == a
assert e.n() == a.n()
# coverage of fermatCoords: multiplicity > 1; the following could be
# different but that portion of the code should be tested in some way
assert cos(pi/9/17).rewrite(sqrt) == \
sin(pi/9)*sin(2*pi/17) + cos(pi/9)*cos(2*pi/17)
@slow
def test_tancot_rewrite_sqrt():
# equivalent to testing rewrite(pow)
for p in [1, 3, 5, 17]:
for t in [1, 8]:
n = t*p
for i in range(1, (n + 1)//2 + 1):
if 1 == gcd(i, n):
x = i*pi/n
if 2*i != n and 3*i != 2*n:
t1 = tan(x).rewrite(sqrt)
assert not t1.has(cot, tan), "fails for %d*pi/%d" % (i, n)
assert 1e-3 > abs( tan(x.evalf(7)) - t1.evalf(4) ), "fails for %d*pi/%d" % (i, n)
if i != 0 and i != n:
c1 = cot(x).rewrite(sqrt)
assert not c1.has(cot, tan), "fails for %d*pi/%d" % (i, n)
assert 1e-3 > abs( cot(x.evalf(7)) - c1.evalf(4) ), "fails for %d*pi/%d" % (i, n)
def test_sec():
x = symbols('x', real=True)
z = symbols('z')
assert sec.nargs == FiniteSet(1)
assert sec(0) == 1
assert sec(pi) == -1
assert sec(pi/2) == zoo
assert sec(-pi/2) == zoo
assert sec(pi/6) == 2*sqrt(3)/3
assert sec(pi/3) == 2
assert sec(5*pi/2) == zoo
assert sec(9*pi/7) == -sec(2*pi/7)
assert sec(3*pi/4) == -sqrt(2) # issue 8421
assert sec(I) == 1/cosh(1)
assert sec(x*I) == 1/cosh(x)
assert sec(-x) == sec(x)
assert sec(asec(x)) == x
assert sec(x).rewrite(exp) == 1/(exp(I*x)/2 + exp(-I*x)/2)
assert sec(x).rewrite(sin) == sec(x)
assert sec(x).rewrite(cos) == 1/cos(x)
assert sec(x).rewrite(tan) == (tan(x/2)**2 + 1)/(-tan(x/2)**2 + 1)
assert sec(x).rewrite(pow) == sec(x)
assert sec(x).rewrite(sqrt) == sec(x)
assert sec(z).rewrite(cot) == (cot(z/2)**2 + 1)/(cot(z/2)**2 - 1)
assert sec(z).conjugate() == sec(conjugate(z))
assert (sec(z).as_real_imag() ==
(cos(re(z))*cosh(im(z))/(sin(re(z))**2*sinh(im(z))**2 +
cos(re(z))**2*cosh(im(z))**2),
sin(re(z))*sinh(im(z))/(sin(re(z))**2*sinh(im(z))**2 +
cos(re(z))**2*cosh(im(z))**2)))
assert sec(x).expand(trig=True) == 1/cos(x)
assert sec(2*x).expand(trig=True) == 1/(2*cos(x)**2 - 1)
assert sec(x).is_real == True
assert sec(z).is_real == None
assert sec(a).is_algebraic is None
assert sec(na).is_algebraic is False
assert sec(x).as_leading_term() == sec(x)
assert sec(0).is_finite == True
assert sec(x).is_finite == None
assert sec(pi/2).is_finite == False
assert series(sec(x), x, x0=0, n=6) == 1 + x**2/2 + 5*x**4/24 + O(x**6)
# https://github.com/sympy/sympy/issues/7166
assert series(sqrt(sec(x))) == 1 + x**2/4 + 7*x**4/96 + O(x**6)
# https://github.com/sympy/sympy/issues/7167
assert (series(sqrt(sec(x)), x, x0=pi*3/2, n=4) ==
1/sqrt(x - 3*pi/2) + (x - 3*pi/2)**(S(3)/2)/12 +
(x - 3*pi/2)**(S(7)/2)/160 + O((x - 3*pi/2)**4, (x, 3*pi/2)))
assert sec(x).diff(x) == tan(x)*sec(x)
# Taylor Term checks
assert sec(z).taylor_term(4, z) == 5*z**4/24
assert sec(z).taylor_term(6, z) == 61*z**6/720
assert sec(z).taylor_term(5, z) == 0
def test_csc():
x = symbols('x', real=True)
z = symbols('z')
# https://github.com/sympy/sympy/issues/6707
cosecant = csc('x')
alternate = 1/sin('x')
assert cosecant.equals(alternate) == True
assert alternate.equals(cosecant) == True
assert csc.nargs == FiniteSet(1)
assert csc(0) == zoo
assert csc(pi) == zoo
assert csc(pi/2) == 1
assert csc(-pi/2) == -1
assert csc(pi/6) == 2
assert csc(pi/3) == 2*sqrt(3)/3
assert csc(5*pi/2) == 1
assert csc(9*pi/7) == -csc(2*pi/7)
assert csc(3*pi/4) == sqrt(2) # issue 8421
assert csc(I) == -I/sinh(1)
assert csc(x*I) == -I/sinh(x)
assert csc(-x) == -csc(x)
assert csc(acsc(x)) == x
assert csc(x).rewrite(exp) == 2*I/(exp(I*x) - exp(-I*x))
assert csc(x).rewrite(sin) == 1/sin(x)
assert csc(x).rewrite(cos) == csc(x)
assert csc(x).rewrite(tan) == (tan(x/2)**2 + 1)/(2*tan(x/2))
assert csc(x).rewrite(cot) == (cot(x/2)**2 + 1)/(2*cot(x/2))
assert csc(z).conjugate() == csc(conjugate(z))
assert (csc(z).as_real_imag() ==
(sin(re(z))*cosh(im(z))/(sin(re(z))**2*cosh(im(z))**2 +
cos(re(z))**2*sinh(im(z))**2),
-cos(re(z))*sinh(im(z))/(sin(re(z))**2*cosh(im(z))**2 +
cos(re(z))**2*sinh(im(z))**2)))
assert csc(x).expand(trig=True) == 1/sin(x)
assert csc(2*x).expand(trig=True) == 1/(2*sin(x)*cos(x))
assert csc(x).is_real == True
assert csc(z).is_real == None
assert csc(a).is_algebraic is None
assert csc(na).is_algebraic is False
assert csc(x).as_leading_term() == csc(x)
assert csc(0).is_finite == False
assert csc(x).is_finite == None
assert csc(pi/2).is_finite == True
assert series(csc(x), x, x0=pi/2, n=6) == \
1 + (x - pi/2)**2/2 + 5*(x - pi/2)**4/24 + O((x - pi/2)**6, (x, pi/2))
assert series(csc(x), x, x0=0, n=6) == \
1/x + x/6 + 7*x**3/360 + 31*x**5/15120 + O(x**6)
assert csc(x).diff(x) == -cot(x)*csc(x)
assert csc(x).taylor_term(2, x) == 0
assert csc(x).taylor_term(3, x) == 7*x**3/360
assert csc(x).taylor_term(5, x) == 31*x**5/15120
def test_asec():
z = Symbol('z', zero=True)
assert asec(z) == zoo
assert asec(nan) == nan
assert asec(1) == 0
assert asec(-1) == pi
assert asec(oo) == pi/2
assert asec(-oo) == pi/2
assert asec(zoo) == pi/2
assert asec(x).diff(x) == 1/(x**2*sqrt(1 - 1/x**2))
assert asec(x).as_leading_term(x) == log(x)
assert asec(x).rewrite(log) == I*log(sqrt(1 - 1/x**2) + I/x) + pi/2
assert asec(x).rewrite(asin) == -asin(1/x) + pi/2
assert asec(x).rewrite(acos) == acos(1/x)
assert asec(x).rewrite(atan) == (2*atan(x + sqrt(x**2 - 1)) - pi/2)*sqrt(x**2)/x
assert asec(x).rewrite(acot) == (2*acot(x - sqrt(x**2 - 1)) - pi/2)*sqrt(x**2)/x
assert asec(x).rewrite(acsc) == -acsc(x) + pi/2
def test_asec_is_real():
assert asec(S(1)/2).is_real is False
n = Symbol('n', positive=True, integer=True)
assert asec(n).is_real is True
assert asec(x).is_real is None
assert asec(r).is_real is None
t = Symbol('t', real=False)
assert asec(t).is_real is False
def test_acsc():
assert acsc(nan) == nan
assert acsc(1) == pi/2
assert acsc(-1) == -pi/2
assert acsc(oo) == 0
assert acsc(-oo) == 0
assert acsc(zoo) == 0
assert acsc(x).diff(x) == -1/(x**2*sqrt(1 - 1/x**2))
assert acsc(x).as_leading_term(x) == log(x)
assert acsc(x).rewrite(log) == -I*log(sqrt(1 - 1/x**2) + I/x)
assert acsc(x).rewrite(asin) == asin(1/x)
assert acsc(x).rewrite(acos) == -acos(1/x) + pi/2
assert acsc(x).rewrite(atan) == (-atan(sqrt(x**2 - 1)) + pi/2)*sqrt(x**2)/x
assert acsc(x).rewrite(acot) == (-acot(1/sqrt(x**2 - 1)) + pi/2)*sqrt(x**2)/x
assert acsc(x).rewrite(asec) == -asec(x) + pi/2
@XFAIL
def test_csc_rewrite_failing():
# Move these 2 tests to test_csc() once bugs fixed
# sin(x).rewrite(pow) raises RuntimeError: maximum recursion depth
# https://github.com/sympy/sympy/issues/7171
assert csc(x).rewrite(pow) == csc(x)
assert csc(x).rewrite(sqrt) == csc(x)
def test_issue_8653():
n = Symbol('n', integer=True)
assert sin(n).is_irrational is None
assert cos(n).is_irrational is None
assert tan(n).is_irrational is None
def test_issue_9157():
n = Symbol('n', integer=True, positive=True)
atan(n - 1).is_nonnegative is True
| bsd-3-clause |
madelynfreed/rlundo | venv/lib/python2.7/site-packages/IPython/nbformat/v3/nbpy.py | 28 | 7561 | """Read and write notebooks as regular .py files.
Authors:
* Brian Granger
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import re
from .rwbase import NotebookReader, NotebookWriter
from .nbbase import (
new_code_cell, new_text_cell, new_worksheet,
new_notebook, new_heading_cell, nbformat, nbformat_minor,
)
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
_encoding_declaration_re = re.compile(r"^#.*coding[:=]\s*([-\w.]+)")
class PyReaderError(Exception):
pass
class PyReader(NotebookReader):
def reads(self, s, **kwargs):
return self.to_notebook(s,**kwargs)
def to_notebook(self, s, **kwargs):
lines = s.splitlines()
cells = []
cell_lines = []
kwargs = {}
state = u'codecell'
for line in lines:
if line.startswith(u'# <nbformat>') or _encoding_declaration_re.match(line):
pass
elif line.startswith(u'# <codecell>'):
cell = self.new_cell(state, cell_lines, **kwargs)
if cell is not None:
cells.append(cell)
state = u'codecell'
cell_lines = []
kwargs = {}
elif line.startswith(u'# <htmlcell>'):
cell = self.new_cell(state, cell_lines, **kwargs)
if cell is not None:
cells.append(cell)
state = u'htmlcell'
cell_lines = []
kwargs = {}
elif line.startswith(u'# <markdowncell>'):
cell = self.new_cell(state, cell_lines, **kwargs)
if cell is not None:
cells.append(cell)
state = u'markdowncell'
cell_lines = []
kwargs = {}
# VERSIONHACK: plaintext -> raw
elif line.startswith(u'# <rawcell>') or line.startswith(u'# <plaintextcell>'):
cell = self.new_cell(state, cell_lines, **kwargs)
if cell is not None:
cells.append(cell)
state = u'rawcell'
cell_lines = []
kwargs = {}
elif line.startswith(u'# <headingcell'):
cell = self.new_cell(state, cell_lines, **kwargs)
if cell is not None:
cells.append(cell)
cell_lines = []
m = re.match(r'# <headingcell level=(?P<level>\d)>',line)
if m is not None:
state = u'headingcell'
kwargs = {}
kwargs['level'] = int(m.group('level'))
else:
state = u'codecell'
kwargs = {}
cell_lines = []
else:
cell_lines.append(line)
if cell_lines and state == u'codecell':
cell = self.new_cell(state, cell_lines)
if cell is not None:
cells.append(cell)
ws = new_worksheet(cells=cells)
nb = new_notebook(worksheets=[ws])
return nb
def new_cell(self, state, lines, **kwargs):
if state == u'codecell':
input = u'\n'.join(lines)
input = input.strip(u'\n')
if input:
return new_code_cell(input=input)
elif state == u'htmlcell':
text = self._remove_comments(lines)
if text:
return new_text_cell(u'html',source=text)
elif state == u'markdowncell':
text = self._remove_comments(lines)
if text:
return new_text_cell(u'markdown',source=text)
elif state == u'rawcell':
text = self._remove_comments(lines)
if text:
return new_text_cell(u'raw',source=text)
elif state == u'headingcell':
text = self._remove_comments(lines)
level = kwargs.get('level',1)
if text:
return new_heading_cell(source=text,level=level)
def _remove_comments(self, lines):
new_lines = []
for line in lines:
if line.startswith(u'#'):
new_lines.append(line[2:])
else:
new_lines.append(line)
text = u'\n'.join(new_lines)
text = text.strip(u'\n')
return text
def split_lines_into_blocks(self, lines):
if len(lines) == 1:
yield lines[0]
raise StopIteration()
import ast
source = '\n'.join(lines)
code = ast.parse(source)
starts = [x.lineno-1 for x in code.body]
for i in range(len(starts)-1):
yield '\n'.join(lines[starts[i]:starts[i+1]]).strip('\n')
yield '\n'.join(lines[starts[-1]:]).strip('\n')
class PyWriter(NotebookWriter):
def writes(self, nb, **kwargs):
lines = [u'# -*- coding: utf-8 -*-']
lines.extend([
u'# <nbformat>%i.%i</nbformat>' % (nbformat, nbformat_minor),
u'',
])
for ws in nb.worksheets:
for cell in ws.cells:
if cell.cell_type == u'code':
input = cell.get(u'input')
if input is not None:
lines.extend([u'# <codecell>',u''])
lines.extend(input.splitlines())
lines.append(u'')
elif cell.cell_type == u'html':
input = cell.get(u'source')
if input is not None:
lines.extend([u'# <htmlcell>',u''])
lines.extend([u'# ' + line for line in input.splitlines()])
lines.append(u'')
elif cell.cell_type == u'markdown':
input = cell.get(u'source')
if input is not None:
lines.extend([u'# <markdowncell>',u''])
lines.extend([u'# ' + line for line in input.splitlines()])
lines.append(u'')
elif cell.cell_type == u'raw':
input = cell.get(u'source')
if input is not None:
lines.extend([u'# <rawcell>',u''])
lines.extend([u'# ' + line for line in input.splitlines()])
lines.append(u'')
elif cell.cell_type == u'heading':
input = cell.get(u'source')
level = cell.get(u'level',1)
if input is not None:
lines.extend([u'# <headingcell level=%s>' % level,u''])
lines.extend([u'# ' + line for line in input.splitlines()])
lines.append(u'')
lines.append('')
return u'\n'.join(lines)
_reader = PyReader()
_writer = PyWriter()
reads = _reader.reads
read = _reader.read
to_notebook = _reader.to_notebook
write = _writer.write
writes = _writer.writes
| gpl-3.0 |
aureooms/networkx | networkx/algorithms/operators/all.py | 53 | 4220 | """Operations on many graphs.
"""
# Copyright (C) 2013 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
try:
from itertools import izip_longest as zip_longest
except ImportError: # Python3 has zip_longest
from itertools import zip_longest
import networkx as nx
# from networkx.utils import is_string_like
__author__ = """\n""".join([ 'Robert King <kingrobertking@gmail.com>',
'Aric Hagberg <aric.hagberg@gmail.com>'])
__all__ = ['union_all', 'compose_all', 'disjoint_union_all',
'intersection_all']
def union_all(graphs, rename=(None,), name=None):
"""Return the union of all graphs.
The graphs must be disjoint, otherwise an exception is raised.
Parameters
----------
graphs : list of graphs
List of NetworkX graphs
rename : bool , default=(None, None)
Node names of G and H can be changed by specifying the tuple
rename=('G-','H-') (for example). Node "u" in G is then renamed
"G-u" and "v" in H is renamed "H-v".
name : string
Specify the name for the union graph@not_implemnted_for('direct
Returns
-------
U : a graph with the same type as the first graph in list
Notes
-----
To force a disjoint union with node relabeling, use
disjoint_union_all(G,H) or convert_node_labels_to integers().
Graph, edge, and node attributes are propagated to the union graph.
If a graph attribute is present in multiple graphs, then the value
from the last graph in the list with that attribute is used.
See Also
--------
union
disjoint_union_all
"""
graphs_names = zip_longest(graphs, rename)
U, gname = next(graphs_names)
for H, hname in graphs_names:
U = nx.union(U, H, (gname, hname), name=name)
gname = None
return U
def disjoint_union_all(graphs):
"""Return the disjoint union of all graphs.
This operation forces distinct integer node labels starting with 0
for the first graph in the list and numbering consecutively.
Parameters
----------
graphs : list
List of NetworkX graphs
Returns
-------
U : A graph with the same type as the first graph in list
Notes
-----
It is recommended that the graphs be either all directed or all undirected.
Graph, edge, and node attributes are propagated to the union graph.
If a graph attribute is present in multiple graphs, then the value
from the last graph in the list with that attribute is used.
"""
graphs = iter(graphs)
U = next(graphs)
for H in graphs:
U = nx.disjoint_union(U, H)
return U
def compose_all(graphs, name=None):
"""Return the composition of all graphs.
Composition is the simple union of the node sets and edge sets.
The node sets of the supplied graphs need not be disjoint.
Parameters
----------
graphs : list
List of NetworkX graphs
name : string
Specify name for new graph
Returns
-------
C : A graph with the same type as the first graph in list
Notes
-----
It is recommended that the supplied graphs be either all directed or all
undirected.
Graph, edge, and node attributes are propagated to the union graph.
If a graph attribute is present in multiple graphs, then the value
from the last graph in the list with that attribute is used.
"""
graphs = iter(graphs)
C = next(graphs)
for H in graphs:
C = nx.compose(C, H, name=name)
return C
def intersection_all(graphs):
"""Return a new graph that contains only the edges that exist in
all graphs.
All supplied graphs must have the same node set.
Parameters
----------
graphs_list : list
List of NetworkX graphs
Returns
-------
R : A new graph with the same type as the first graph in list
Notes
-----
Attributes from the graph, nodes, and edges are not copied to the new
graph.
"""
graphs = iter(graphs)
R = next(graphs)
for H in graphs:
R = nx.intersection(R, H)
return R
| bsd-3-clause |
Arable/evepod | lib/python2.7/site-packages/newrelic-2.12.0.10/newrelic/samplers/data_sampler.py | 4 | 2640 | """This module implements a higher level data sampler which sits atop and
manages the consumption of data from a data source.
"""
import logging
from ..common.object_names import callable_name
_logger = logging.getLogger(__name__)
class DataSampler(object):
def __init__(self, consumer, source, name, settings, **properties):
self.consumer = consumer
self.settings = settings
self.source_properties = source(settings)
self.factory = self.source_properties['factory']
self.instance = None
self.merged_properties = dict(self.source_properties)
self.merged_properties.update(properties)
self.name = (name or self.merged_properties.get('name')
or callable_name(source))
self.group = self.merged_properties.get('group')
if self.group:
self.group = self.group.rstrip('/')
self.guid = self.merged_properties.get('guid')
if self.guid is None and hasattr(source, 'guid'):
self.guid = source.guid
self.version = self.merged_properties.get('version')
if self.version is None and hasattr(source, 'version'):
self.version = source.version
environ = {}
environ['consumer.name'] = consumer
environ['consumer.vendor'] = 'New Relic'
environ['producer.name'] = self.name
environ['producer.group'] = self.group
environ['producer.guid'] = self.guid
environ['producer.version'] = self.version
self.environ = environ
_logger.debug('Initialising data sampler for %r.', self.environ)
def start(self):
if self.instance is None:
self.instance = self.factory(self.environ)
if self.instance is None:
_logger.error('Failed to create instance of data source for '
'%r, returned None. Custom metrics from this data '
'source will not subsequently be available. If this '
'problem persists, please report this problem '
'to the provider of the data source.', self.environ)
if hasattr(self.instance, 'start'):
self.instance.start()
def stop(self):
if hasattr(self.instance, 'stop'):
self.instance.stop()
else:
self.instance = None
def metrics(self):
if self.instance is None:
return []
if self.group:
return (('%s/%s' % (self.group, key), value)
for key, value in self.instance())
else:
return self.instance()
| apache-2.0 |
tkf/neo | neo/io/micromedio.py | 1 | 5751 | # encoding: utf-8
"""
Class for reading/writing data from micromed (.trc).
Inspired by the Matlab code for EEGLAB from Rami K. Niazy.
Supported : Read
Author: sgarcia
"""
from .baseio import BaseIO
from ..core import *
from .tools import create_many_to_one_relationship
import numpy as np
import quantities as pq
import os
import struct
import datetime
# file no longer exists in Python3
try:
file
except NameError:
import io
file = io.BufferedReader
class struct_file(file):
def read_f(self, format):
return struct.unpack(format , self.read(struct.calcsize(format)))
class MicromedIO(BaseIO):
"""
Class for reading data from micromed (.trc).
Usage:
>>> from neo import io
>>> r = io.MicromedIO(filename='File_micromed_1.TRC')
>>> seg = r.read_segment(lazy=False, cascade=True)
>>> print seg.analogsignals # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
[<AnalogSignal(array([ -1.77246094e+02, -2.24707031e+02, -2.66015625e+02,
...
"""
is_readable = True
is_writable = False
supported_objects = [ Segment , AnalogSignal , EventArray ]
readable_objects = [Segment]
writeable_objects = [ ]
has_header = False
is_streameable = False
read_params = { Segment : [ ] }
write_params = None
name = None
extensions = [ 'TRC' ]
mode = 'file'
def __init__(self , filename = None) :
"""
This class read a micromed TRC file.
Arguments:
filename : the filename to read
"""
BaseIO.__init__(self)
self.filename = filename
def read_segment(self, cascade = True, lazy = False,):
"""
Arguments:
"""
f = struct_file(self.filename, 'rb')
#Name
f.seek(64,0)
surname = f.read(22)
while surname[-1] == ' ' :
if len(surname) == 0 :break
surname = surname[:-1]
name = f.read(20)
while name[-1] == ' ' :
if len(name) == 0 :break
name = name[:-1]
#Date
f.seek(128,0)
day, month, year = f.read_f('bbb')
rec_date = datetime.date(year+1900 , month , day)
#header
f.seek(175,0)
header_version, = f.read_f('b')
assert header_version == 4
f.seek(138,0)
Data_Start_Offset , Num_Chan , Multiplexer , Rate_Min , Bytes = f.read_f('IHHHH')
f.seek(176+8,0)
Code_Area , Code_Area_Length, = f.read_f('II')
f.seek(192+8,0)
Electrode_Area , Electrode_Area_Length = f.read_f('II')
f.seek(400+8,0)
Trigger_Area , Tigger_Area_Length=f.read_f('II')
seg = Segment( name = name,
file_origin = os.path.basename(self.filename),
)
seg.annotate(surname = surname)
seg.annotate(rec_date = rec_date)
if not cascade:
return seg
# reading raw data
if not lazy:
f.seek(Data_Start_Offset,0)
rawdata = np.fromstring(f.read() , dtype = 'u'+str(Bytes))
rawdata = rawdata.reshape(( rawdata.size/Num_Chan , Num_Chan))
# Reading Code Info
f.seek(Code_Area,0)
code = np.fromfile(f, dtype='u2', count=Num_Chan)
units = {-1: pq.nano*pq.V, 0:pq.uV, 1:pq.mV, 2:1, 100: pq.percent, 101:pq.dimensionless, 102:pq.dimensionless}
for c in range(Num_Chan):
f.seek(Electrode_Area+code[c]*128+2,0)
label = f.read(6).strip("\x00")
ground = f.read(6).strip("\x00")
logical_min , logical_max, logical_ground, physical_min, physical_max = f.read_f('iiiii')
k, = f.read_f('h')
if k in units.keys() :
unit = units[k]
else :
unit = pq.uV
f.seek(8,1)
sampling_rate, = f.read_f('H') * pq.Hz
sampling_rate *= Rate_Min
if lazy:
signal = [ ]*unit
else:
factor = float(physical_max - physical_min) / float(logical_max-logical_min+1)
signal = ( rawdata[:,c].astype('f') - logical_ground )* factor*unit
anaSig = AnalogSignal( signal , sampling_rate = sampling_rate ,name = label)
if lazy:
#TODO
anaSig.lazy_shape = None
anaSig.annotate(channel_index = c)
anaSig.annotate(ground = ground)
seg.analogsignals.append( anaSig )
sampling_rate = np.mean([ anaSig.sampling_rate for anaSig in seg.analogsignals ])*pq.Hz
# Read trigger
f.seek(Trigger_Area,0)
ea = EventArray()
if not lazy:
labels = [ ]
times = [ ]
first_trig = 0
for i in range(0,Tigger_Area_Length/6) :
pos , label = f.read_f('IH')
if i == 0:
first_trig = pos
if ( pos > first_trig ) and (pos < rawdata.shape[0]) :
labels.append(str(label))
times.append(pos/sampling_rate)
ea.labels = np.array(labels)
ea.times = times*pq.s
else:
ea.lazy_shape = Tigger_Area_Length/6
seg.eventarrays.append(ea)
create_many_to_one_relationship(seg)
return seg
| bsd-3-clause |
pgonda/servo | tests/wpt/css-tests/css21_dev/xhtml1print/support/fonts/makegsubfonts.py | 820 | 14309 |
import os
import textwrap
from xml.etree import ElementTree
from fontTools.ttLib import TTFont, newTable
from fontTools.misc.psCharStrings import T2CharString
from fontTools.ttLib.tables.otTables import GSUB,\
ScriptList, ScriptRecord, Script, DefaultLangSys,\
FeatureList, FeatureRecord, Feature,\
LookupList, Lookup, AlternateSubst, SingleSubst
# paths
directory = os.path.dirname(__file__)
shellSourcePath = os.path.join(directory, "gsubtest-shell.ttx")
shellTempPath = os.path.join(directory, "gsubtest-shell.otf")
featureList = os.path.join(directory, "gsubtest-features.txt")
javascriptData = os.path.join(directory, "gsubtest-features.js")
outputPath = os.path.join(os.path.dirname(directory), "gsubtest-lookup%d")
baseCodepoint = 0xe000
# -------
# Features
# -------
f = open(featureList, "rb")
text = f.read()
f.close()
mapping = []
for line in text.splitlines():
line = line.strip()
if not line:
continue
if line.startswith("#"):
continue
# parse
values = line.split("\t")
tag = values.pop(0)
mapping.append(tag);
# --------
# Outlines
# --------
def addGlyphToCFF(glyphName=None, program=None, private=None, globalSubrs=None, charStringsIndex=None, topDict=None, charStrings=None):
charString = T2CharString(program=program, private=private, globalSubrs=globalSubrs)
charStringsIndex.append(charString)
glyphID = len(topDict.charset)
charStrings.charStrings[glyphName] = glyphID
topDict.charset.append(glyphName)
def makeLookup1():
# make a variation of the shell TTX data
f = open(shellSourcePath)
ttxData = f.read()
f.close()
ttxData = ttxData.replace("__familyName__", "gsubtest-lookup1")
tempShellSourcePath = shellSourcePath + ".temp"
f = open(tempShellSourcePath, "wb")
f.write(ttxData)
f.close()
# compile the shell
shell = TTFont(sfntVersion="OTTO")
shell.importXML(tempShellSourcePath)
shell.save(shellTempPath)
os.remove(tempShellSourcePath)
# load the shell
shell = TTFont(shellTempPath)
# grab the PASS and FAIL data
hmtx = shell["hmtx"]
glyphSet = shell.getGlyphSet()
failGlyph = glyphSet["F"]
failGlyph.decompile()
failGlyphProgram = list(failGlyph.program)
failGlyphMetrics = hmtx["F"]
passGlyph = glyphSet["P"]
passGlyph.decompile()
passGlyphProgram = list(passGlyph.program)
passGlyphMetrics = hmtx["P"]
# grab some tables
hmtx = shell["hmtx"]
cmap = shell["cmap"]
# start the glyph order
existingGlyphs = [".notdef", "space", "F", "P"]
glyphOrder = list(existingGlyphs)
# start the CFF
cff = shell["CFF "].cff
globalSubrs = cff.GlobalSubrs
topDict = cff.topDictIndex[0]
topDict.charset = existingGlyphs
private = topDict.Private
charStrings = topDict.CharStrings
charStringsIndex = charStrings.charStringsIndex
features = sorted(mapping)
# build the outline, hmtx and cmap data
cp = baseCodepoint
for index, tag in enumerate(features):
# tag.pass
glyphName = "%s.pass" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=passGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = passGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
cp += 1
# tag.fail
glyphName = "%s.fail" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=failGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = failGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
# bump this up so that the sequence is the same as the lookup 3 font
cp += 3
# set the glyph order
shell.setGlyphOrder(glyphOrder)
# start the GSUB
shell["GSUB"] = newTable("GSUB")
gsub = shell["GSUB"].table = GSUB()
gsub.Version = 1.0
# make a list of all the features we will make
featureCount = len(features)
# set up the script list
scriptList = gsub.ScriptList = ScriptList()
scriptList.ScriptCount = 1
scriptList.ScriptRecord = []
scriptRecord = ScriptRecord()
scriptList.ScriptRecord.append(scriptRecord)
scriptRecord.ScriptTag = "DFLT"
script = scriptRecord.Script = Script()
defaultLangSys = script.DefaultLangSys = DefaultLangSys()
defaultLangSys.FeatureCount = featureCount
defaultLangSys.FeatureIndex = range(defaultLangSys.FeatureCount)
defaultLangSys.ReqFeatureIndex = 65535
defaultLangSys.LookupOrder = None
script.LangSysCount = 0
script.LangSysRecord = []
# set up the feature list
featureList = gsub.FeatureList = FeatureList()
featureList.FeatureCount = featureCount
featureList.FeatureRecord = []
for index, tag in enumerate(features):
# feature record
featureRecord = FeatureRecord()
featureRecord.FeatureTag = tag
feature = featureRecord.Feature = Feature()
featureList.FeatureRecord.append(featureRecord)
# feature
feature.FeatureParams = None
feature.LookupCount = 1
feature.LookupListIndex = [index]
# write the lookups
lookupList = gsub.LookupList = LookupList()
lookupList.LookupCount = featureCount
lookupList.Lookup = []
for tag in features:
# lookup
lookup = Lookup()
lookup.LookupType = 1
lookup.LookupFlag = 0
lookup.SubTableCount = 1
lookup.SubTable = []
lookupList.Lookup.append(lookup)
# subtable
subtable = SingleSubst()
subtable.Format = 2
subtable.LookupType = 1
subtable.mapping = {
"%s.pass" % tag : "%s.fail" % tag,
"%s.fail" % tag : "%s.pass" % tag,
}
lookup.SubTable.append(subtable)
path = outputPath % 1 + ".otf"
if os.path.exists(path):
os.remove(path)
shell.save(path)
# get rid of the shell
if os.path.exists(shellTempPath):
os.remove(shellTempPath)
def makeLookup3():
# make a variation of the shell TTX data
f = open(shellSourcePath)
ttxData = f.read()
f.close()
ttxData = ttxData.replace("__familyName__", "gsubtest-lookup3")
tempShellSourcePath = shellSourcePath + ".temp"
f = open(tempShellSourcePath, "wb")
f.write(ttxData)
f.close()
# compile the shell
shell = TTFont(sfntVersion="OTTO")
shell.importXML(tempShellSourcePath)
shell.save(shellTempPath)
os.remove(tempShellSourcePath)
# load the shell
shell = TTFont(shellTempPath)
# grab the PASS and FAIL data
hmtx = shell["hmtx"]
glyphSet = shell.getGlyphSet()
failGlyph = glyphSet["F"]
failGlyph.decompile()
failGlyphProgram = list(failGlyph.program)
failGlyphMetrics = hmtx["F"]
passGlyph = glyphSet["P"]
passGlyph.decompile()
passGlyphProgram = list(passGlyph.program)
passGlyphMetrics = hmtx["P"]
# grab some tables
hmtx = shell["hmtx"]
cmap = shell["cmap"]
# start the glyph order
existingGlyphs = [".notdef", "space", "F", "P"]
glyphOrder = list(existingGlyphs)
# start the CFF
cff = shell["CFF "].cff
globalSubrs = cff.GlobalSubrs
topDict = cff.topDictIndex[0]
topDict.charset = existingGlyphs
private = topDict.Private
charStrings = topDict.CharStrings
charStringsIndex = charStrings.charStringsIndex
features = sorted(mapping)
# build the outline, hmtx and cmap data
cp = baseCodepoint
for index, tag in enumerate(features):
# tag.pass
glyphName = "%s.pass" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=passGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = passGlyphMetrics
# tag.fail
glyphName = "%s.fail" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=failGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = failGlyphMetrics
# tag.default
glyphName = "%s.default" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=passGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = passGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
cp += 1
# tag.alt1,2,3
for i in range(1,4):
glyphName = "%s.alt%d" % (tag, i)
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=failGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = failGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
cp += 1
# set the glyph order
shell.setGlyphOrder(glyphOrder)
# start the GSUB
shell["GSUB"] = newTable("GSUB")
gsub = shell["GSUB"].table = GSUB()
gsub.Version = 1.0
# make a list of all the features we will make
featureCount = len(features)
# set up the script list
scriptList = gsub.ScriptList = ScriptList()
scriptList.ScriptCount = 1
scriptList.ScriptRecord = []
scriptRecord = ScriptRecord()
scriptList.ScriptRecord.append(scriptRecord)
scriptRecord.ScriptTag = "DFLT"
script = scriptRecord.Script = Script()
defaultLangSys = script.DefaultLangSys = DefaultLangSys()
defaultLangSys.FeatureCount = featureCount
defaultLangSys.FeatureIndex = range(defaultLangSys.FeatureCount)
defaultLangSys.ReqFeatureIndex = 65535
defaultLangSys.LookupOrder = None
script.LangSysCount = 0
script.LangSysRecord = []
# set up the feature list
featureList = gsub.FeatureList = FeatureList()
featureList.FeatureCount = featureCount
featureList.FeatureRecord = []
for index, tag in enumerate(features):
# feature record
featureRecord = FeatureRecord()
featureRecord.FeatureTag = tag
feature = featureRecord.Feature = Feature()
featureList.FeatureRecord.append(featureRecord)
# feature
feature.FeatureParams = None
feature.LookupCount = 1
feature.LookupListIndex = [index]
# write the lookups
lookupList = gsub.LookupList = LookupList()
lookupList.LookupCount = featureCount
lookupList.Lookup = []
for tag in features:
# lookup
lookup = Lookup()
lookup.LookupType = 3
lookup.LookupFlag = 0
lookup.SubTableCount = 1
lookup.SubTable = []
lookupList.Lookup.append(lookup)
# subtable
subtable = AlternateSubst()
subtable.Format = 1
subtable.LookupType = 3
subtable.alternates = {
"%s.default" % tag : ["%s.fail" % tag, "%s.fail" % tag, "%s.fail" % tag],
"%s.alt1" % tag : ["%s.pass" % tag, "%s.fail" % tag, "%s.fail" % tag],
"%s.alt2" % tag : ["%s.fail" % tag, "%s.pass" % tag, "%s.fail" % tag],
"%s.alt3" % tag : ["%s.fail" % tag, "%s.fail" % tag, "%s.pass" % tag]
}
lookup.SubTable.append(subtable)
path = outputPath % 3 + ".otf"
if os.path.exists(path):
os.remove(path)
shell.save(path)
# get rid of the shell
if os.path.exists(shellTempPath):
os.remove(shellTempPath)
def makeJavascriptData():
features = sorted(mapping)
outStr = []
outStr.append("")
outStr.append("/* This file is autogenerated by makegsubfonts.py */")
outStr.append("")
outStr.append("/* ")
outStr.append(" Features defined in gsubtest fonts with associated base")
outStr.append(" codepoints for each feature:")
outStr.append("")
outStr.append(" cp = codepoint for feature featX")
outStr.append("")
outStr.append(" cp default PASS")
outStr.append(" cp featX=1 FAIL")
outStr.append(" cp featX=2 FAIL")
outStr.append("")
outStr.append(" cp+1 default FAIL")
outStr.append(" cp+1 featX=1 PASS")
outStr.append(" cp+1 featX=2 FAIL")
outStr.append("")
outStr.append(" cp+2 default FAIL")
outStr.append(" cp+2 featX=1 FAIL")
outStr.append(" cp+2 featX=2 PASS")
outStr.append("")
outStr.append("*/")
outStr.append("")
outStr.append("var gFeatures = {");
cp = baseCodepoint
taglist = []
for tag in features:
taglist.append("\"%s\": 0x%x" % (tag, cp))
cp += 4
outStr.append(textwrap.fill(", ".join(taglist), initial_indent=" ", subsequent_indent=" "))
outStr.append("};");
outStr.append("");
if os.path.exists(javascriptData):
os.remove(javascriptData)
f = open(javascriptData, "wb")
f.write("\n".join(outStr))
f.close()
# build fonts
print "Making lookup type 1 font..."
makeLookup1()
print "Making lookup type 3 font..."
makeLookup3()
# output javascript data
print "Making javascript data file..."
makeJavascriptData() | mpl-2.0 |
keen99/SickRage | lib/requests/packages/chardet/charsetprober.py | 3127 | 1902 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import re
class CharSetProber:
def __init__(self):
pass
def reset(self):
self._mState = constants.eDetecting
def get_charset_name(self):
return None
def feed(self, aBuf):
pass
def get_state(self):
return self._mState
def get_confidence(self):
return 0.0
def filter_high_bit_only(self, aBuf):
aBuf = re.sub(b'([\x00-\x7F])+', b' ', aBuf)
return aBuf
def filter_without_english_letters(self, aBuf):
aBuf = re.sub(b'([A-Za-z])+', b' ', aBuf)
return aBuf
def filter_with_english_letters(self, aBuf):
# TODO
return aBuf
| gpl-3.0 |
xiaozhuchacha/OpenBottle | action_earley_srv/scripts/nltk/stem/regexp.py | 7 | 1648 | # Natural Language Toolkit: Stemmers
#
# Copyright (C) 2001-2017 NLTK Project
# Author: Trevor Cohn <tacohn@cs.mu.oz.au>
# Edward Loper <edloper@gmail.com>
# Steven Bird <stevenbird1@gmail.com>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
from __future__ import unicode_literals
import re
from nltk.stem.api import StemmerI
from nltk.compat import python_2_unicode_compatible
@python_2_unicode_compatible
class RegexpStemmer(StemmerI):
"""
A stemmer that uses regular expressions to identify morphological
affixes. Any substrings that match the regular expressions will
be removed.
>>> from nltk.stem import RegexpStemmer
>>> st = RegexpStemmer('ing$|s$|e$|able$', min=4)
>>> st.stem('cars')
'car'
>>> st.stem('mass')
'mas'
>>> st.stem('was')
'was'
>>> st.stem('bee')
'bee'
>>> st.stem('compute')
'comput'
>>> st.stem('advisable')
'advis'
:type regexp: str or regexp
:param regexp: The regular expression that should be used to
identify morphological affixes.
:type min: int
:param min: The minimum length of string to stem
"""
def __init__(self, regexp, min=0):
if not hasattr(regexp, 'pattern'):
regexp = re.compile(regexp)
self._regexp = regexp
self._min = min
def stem(self, word):
if len(word) < self._min:
return word
else:
return self._regexp.sub('', word)
def __repr__(self):
return '<RegexpStemmer: {!r}>'.format(self._regexp.pattern)
| mit |
ThinkingBridge/platform_external_chromium_org | chrome/test/functional/chromeos_login.py | 29 | 13674 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
import pyauto_functional # Must be imported before pyauto
import pyauto
import pyauto_errors
import test_utils
sys.path.append('/usr/local') # To make autotest libs importable.
from autotest.cros import cros_ui
from autotest.cros import cryptohome
class ChromeosLogin(pyauto.PyUITest):
"""TestCases for Logging into ChromeOS."""
assert os.geteuid() == 0, 'Need to run this test as root'
def ShouldAutoLogin(self):
return False
def setUp(self):
# We want a clean session_manager instance for every run,
# so restart ui now.
cros_ui.stop(allow_fail=True)
cryptohome.remove_all_vaults()
cros_ui.start(wait_for_login_prompt=False)
pyauto.PyUITest.setUp(self)
def _ValidCredentials(self, account_type='test_google_account'):
"""Obtains a valid username and password from a data file.
Returns:
A dictionary with the keys 'username' and 'password'
"""
return self.GetPrivateInfo()[account_type]
def testExecuteJavascriptInOOBEWebUI(self):
"""Test that javascript can be executed at the login page."""
msg = 'test success'
ret = self.ExecuteJavascriptInOOBEWebUI(
'window.domAutomationController.send("%s");' % msg)
self.assertEqual(ret, msg)
def testGoodLogin(self):
"""Test that login is successful with valid credentials."""
credentials = self._ValidCredentials()
self.Login(credentials['username'], credentials['password'])
login_info = self.GetLoginInfo()
self.assertTrue(login_info['is_logged_in'], msg='Login failed.')
def testBadUsername(self):
"""Test that login fails when passed an invalid username."""
self.assertRaises(
pyauto_errors.JSONInterfaceError,
lambda: self.Login('doesnotexist@fakedomain.org', 'badpassword'))
login_info = self.GetLoginInfo()
self.assertFalse(login_info['is_logged_in'],
msg='Login succeeded, with bad credentials.')
def testBadPassword(self):
"""Test that login fails when passed an invalid password."""
credentials = self._ValidCredentials()
self.assertRaises(
pyauto_errors.JSONInterfaceError,
lambda: self.Login(credentials['username'], 'badpassword'))
login_info = self.GetLoginInfo()
self.assertFalse(login_info['is_logged_in'],
msg='Login succeeded, with bad credentials.')
def testLoginAsGuest(self):
"""Test we can login with guest mode."""
self.LoginAsGuest()
login_info = self.GetLoginInfo()
self.assertTrue(login_info['is_logged_in'], msg='Not logged in at all.')
self.assertTrue(login_info['is_guest'], msg='Not logged in as guest.')
def testLockScreenAfterLogin(self):
"""Test after logging in that the screen can be locked."""
self.testGoodLogin()
self.assertFalse(self.GetLoginInfo()['is_screen_locked'],
msg='Screen is locked, but the screen was not locked.')
self.LockScreen()
login_info = self.GetLoginInfo()
self.assertTrue(login_info['is_screen_locked'], msg='The screen is not '
'locked after attempting to lock the screen.')
def testLockAndUnlockScreenAfterLogin(self):
"""Test locking and unlocking the screen after logging in."""
self.testLockScreenAfterLogin()
self.UnlockScreen(self._ValidCredentials()['password'])
login_info = self.GetLoginInfo()
self.assertFalse(login_info['is_screen_locked'],
msg='Screen is locked, but it should have been unlocked.')
def testLockAndUnlockScreenAfterLoginWithBadPassword(self):
"""Test locking and unlocking the screen with the wrong password."""
self.testLockScreenAfterLogin()
self.UnlockScreen('not_the_right_password')
login_info = self.GetLoginInfo()
self.assertTrue(login_info['is_screen_locked'],
msg='Screen is unlock, but it should have been unlocked '
'since we attempted to unlock with a bad password')
def testLoginToCreateNewAccount(self):
"""Test we can login as a guest and create a new account."""
self.ShowCreateAccountUI()
# The login hook does not wait for the first tab to load, so we wait here.
self.assertTrue(
self.WaitUntil(self.GetActiveTabTitle, expect_retval='Google Accounts'),
msg='Could not verify that the Accounts tab was opened.')
login_info = self.GetLoginInfo()
self.assertTrue(login_info['is_guest'], msg='Not logged in as guest.')
def testGoodLoginForTransitionedDomainAccount(self):
"""Test that login is successful with valid credentials for a domain.
ChromeOS only allows GA+ accounts to login, there are also known as
transitioned accounts.
"""
credentials = self._ValidCredentials(account_type='test_domain_account')
self.Login(credentials['username'], credentials['password'])
login_info = self.GetLoginInfo()
self.assertTrue(login_info['is_logged_in'], msg='Login failed.')
def testNavigateAfterLogin(self):
"""Test that page navigation is successful after logging in."""
self.testGoodLogin()
self.NavigateToURL("http://www.google.com")
self.assertEqual(self.GetActiveTabTitle(), 'Google',
msg='Unable to navigate to Google and verify tab title.')
def testSigningOutFromLockedScreen(self):
"""Test logout can be performed from the lock screen."""
self.testLockScreenAfterLogin()
self.SignoutInScreenLocker()
self.assertFalse(self.GetLoginInfo()['is_logged_in'],
msg='Still logged in when we should be logged out.')
def testLoginSequenceSanity(self):
"""Test that the interface can maintain a connection after multiple logins.
This test is to verify the stability of the automation interface.
"""
self.testGoodLogin()
self.Logout()
self.testBadPassword()
self.testLoginAsGuest()
self.Logout()
self.testLoginToCreateNewAccount()
def testLogoutWithNoWindows(self):
"""Verify logout when no browser windows are present."""
self.testGoodLogin()
for i in range(5):
self.OpenNewBrowserWindow(True)
for _ in range(self.GetBrowserWindowCount()):
self.CloseBrowserWindow(0)
self.assertEqual(0, self.GetBrowserWindowCount(),
msg='Could not close all browser windows')
self.Logout()
self.testGoodLogin()
def testInitialLoginState(self):
"""Verify basic state of browser windows at initial login."""
self.testGoodLogin()
# Should have 1 browser window with 1 tab.
info = self.GetBrowserInfo()
self.assertEqual(1, len(info['windows']))
self.assertFalse(info['windows'][0]['incognito'],
msg='Did not expect incognito window after login')
self.assertEqual(1, len(info['windows'][0]['tabs']))
self.OpenNewBrowserWindow(True)
# Should have 2 regular browser windows.
info = self.GetBrowserInfo()
self.assertEqual(2, len(info['windows']))
self.assertFalse(info['windows'][0]['incognito'])
self.assertFalse(info['windows'][1]['incognito'],
msg='Expected a regular new window.')
def testProfilePreservedBetweenLogins(self):
"""Verify that profile is preserved between two login sessions.
Also verify Local State.
"""
self.testGoodLogin()
# Build up some history and setup state in "Local State".
url = self.GetHttpURLForDataPath('title2.html')
self.NavigateToURL(url)
# chromeos often takes a while to register URLs into history.
self.assertTrue(self.WaitUntil(lambda: self.GetHistoryInfo().History()),
msg='Could not open %s successfully' % url)
open('/home/chronos/__magic__', 'w').close()
open('/home/chronos/user/__magic__', 'w').close()
def _VerifyProfile():
history = self.GetHistoryInfo().History()
self.assertEqual(1, len(history))
self.assertEqual(url, history[0]['url'])
self.assertTrue(os.path.exists('/home/chronos/__magic__'),
msg='/home/chronos/__magic__ did not persist across login sessions')
self.assertTrue(os.path.exists('/home/chronos/user/__magic__'),
msg='/home/chronos/user/__magic__ did not persist across '
'login sessions')
_VerifyProfile()
self.Logout()
self.testGoodLogin() # Re-login with same account.
_VerifyProfile()
def testGuestCrosh(self):
"""Verify we can use crosh in guest mode."""
self.LoginAsGuest()
login_info = self.GetLoginInfo()
self.assertTrue(login_info['is_logged_in'], msg='Not logged in at all.')
self.assertTrue(login_info['is_guest'], msg='Not logged in as guest.')
for _ in range(self.GetBrowserWindowCount()):
self.CloseBrowserWindow(0)
test_utils.OpenCroshVerification(self)
# Verify crosh prompt.
self.WaitForHtermText(text='crosh> ',
msg='Could not find "crosh> " prompt')
self.assertTrue(
self.GetHtermRowsText(start=0, end=2).endswith('crosh> '),
msg='Could not find "crosh> " prompt')
# Run a crosh command.
self.SendKeysToHterm('help\\n')
self.WaitForHtermText(text='help_advanced',
msg='Could not find "help_advanced" in help output.')
# Exit crosh and close tab.
self.SendKeysToHterm('exit\\n')
self.WaitForHtermText(text='command crosh completed with exit code 0',
msg='Could not exit crosh.')
def testCroshPreservedBetweenLogins(self):
"""Verify user can continue after re-login."""
self.testGoodLogin()
self.CloseBrowserWindow(0)
test_utils.OpenCroshVerification(self)
# Verify crosh prompt.
self.WaitForHtermText(text='crosh> ',
msg='Could not find "crosh> " prompt')
self.assertTrue(
self.GetHtermRowsText(start=0, end=2).endswith('crosh> '),
msg='Could not find "crosh> " prompt')
# Open 2 other tabs.
self.AppendTab(self.GetHttpURLForDataPath('title2.html'))
self.assertEqual('Title Of Awesomeness', self.GetActiveTabTitle(),
msg='Unable to naviage to title2.html and '
'verify tab title.')
self.AppendTab(self.GetHttpURLForDataPath('settings', 'image_page.html'))
self.assertEqual('Show an image', self.GetActiveTabTitle(),
msg='Unable to navigate to image_page and '
'verify tab title.')
self.Logout()
self.testGoodLogin() # Re-Login with same account.
# Verify 3 tabs are still open after re-login.
self.assertEqual(3, len(self.GetBrowserInfo()['windows'][0]['tabs']))
class ChromeosLoginCachedCredentialsAddUser(pyauto.PyUITest):
"""TestCase for failing to add a user with invalid proxy settings."""
assert os.geteuid() == 0, 'Need to run this test as root'
def ShouldAutoLogin(self):
return False
def setUp(self):
# We want a clean session_manager instance for every run,
# so restart ui now.
cros_ui.stop(allow_fail=True)
cryptohome.remove_all_vaults()
cros_ui.start(wait_for_login_prompt=False)
pyauto.PyUITest.setUp(self)
def tearDown(self):
self.ResetProxySettingsOnChromeOS()
pyauto.PyUITest.tearDown(self)
def _ValidCredentials(self, account_type='test_google_account'):
"""Obtains a valid username and password from a data file.
Returns:
A dictionary with the keys 'username' and 'password'
"""
return self.GetPrivateInfo()[account_type]
def testCachedCredentialsAddUser(self):
self.SetSharedProxies(True)
proxy_config = {
'mode': 'fixed_servers',
'server': '127.0.0.1'
}
self.SetProxySettingOnChromeOS(proxy_config);
"""Test that login fails."""
credentials = self._ValidCredentials()
self.assertRaises(
pyauto_errors.JSONInterfaceError,
lambda: self.Login(credentials['username'],
credentials['password'])
)
class ChromeosLoginCachedCredentialsUserPod(ChromeosLogin):
"""TestCase for Logging into ChromeOS with cached credentials and
invalid proxy settings.
"""
assert os.geteuid() == 0, 'Need to run this test as root'
def ShouldAutoLogin(self):
return False
def setUp(self):
# We want a clean session_manager instance for every run,
# so restart ui now.
cros_ui.stop(allow_fail=True)
cryptohome.remove_all_vaults()
cros_ui.start(wait_for_login_prompt=False)
pyauto.PyUITest.setUp(self)
def tearDown(self):
self.ResetProxySettingsOnChromeOS()
pyauto.PyUITest.tearDown(self)
def _ValidCredentials(self, account_type='test_google_account'):
"""Obtains a valid username and password from a data file.
Returns:
A dictionary with the keys 'username' and 'password'
"""
return self.GetPrivateInfo()[account_type]
def testCachedCredentialsUserPod(self):
"""Test that we can login without connectivity if we have so before.
This test is currently disabled because testGoodLogin tries to
add a user after setting proxies, which is supposed to fail. To
make it pass we need a hook that simply calls Login on the delegate
in webui_login_display.cc ::ShowSigninScreenForCreds.
"""
self.testGoodLogin()
self.Logout()
self.SetSharedProxies(True)
proxy_config = {
'mode': 'fixed_servers',
'server': '127.0.0.1'
}
self.SetProxySettingOnChromeOS(proxy_config);
self.testGoodLogin()
self.ResetProxySettingsOnChromeOS()
if __name__ == '__main__':
pyauto_functional.Main()
| bsd-3-clause |
tzewangdorje/SIPserv | Twisted-13.1.0/twisted/internet/test/test_inotify.py | 42 | 16494 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for the inotify wrapper in L{twisted.internet.inotify}.
"""
from twisted.internet import defer, reactor
from twisted.python import filepath, runtime
from twisted.trial import unittest
try:
from twisted.python import _inotify
except ImportError:
inotify = None
else:
from twisted.internet import inotify
class TestINotify(unittest.TestCase):
"""
Define all the tests for the basic functionality exposed by
L{inotify.INotify}.
"""
if not runtime.platform.supportsINotify():
skip = "This platform doesn't support INotify."
def setUp(self):
self.dirname = filepath.FilePath(self.mktemp())
self.dirname.createDirectory()
self.inotify = inotify.INotify()
self.inotify.startReading()
self.addCleanup(self.inotify.loseConnection)
def test_initializationErrors(self):
"""
L{inotify.INotify} emits a C{RuntimeError} when initialized
in an environment that doesn't support inotify as we expect it.
We just try to raise an exception for every possible case in
the for loop in L{inotify.INotify._inotify__init__}.
"""
class FakeINotify:
def init(self):
raise inotify.INotifyError()
self.patch(inotify.INotify, '_inotify', FakeINotify())
self.assertRaises(inotify.INotifyError, inotify.INotify)
def _notificationTest(self, mask, operation, expectedPath=None):
"""
Test notification from some filesystem operation.
@param mask: The event mask to use when setting up the watch.
@param operation: A function which will be called with the
name of a file in the watched directory and which should
trigger the event.
@param expectedPath: Optionally, the name of the path which is
expected to come back in the notification event; this will
also be passed to C{operation} (primarily useful when the
operation is being done to the directory itself, not a
file in it).
@return: A L{Deferred} which fires successfully when the
expected event has been received or fails otherwise.
"""
if expectedPath is None:
expectedPath = self.dirname.child("foo.bar")
notified = defer.Deferred()
def cbNotified((watch, filename, events)):
self.assertEqual(filename, expectedPath)
self.assertTrue(events & mask)
notified.addCallback(cbNotified)
self.inotify.watch(
self.dirname, mask=mask,
callbacks=[lambda *args: notified.callback(args)])
operation(expectedPath)
return notified
def test_access(self):
"""
Reading from a file in a monitored directory sends an
C{inotify.IN_ACCESS} event to the callback.
"""
def operation(path):
path.setContent("foo")
path.getContent()
return self._notificationTest(inotify.IN_ACCESS, operation)
def test_modify(self):
"""
Writing to a file in a monitored directory sends an
C{inotify.IN_MODIFY} event to the callback.
"""
def operation(path):
fObj = path.open("w")
fObj.write('foo')
fObj.close()
return self._notificationTest(inotify.IN_MODIFY, operation)
def test_attrib(self):
"""
Changing the metadata of a a file in a monitored directory
sends an C{inotify.IN_ATTRIB} event to the callback.
"""
def operation(path):
path.touch()
path.touch()
return self._notificationTest(inotify.IN_ATTRIB, operation)
def test_closeWrite(self):
"""
Closing a file which was open for writing in a monitored
directory sends an C{inotify.IN_CLOSE_WRITE} event to the
callback.
"""
def operation(path):
fObj = path.open("w")
fObj.close()
return self._notificationTest(inotify.IN_CLOSE_WRITE, operation)
def test_closeNoWrite(self):
"""
Closing a file which was open for reading but not writing in a
monitored directory sends an C{inotify.IN_CLOSE_NOWRITE} event
to the callback.
"""
def operation(path):
path.touch()
fObj = path.open("r")
fObj.close()
return self._notificationTest(inotify.IN_CLOSE_NOWRITE, operation)
def test_open(self):
"""
Opening a file in a monitored directory sends an
C{inotify.IN_OPEN} event to the callback.
"""
def operation(path):
fObj = path.open("w")
fObj.close()
return self._notificationTest(inotify.IN_OPEN, operation)
def test_movedFrom(self):
"""
Moving a file out of a monitored directory sends an
C{inotify.IN_MOVED_FROM} event to the callback.
"""
def operation(path):
fObj = path.open("w")
fObj.close()
path.moveTo(filepath.FilePath(self.mktemp()))
return self._notificationTest(inotify.IN_MOVED_FROM, operation)
def test_movedTo(self):
"""
Moving a file into a monitored directory sends an
C{inotify.IN_MOVED_TO} event to the callback.
"""
def operation(path):
p = filepath.FilePath(self.mktemp())
p.touch()
p.moveTo(path)
return self._notificationTest(inotify.IN_MOVED_TO, operation)
def test_create(self):
"""
Creating a file in a monitored directory sends an
C{inotify.IN_CREATE} event to the callback.
"""
def operation(path):
fObj = path.open("w")
fObj.close()
return self._notificationTest(inotify.IN_CREATE, operation)
def test_delete(self):
"""
Deleting a file in a monitored directory sends an
C{inotify.IN_DELETE} event to the callback.
"""
def operation(path):
path.touch()
path.remove()
return self._notificationTest(inotify.IN_DELETE, operation)
def test_deleteSelf(self):
"""
Deleting the monitored directory itself sends an
C{inotify.IN_DELETE_SELF} event to the callback.
"""
def operation(path):
path.remove()
return self._notificationTest(
inotify.IN_DELETE_SELF, operation, expectedPath=self.dirname)
def test_moveSelf(self):
"""
Renaming the monitored directory itself sends an
C{inotify.IN_MOVE_SELF} event to the callback.
"""
def operation(path):
path.moveTo(filepath.FilePath(self.mktemp()))
return self._notificationTest(
inotify.IN_MOVE_SELF, operation, expectedPath=self.dirname)
def test_simpleSubdirectoryAutoAdd(self):
"""
L{inotify.INotify} when initialized with autoAdd==True adds
also adds the created subdirectories to the watchlist.
"""
def _callback(wp, filename, mask):
# We are notified before we actually process new
# directories, so we need to defer this check.
def _():
try:
self.assertTrue(self.inotify._isWatched(subdir))
d.callback(None)
except Exception:
d.errback()
reactor.callLater(0, _)
checkMask = inotify.IN_ISDIR | inotify.IN_CREATE
self.inotify.watch(
self.dirname, mask=checkMask, autoAdd=True,
callbacks=[_callback])
subdir = self.dirname.child('test')
d = defer.Deferred()
subdir.createDirectory()
return d
def test_simpleDeleteDirectory(self):
"""
L{inotify.INotify} removes a directory from the watchlist when
it's removed from the filesystem.
"""
calls = []
def _callback(wp, filename, mask):
# We are notified before we actually process new
# directories, so we need to defer this check.
def _():
try:
self.assertTrue(self.inotify._isWatched(subdir))
subdir.remove()
except Exception:
d.errback()
def _eb():
# second call, we have just removed the subdir
try:
self.assertTrue(not self.inotify._isWatched(subdir))
d.callback(None)
except Exception:
d.errback()
if not calls:
# first call, it's the create subdir
calls.append(filename)
reactor.callLater(0, _)
else:
reactor.callLater(0, _eb)
checkMask = inotify.IN_ISDIR | inotify.IN_CREATE
self.inotify.watch(
self.dirname, mask=checkMask, autoAdd=True,
callbacks=[_callback])
subdir = self.dirname.child('test')
d = defer.Deferred()
subdir.createDirectory()
return d
def test_ignoreDirectory(self):
"""
L{inotify.INotify.ignore} removes a directory from the watchlist
"""
self.inotify.watch(self.dirname, autoAdd=True)
self.assertTrue(self.inotify._isWatched(self.dirname))
self.inotify.ignore(self.dirname)
self.assertFalse(self.inotify._isWatched(self.dirname))
def test_humanReadableMask(self):
"""
L{inotify.humaReadableMask} translates all the possible event
masks to a human readable string.
"""
for mask, value in inotify._FLAG_TO_HUMAN:
self.assertEqual(inotify.humanReadableMask(mask)[0], value)
checkMask = (
inotify.IN_CLOSE_WRITE | inotify.IN_ACCESS | inotify.IN_OPEN)
self.assertEqual(
set(inotify.humanReadableMask(checkMask)),
set(['close_write', 'access', 'open']))
def test_recursiveWatch(self):
"""
L{inotify.INotify.watch} with recursive==True will add all the
subdirectories under the given path to the watchlist.
"""
subdir = self.dirname.child('test')
subdir2 = subdir.child('test2')
subdir3 = subdir2.child('test3')
subdir3.makedirs()
dirs = [subdir, subdir2, subdir3]
self.inotify.watch(self.dirname, recursive=True)
# let's even call this twice so that we test that nothing breaks
self.inotify.watch(self.dirname, recursive=True)
for d in dirs:
self.assertTrue(self.inotify._isWatched(d))
def test_connectionLostError(self):
"""
L{inotify.INotify.connectionLost} if there's a problem while closing
the fd shouldn't raise the exception but should log the error
"""
import os
in_ = inotify.INotify()
os.close(in_._fd)
in_.loseConnection()
self.flushLoggedErrors()
def test_noAutoAddSubdirectory(self):
"""
L{inotify.INotify.watch} with autoAdd==False will stop inotify
from watching subdirectories created under the watched one.
"""
def _callback(wp, fp, mask):
# We are notified before we actually process new
# directories, so we need to defer this check.
def _():
try:
self.assertFalse(self.inotify._isWatched(subdir.path))
d.callback(None)
except Exception:
d.errback()
reactor.callLater(0, _)
checkMask = inotify.IN_ISDIR | inotify.IN_CREATE
self.inotify.watch(
self.dirname, mask=checkMask, autoAdd=False,
callbacks=[_callback])
subdir = self.dirname.child('test')
d = defer.Deferred()
subdir.createDirectory()
return d
def test_seriesOfWatchAndIgnore(self):
"""
L{inotify.INotify} will watch a filepath for events even if the same
path is repeatedly added/removed/re-added to the watchpoints.
"""
expectedPath = self.dirname.child("foo.bar2")
expectedPath.touch()
notified = defer.Deferred()
def cbNotified((ignored, filename, events)):
self.assertEqual(filename, expectedPath)
self.assertTrue(events & inotify.IN_DELETE_SELF)
def callIt(*args):
notified.callback(args)
# Watch, ignore, watch again to get into the state being tested.
self.assertTrue(self.inotify.watch(expectedPath, callbacks=[callIt]))
self.inotify.ignore(expectedPath)
self.assertTrue(
self.inotify.watch(
expectedPath, mask=inotify.IN_DELETE_SELF, callbacks=[callIt]))
notified.addCallback(cbNotified)
# Apparently in kernel version < 2.6.25, inofify has a bug in the way
# similar events are coalesced. So, be sure to generate a different
# event here than the touch() at the top of this method might have
# generated.
expectedPath.remove()
return notified
def test_ignoreFilePath(self):
"""
L{inotify.INotify} will ignore a filepath after it has been removed from
the watch list.
"""
expectedPath = self.dirname.child("foo.bar2")
expectedPath.touch()
expectedPath2 = self.dirname.child("foo.bar3")
expectedPath2.touch()
notified = defer.Deferred()
def cbNotified((ignored, filename, events)):
self.assertEqual(filename, expectedPath2)
self.assertTrue(events & inotify.IN_DELETE_SELF)
def callIt(*args):
notified.callback(args)
self.assertTrue(
self.inotify.watch(
expectedPath, inotify.IN_DELETE_SELF, callbacks=[callIt]))
notified.addCallback(cbNotified)
self.assertTrue(
self.inotify.watch(
expectedPath2, inotify.IN_DELETE_SELF, callbacks=[callIt]))
self.inotify.ignore(expectedPath)
expectedPath.remove()
expectedPath2.remove()
return notified
def test_ignoreNonWatchedFile(self):
"""
L{inotify.INotify} will raise KeyError if a non-watched filepath is
ignored.
"""
expectedPath = self.dirname.child("foo.ignored")
expectedPath.touch()
self.assertRaises(KeyError, self.inotify.ignore, expectedPath)
def test_complexSubdirectoryAutoAdd(self):
"""
L{inotify.INotify} with autoAdd==True for a watched path
generates events for every file or directory already present
in a newly created subdirectory under the watched one.
This tests that we solve a race condition in inotify even though
we may generate duplicate events.
"""
calls = set()
def _callback(wp, filename, mask):
calls.add(filename)
if len(calls) == 6:
try:
self.assertTrue(self.inotify._isWatched(subdir))
self.assertTrue(self.inotify._isWatched(subdir2))
self.assertTrue(self.inotify._isWatched(subdir3))
created = someFiles + [subdir, subdir2, subdir3]
self.assertEqual(len(calls), len(created))
self.assertEqual(calls, set(created))
except Exception:
d.errback()
else:
d.callback(None)
checkMask = inotify.IN_ISDIR | inotify.IN_CREATE
self.inotify.watch(
self.dirname, mask=checkMask, autoAdd=True,
callbacks=[_callback])
subdir = self.dirname.child('test')
subdir2 = subdir.child('test2')
subdir3 = subdir2.child('test3')
d = defer.Deferred()
subdir3.makedirs()
someFiles = [subdir.child('file1.dat'),
subdir2.child('file2.dat'),
subdir3.child('file3.dat')]
# Add some files in pretty much all the directories so that we
# see that we process all of them.
for i, filename in enumerate(someFiles):
filename.setContent(filename.path)
return d
| gpl-3.0 |
martinbuc/missionplanner | packages/IronPython.StdLib.2.7.4/content/Lib/py_compile.py | 53 | 6100 | """Routine to "compile" a .py file to a .pyc (or .pyo) file.
This module has intimate knowledge of the format of .pyc files.
"""
import __builtin__
import imp
import marshal
import os
import sys
import traceback
MAGIC = imp.get_magic()
__all__ = ["compile", "main", "PyCompileError"]
class PyCompileError(Exception):
"""Exception raised when an error occurs while attempting to
compile the file.
To raise this exception, use
raise PyCompileError(exc_type,exc_value,file[,msg])
where
exc_type: exception type to be used in error message
type name can be accesses as class variable
'exc_type_name'
exc_value: exception value to be used in error message
can be accesses as class variable 'exc_value'
file: name of file being compiled to be used in error message
can be accesses as class variable 'file'
msg: string message to be written as error message
If no value is given, a default exception message will be given,
consistent with 'standard' py_compile output.
message (or default) can be accesses as class variable 'msg'
"""
def __init__(self, exc_type, exc_value, file, msg=''):
exc_type_name = exc_type.__name__
if exc_type is SyntaxError:
tbtext = ''.join(traceback.format_exception_only(exc_type, exc_value))
errmsg = tbtext.replace('File "<string>"', 'File "%s"' % file)
else:
errmsg = "Sorry: %s: %s" % (exc_type_name,exc_value)
Exception.__init__(self,msg or errmsg,exc_type_name,exc_value,file)
self.exc_type_name = exc_type_name
self.exc_value = exc_value
self.file = file
self.msg = msg or errmsg
def __str__(self):
return self.msg
def wr_long(f, x):
"""Internal; write a 32-bit int to a file in little-endian order."""
f.write(chr( x & 0xff))
f.write(chr((x >> 8) & 0xff))
f.write(chr((x >> 16) & 0xff))
f.write(chr((x >> 24) & 0xff))
def compile(file, cfile=None, dfile=None, doraise=False):
"""Byte-compile one Python source file to Python bytecode.
Arguments:
file: source filename
cfile: target filename; defaults to source with 'c' or 'o' appended
('c' normally, 'o' in optimizing mode, giving .pyc or .pyo)
dfile: purported filename; defaults to source (this is the filename
that will show up in error messages)
doraise: flag indicating whether or not an exception should be
raised when a compile error is found. If an exception
occurs and this flag is set to False, a string
indicating the nature of the exception will be printed,
and the function will return to the caller. If an
exception occurs and this flag is set to True, a
PyCompileError exception will be raised.
Note that it isn't necessary to byte-compile Python modules for
execution efficiency -- Python itself byte-compiles a module when
it is loaded, and if it can, writes out the bytecode to the
corresponding .pyc (or .pyo) file.
However, if a Python installation is shared between users, it is a
good idea to byte-compile all modules upon installation, since
other users may not be able to write in the source directories,
and thus they won't be able to write the .pyc/.pyo file, and then
they would be byte-compiling every module each time it is loaded.
This can slow down program start-up considerably.
See compileall.py for a script/module that uses this module to
byte-compile all installed files (or all files in selected
directories).
"""
with open(file, 'U') as f:
try:
timestamp = long(os.fstat(f.fileno()).st_mtime)
except AttributeError:
timestamp = long(os.stat(file).st_mtime)
codestring = f.read()
try:
codeobject = __builtin__.compile(codestring, dfile or file,'exec')
except Exception,err:
py_exc = PyCompileError(err.__class__,err.args,dfile or file)
if doraise:
raise py_exc
else:
sys.stderr.write(py_exc.msg + '\n')
return
if cfile is None:
cfile = file + (__debug__ and 'c' or 'o')
with open(cfile, 'wb') as fc:
fc.write('\0\0\0\0')
wr_long(fc, timestamp)
marshal.dump(codeobject, fc)
fc.flush()
fc.seek(0, 0)
fc.write(MAGIC)
def main(args=None):
"""Compile several source files.
The files named in 'args' (or on the command line, if 'args' is
not specified) are compiled and the resulting bytecode is cached
in the normal manner. This function does not search a directory
structure to locate source files; it only compiles files named
explicitly. If '-' is the only parameter in args, the list of
files is taken from standard input.
"""
if args is None:
args = sys.argv[1:]
rv = 0
if args == ['-']:
while True:
filename = sys.stdin.readline()
if not filename:
break
filename = filename.rstrip('\n')
try:
compile(filename, doraise=True)
except PyCompileError as error:
rv = 1
sys.stderr.write("%s\n" % error.msg)
except IOError as error:
rv = 1
sys.stderr.write("%s\n" % error)
else:
for filename in args:
try:
compile(filename, doraise=True)
except PyCompileError as error:
# return value to indicate at least one failure
rv = 1
sys.stderr.write(error.msg)
return rv
if __name__ == "__main__":
sys.exit(main())
| gpl-3.0 |
alviano/aspino | tests/sat/Models/c415.100.UNSAT.dimacs.test.py | 5 | 5236 | input = """
c num blocks = 1
c num vars = 100
c minblockids[0] = 1
c maxblockids[0] = 100
p cnf 100 415
-71 -62 -94 0
-33 -74 93 0
16 -58 -2 0
-65 -69 56 0
12 -46 -68 0
6 14 7 0
-39 -31 -87 0
-47 -71 75 0
11 -85 -8 0
-49 -64 63 0
-8 -1 15 0
-21 -60 -40 0
-68 -71 -85 0
-68 49 -55 0
51 17 -40 0
-51 -82 -59 0
-53 45 -41 0
85 -44 34 0
-12 46 -66 0
100 -19 -93 0
-75 -32 82 0
78 -31 70 0
26 -65 -74 0
-57 88 -51 0
18 94 48 0
-71 62 -83 0
9 -91 -36 0
15 -66 78 0
41 -71 42 0
99 -70 61 0
-15 59 39 0
24 -10 -99 0
2 -52 -93 0
-18 -42 74 0
40 46 58 0
-89 99 13 0
-2 20 22 0
70 29 -92 0
-52 -26 20 0
-96 86 -2 0
12 -73 -55 0
-58 89 16 0
-61 -78 -32 0
89 -47 17 0
71 16 67 0
-93 -22 -30 0
-91 -14 95 0
-53 -27 48 0
63 2 -22 0
46 -13 -99 0
-21 -98 -23 0
23 93 68 0
46 73 -11 0
62 -84 83 0
-71 -40 4 0
-59 -35 64 0
26 -35 -98 0
90 -78 66 0
-53 -50 79 0
64 33 62 0
64 2 39 0
32 -87 -19 0
-41 -12 13 0
21 -43 -98 0
66 96 -72 0
37 87 -47 0
65 22 61 0
90 -63 32 0
100 2 -52 0
-41 77 -5 0
100 -71 -47 0
-72 30 95 0
29 -25 94 0
17 -93 47 0
-23 -13 39 0
64 76 43 0
73 17 63 0
2 91 31 0
36 28 63 0
55 39 93 0
77 72 -80 0
-64 35 -28 0
41 -58 -72 0
-85 98 -99 0
-62 91 15 0
-71 -99 39 0
-12 -78 47 0
59 23 -50 0
49 69 99 0
87 -71 77 0
13 35 -43 0
-12 -49 50 0
-18 -62 -94 0
-60 1 57 0
9 -74 -91 0
-36 -90 -82 0
-2 31 10 0
-65 -90 -59 0
64 -40 27 0
10 84 57 0
71 -3 31 0
40 78 -86 0
-94 28 -37 0
61 100 -59 0
1 22 8 0
27 -76 94 0
-94 86 80 0
57 88 -12 0
36 41 86 0
-82 -62 -88 0
26 -27 -15 0
27 59 -98 0
38 69 14 0
-96 -13 8 0
-46 -97 74 0
81 -45 21 0
33 -88 17 0
35 5 -4 0
90 -83 89 0
65 36 13 0
25 -52 -65 0
-58 63 -10 0
85 -19 99 0
40 69 15 0
41 -91 39 0
71 -86 47 0
-19 30 -67 0
54 33 12 0
-32 24 -99 0
-40 -2 -58 0
-60 30 12 0
-46 85 -33 0
-88 74 22 0
42 46 -30 0
35 86 -77 0
-49 94 35 0
-91 81 -74 0
85 44 63 0
45 68 -20 0
-75 -40 -32 0
99 7 50 0
41 33 -19 0
22 -33 -70 0
17 84 -70 0
53 15 -5 0
41 49 -65 0
3 46 72 0
-40 2 78 0
21 59 -31 0
-2 69 62 0
68 61 49 0
12 91 21 0
72 55 50 0
55 -7 -40 0
-37 -77 -80 0
-50 39 71 0
14 51 -10 0
87 -78 69 0
-88 -84 45 0
-50 27 43 0
-24 -44 68 0
7 77 75 0
97 -63 -22 0
-65 -16 96 0
-71 -47 69 0
57 94 -4 0
59 -90 -96 0
-75 -90 93 0
-53 84 98 0
87 -78 -9 0
-15 -25 -23 0
19 34 1 0
-47 -45 43 0
69 -88 99 0
92 -84 -24 0
-89 27 -21 0
47 -81 -20 0
57 -46 27 0
20 -92 -69 0
-27 -11 78 0
80 -100 -69 0
88 -91 92 0
73 -9 67 0
45 13 99 0
-7 71 -55 0
-69 -82 7 0
92 83 -64 0
-99 71 -72 0
-47 100 -33 0
-42 100 67 0
-26 -9 4 0
65 -70 23 0
20 -85 -80 0
-13 64 -51 0
-20 78 -16 0
61 65 50 0
19 58 -24 0
-17 -33 -49 0
84 20 63 0
64 -69 -51 0
-12 -66 -14 0
68 -38 30 0
-21 -89 -74 0
69 -95 -60 0
8 21 -42 0
-77 -43 32 0
-92 -26 89 0
54 43 -10 0
-100 -57 36 0
-23 35 -97 0
-23 65 -84 0
57 46 5 0
37 90 -88 0
62 54 71 0
-21 -40 100 0
68 -87 -23 0
68 48 40 0
-32 34 -99 0
-24 36 -16 0
-67 59 -61 0
-82 -45 -55 0
-80 -82 -45 0
-60 -50 -5 0
-62 64 -58 0
-51 -68 57 0
9 -57 41 0
-60 84 59 0
-26 -55 22 0
70 -21 50 0
6 22 -81 0
27 31 -3 0
77 -9 -45 0
-37 29 97 0
28 98 -59 0
-40 64 72 0
66 42 34 0
41 58 83 0
-90 -86 -50 0
-72 -73 49 0
30 23 5 0
90 -24 16 0
-19 -23 98 0
22 44 1 0
33 89 87 0
-11 31 28 0
16 32 91 0
1 86 95 0
-81 -36 -7 0
-70 -92 44 0
39 -61 17 0
74 -21 41 0
-62 -30 -49 0
10 -27 57 0
80 34 57 0
-85 -49 74 0
16 -86 47 0
-42 27 -68 0
-44 15 -64 0
2 -35 -17 0
-24 -4 81 0
-94 33 -47 0
-49 9 -66 0
-49 -97 -4 0
-66 -48 -62 0
22 -65 -23 0
61 -94 -7 0
93 -30 -48 0
-25 39 1 0
26 99 -32 0
76 26 74 0
-15 31 -67 0
-19 -68 26 0
-55 -36 47 0
-20 -65 -64 0
-94 -55 77 0
67 -47 29 0
71 -21 85 0
24 -41 -69 0
-62 71 -18 0
-62 50 -33 0
-47 41 9 0
75 -51 30 0
81 88 -74 0
-96 44 -23 0
-60 -97 -23 0
-66 -24 -10 0
55 41 -76 0
-56 -88 91 0
-97 35 28 0
-41 -96 32 0
77 -28 99 0
5 -82 -24 0
-69 58 86 0
50 66 -11 0
-67 75 -4 0
90 -39 -69 0
6 -86 -69 0
-58 -97 55 0
67 11 -24 0
88 77 97 0
44 -32 -1 0
25 48 18 0
8 -52 18 0
-56 -90 -21 0
60 53 4 0
86 41 23 0
-57 -26 -97 0
40 49 -54 0
23 -31 84 0
-42 51 91 0
5 72 -47 0
-49 42 -64 0
-92 16 -95 0
-68 80 -97 0
-45 -60 -4 0
-92 -53 -86 0
92 -31 86 0
54 -88 -12 0
-34 -46 70 0
42 61 -14 0
56 -98 -52 0
-100 47 53 0
-68 22 -48 0
94 64 -15 0
40 13 -58 0
-71 37 -67 0
-81 18 -38 0
-9 -28 -62 0
51 62 54 0
23 14 63 0
17 100 -77 0
61 7 -60 0
58 -91 49 0
21 55 -32 0
-54 71 49 0
-54 2 -75 0
33 -87 59 0
-44 61 50 0
36 -15 65 0
17 20 -31 0
81 94 -55 0
-87 85 48 0
-64 -4 58 0
52 -11 -93 0
-26 -37 6 0
-97 -73 -48 0
-85 -54 -87 0
87 -75 68 0
-43 -22 -94 0
-79 -68 -62 0
-38 91 -89 0
-68 -78 -88 0
65 83 -56 0
50 -43 -17 0
51 60 -14 0
39 -40 17 0
55 32 -60 0
-94 -50 60 0
66 -5 -14 0
63 -29 -33 0
-40 3 23 0
-28 88 -99 0
53 33 48 0
30 -67 -14 0
25 22 26 0
83 59 -18 0
41 84 -37 0
-33 -54 -19 0
-92 -26 -30 0
-97 -35 -95 0
91 -48 86 0
22 76 29 0
-49 33 52 0
-38 16 81 0
-46 -89 1 0
61 -14 42 0
-18 -98 51 0
4 91 -55 0
-87 65 -17 0
97 88 -37 0
55 53 -60 0
100 -76 4 0
64 -78 -14 0
-63 -53 -62 0
-31 57 -100 0
-89 -93 72 0
63 38 -99 0
-67 -3 42 0
-82 83 32 0
43 -81 -2 0
6 -97 48 0
4 50 -9 0
-91 -65 -94 0
-20 74 -21 0
32 21 -86 0
-69 67 -61 0
1 87 -92 0
-98 -70 -10 0
14 -71 51 0
-60 26 77 0
42 -28 81 0
97 -41 78 0
-26 -10 -74 0
-58 -79 17 0
-30 -6 49 0
-64 23 22 0
-97 95 94 0
-55 -82 -83 0
28 82 55 0
-50 -87 -77 0
92 94 -26 0
-33 20 70 0
57 -21 -23 0
31 -43 74 0
-86 46 -32 0
"""
output = "UNSAT"
| apache-2.0 |
listyque/TACTIC-Handler | thlib/side/natsort/utils.py | 1 | 15239 | # -*- coding: utf-8 -*-
"""
Utilities and definitions for natsort, mostly all used to define
the _natsort_key function.
"""
from __future__ import (
print_function,
division,
unicode_literals,
absolute_import
)
# Std. lib imports.
import re
from math import isnan
from warnings import warn
from os import curdir, pardir
from os.path import split, splitext
from itertools import islice
from locale import localeconv
# Local imports.
from natsort.ns_enum import ns, _ns
from natsort.unicode_numbers import digits, numeric
from natsort.locale_help import locale_convert, grouper
from natsort.compat.pathlib import PurePath, has_pathlib
from natsort.compat.py23 import (
py23_str,
py23_zip,
PY_VERSION,
)
from natsort.compat.locale import (
dumb_sort,
use_pyicu,
null_string,
)
from natsort.compat.fastnumbers import (
fast_float,
fast_int,
isint,
isfloat,
)
# Group algorithm types for easy extraction
_NUMBER_ALGORITHMS = ns.FLOAT | ns.INT | ns.UNSIGNED | ns.SIGNED | ns.NOEXP
_ALL_BUT_PATH = (ns.F | ns.I | ns.U | ns.S | ns.N | ns.L |
ns.IC | ns.LF | ns.G | ns.UG | ns.TYPESAFE)
# The regex that locates floats - include Unicode numerals.
_float_sign_exp_re = r'([-+]?[0-9]*\.?[0-9]+(?:[eE][-+]?[0-9]+)?|[{0}])'
_float_sign_exp_re = _float_sign_exp_re.format(numeric)
_float_sign_exp_re = re.compile(_float_sign_exp_re, flags=re.U)
_float_nosign_exp_re = r'([0-9]*\.?[0-9]+(?:[eE][-+]?[0-9]+)?|[{0}])'
_float_nosign_exp_re = _float_nosign_exp_re.format(numeric)
_float_nosign_exp_re = re.compile(_float_nosign_exp_re, flags=re.U)
_float_sign_noexp_re = r'([-+]?[0-9]*\.?[0-9]+|[{0}])'
_float_sign_noexp_re = _float_sign_noexp_re.format(numeric)
_float_sign_noexp_re = re.compile(_float_sign_noexp_re, flags=re.U)
_float_nosign_noexp_re = r'([0-9]*\.?[0-9]+|[{0}])'
_float_nosign_noexp_re = _float_nosign_noexp_re.format(numeric)
_float_nosign_noexp_re = re.compile(_float_nosign_noexp_re, flags=re.U)
_float_sign_exp_re_c = r'([-+]?[0-9]*[.,]?[0-9]+(?:[eE][-+]?[0-9]+)?)|[{0}]'
_float_sign_exp_re_c = _float_sign_exp_re_c.format(numeric)
_float_sign_exp_re_c = re.compile(_float_sign_exp_re_c, flags=re.U)
_float_nosign_exp_re_c = r'([0-9]*[.,]?[0-9]+(?:[eE][-+]?[0-9]+)?|[{0}])'
_float_nosign_exp_re_c = _float_nosign_exp_re_c.format(numeric)
_float_nosign_exp_re_c = re.compile(_float_nosign_exp_re_c, flags=re.U)
_float_sign_noexp_re_c = r'([-+]?[0-9]*[.,]?[0-9]+|[{0}])'
_float_sign_noexp_re_c = _float_sign_noexp_re_c.format(numeric)
_float_sign_noexp_re_c = re.compile(_float_sign_noexp_re_c, flags=re.U)
_float_nosign_noexp_re_c = r'([0-9]*[.,]?[0-9]+|[{0}])'
_float_nosign_noexp_re_c = _float_nosign_noexp_re_c.format(numeric)
_float_nosign_noexp_re_c = re.compile(_float_nosign_noexp_re_c, flags=re.U)
# Integer regexes - include Unicode digits.
_int_nosign_re = r'([0-9]+|[{0}])'.format(digits)
_int_nosign_re = re.compile(_int_nosign_re, flags=re.U)
_int_sign_re = r'([-+]?[0-9]+|[{0}])'.format(digits)
_int_sign_re = re.compile(_int_sign_re, flags=re.U)
# This dict will help select the correct regex and number conversion function.
_regex_and_num_function_chooser = {
(ns.F | ns.S, '.'): (_float_sign_exp_re, fast_float),
(ns.F | ns.S | ns.N, '.'): (_float_sign_noexp_re, fast_float),
(ns.F | ns.U, '.'): (_float_nosign_exp_re, fast_float),
(ns.F | ns.U | ns.N, '.'): (_float_nosign_noexp_re, fast_float),
(ns.I | ns.S, '.'): (_int_sign_re, fast_int),
(ns.I | ns.S | ns.N, '.'): (_int_sign_re, fast_int),
(ns.I | ns.U, '.'): (_int_nosign_re, fast_int),
(ns.I | ns.U | ns.N, '.'): (_int_nosign_re, fast_int),
(ns.F | ns.S, ','): (_float_sign_exp_re_c, fast_float),
(ns.F | ns.S | ns.N, ','): (_float_sign_noexp_re_c, fast_float),
(ns.F | ns.U, ','): (_float_nosign_exp_re_c, fast_float),
(ns.F | ns.U | ns.N, ','): (_float_nosign_noexp_re_c, fast_float),
(ns.I | ns.S, ','): (_int_sign_re, fast_int),
(ns.I | ns.S | ns.N, ','): (_int_sign_re, fast_int),
(ns.I | ns.U, ','): (_int_nosign_re, fast_int),
(ns.I | ns.U | ns.N, ','): (_int_nosign_re, fast_int),
}
# Dict to select checker function from converter function
_conv_to_check = {fast_float: isfloat, fast_int: isint}
def _do_decoding(s, encoding):
"""A function to decode a bytes string, or return the object as-is."""
try:
return s.decode(encoding)
except UnicodeError:
raise
except (AttributeError, TypeError):
return s
def _args_to_enum(**kwargs):
"""A function to convert input booleans to an enum-type argument."""
alg = 0
keys = ('number_type', 'signed', 'exp', 'as_path', 'py3_safe')
if any(x not in keys for x in kwargs):
x = set(kwargs) - set(keys)
raise TypeError('Invalid argument(s): ' + ', '.join(x))
if 'number_type' in kwargs and kwargs['number_type'] is not int:
msg = "The 'number_type' argument is deprecated as of 3.5.0, "
msg += "please use 'alg=ns.FLOAT', 'alg=ns.INT', or 'alg=ns.VERSION'"
warn(msg, DeprecationWarning)
alg |= (_ns['FLOAT'] * bool(kwargs['number_type'] is float))
alg |= (_ns['INT'] * bool(kwargs['number_type'] in (int, None)))
alg |= (_ns['SIGNED'] * (kwargs['number_type'] not in (float, None)))
if 'signed' in kwargs and kwargs['signed'] is not None:
msg = "The 'signed' argument is deprecated as of 3.5.0, "
msg += "please use 'alg=ns.SIGNED'."
warn(msg, DeprecationWarning)
alg |= (_ns['SIGNED'] * bool(kwargs['signed']))
if 'exp' in kwargs and kwargs['exp'] is not None:
msg = "The 'exp' argument is deprecated as of 3.5.0, "
msg += "please use 'alg=ns.NOEXP'."
warn(msg, DeprecationWarning)
alg |= (_ns['NOEXP'] * (not kwargs['exp']))
if 'as_path' in kwargs and kwargs['as_path'] is not None:
msg = "The 'as_path' argument is deprecated as of 3.5.0, "
msg += "please use 'alg=ns.PATH'."
warn(msg, DeprecationWarning)
alg |= (_ns['PATH'] * kwargs['as_path'])
if 'py3_safe' in kwargs and kwargs['py3_safe'] is not None:
msg = "The 'py3_safe' argument is deprecated as of 3.5.0, "
msg += "please use 'alg=ns.TYPESAFE'."
warn(msg, DeprecationWarning)
alg |= (_ns['TYPESAFE'] * kwargs['py3_safe'])
return alg
def _number_extracter(s, regex, numconv, py3_safe, use_locale, group_letters):
"""Helper to separate the string input into numbers and strings."""
conv_check = (numconv, _conv_to_check[numconv])
# Split the input string by numbers.
# If the input is not a string, TypeError is raised.
s = regex.split(s)
# Now convert the numbers to numbers, and leave strings as strings.
# Take into account locale if needed, and group letters if needed.
# Remove empty strings from the list.
if use_locale:
s = [locale_convert(x, conv_check, group_letters) for x in s if x]
elif group_letters:
s = [grouper(x, conv_check) for x in s if x]
else:
s = [numconv(x) for x in s if x]
# If the list begins with a number, lead with an empty string.
# This is used to get around the "unorderable types" issue.
if not s: # Return empty list for empty results.
return []
elif conv_check[1](s[0], num_only=True):
s = [null_string if use_locale else ''] + s
# The _py3_safe function inserts "" between numbers in the list,
# and is used to get around "unorderable types" in complex cases.
# It is a separate function that needs to be requested specifically
# because it is expensive to call.
return _py3_safe(s, use_locale, conv_check[1]) if py3_safe else s
def _path_splitter(s, _d_match=re.compile(r'\.\d').match):
"""Split a string into its path components. Assumes a string is a path."""
path_parts = []
p_append = path_parts.append
# Convert a pathlib PurePath object to a string.
if has_pathlib and isinstance(s, PurePath):
path_location = str(s)
else: # pragma: no cover
path_location = s
# Continue splitting the path from the back until we have reached
# '..' or '.', or until there is nothing left to split.
while path_location != curdir and path_location != pardir:
parent_path = path_location
path_location, child_path = split(parent_path)
if path_location == parent_path:
break
p_append(child_path)
# This last append is the base path.
# Only append if the string is non-empty.
if path_location:
p_append(path_location)
# We created this list in reversed order, so we now correct the order.
path_parts.reverse()
# Now, split off the file extensions using a similar method to above.
# Continue splitting off file extensions until we reach a decimal number
# or there are no more extensions.
base = path_parts.pop()
base_parts = []
b_append = base_parts.append
while True:
front = base
base, ext = splitext(front)
if _d_match(ext) or not ext:
# Reset base to before the split if the split is invalid.
base = front
break
b_append(ext)
b_append(base)
base_parts.reverse()
# Return the split parent paths and then the split basename.
return path_parts + base_parts
def _py3_safe(parsed_list, use_locale, check):
"""Insert '' between two numbers."""
length = len(parsed_list)
if length < 2:
return parsed_list
else:
new_list = [parsed_list[0]]
nl_append = new_list.append
for before, after in py23_zip(islice(parsed_list, 0, length-1),
islice(parsed_list, 1, None)):
if check(before, num_only=True) and check(after, num_only=True):
nl_append(null_string if use_locale else '')
nl_append(after)
return new_list
def _fix_nan(ret, alg):
"""Detect an NaN and replace or raise a ValueError."""
t = []
for r in ret:
if isfloat(r, num_only=True) and isnan(r):
if alg & _ns['NANLAST']:
t.append(float('+inf'))
else:
t.append(float('-inf'))
else:
t.append(r)
return tuple(t)
def _natsort_key(val, key, alg):
"""\
Key to sort strings and numbers naturally.
It works by separating out the numbers from the strings. This function for
internal use only. See the natsort_keygen documentation for details of each
parameter.
Parameters
----------
val : {str, unicode}
key : callable
alg : ns enum
Returns
-------
out : tuple
The modified value with numbers extracted.
"""
# Convert the arguments to the proper input tuple
try:
use_locale = alg & _ns['LOCALE']
inp_options = (alg & _NUMBER_ALGORITHMS,
localeconv()['decimal_point'] if use_locale else '.')
except TypeError:
msg = "_natsort_key: 'alg' argument must be from the enum 'ns'"
raise ValueError(msg+', got {0}'.format(py23_str(alg)))
# Get the proper regex and conversion function.
try:
regex, num_function = _regex_and_num_function_chooser[inp_options]
except KeyError: # pragma: no cover
if inp_options[1] not in ('.', ','): # pragma: no cover
raise ValueError("_natsort_key: currently natsort only supports "
"the decimal separators '.' and ','. "
"Please file a bug report.")
else:
raise
else:
# Apply key if needed.
if key is not None:
val = key(val)
# If this is a path, convert it.
# An AttrubuteError is raised if not a string.
split_as_path = False
if alg & _ns['PATH']:
try:
val = _path_splitter(val)
except AttributeError:
pass
else:
# Record that this string was split as a path so that
# we don't set PATH in the recursive call.
split_as_path = True
# Assume the input are strings, which is the most common case.
# Apply the string modification if needed.
orig_val = val
try:
lowfirst = alg & _ns['LOWERCASEFIRST']
dumb = dumb_sort() if use_locale else False
if use_locale and dumb and not lowfirst: # pragma: no cover
val = val.swapcase() # Compensate for bad locale lib.
elif lowfirst and not (use_locale and dumb):
val = val.swapcase()
if alg & _ns['IGNORECASE']:
val = val.casefold() if PY_VERSION >= 3.3 else val.lower()
gl = alg & _ns['GROUPLETTERS']
ret = tuple(_number_extracter(val,
regex,
num_function,
alg & _ns['TYPESAFE'],
use_locale,
gl or (use_locale and dumb)))
# Handle NaN.
if any(isfloat(x, num_only=True) and isnan(x) for x in ret):
ret = _fix_nan(ret, alg)
# For UNGROUPLETTERS, so the high level grouping can occur
# based on the first letter of the string.
# Do no locale transformation of the characters.
if use_locale and alg & _ns['UNGROUPLETTERS']:
if not ret:
return (ret, ret)
elif ret[0] == null_string:
return ((b'' if use_pyicu else '',), ret)
elif dumb: # pragma: no cover
if lowfirst:
return ((orig_val[0].swapcase(),), ret)
else:
return ((orig_val[0],), ret)
else:
return ((val[0],), ret)
else:
return ret
except (TypeError, AttributeError):
# Check if it is a bytes type, and if so return as a
# one element tuple.
if type(val) in (bytes,):
return (val.lower(),) if alg & _ns['IGNORECASE'] else (val,)
# If not strings, assume it is an iterable that must
# be parsed recursively. Do not apply the key recursively.
# If this string was split as a path, turn off 'PATH'.
try:
was_path = alg & _ns['PATH']
newalg = alg & _ALL_BUT_PATH
newalg |= (was_path * (not split_as_path))
return tuple([_natsort_key(x, None, newalg) for x in val])
# If there is still an error, it must be a number.
# Return as-is, with a leading empty string.
except TypeError:
n = null_string if use_locale else ''
if isfloat(val, num_only=True) and isnan(val):
val = _fix_nan([val], alg)[0]
return ((n, val,),) if alg & _ns['PATH'] else (n, val,)
| epl-1.0 |
ngokevin/zamboni | mkt/developers/api_payments.py | 1 | 11477 | from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
import commonware
from curling.lib import HttpClientError, HttpServerError
from rest_framework import status
from rest_framework.mixins import (CreateModelMixin, DestroyModelMixin,
ListModelMixin, RetrieveModelMixin,
UpdateModelMixin)
from rest_framework.permissions import BasePermission, IsAuthenticated
from rest_framework.relations import HyperlinkedRelatedField
from rest_framework.response import Response
from rest_framework.serializers import (HyperlinkedModelSerializer,
Serializer,
ValidationError)
from rest_framework.viewsets import GenericViewSet
from tower import ugettext as _
import amo
from lib.pay_server import get_client
from mkt.api.authentication import (RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.authorization import AllowAppOwner, GroupPermission
from mkt.api.base import MarketplaceView
from mkt.constants.payments import PAYMENT_STATUSES
from mkt.constants.payments import PROVIDER_BANGO
from mkt.developers.forms_payments import (BangoPaymentAccountForm,
PaymentCheckForm)
from mkt.developers.models import (AddonPaymentAccount, CantCancel,
PaymentAccount)
from mkt.developers.providers import get_provider
from mkt.webapps.models import AddonUpsell
log = commonware.log.getLogger('z.api.payments')
class PaymentAppViewSet(GenericViewSet):
def initialize_request(self, request, *args, **kwargs):
"""
Pass the value in the URL through to the form defined on the
ViewSet, which will populate the app property with the app object.
You must define a form which will take an app object.
"""
request = (super(PaymentAppViewSet, self)
.initialize_request(request, *args, **kwargs))
self.app = None
form = self.form({'app': kwargs.get('pk')})
if form.is_valid():
self.app = form.cleaned_data['app']
return request
class PaymentAccountSerializer(Serializer):
"""
Fake serializer that returns PaymentAccount details when
serializing a PaymentAccount instance. Use only for read operations.
"""
def to_native(self, obj):
data = obj.get_provider().account_retrieve(obj)
data['resource_uri'] = reverse('payment-account-detail',
kwargs={'pk': obj.pk})
return data
class PaymentAccountViewSet(ListModelMixin, RetrieveModelMixin,
MarketplaceView, GenericViewSet):
queryset = PaymentAccount.objects.all()
# PaymentAccountSerializer is not a real serializer, it just looks up
# the details on the object. It's only used for GET requests, in every
# other case we use BangoPaymentAccountForm directly.
serializer_class = PaymentAccountSerializer
authentication_classes = [RestOAuthAuthentication,
RestSharedSecretAuthentication]
# Security checks are performed in get_queryset(), so we allow any
# authenticated users by default.
permission_classes = [IsAuthenticated]
def get_queryset(self):
"""
Return the queryset specific to the user using the view. (This replaces
permission checks, unauthorized users won't be able to see that an
account they don't have access to exists, we'll return 404 for them.)
"""
qs = super(PaymentAccountViewSet, self).get_queryset()
return qs.filter(user=self.request.user, inactive=False)
def create(self, request, *args, **kwargs):
provider = get_provider()
form = provider.forms['account'](request.DATA)
if form.is_valid():
try:
provider = get_provider()
obj = provider.account_create(request.user, form.data)
except HttpClientError as e:
log.error('Client error creating Bango account; %s' % e)
return Response(e.content,
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
except HttpServerError as e:
log.error('Error creating Bango payment account; %s' % e)
return Response(_(u'Could not connect to payment server.'),
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
serializer = self.get_serializer(obj)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, *args, **kwargs):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
def update(self, request, *args, **kwargs):
self.object = self.get_object()
form = BangoPaymentAccountForm(request.DATA, account=True)
if form.is_valid():
self.object.get_provider().account_update(self.object,
form.cleaned_data)
return Response(status=status.HTTP_204_NO_CONTENT)
return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, *args, **kwargs):
account = self.get_object()
try:
account.cancel(disable_refs=True)
except CantCancel:
return Response(_('Cannot delete shared account'),
status=status.HTTP_409_CONFLICT)
log.info('Account cancelled: %s' % account.pk)
return Response(status=status.HTTP_204_NO_CONTENT)
class UpsellSerializer(HyperlinkedModelSerializer):
free = premium = HyperlinkedRelatedField(view_name='app-detail')
class Meta:
model = AddonUpsell
fields = ('free', 'premium', 'created', 'modified', 'url')
view_name = 'app-upsell-detail'
def validate(self, attrs):
if attrs['free'].premium_type not in amo.ADDON_FREES:
raise ValidationError('Upsell must be from a free app.')
if attrs['premium'].premium_type in amo.ADDON_FREES:
raise ValidationError('Upsell must be to a premium app.')
return attrs
class UpsellPermission(BasePermission):
"""
Permissions on the upsell object, is determined by permissions on the
free and premium object.
"""
def check(self, request, free, premium):
allow = AllowAppOwner()
for app in free, premium:
if app and not allow.has_object_permission(request, '', app):
return False
return True
def has_object_permission(self, request, view, object):
return self.check(request, object.free, object.premium)
class UpsellViewSet(CreateModelMixin, DestroyModelMixin, RetrieveModelMixin,
UpdateModelMixin, MarketplaceView, GenericViewSet):
permission_classes = (UpsellPermission,)
queryset = AddonUpsell.objects.filter()
serializer_class = UpsellSerializer
def pre_save(self, obj):
if not UpsellPermission().check(self.request, obj.free, obj.premium):
raise PermissionDenied('Not allowed to alter that object')
class AddonPaymentAccountPermission(BasePermission):
"""
Permissions on the app payment account object, is determined by permissions
on the app the account is being used for.
"""
def check(self, request, app, account):
if AllowAppOwner().has_object_permission(request, '', app):
if account.shared or account.user.pk == request.user.pk:
return True
else:
log.info('AddonPaymentAccount access %(account)s denied '
'for %(user)s: wrong user, not shared.'.format(
{'account': account.pk, 'user': request.user.pk}))
else:
log.info('AddonPaymentAccount access %(account)s denied '
'for %(user)s: no app permission.'.format(
{'account': account.pk, 'user': request.user.pk}))
return False
def has_object_permission(self, request, view, object):
return self.check(request, object.addon, object.payment_account)
class AddonPaymentAccountSerializer(HyperlinkedModelSerializer):
addon = HyperlinkedRelatedField(view_name='app-detail')
payment_account = HyperlinkedRelatedField(
view_name='payment-account-detail')
class Meta:
model = AddonPaymentAccount
fields = ('addon', 'payment_account', 'created', 'modified', 'url')
view_name = 'app-payment-account-detail'
def validate(self, attrs):
if attrs['addon'].premium_type in amo.ADDON_FREES:
raise ValidationError('App must be a premium app.')
return attrs
class AddonPaymentAccountViewSet(CreateModelMixin, RetrieveModelMixin,
UpdateModelMixin, MarketplaceView,
GenericViewSet):
permission_classes = (AddonPaymentAccountPermission,)
queryset = AddonPaymentAccount.objects.filter()
serializer_class = AddonPaymentAccountSerializer
def pre_save(self, obj):
if not AddonPaymentAccountPermission().check(self.request,
obj.addon, obj.payment_account):
raise PermissionDenied('Not allowed to alter that object.')
if self.request.method != 'POST':
addon = obj.__class__.objects.no_cache().get(pk=obj.pk).addon
if not obj.addon == addon:
# This should be a 400 error.
raise PermissionDenied('Cannot change the add-on.')
def post_save(self, obj, created=False):
"""Ensure that the setup_bango method is called after creation."""
if created:
provider = get_provider()
uri = provider.product_create(obj.payment_account, obj.addon)
obj.product_uri = uri
obj.save()
class PaymentCheckViewSet(PaymentAppViewSet):
permission_classes = (AllowAppOwner,)
form = PaymentCheckForm
def create(self, request, *args, **kwargs):
"""
We aren't actually creating objects, but proxying them
through to solitude.
"""
if not self.app:
return Response('', status=400)
self.check_object_permissions(request, self.app)
client = get_client()
res = client.api.bango.status.post(
data={'seller_product_bango':
self.app.payment_account(PROVIDER_BANGO).account_uri})
filtered = {
'bango': {
'status': PAYMENT_STATUSES[res['status']],
'errors': ''
},
}
return Response(filtered, status=200)
class PaymentDebugViewSet(PaymentAppViewSet):
permission_classes = [GroupPermission('Transaction', 'Debug')]
form = PaymentCheckForm
def list(self, request, *args, **kwargs):
if not self.app:
return Response('', status=400)
client = get_client()
res = client.api.bango.debug.get(
data={'seller_product_bango':
self.app.payment_account(PROVIDER_BANGO).account_uri})
filtered = {
'bango': res['bango'],
}
return Response(filtered, status=200)
| bsd-3-clause |
bright-sparks/titanium_mobile | support/android/builder.py | 30 | 97623 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Appcelerator Titanium Mobile
# Copyright (c) 2011-2012 by Appcelerator, Inc. All Rights Reserved.
# Licensed under the terms of the Apache Public License
# Please see the LICENSE included with this distribution for details.
#
# General builder script for staging, packaging, deploying,
# and debugging Titanium Mobile applications on Android
#
import os, sys, subprocess, shutil, time, signal, string, platform, re, glob, hashlib, imp, inspect
import run, avd, prereq, zipfile, tempfile, fnmatch, codecs, traceback, sgmllib
from os.path import splitext
from compiler import Compiler
from os.path import join, splitext, split, exists
from shutil import copyfile
from xml.dom.minidom import parseString
from tilogger import *
from datetime import datetime, timedelta
reload(sys) # this is required to prevent the following error: "AttributeError: 'module' object has no attribute 'setdefaultencoding'"
sys.setdefaultencoding("utf_8") # Fix umlaut issues
template_dir = os.path.abspath(os.path.dirname(sys._getframe(0).f_code.co_filename))
top_support_dir = os.path.dirname(template_dir)
sys.path.append(top_support_dir)
sys.path.append(os.path.join(top_support_dir, 'common'))
sys.path.append(os.path.join(top_support_dir, 'module'))
import simplejson, java
from mako.template import Template
from tiapp import *
from android import Android
from androidsdk import AndroidSDK
from deltafy import Deltafy, Delta
from css import csscompiler
from module import ModuleDetector
import localecompiler
import fastdev
import requireIndex
resourceFiles = ['strings.xml', 'attrs.xml', 'styles.xml', 'bools.xml', 'colors.xml',
'dimens.xml', 'ids.xml', 'integers.xml', 'arrays.xml']
ignoreFiles = ['.gitignore', '.cvsignore', '.DS_Store'];
ignoreDirs = ['.git','.svn','_svn', 'CVS'];
android_avd_hw = {'hw.camera': 'yes', 'hw.gps':'yes'}
res_skips = ['style']
log = None
# Copied from frameworks/base/tools/aapt/Package.cpp
uncompressed_types = [
".jpg", ".jpeg", ".png", ".gif",
".wav", ".mp2", ".mp3", ".ogg", ".aac",
".mpg", ".mpeg", ".mid", ".midi", ".smf", ".jet",
".rtttl", ".imy", ".xmf", ".mp4", ".m4a",
".m4v", ".3gp", ".3gpp", ".3g2", ".3gpp2",
".amr", ".awb", ".wma", ".wmv"
]
# Java keywords to reference in case app id contains java keyword
java_keywords = [
"abstract", "continue", "for", "new", "switch",
"assert", "default", "goto", "package", "synchronized",
"boolean", "do", "if", "private", "this",
"break", "double", "implements", "protected", "throw",
"byte", "else", "import", "public", "throws",
"case", "enum", "instanceof", "return", "transient",
"catch", "extends", "int", "short", "try",
"char", "final", "interface", "static", "void",
"class", "finally", "long", "strictfp", "volatile",
"const", "float", "native", "super", "while",
"true", "false", "null"
]
MIN_API_LEVEL = 10
HONEYCOMB_MR2_LEVEL = 13
KNOWN_ABIS = ("armeabi", "armeabi-v7a", "x86")
# Used only to find <script> tags in HTML files
# so we can be sure to package referenced JS files
# even when compiling for production. (See
# Builder.package_and_deploy later in this file.)
class HTMLParser(sgmllib.SGMLParser):
def parse(self, html_source):
self.feed(html_source)
self.close()
def __init__(self, verbose=0):
sgmllib.SGMLParser.__init__(self, verbose)
self.referenced_js_files = []
def start_script(self, attributes):
for name, value in attributes:
if value and name.lower() == "src":
self.referenced_js_files.append(value.lower())
def get_referenced_js_files(self):
return self.referenced_js_files
def launch_logcat():
valid_device_switches = ('-e', '-d', '-s')
device_id = None
android_sdk_location = None
adb_location = None
logcat_process = None
device_switch = None # e.g., -e or -d or -s
def show_usage():
print >> sys.stderr, ""
print >> sys.stderr, "%s devicelog <sdk_dir> <device_switch> [device_serial_number]" % os.path.basename(sys.argv[0])
print >> sys.stderr, ""
print >> sys.stderr, "The <device_switch> can be -e, -d -s. If -s, also pass serial number."
sys.exit(1)
if len(sys.argv) < 3:
print >> sys.stderr, "Missing Android SDK location."
show_usage()
else:
android_sdk_location = os.path.abspath(os.path.expanduser(sys.argv[2]))
adb_location = AndroidSDK(android_sdk_location).get_adb()
if len(sys.argv) < 4:
print >> sys.stderr, "Missing device/emulator switch (e.g., -e, -d, -s)."
show_usage()
device_switch = sys.argv[3]
if device_switch not in valid_device_switches:
print >> sys.stderr, "Unknown device type switch: %s" % device_switch
show_usage()
if device_switch == "-s":
if len(sys.argv) < 5:
print >> sys.stderr, "Must specify serial number when using -s."
show_usage()
else:
device_id = sys.argv[4]
# For killing the logcat process if our process gets killed.
def signal_handler(signum, frame):
print "[DEBUG] Signal %s received. Terminating the logcat process." % signum
if logcat_process is not None:
if platform.system() == "Windows":
os.system("taskkill /F /T /PID %i" % logcat_process.pid)
else:
os.kill(logcat_process.pid, signal.SIGTERM)
# make sure adb is running on windows, else XP can lockup the python
# process when adb runs first time
if platform.system() == "Windows":
run.run([adb_location, "start-server"], True, ignore_output=True)
logcat_cmd = [adb_location, device_switch]
if device_id:
logcat_cmd.append(device_id)
logcat_cmd.extend(["logcat", "-s", "*:d,*,TiAPI:V"])
logcat_process = subprocess.Popen(logcat_cmd)
if platform.system() != "Windows":
signal.signal(signal.SIGHUP, signal_handler)
signal.signal(signal.SIGQUIT, signal_handler)
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGABRT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
# In case it's gonna exit early (like if the command line
# was wrong or something) give it a chance to do so before we start
# waiting on it.
time.sleep(1)
return_code = logcat_process.poll()
if return_code:
signal_handler(signal.SIGQUIT, None)
sys.exit(return_code)
# Now wait for it.
try:
return_code = logcat_process.wait()
except OSError:
signal_handler(signal.SIGQUIT, None)
sys.exit(return_code)
sys.exit(return_code)
def render_template_with_tiapp(template_text, tiapp_obj):
t = Template(template_text)
return t.render(tiapp=tiapp_obj)
def remove_ignored_dirs(dirs):
for d in dirs:
if d in ignoreDirs:
dirs.remove(d)
# ZipFile.extractall introduced in Python 2.6, so this is workaround for earlier
# versions
def zip_extractall(zfile, target_dir):
file_infos = zfile.infolist()
for info in file_infos:
if info.file_size > 0:
file_path = os.path.join(target_dir, os.path.normpath(info.filename))
parent_path = os.path.dirname(file_path)
if not os.path.exists(parent_path):
os.makedirs(parent_path)
out_file = open(file_path, "wb")
out_file.write(zfile.read(info.filename))
out_file.close()
def dequote(s):
if s[0:1] == '"':
return s[1:-1]
return s
def pipe(args1,args2):
p1 = subprocess.Popen(args1, stdout=subprocess.PIPE)
p2 = subprocess.Popen(args2, stdin=p1.stdout, stdout=subprocess.PIPE)
return p2.communicate()[0]
def read_properties(propFile, separator=":= "):
propDict = dict()
for propLine in propFile:
propDef = propLine.strip()
if len(propDef) == 0:
continue
if propDef[0] in ( '!', '#' ):
continue
punctuation= [ propDef.find(c) for c in separator ] + [ len(propDef) ]
found= min( [ pos for pos in punctuation if pos != -1 ] )
name= propDef[:found].rstrip()
value= propDef[found:].lstrip(separator).rstrip()
propDict[name]= value
propFile.close()
return propDict
def info(msg):
log.info(msg)
def debug(msg):
log.debug(msg)
def warn(msg):
log.warn(msg)
def trace(msg):
log.trace(msg)
def error(msg):
log.error(msg)
def copy_all(source_folder, dest_folder, mergeXMLResources=False, ignore_dirs=[], ignore_files=[], ignore_exts=[], one_time_msg=""):
msg_shown = False
for root, dirs, files in os.walk(source_folder, True, None, True):
for d in dirs:
if d in ignore_dirs:
dirs.remove(d)
for f in files:
if f in ignore_files:
continue
ext = os.path.splitext(f)[1]
if ext in ignore_exts:
continue
if one_time_msg and not msg_shown:
info(one_time_msg)
msg_shown = True
from_ = os.path.join(root, f)
to_ = from_.replace(source_folder, dest_folder, 1)
to_directory = os.path.split(to_)[0]
if not os.path.exists(to_directory):
os.makedirs(to_directory)
shutil.copyfile(from_, to_)
#
# Merge the xml resource files in res/values/ if there are multiple files with the same name.
# (TIMOB-12663)
#
elif mergeXMLResources and os.path.isfile(to_) and f in resourceFiles:
sfile = open(from_, 'r')
dfile = open(to_, 'r')
scontent = sfile.read()
dcontent = dfile.read()
sfile.close()
dfile.close()
sindex = scontent.find('</resources>')
dindex = dcontent.find('>', dcontent.find('<resources')) + 1
content_to_write = scontent[:sindex] + dcontent[dindex:]
wfile = open(to_, 'w')
wfile.write(content_to_write)
wfile.close()
else:
shutil.copyfile(from_, to_)
def remove_orphaned_files(source_folder, target_folder, ignore=[]):
is_res = source_folder.endswith('Resources') or source_folder.endswith('Resources' + os.sep)
for root, dirs, files in os.walk(target_folder):
for f in files:
if f in ignore:
continue
full = os.path.join(root, f)
rel = full.replace(target_folder, '')
if rel[0] == os.sep:
rel = rel[1:]
is_orphan = False
if not os.path.exists(os.path.join(source_folder, rel)):
is_orphan = True
# But it could be under android/... too (platform-specific)
if is_orphan and is_res:
if os.path.exists(os.path.join(source_folder, 'android', rel)):
is_orphan = False
if is_orphan:
os.remove(full)
def is_resource_drawable(path):
if re.search("android/images/(high|medium|low|res-[^/]+)/", path.replace(os.sep, "/")):
return True
else:
return False
def resource_drawable_folder(path):
if not is_resource_drawable(path):
return None
else:
pattern = r'/android/images/(high|medium|low|res-[^/]+)/'
match = re.search(pattern, path.replace(os.sep, "/"))
if not match.groups():
return None
folder = match.groups()[0]
if re.match('high|medium|low', folder):
return 'drawable-%sdpi' % folder[0]
else:
return 'drawable-%s' % folder.replace('res-', '')
def remove_duplicate_nodes_in_res_file(full_path, node_names_to_check):
f = open(full_path, 'r')
contents = f.read()
f.close()
doc = parseString(contents)
resources_node = doc.getElementsByTagName('resources')[0]
made_change = False
for node_name in node_names_to_check:
nodes = doc.getElementsByTagName(node_name)
if len(nodes) == 0:
continue
name_list = [] #keeps track of the name attribute for the node we are checking
for node in nodes:
# Only check for the children of the "resources" node
if node.parentNode != resources_node:
continue
name = node.getAttribute('name')
# Remove the node with the duplicate names
if name in name_list:
resources_node.removeChild(node)
made_change = True
debug('Removed duplicate node [%s] from %s' %(name, full_path))
else:
name_list.append(name)
if made_change:
new_contents = doc.toxml()
f = codecs.open(full_path, 'w')
f.write(new_contents)
f.close()
class Builder(object):
def __init__(self, name, sdk, project_dir, support_dir, app_id, is_emulator):
self.top_dir = project_dir
self.project_tiappxml = os.path.join(self.top_dir,'tiapp.xml')
self.project_dir = os.path.join(project_dir,'build','android')
self.res_dir = os.path.join(self.project_dir,'res')
self.platform_dir = os.path.join(project_dir, 'platform', 'android')
self.project_src_dir = os.path.join(self.project_dir, 'src')
self.project_gen_dir = os.path.join(self.project_dir, 'gen')
self.name = name
self.app_id = app_id
self.support_dir = support_dir
self.compiled_files = []
self.force_rebuild = False
self.debugger_host = None
self.debugger_port = -1
self.profiler_host = None
self.profiler_port = -1
self.fastdev_port = -1
self.fastdev = False
self.compile_js = False
self.tool_api_level = MIN_API_LEVEL
self.abis = list(KNOWN_ABIS)
# don't build if a java keyword in the app id would cause the build to fail
tok = self.app_id.split('.')
for token in tok:
if token in java_keywords:
error("Do not use java keywords for project app id, such as " + token)
sys.exit(1)
tool_api_level_explicit = False
temp_tiapp = TiAppXML(self.project_tiappxml)
if temp_tiapp and temp_tiapp.android:
if 'tool-api-level' in temp_tiapp.android:
self.tool_api_level = int(temp_tiapp.android['tool-api-level'])
tool_api_level_explicit = True
if 'abi' in temp_tiapp.android and temp_tiapp.android['abi'] != 'all':
tiapp_abis = [abi.strip() for abi in temp_tiapp.android['abi'].split(",")]
to_remove = [bad_abi for bad_abi in tiapp_abis if bad_abi not in KNOWN_ABIS]
if to_remove:
warn("The following ABIs listed in the Android <abi> section of tiapp.xml are unknown and will be ignored: %s." % ", ".join(to_remove))
tiapp_abis = [abi for abi in tiapp_abis if abi not in to_remove]
self.abis = tiapp_abis
if not self.abis:
warn("Android <abi> tiapp.xml section does not specify any valid ABIs. Defaulting to '%s'." %
",".join(KNOWN_ABIS))
self.abis = list(KNOWN_ABIS)
self.sdk = AndroidSDK(sdk, self.tool_api_level)
# If the tool-api-level was not explicitly set in the tiapp.xml, but
# <uses-sdk android:targetSdkVersion> *is* set, try to match the target version.
if (not tool_api_level_explicit and temp_tiapp and temp_tiapp.android_manifest
and "manifest" in temp_tiapp.android_manifest):
self.check_target_api_version(temp_tiapp.android_manifest["manifest"])
self.tiappxml = temp_tiapp
json_contents = open(os.path.join(template_dir,'dependency.json')).read()
self.depends_map = simplejson.loads(json_contents)
# favor the ANDROID_SDK_HOME environment variable if used
if os.environ.has_key('ANDROID_SDK_HOME') and os.path.exists(os.environ['ANDROID_SDK_HOME']):
self.home_dir = os.path.join(os.environ['ANDROID_SDK_HOME'], '.titanium')
self.android_home_dir = os.path.join(os.environ['ANDROID_SDK_HOME'], '.android')
# we place some files in the users home
elif platform.system() == "Windows":
self.home_dir = os.path.join(os.environ['USERPROFILE'], '.titanium')
self.android_home_dir = os.path.join(os.environ['USERPROFILE'], '.android')
else:
self.home_dir = os.path.join(os.path.expanduser('~'), '.titanium')
self.android_home_dir = os.path.join(os.path.expanduser('~'), '.android')
if not os.path.exists(self.home_dir):
os.makedirs(self.home_dir)
self.sdcard = os.path.join(self.home_dir,'android2.sdcard')
self.classname = Android.strip_classname(self.name)
if not is_emulator:
self.set_java_commands()
# start in 1.4, you no longer need the build/android directory
# if missing, we'll create it on the fly
if not os.path.exists(self.project_dir) or not os.path.exists(os.path.join(self.project_dir,'AndroidManifest.xml')):
android_creator = Android(name, app_id, self.sdk, None, self.java)
parent_dir = os.path.dirname(self.top_dir)
if os.path.exists(self.top_dir):
android_creator.create(parent_dir, project_dir=self.top_dir, build_time=True)
else:
android_creator.create(parent_dir)
self.force_rebuild = True
sys.stdout.flush()
def check_target_api_version(self, manifest_elements):
pattern = r'android:targetSdkVersion=\"(\d+)\"'
for el in manifest_elements:
if el.nodeName == "uses-sdk":
xml = el.toxml()
matches = re.findall(pattern, xml)
if matches:
new_level = self.sdk.try_best_match_api_level(int(matches[0]))
if new_level != self.tool_api_level:
self.tool_api_level = new_level
break
def set_java_commands(self):
commands = java.find_java_commands()
to_check = ("java", "javac", "keytool", "jarsigner")
found = True
for check in to_check:
if not commands[check]:
found = False
error("Required Java tool '%s' not located." % check)
if not found:
error("One or more required files not found - please check your JAVA_HOME environment variable")
sys.exit(1)
self.jarsigner = commands["jarsigner"]
self.keytool = commands["keytool"]
self.javac = commands["javac"]
self.java = commands["java"]
if not commands["environ_java_home"] and commands["java_home"]:
os.environ["JAVA_HOME"] = commands["java_home"]
def wait_for_home(self, type):
max_wait = 20
attempts = 0
while True:
processes = self.sdk.list_processes(['-%s' % type])
found_home = False
for process in processes:
if process["name"] == "android.process.acore":
found_home = True
break
if found_home:
break
attempts += 1
if attempts == max_wait:
error("Timed out waiting for android.process.acore")
return False
time.sleep(1)
return True
def wait_for_device(self, type):
debug("Waiting for device to be ready ...")
t = time.time()
max_wait = 30
max_zero = 10
attempts = 0
zero_attempts = 0
timed_out = True
no_devices = False
while True:
devices = self.sdk.list_devices()
trace("adb devices returned %s devices/emulators" % len(devices))
if len(devices) > 0:
found = False
for device in devices:
if type == "e" and device.is_emulator() and not device.is_offline(): found = True
elif type == "d" and device.is_device(): found = True
if found:
timed_out = False
break
else: zero_attempts += 1
try: time.sleep(5) # for some reason KeyboardInterrupts get caught here from time to time
except KeyboardInterrupt: pass
attempts += 1
if attempts == max_wait:
break
elif zero_attempts == max_zero:
no_devices = True
break
if timed_out:
if type == "e":
device = "emulator"
extra_message = "you may need to close the emulator and try again"
else:
device = "device"
extra_message = "you may try reconnecting the USB cable"
error("Timed out waiting for %s to be ready, %s" % (device, extra_message))
if no_devices:
sys.exit(1)
return False
debug("Device connected... (waited %d seconds)" % (attempts*5))
duration = time.time() - t
debug("waited %f seconds on emulator to get ready" % duration)
if duration > 1.0:
info("Waiting for the Android Emulator to become available")
return self.wait_for_home(type)
#time.sleep(20) # give it a little more time to get installed
return True
def create_avd(self, avd_id, avd_skin, avd_abi):
# Sanity check the AVD to see if the ABI is available, or
# necessary.
available_avds = avd.get_avds(self.sdk)
multiple_abis = False
for device in available_avds:
if device['id'] == avd_id:
default_abi = device['abis'][0]
multiple_abis = ( len(device['abis']) != 1 )
if avd_abi is None:
avd_abi = default_abi
elif avd_abi not in device['abis']:
warn("ABI %s not supported for AVD ID %s: Using default ABI %s" % (avd_abi, avd_id, default_abi))
avd_abi = default_abi
break
if multiple_abis:
name = "titanium_%s_%s_%s" % (avd_id, avd_skin, avd_abi)
else:
name = "titanium_%s_%s" % (avd_id, avd_skin)
name = name.replace(' ', '_')
if not os.path.exists(self.home_dir):
os.makedirs(self.home_dir)
avd_path = os.path.join(self.android_home_dir, 'avd')
my_avd = os.path.join(avd_path,"%s.avd" % name)
own_sdcard = os.path.join(self.home_dir, '%s.sdcard' % name)
if not os.path.exists(my_avd) or os.path.exists(own_sdcard):
# starting with 1.7.2, when we create a new avd, give it its own
# SDCard as well.
self.sdcard = own_sdcard
if not os.path.exists(self.sdcard):
info("Creating 64M SD card for use in Android emulator")
run.run([self.sdk.get_mksdcard(), '64M', self.sdcard])
if not os.path.exists(my_avd):
if multiple_abis:
info("Creating new Android Virtual Device (%s %s %s)" % (avd_id,avd_skin,avd_abi))
else:
info("Creating new Android Virtual Device (%s %s)" % (avd_id,avd_skin))
inputgen = os.path.join(template_dir,'input.py')
abi_args = []
if multiple_abis:
abi_args = ['-b', avd_abi]
pipe([sys.executable, inputgen], [self.sdk.get_android(), '--verbose', 'create', 'avd', '--name', name, '--target', avd_id, '-s', avd_skin, '--force', '--sdcard', self.sdcard] + abi_args)
inifile = os.path.join(my_avd,'config.ini')
inifilec = open(inifile,'r').read()
inifiledata = open(inifile,'w')
inifiledata.write(inifilec)
# TODO - Document options
for hw_option in android_avd_hw.keys():
inifiledata.write("%s=%s\n" % (hw_option, android_avd_hw[hw_option]))
inifiledata.close()
return name
def run_emulator(self, avd_id, avd_skin, avd_name, avd_abi, add_args):
info("Launching Android emulator...one moment")
debug("From: " + self.sdk.get_emulator())
debug("SDCard: " + self.sdcard)
if avd_name is None:
debug("AVD ID: " + avd_id)
debug("AVD Skin: " + avd_skin)
else:
debug("AVD Name: " + avd_name)
if avd_abi is not None:
debug("AVD ABI: " + avd_abi)
debug("SDK: " + sdk_dir)
# make sure adb is running on windows, else XP can lockup the python
# process when adb runs first time
if platform.system() == "Windows":
run.run([self.sdk.get_adb(), "start-server"], True, ignore_output=True)
devices = self.sdk.list_devices()
for device in devices:
if device.is_emulator() and device.get_port() == 5560:
info("Emulator is running.")
sys.exit()
# this will create an AVD on demand or re-use existing one if already created
if avd_name == None:
avd_name = self.create_avd(avd_id, avd_skin, avd_abi)
# start the emulator
emulator_cmd = [
self.sdk.get_emulator(),
'-avd',
avd_name,
'-port',
'5560',
'-sdcard',
self.get_sdcard_path(),
'-logcat',
'*:d,*,TiAPI:V',
'-no-boot-anim',
'-partition-size',
'128' # in between nexusone and droid
]
if add_args:
emulator_cmd.extend([arg.strip() for arg in add_args if len(arg.strip()) > 0])
debug(' '.join(emulator_cmd))
p = subprocess.Popen(emulator_cmd)
def handler(signum, frame):
debug("signal caught: %d" % signum)
if not p == None:
debug("calling emulator kill on %d" % p.pid)
if platform.system() == "Windows":
os.system("taskkill /F /T /PID %i" % p.pid)
else:
os.kill(p.pid, signal.SIGTERM)
if platform.system() != "Windows":
signal.signal(signal.SIGHUP, handler)
signal.signal(signal.SIGQUIT, handler)
signal.signal(signal.SIGINT, handler)
signal.signal(signal.SIGABRT, handler)
signal.signal(signal.SIGTERM, handler)
# give it some time to exit prematurely
time.sleep(1)
rc = p.poll()
if rc != None:
handler(3,None)
sys.exit(rc)
# wait for the emulator to finish
try:
rc = p.wait()
except OSError:
handler(3,None)
info("Android Emulator has exited")
sys.exit(rc)
def check_file_exists(self, path):
output = self.run_adb('shell', 'ls', path)
if output != None:
if output.find("No such file or directory") == -1 \
and output.find("error: device offline") == -1:
return True
return False
def is_app_installed(self):
return self.check_file_exists('/data/app/%s*.apk' % self.app_id)
def get_sdcard_path(self):
# We need to surround the sd card path in quotes for windows to account for spaces in path
if platform.system() == "Windows":
return '"' + self.sdcard + '"'
return self.sdcard
def are_resources_installed(self):
return self.check_file_exists(self.sdcard_resources+'/app.js')
def include_path(self, path, isfile):
if not isfile and os.path.basename(path) in ignoreDirs: return False
elif isfile and os.path.basename(path) in ignoreFiles: return False
return True
def warn_dupe_drawable_folders(self):
tocheck = ('high', 'medium', 'low')
image_parent = os.path.join(self.top_dir, 'Resources', 'android', 'images')
for check in tocheck:
if os.path.exists(os.path.join(image_parent, check)) and os.path.exists(os.path.join(image_parent, 'res-%sdpi' % check[0])):
warn('You have both an android/images/%s folder and an android/images/res-%sdpi folder. Files from both of these folders will end up in res/drawable-%sdpi. If two files are named the same, there is no guarantee which one will be copied last and therefore be the one the application uses. You should use just one of these folders to avoid conflicts.' % (check, check[0], check[0]))
def copy_module_platform_folders(self):
for module in self.modules:
platform_folder = os.path.join(module.path, 'platform', 'android')
if os.path.exists(platform_folder):
copy_all(platform_folder, self.project_dir, True, one_time_msg="Copying platform-specific files for '%s' module" % module.manifest.name)
def copy_commonjs_modules(self):
info('Copying CommonJS modules...')
for module in self.modules:
if module.js is None:
continue
module_name = os.path.basename(module.js)
self.non_orphans.append(module_name)
shutil.copy(module.js, self.assets_resources_dir)
def copy_project_platform_folder(self, ignore_dirs=[], ignore_files=[]):
if not os.path.exists(self.platform_dir):
return
copy_all(self.platform_dir, self.project_dir, True, ignore_dirs, ignore_files, one_time_msg="Copying platform-specific files ...")
def copy_resource_drawables(self):
debug('Processing Android resource drawables')
def make_resource_drawable_filename(orig):
normalized = orig.replace(os.sep, "/")
matches = re.search("/android/images/(high|medium|low|res-[^/]+)/(?P<chopped>.*$)", normalized)
if matches and matches.groupdict() and 'chopped' in matches.groupdict():
chopped = matches.groupdict()['chopped'].lower()
for_hash = chopped
if for_hash.endswith('.9.png'):
for_hash = for_hash[:-6] + '.png'
extension = ""
without_extension = chopped
if re.search("\\..*$", chopped):
if chopped.endswith('.9.png'):
extension = '9.png'
without_extension = chopped[:-6]
else:
extension = chopped.split(".")[-1]
without_extension = chopped[:-(len(extension)+1)]
cleaned_without_extension = re.sub(r'[^a-z0-9_]', '_', without_extension)
cleaned_extension = re.sub(r'[^a-z0-9\._]', '_', extension)
result = cleaned_without_extension[:80] + "_" + hashlib.md5(for_hash).hexdigest()[:10]
if extension:
result += "." + extension
return result
else:
trace("Regexp for resource drawable file %s failed" % orig)
return None
def delete_resource_drawable(orig):
folder = resource_drawable_folder(orig)
res_file = os.path.join(self.res_dir, folder, make_resource_drawable_filename(orig))
if os.path.exists(res_file):
try:
trace("DELETING FILE: %s" % res_file)
os.remove(res_file)
except:
warn('Unable to delete %s: %s. Execution will continue.' % (res_file, sys.exc_info()[0]))
def copy_resource_drawable(orig):
partial_folder = resource_drawable_folder(orig)
if not partial_folder:
trace("Could not copy %s; resource folder not determined" % orig)
return
dest_folder = os.path.join(self.res_dir, partial_folder)
dest_filename = make_resource_drawable_filename(orig)
if dest_filename is None:
return
dest = os.path.join(dest_folder, dest_filename)
if not os.path.exists(dest_folder):
os.makedirs(dest_folder)
trace("COPYING FILE: %s => %s" % (orig, dest))
shutil.copy(orig, dest)
fileset = []
if self.force_rebuild or self.deploy_type == 'production' or \
(self.js_changed and not self.fastdev):
for root, dirs, files in os.walk(os.path.join(self.top_dir, "Resources")):
remove_ignored_dirs(dirs)
for f in files:
if f in ignoreFiles:
continue
path = os.path.join(root, f)
if is_resource_drawable(path) and f != 'default.png':
fileset.append(path)
else:
if self.project_deltas:
for delta in self.project_deltas:
path = delta.get_path()
if is_resource_drawable(path):
if delta.get_status() == Delta.DELETED:
delete_resource_drawable(path)
else:
fileset.append(path)
if len(fileset) == 0:
return False
for f in fileset:
copy_resource_drawable(f)
return True
def copy_project_resources(self):
info("Copying project resources..")
def validate_filenames(topdir):
for root, dirs, files in os.walk(topdir):
remove_ignored_dirs(dirs)
for d in dirs:
if d == "iphone" or d == "mobileweb":
dirs.remove(d)
for filename in files:
if filename.startswith("_"):
error("%s is an invalid filename. Android will not package assets whose filenames start with underscores. Fix and rebuild." % os.path.join(root, filename))
sys.exit(1)
resources_dir = os.path.join(self.top_dir, 'Resources')
validate_filenames(resources_dir)
android_resources_dir = os.path.join(resources_dir, 'android')
self.project_deltafy = Deltafy(resources_dir, include_callback=self.include_path)
self.project_deltas = self.project_deltafy.scan()
self.js_changed = False
tiapp_delta = self.project_deltafy.scan_single_file(self.project_tiappxml)
self.tiapp_changed = tiapp_delta is not None
full_copy = not os.path.exists(self.assets_resources_dir)
if self.tiapp_changed or self.force_rebuild or full_copy:
info("Detected change in tiapp.xml, or assets deleted. Forcing full re-build...")
# force a clean scan/copy when the tiapp.xml has changed
self.project_deltafy.clear_state()
self.project_deltas = self.project_deltafy.scan()
# rescan tiapp.xml so it doesn't show up as created next time around
self.project_deltafy.scan_single_file(self.project_tiappxml)
if self.tiapp_changed:
for root, dirs, files in os.walk(self.project_gen_dir, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
os.rmdir(os.path.join(root, name))
def strip_slash(s):
if s[0:1]=='/' or s[0:1]=='\\': return s[1:]
return s
def make_relative(path, relative_to, prefix=None):
relative_path = strip_slash(path[len(relative_to):])
if prefix is not None:
return os.path.join(prefix, relative_path)
return relative_path
for delta in self.project_deltas:
path = delta.get_path()
if re.search("android/images/(high|medium|low|res-[^/]+)/", path.replace(os.sep, "/")):
continue # density images are handled later
if delta.get_status() == Delta.DELETED and path.startswith(android_resources_dir):
shared_path = path.replace(android_resources_dir, resources_dir, 1)
if os.path.exists(shared_path):
dest = make_relative(shared_path, resources_dir, self.assets_resources_dir)
trace("COPYING FILE: %s => %s (platform-specific file was removed)" % (shared_path, dest))
shutil.copy(shared_path, dest)
if delta.get_status() != Delta.DELETED:
if path.startswith(android_resources_dir):
dest = make_relative(path, android_resources_dir, self.assets_resources_dir)
else:
# don't copy it if there is an android-specific file
if os.path.exists(path.replace(resources_dir, android_resources_dir, 1)):
continue
dest = make_relative(path, resources_dir, self.assets_resources_dir)
if path.startswith(os.path.join(resources_dir, "iphone")) or path.startswith(os.path.join(resources_dir, "mobileweb")) or path.startswith(os.path.join(resources_dir, "blackberry")):
continue
parent = os.path.dirname(dest)
if not os.path.exists(parent):
os.makedirs(parent)
trace("COPYING %s FILE: %s => %s" % (delta.get_status_str(), path, dest))
shutil.copy(path, dest)
if (path.startswith(resources_dir) or path.startswith(android_resources_dir)) and path.endswith(".js"):
self.js_changed = True
# copy to the sdcard in development mode
if self.sdcard_copy and self.app_installed and (self.deploy_type == 'development' or self.deploy_type == 'test'):
if path.startswith(android_resources_dir):
relative_path = make_relative(delta.get_path(), android_resources_dir)
else:
relative_path = make_relative(delta.get_path(), resources_dir)
relative_path = relative_path.replace("\\", "/")
self.run_adb('push', delta.get_path(), "%s/%s" % (self.sdcard_resources, relative_path))
if os.environ.has_key('LIVEVIEW'):
debug("LiveView enabled")
appjs = os.path.join(self.assets_resources_dir, 'app.js')
_appjs = os.path.join(self.assets_resources_dir, '_app.js')
liveviewjs = os.path.join(tempfile.gettempdir(), 'liveview.js')
self.non_orphans.append('_app.js')
if not os.path.exists(appjs):
debug('app.js not found: %s' % appjs)
if not os.path.exists(liveviewjs):
debug('liveviewjs.js not found: %s' % liveviewjs)
if os.path.exists(appjs) and os.path.exists(liveviewjs):
trace("COPYING %s => %s" % (appjs, _appjs))
shutil.copy(appjs, _appjs)
trace("COPYING %s => %s" % (liveviewjs, appjs))
shutil.copy(liveviewjs, appjs)
else:
debug('LiveView not enabled')
index_json_path = os.path.join(self.assets_dir, "index.json")
if len(self.project_deltas) > 0 or not os.path.exists(index_json_path):
requireIndex.generateJSON(self.assets_dir, index_json_path)
def check_permissions_mapping(self, key, permissions_mapping, permissions_list):
try:
perms = permissions_mapping[key]
if perms:
for perm in perms:
try:
permissions_list.index(perm)
except:
permissions_list.append(perm)
except:
pass
def generate_android_manifest(self,compiler):
self.generate_localizations()
self.remove_duplicate_res()
# NOTE: these are built-in permissions we need -- we probably need to refine when these are needed too
permissions_required = ['INTERNET','ACCESS_WIFI_STATE','ACCESS_NETWORK_STATE', 'WRITE_EXTERNAL_STORAGE']
GEO_PERMISSION = [ 'ACCESS_COARSE_LOCATION', 'ACCESS_FINE_LOCATION']
CONTACTS_READ_PERMISSION = ['READ_CONTACTS']
CONTACTS_PERMISSION = ['READ_CONTACTS', 'WRITE_CONTACTS']
CALENDAR_PERMISSION = ['READ_CALENDAR', 'WRITE_CALENDAR']
VIBRATE_PERMISSION = ['VIBRATE']
CAMERA_PERMISSION = ['CAMERA']
WALLPAPER_PERMISSION = ['SET_WALLPAPER']
# Enable mock location if in development or test mode.
if self.deploy_type == 'development' or self.deploy_type == 'test':
GEO_PERMISSION.append('ACCESS_MOCK_LOCATION')
# this is our module to permission(s) trigger - for each module on the left, require the permission(s) on the right
permissions_module_mapping = {
# GEO
'geolocation' : GEO_PERMISSION
}
# this is our module method to permission(s) trigger - for each method on the left, require the permission(s) on the right
permissions_method_mapping = {
# MAP
'Map.createView' : GEO_PERMISSION,
# MEDIA
'Media.vibrate' : VIBRATE_PERMISSION,
'Media.showCamera' : CAMERA_PERMISSION,
# CONTACTS
'Contacts.createPerson' : CONTACTS_PERMISSION,
'Contacts.removePerson' : CONTACTS_PERMISSION,
'Contacts.getAllContacts' : CONTACTS_READ_PERMISSION,
'Contacts.showContactPicker' : CONTACTS_READ_PERMISSION,
'Contacts.showContacts' : CONTACTS_READ_PERMISSION,
'Contacts.getPersonByID' : CONTACTS_READ_PERMISSION,
'Contacts.getPeopleWithName' : CONTACTS_READ_PERMISSION,
'Contacts.getAllPeople' : CONTACTS_READ_PERMISSION,
'Contacts.getAllGroups' : CONTACTS_READ_PERMISSION,
'Contacts.getGroupByID' : CONTACTS_READ_PERMISSION,
# Old CALENDAR
'Android.Calendar.getAllAlerts' : CALENDAR_PERMISSION,
'Android.Calendar.getAllCalendars' : CALENDAR_PERMISSION,
'Android.Calendar.getCalendarById' : CALENDAR_PERMISSION,
'Android.Calendar.getSelectableCalendars' : CALENDAR_PERMISSION,
# CALENDAR
'Calendar.getAllAlerts' : CALENDAR_PERMISSION,
'Calendar.getAllCalendars' : CALENDAR_PERMISSION,
'Calendar.getCalendarById' : CALENDAR_PERMISSION,
'Calendar.getSelectableCalendars' : CALENDAR_PERMISSION,
# WALLPAPER
'Media.Android.setSystemWallpaper' : WALLPAPER_PERMISSION,
}
VIDEO_ACTIVITY = """<activity
android:name="ti.modules.titanium.media.TiVideoActivity"
android:configChanges="keyboardHidden|orientation"
android:theme="@android:style/Theme.NoTitleBar.Fullscreen"
android:launchMode="singleTask"
/>"""
MAP_ACTIVITY = """<activity
android:name="ti.modules.titanium.map.TiMapActivity"
android:configChanges="keyboardHidden|orientation"
android:launchMode="singleTask"
/>
<uses-library android:name="com.google.android.maps" />"""
CAMERA_ACTIVITY = """<activity
android:name="ti.modules.titanium.media.TiCameraActivity"
android:configChanges="keyboardHidden|orientation"
android:theme="@android:style/Theme.Translucent.NoTitleBar.Fullscreen"
/>"""
activity_mapping = {
# MEDIA
'Media.createVideoPlayer' : VIDEO_ACTIVITY,
'Media.showCamera' : CAMERA_ACTIVITY,
# MAPS
'Map.createView' : MAP_ACTIVITY,
}
# this is a map of our APIs to ones that require Google APIs to be available on the device
google_apis = {
"Map.createView" : True
}
activities = []
# figure out which permissions we need based on the used module
for mod in compiler.modules:
self.check_permissions_mapping(mod, permissions_module_mapping, permissions_required)
# figure out which permissions we need based on the used module methods
for mn in compiler.module_methods:
self.check_permissions_mapping(mn, permissions_method_mapping, permissions_required)
try:
mappings = activity_mapping[mn]
try:
if google_apis[mn] and not self.google_apis_supported:
warn("Google APIs detected but a device has been selected that doesn't support them. The API call to Titanium.%s will fail using '%s'" % (mn,my_avd['name']))
continue
except:
pass
try:
activities.index(mappings)
except:
activities.append(mappings)
except:
pass
# Javascript-based activities defined in tiapp.xml
if self.tiapp and self.tiapp.android and 'activities' in self.tiapp.android:
tiapp_activities = self.tiapp.android['activities']
for key in tiapp_activities:
activity = tiapp_activities[key]
if not 'url' in activity:
continue
activity_name = self.app_id + '.' + activity['classname']
activity_str = '<activity \n\t\t\tandroid:name="%s"' % activity_name
for subkey in activity:
if subkey not in ('nodes', 'name', 'url', 'options', 'classname', 'android:name'):
activity_str += '\n\t\t\t%s="%s"' % (subkey, activity[subkey])
if 'android:config' not in activity:
activity_str += '\n\t\t\tandroid:configChanges="keyboardHidden|orientation"'
if 'nodes' in activity:
activity_str += '>'
for node in activity['nodes']:
activity_str += '\n\t\t\t\t' + node.toxml()
activities.append(activity_str + '\n\t\t</activity>\n')
else:
activities.append(activity_str + '\n\t\t/>\n')
activities = set(activities)
services = []
# Javascript-based services defined in tiapp.xml
if self.tiapp and self.tiapp.android and 'services' in self.tiapp.android:
tiapp_services = self.tiapp.android['services']
for key in tiapp_services:
service = tiapp_services[key]
if not 'url' in service:
continue
service_name = self.app_id + '.' + service['classname']
service_str = '<service \n\t\t\tandroid:name="%s"' % service_name
for subkey in service:
if subkey not in ('nodes', 'service_type', 'type', 'name', 'url', 'options', 'classname', 'android:name'):
service_str += '\n\t\t\t%s="%s"' % (subkey, service[subkey])
if 'nodes' in service:
service_str += '>'
for node in service['nodes']:
service_str += '\n\t\t\t\t' + node.toxml()
services.append(service_str + '\n\t\t</service>\n')
else:
services.append(service_str + '\n\t\t/>\n')
self.use_maps = False
self.res_changed = False
icon_name = self.tiapp.properties['icon']
icon_path = os.path.join(self.assets_resources_dir, icon_name)
icon_ext = os.path.splitext(icon_path)[1]
res_drawable_dest = os.path.join(self.project_dir, 'res', 'drawable')
if not os.path.exists(res_drawable_dest):
os.makedirs(res_drawable_dest)
default_icon = os.path.join(self.support_resources_dir, 'default.png')
dest_icon = os.path.join(res_drawable_dest, 'appicon%s' % icon_ext)
if Deltafy.needs_update(icon_path, dest_icon):
self.res_changed = True
debug("copying app icon: %s" % icon_path)
shutil.copy(icon_path, dest_icon)
elif Deltafy.needs_update(default_icon, dest_icon):
self.res_changed = True
debug("copying default app icon")
shutil.copy(default_icon, dest_icon)
# make our Titanium theme for our icon
res_values_dir = os.path.join(self.project_dir, 'res','values')
if not os.path.exists(res_values_dir):
os.makedirs(res_values_dir)
theme_xml = os.path.join(res_values_dir,'theme.xml')
if not os.path.exists(theme_xml):
self.res_changed = True
debug('generating theme.xml')
theme_file = open(theme_xml, 'w')
theme_flags = "Theme"
# We need to treat the default values for fulscreen and
# navbar-hidden the same as android.py does -- false for both.
theme_fullscreen = False
theme_navbarhidden = False
if (self.tiapp.properties.get("fullscreen") == "true" or
self.tiapp.properties.get("statusbar-hidden") == "true"):
theme_fullscreen = True
elif self.tiapp.properties.get("navbar-hidden") == "true":
theme_navbarhidden = True
if theme_fullscreen:
theme_flags += ".NoTitleBar.Fullscreen"
elif theme_navbarhidden:
theme_flags += ".NoTitleBar"
# Wait, one exception. If you want the notification area (very
# top of screen) hidden, but want the title bar in the app,
# there's no theme for that. So we have to use the default theme (no flags)
# and when the application code starts running, the adjustments are then made.
# Only do this when the properties are explicitly set, so as to avoid changing
# old default behavior.
if theme_flags.endswith('.Fullscreen') and \
self.tiapp.properties.get("navbar-hidden") == 'false' and \
('fullscreen' in self.tiapp.explicit_properties or \
'statusbar-hidden' in self.tiapp.explicit_properties) and \
'navbar-hidden' in self.tiapp.explicit_properties:
theme_flags = 'Theme'
TITANIUM_THEME="""<?xml version="1.0" encoding="utf-8"?>
<resources>
<style name="Theme.Titanium" parent="android:%s">
<item name="android:windowBackground">@drawable/background</item>
</style>
</resources>
""" % theme_flags
theme_file.write(TITANIUM_THEME)
theme_file.close()
# create our background image which acts as splash screen during load
resources_dir = os.path.join(self.top_dir, 'Resources')
android_images_dir = os.path.join(resources_dir, 'android', 'images')
# look for density-specific default.png's first
if os.path.exists(android_images_dir):
pattern = r'/android/images/(high|medium|low|res-[^/]+)/default.png'
for root, dirs, files in os.walk(android_images_dir):
remove_ignored_dirs(dirs)
for f in files:
if f in ignoreFiles:
continue
path = os.path.join(root, f)
if re.search(pattern, path.replace(os.sep, "/")):
res_folder = resource_drawable_folder(path)
debug('found %s splash screen at %s' % (res_folder, path))
dest_path = os.path.join(self.res_dir, res_folder)
dest_file = os.path.join(dest_path, 'background.png')
if not os.path.exists(dest_path):
os.makedirs(dest_path)
if Deltafy.needs_update(path, dest_file):
self.res_changed = True
debug('copying %s splash screen to %s' % (path, dest_file))
shutil.copy(path, dest_file)
default_png = os.path.join(self.assets_resources_dir, 'default.png')
support_default_png = os.path.join(self.support_resources_dir, 'default.png')
background_png = os.path.join(self.project_dir, 'res','drawable','background.png')
if os.path.exists(default_png) and Deltafy.needs_update(default_png, background_png):
self.res_changed = True
debug("found splash screen at %s" % os.path.abspath(default_png))
shutil.copy(default_png, background_png)
elif Deltafy.needs_update(support_default_png, background_png):
self.res_changed = True
debug("copying default splash screen")
shutil.copy(support_default_png, background_png)
android_manifest = os.path.join(self.project_dir, 'AndroidManifest.xml')
android_manifest_to_read = android_manifest
# NOTE: allow the user to use their own custom AndroidManifest if they put a file named
# AndroidManifest.xml in platform/android, in which case all bets are off
is_custom = False
# Catch people who may have it in project root (un-released 1.4.x android_native_refactor branch users)
if os.path.exists(os.path.join(self.top_dir, 'AndroidManifest.xml')):
warn('AndroidManifest.xml file in the project root is ignored. Move it to platform/android if you want it to be your custom manifest.')
android_custom_manifest = os.path.join(self.project_dir, 'AndroidManifest.custom.xml')
if not os.path.exists(android_custom_manifest):
android_custom_manifest = os.path.join(self.platform_dir, 'AndroidManifest.xml')
else:
warn('Use of AndroidManifest.custom.xml is deprecated. Please put your custom manifest as "AndroidManifest.xml" in the "platform/android" directory if you do not need to compile for versions < 1.5')
if os.path.exists(android_custom_manifest):
android_manifest_to_read = android_custom_manifest
is_custom = True
info("Detected custom ApplicationManifest.xml -- no Titanium version migration supported")
default_manifest_contents = self.android.render_android_manifest()
if self.sdk.api_level >= HONEYCOMB_MR2_LEVEL:
# Need to add "screenSize" in our default "configChanges" attribute on
# <activity> elements, else changes in orientation will cause the app
# to restart. cf. TIMOB-10863.
default_manifest_contents = default_manifest_contents.replace('|orientation"', '|orientation|screenSize"')
debug("Added 'screenSize' to <activity android:configChanges> because targeted api level %s is >= %s" % (self.sdk.api_level, HONEYCOMB_MR2_LEVEL))
custom_manifest_contents = None
if is_custom:
custom_manifest_contents = open(android_manifest_to_read,'r').read()
manifest_xml = ''
def get_manifest_xml(tiapp, template_obj=None):
xml = ''
if 'manifest' in tiapp.android_manifest:
for manifest_el in tiapp.android_manifest['manifest']:
# since we already track permissions in another way, go ahead and us e that
if manifest_el.nodeName == 'uses-permission' and manifest_el.hasAttribute('android:name'):
if manifest_el.getAttribute('android:name').split('.')[-1] not in permissions_required:
perm_val = manifest_el.getAttribute('android:name')
if template_obj is not None and "${" in perm_val:
perm_val = render_template_with_tiapp(perm_val, template_obj)
permissions_required.append(perm_val)
elif manifest_el.nodeName not in ('supports-screens', 'uses-sdk'):
this_xml = manifest_el.toprettyxml()
if template_obj is not None and "${" in this_xml:
this_xml = render_template_with_tiapp(this_xml, template_obj)
xml += this_xml
return xml
application_xml = ''
def get_application_xml(tiapp, template_obj=None):
xml = ''
if 'application' in tiapp.android_manifest:
for app_el in tiapp.android_manifest['application']:
this_xml = app_el.toxml()
if template_obj is not None and "${" in this_xml:
this_xml = render_template_with_tiapp(this_xml, template_obj)
xml += this_xml
return xml
# add manifest / application entries from tiapp.xml
manifest_xml += get_manifest_xml(self.tiapp)
application_xml += get_application_xml(self.tiapp)
# add manifest / application entries from modules
for module in self.modules:
if module.xml == None: continue
manifest_xml += get_manifest_xml(module.xml, self.tiapp)
application_xml += get_application_xml(module.xml, self.tiapp)
# build the permissions XML based on the permissions detected
permissions_required = set(permissions_required)
permissions_required_xml = ""
for p in permissions_required:
if '.' not in p:
permissions_required_xml+="<uses-permission android:name=\"android.permission.%s\"/>\n\t" % p
else:
permissions_required_xml+="<uses-permission android:name=\"%s\"/>\n\t" % p
def fill_manifest(manifest_source):
ti_activities = '<!-- TI_ACTIVITIES -->'
ti_permissions = '<!-- TI_PERMISSIONS -->'
ti_manifest = '<!-- TI_MANIFEST -->'
ti_application = '<!-- TI_APPLICATION -->'
ti_services = '<!-- TI_SERVICES -->'
manifest_source = manifest_source.replace(ti_activities,"\n\n\t\t".join(activities))
manifest_source = manifest_source.replace(ti_services,"\n\n\t\t".join(services))
manifest_source = manifest_source.replace(ti_permissions,permissions_required_xml)
if len(manifest_xml) > 0:
manifest_source = manifest_source.replace(ti_manifest, manifest_xml)
if len(application_xml) > 0:
manifest_source = manifest_source.replace(ti_application, application_xml)
return manifest_source
default_manifest_contents = fill_manifest(default_manifest_contents)
# if a custom uses-sdk or supports-screens has been specified via tiapp.xml
# <android><manifest>..., we need to replace the ones in the generated
# default manifest
supports_screens_node = None
uses_sdk_node = None
if 'manifest' in self.tiapp.android_manifest:
for node in self.tiapp.android_manifest['manifest']:
if node.nodeName == 'uses-sdk':
uses_sdk_node = node
elif node.nodeName == 'supports-screens':
supports_screens_node = node
if supports_screens_node or uses_sdk_node or ('manifest-attributes' in self.tiapp.android_manifest and self.tiapp.android_manifest['manifest-attributes'].length) or ('application-attributes' in self.tiapp.android_manifest and self.tiapp.android_manifest['application-attributes'].length):
dom = parseString(default_manifest_contents)
def replace_node(olddom, newnode):
nodes = olddom.getElementsByTagName(newnode.nodeName)
retval = False
if nodes:
olddom.documentElement.replaceChild(newnode, nodes[0])
retval = True
return retval
if supports_screens_node:
if not replace_node(dom, supports_screens_node):
dom.documentElement.insertBefore(supports_screens_node, dom.documentElement.firstChild.nextSibling)
if uses_sdk_node:
replace_node(dom, uses_sdk_node)
def set_attrs(element, new_attr_set):
for k in new_attr_set.keys():
if element.hasAttribute(k):
element.removeAttribute(k)
element.setAttribute(k, new_attr_set.get(k).value)
if 'manifest-attributes' in self.tiapp.android_manifest and self.tiapp.android_manifest['manifest-attributes'].length:
set_attrs(dom.documentElement, self.tiapp.android_manifest['manifest-attributes'])
if 'application-attributes' in self.tiapp.android_manifest and self.tiapp.android_manifest['application-attributes'].length:
set_attrs(dom.getElementsByTagName('application')[0], self.tiapp.android_manifest['application-attributes'])
default_manifest_contents = dom.toxml()
if application_xml:
# If the tiapp.xml <manifest><application> section was not empty, it could be
# that user put in <activity> entries that duplicate our own,
# such as if they want a custom theme on TiActivity. So we should delete any dupes.
dom = parseString(default_manifest_contents)
package_name = dom.documentElement.getAttribute('package')
manifest_activities = dom.getElementsByTagName('activity')
activity_names = []
nodes_to_delete = []
for manifest_activity in manifest_activities:
if manifest_activity.hasAttribute('android:name'):
activity_name = manifest_activity.getAttribute('android:name')
if activity_name.startswith('.'):
activity_name = package_name + activity_name
if activity_name in activity_names:
nodes_to_delete.append(manifest_activity)
else:
activity_names.append(activity_name)
if nodes_to_delete:
for node_to_delete in nodes_to_delete:
node_to_delete.parentNode.removeChild(node_to_delete)
default_manifest_contents = dom.toxml()
if custom_manifest_contents:
custom_manifest_contents = fill_manifest(custom_manifest_contents)
new_manifest_contents = None
android_manifest_gen = android_manifest + '.gen'
if custom_manifest_contents:
new_manifest_contents = custom_manifest_contents
# Write the would-be default as well so user can see
# some of the auto-gen'd insides of it if they need/want.
amf = open(android_manifest + '.gen', 'w')
amf.write(default_manifest_contents)
amf.close()
else:
new_manifest_contents = default_manifest_contents
if os.path.exists(android_manifest_gen):
os.remove(android_manifest_gen)
manifest_changed = False
old_contents = None
if os.path.exists(android_manifest):
old_contents = open(android_manifest, 'r').read()
if new_manifest_contents != old_contents:
trace("Writing out AndroidManifest.xml")
amf = open(android_manifest,'w')
amf.write(new_manifest_contents)
amf.close()
manifest_changed = True
if self.res_changed or manifest_changed:
res_dir = os.path.join(self.project_dir, 'res')
output = run.run([self.aapt, 'package', '-m',
'-J', self.project_gen_dir,
'-M', android_manifest,
'-S', res_dir,
'-I', self.android_jar], warning_regex=r'skipping')
r_file = os.path.join(self.project_gen_dir, self.app_id.replace('.', os.sep), 'R.java')
if not os.path.exists(r_file) or (self.res_changed and output == None):
error("Error generating R.java from manifest")
sys.exit(1)
return manifest_changed
def generate_stylesheet(self):
update_stylesheet = False
resources_dir = os.path.join(self.top_dir, 'Resources')
project_gen_pkg_dir = os.path.join(self.project_gen_dir, self.app_id.replace('.', os.sep))
app_stylesheet = os.path.join(project_gen_pkg_dir, 'ApplicationStylesheet.java')
if not os.path.exists(app_stylesheet):
update_stylesheet = True
else:
for root, dirs, files in os.walk(resources_dir, True, None, True):
remove_ignored_dirs(dirs)
for f in files:
if f in ignoreFiles:
continue
if f.endswith(".jss"):
absolute_path = os.path.join(root, f)
if Deltafy.needs_update(absolute_path, app_stylesheet):
update_stylesheet = True
break
if not update_stylesheet:
return
cssc = csscompiler.CSSCompiler(resources_dir, 'android', self.app_id)
if not os.path.exists(project_gen_pkg_dir):
os.makedirs(project_gen_pkg_dir)
debug("app stylesheet => %s" % app_stylesheet)
asf = codecs.open(app_stylesheet, 'w', 'utf-8')
asf.write(cssc.code)
asf.close()
def generate_localizations(self):
# compile localization files
localecompiler.LocaleCompiler(self.name,self.top_dir,'android',sys.argv[1]).compile()
# fix un-escaped single-quotes and full-quotes
# remove duplicate strings since we merge strings.xml from /i18n/ and /platform/android/res/values (TIMOB-12663)
offending_pattern = '[^\\\\][\'"]'
for root, dirs, files in os.walk(self.res_dir):
remove_ignored_dirs(dirs)
for filename in files:
if filename in ignoreFiles or not filename.endswith('.xml'):
continue
string_name_list = [] #keeps track of the string names
full_path = os.path.join(root, filename)
f = codecs.open(full_path, 'r', 'utf-8')
contents = f.read()
f.close()
if not re.search(r"<string ", contents):
continue
doc = parseString(contents.encode("utf-8"))
string_nodes = doc.getElementsByTagName('string')
resources_node = doc.getElementsByTagName('resources')[0]
if len(string_nodes) == 0:
continue
made_change = False
for string_node in string_nodes:
name = string_node.getAttribute('name')
# Remove the string node with the duplicate names
if name in string_name_list:
resources_node.removeChild(string_node)
made_change = True
debug('Removed duplicate string [%s] from %s' %(name, full_path))
else:
string_name_list.append(name)
if not string_node.hasChildNodes():
continue
string_child = string_node.firstChild
if string_child.nodeType == string_child.CDATA_SECTION_NODE or string_child.nodeType == string_child.TEXT_NODE:
string_value = string_child.nodeValue
if not re.search(offending_pattern, string_value):
continue
offenders = re.findall(offending_pattern, string_value)
if offenders:
for offender in offenders:
string_value = string_value.replace(offender, offender[0] + "\\" + offender[-1:])
made_change = True
string_child.nodeValue = string_value
if made_change:
new_contents = doc.toxml()
f = codecs.open(full_path, 'w', 'utf-8')
f.write(new_contents)
f.close()
def remove_duplicate_res(self):
for root, dirs, files in os.walk(self.res_dir):
remove_ignored_dirs(dirs)
for filename in files:
if not (filename in resourceFiles):
continue
full_path = os.path.join(root, filename)
node_names_to_check = ["string", "bool", "color", "dimen", "item", "integer",
"array", "integer-array", "string-array", "declare-styleable", "attr", "style"]
# "strings.xml" is checked in generate_localizations()
if filename != "strings.xml":
remove_duplicate_nodes_in_res_file(full_path, node_names_to_check)
def recurse(self, paths, file_glob=None):
if paths == None: yield None
if not isinstance(paths, list): paths = [paths]
for path in paths:
for root, dirs, files in os.walk(path):
remove_ignored_dirs(dirs)
for filename in files:
if filename in ignoreFiles:
continue
if file_glob != None:
if not fnmatch.fnmatch(filename, file_glob): continue
yield os.path.join(root, filename)
def generate_aidl(self):
# support for android remote interfaces in platform/android/src
framework_aidl = self.sdk.platform_path('framework.aidl')
aidl_args = [self.sdk.get_aidl(), '-p' + framework_aidl, '-I' + self.project_src_dir, '-o' + self.project_gen_dir]
for aidl_file in self.recurse(self.project_src_dir, '*.aidl'):
run.run(aidl_args + [aidl_file])
def build_generated_classes(self):
src_list = []
self.module_jars = []
classpath = os.pathsep.join([self.android_jar, os.pathsep.join(self.android_jars)])
project_module_dir = os.path.join(self.top_dir,'modules','android')
for module in self.modules:
if module.jar == None: continue
self.module_jars.append(module.jar)
classpath = os.pathsep.join([classpath, module.jar])
module_lib = module.get_resource('lib')
for jar in glob.glob(os.path.join(module_lib, '*.jar')):
self.module_jars.append(jar)
classpath = os.pathsep.join([classpath, jar])
if len(self.module_jars) > 0:
# kroll-apt.jar is needed for modules
classpath = os.pathsep.join([classpath, self.kroll_apt_jar])
classpath = os.pathsep.join([classpath, os.path.join(self.support_dir, 'lib', 'titanium-verify.jar')])
if self.deploy_type != 'production':
classpath = os.pathsep.join([classpath, os.path.join(self.support_dir, 'lib', 'titanium-debug.jar')])
classpath = os.pathsep.join([classpath, os.path.join(self.support_dir, 'lib', 'titanium-profiler.jar')])
for java_file in self.recurse([self.project_src_dir, self.project_gen_dir], '*.java'):
if self.project_src_dir in java_file:
relative_path = java_file[len(self.project_src_dir)+1:]
else:
relative_path = java_file[len(self.project_gen_dir)+1:]
class_file = os.path.join(self.classes_dir, relative_path.replace('.java', '.class'))
if Deltafy.needs_update(java_file, class_file) > 0:
# the file list file still needs each file escaped apparently
debug("adding %s to javac build list" % java_file)
src_list.append('"%s"' % java_file.replace("\\", "\\\\"))
if len(src_list) == 0:
# No sources are older than their classfile counterparts, we can skip javac / dex
return False
debug("Building Java Sources: " + " ".join(src_list))
javac_command = [self.javac, '-encoding', 'utf8',
'-classpath', classpath, '-d', self.classes_dir, '-proc:none',
'-sourcepath', self.project_src_dir,
'-sourcepath', self.project_gen_dir, '-target', '1.6', '-source', '1.6']
(src_list_osfile, src_list_filename) = tempfile.mkstemp()
src_list_file = os.fdopen(src_list_osfile, 'w')
src_list_file.write("\n".join(src_list))
src_list_file.close()
javac_command.append('@' + src_list_filename)
(out, err, javac_process) = run.run(javac_command, ignore_error=True, return_error=True, return_process=True)
os.remove(src_list_filename)
if javac_process.returncode != 0:
error("Error(s) compiling generated Java code")
error(str(err))
sys.exit(1)
return True
def create_unsigned_apk(self, resources_zip_file, webview_js_files=None):
unsigned_apk = os.path.join(self.project_dir, 'bin', 'app-unsigned.apk')
self.apk_updated = False
apk_modified = None
if os.path.exists(unsigned_apk):
apk_modified = Deltafy.get_modified_datetime(unsigned_apk)
debug("creating unsigned apk: " + unsigned_apk)
# copy existing resources into the APK
apk_zip = zipfile.ZipFile(unsigned_apk, 'w', zipfile.ZIP_DEFLATED)
def skip_jar_path(path):
ext = os.path.splitext(path)[1]
if path.endswith('/'): return True
if path.startswith('META-INF/'): return True
if path.split('/')[-1].startswith('.'): return True
if ext == '.class': return True
if 'org/appcelerator/titanium/bindings' in path and ext == '.json': return True
if 'tiapp' in path and ext =='.xml': return True
def skip_js_file(path):
return self.compile_js is True and \
os.path.splitext(path)[1] == '.js' and \
os.path.join(self.project_dir, "bin", path) not in webview_js_files
def compression_type(path):
ext = os.path.splitext(path)[1]
if ext in uncompressed_types:
return zipfile.ZIP_STORED
return zipfile.ZIP_DEFLATED
def zipinfo(path):
info = zipfile.ZipInfo(path)
info.compress_type = compression_type(path)
return info
def is_modified(path):
return apk_modified is None or Deltafy.needs_update_timestamp(path, apk_modified)
def zip_contains(zip, entry):
try:
zip.getinfo(entry)
except:
return False
return True
if is_modified(resources_zip_file):
self.apk_updated = True
resources_zip = zipfile.ZipFile(resources_zip_file)
for path in resources_zip.namelist():
if skip_jar_path(path) or skip_js_file(path): continue
debug("from resource zip => " + path)
apk_zip.writestr(zipinfo(path), resources_zip.read(path))
resources_zip.close()
# add classes.dex
if is_modified(self.classes_dex) or not zip_contains(apk_zip, 'classes.dex'):
apk_zip.write(self.classes_dex, 'classes.dex')
# add all resource files from the project
for root, dirs, files in os.walk(self.project_src_dir, True, None, True):
remove_ignored_dirs(dirs)
for f in files:
if f in ignoreFiles:
continue
if os.path.splitext(f)[1] != '.java':
absolute_path = os.path.join(root, f)
relative_path = os.path.join(root[len(self.project_src_dir)+1:], f)
if is_modified(absolute_path) or not zip_contains(apk_zip, relative_path):
self.apk_updated = True
debug("resource file => " + relative_path)
apk_zip.write(os.path.join(root, f), relative_path, compression_type(f))
def add_resource_jar(jar_file):
jar = zipfile.ZipFile(jar_file)
for path in jar.namelist():
if skip_jar_path(path): continue
debug("from JAR %s => %s" % (jar_file, path))
apk_zip.writestr(zipinfo(path), jar.read(path))
jar.close()
for jar_file in self.module_jars:
add_resource_jar(jar_file)
for jar_file in self.android_jars:
add_resource_jar(jar_file)
def add_native_libs(libs_dir, exclude=[]):
if os.path.exists(libs_dir):
for abi_dir in os.listdir(libs_dir):
if abi_dir not in self.abis:
continue
libs_abi_dir = os.path.join(libs_dir, abi_dir)
if not os.path.isdir(libs_abi_dir): continue
for file in os.listdir(libs_abi_dir):
if file.endswith('.so') and file not in exclude:
native_lib = os.path.join(libs_abi_dir, file)
path_in_zip = '/'.join(['lib', abi_dir, file])
if is_modified(native_lib) or not zip_contains(apk_zip, path_in_zip):
self.apk_updated = True
debug("installing native lib: %s" % native_lib)
apk_zip.write(native_lib, path_in_zip)
# add module native libraries
for module in self.modules:
exclude_libs = []
add_native_libs(module.get_resource('libs'), exclude_libs)
# add any native libraries : libs/**/*.so -> lib/**/*.so
add_native_libs(os.path.join(self.project_dir, 'libs'))
# add sdk runtime native libraries
debug("installing native SDK libs")
sdk_native_libs = os.path.join(template_dir, 'native', 'libs')
for abi in self.abis:
lib_source_dir = os.path.join(sdk_native_libs, abi)
lib_dest_dir = 'lib/%s/' % abi
# libtiverify is always included
apk_zip.write(os.path.join(lib_source_dir, 'libtiverify.so'), lib_dest_dir + 'libtiverify.so')
# profiler
apk_zip.write(os.path.join(lib_source_dir, 'libtiprofiler.so'), lib_dest_dir + 'libtiprofiler.so')
for fname in ('libkroll-v8.so', 'libstlport_shared.so'):
apk_zip.write(os.path.join(lib_source_dir, fname), lib_dest_dir + fname)
self.apk_updated = True
apk_zip.close()
return unsigned_apk
def run_adb(self, *args):
command = [self.sdk.get_adb()]
command.extend(self.device_args)
command.extend(args)
return run.run(command)
def get_sigalg(self):
output = run.run([self.keytool,
'-v',
'-list',
'-keystore', self.keystore,
'-storepass', self.keystore_pass,
'-alias', self.keystore_alias
], protect_arg_positions=(6,))
# If the keytool encounters an error, that means some of the provided
# keychain info is invalid and we should bail anyway
run.check_output_for_error(output, r'RuntimeException: (.*)', True)
run.check_output_for_error(output, r'^keytool: (.*)', True)
match = re.search(r'Signature algorithm name: (.*)', output)
if match is not None:
return match.group(1)
# Return the default:
return "MD5withRSA"
def package_and_deploy(self):
# If in production mode and compiling JS, we do not package the JS
# files as assets (we protect them from prying eyes). But if a JS
# file is referenced in an html <script> tag, we DO need to package it.
def get_js_referenced_in_html():
js_files = []
for root, dirs, files in os.walk(self.assets_dir):
for one_file in files:
if one_file.lower().endswith(".html"):
full_path = os.path.join(root, one_file)
html_source = None
file_stream = None
try:
file_stream = open(full_path, "r")
html_source = file_stream.read()
except:
error("Unable to read html file '%s'" % full_path)
finally:
file_stream.close()
if html_source:
parser = HTMLParser()
parser.parse(html_source)
relative_js_files = parser.get_referenced_js_files()
if relative_js_files:
for one_rel_js_file in relative_js_files:
if one_rel_js_file.startswith("http:") or one_rel_js_file.startswith("https:"):
continue
if one_rel_js_file.startswith("app://"):
one_rel_js_file = one_rel_js_file[6:]
js_files.append(os.path.abspath(os.path.join(os.path.dirname(full_path), one_rel_js_file)))
return js_files
ap_ = os.path.join(self.project_dir, 'bin', 'app.ap_')
# This is only to check if this has been overridden in production
has_compile_js = self.tiappxml.has_app_property("ti.android.compilejs")
compile_js = not has_compile_js or (has_compile_js and \
self.tiappxml.to_bool(self.tiappxml.get_app_property('ti.android.compilejs')))
# JS files referenced in html files and thus likely needed for webviews.
webview_js_files = []
pkg_assets_dir = self.assets_dir
if self.deploy_type == "test":
compile_js = False
if compile_js and os.environ.has_key('SKIP_JS_MINIFY'):
compile_js = False
info("Disabling JavaScript minification")
if self.deploy_type == "production" and compile_js:
webview_js_files = get_js_referenced_in_html()
non_js_assets = os.path.join(self.project_dir, 'bin', 'non-js-assets')
if not os.path.exists(non_js_assets):
os.mkdir(non_js_assets)
copy_all(self.assets_dir, non_js_assets, ignore_exts=['.js'])
# if we have any js files referenced in html, we *do* need
# to package them as if they are non-js assets.
if webview_js_files:
for one_js_file in webview_js_files:
if os.path.exists(one_js_file):
dest_file = one_js_file.replace(self.assets_dir, non_js_assets, 1)
if not os.path.exists(os.path.dirname(dest_file)):
os.makedirs(os.path.dirname(dest_file))
shutil.copyfile(one_js_file, dest_file)
pkg_assets_dir = non_js_assets
run.run([self.aapt, 'package', '-f', '-M', 'AndroidManifest.xml', '-A', pkg_assets_dir,
'-S', 'res', '-I', self.android_jar, '-I', self.titanium_jar, '-F', ap_], warning_regex=r'skipping')
unsigned_apk = self.create_unsigned_apk(ap_, webview_js_files)
if self.dist_dir:
app_apk = os.path.join(self.dist_dir, self.name + '.apk')
else:
app_apk = os.path.join(self.project_dir, 'bin', 'app.apk')
output = run.run([self.jarsigner,
'-sigalg', self.get_sigalg(),
'-digestalg', 'SHA1',
'-storepass', self.keystore_pass,
'-keystore', self.keystore,
'-signedjar', app_apk,
unsigned_apk,
self.keystore_alias], protect_arg_positions=(6,))
run.check_output_for_error(output, r'RuntimeException: (.*)', True)
run.check_output_for_error(output, r'^jarsigner: (.*)', True)
# TODO Document Exit message
#success = re.findall(r'RuntimeException: (.*)', output)
#if len(success) > 0:
# error(success[0])
# sys.exit(1)
# zipalign to align byte boundaries
zipalign = self.sdk.get_zipalign()
if os.path.exists(app_apk+'z'):
os.remove(app_apk+'z')
ALIGN_32_BIT = 4
output = run.run([zipalign, '-v', str(ALIGN_32_BIT), app_apk, app_apk+'z'])
# TODO - Document Exit message
if output == None:
error("System Error while compiling Android classes.dex")
sys.exit(1)
else:
os.unlink(app_apk)
os.rename(app_apk+'z',app_apk)
if self.dist_dir:
self.post_build()
sys.exit()
if self.build_only:
return (False, False)
out = self.run_adb('get-state')
#out = subprocess.Popen([self.sdk.get_adb(), self.device_type_arg, 'get-state'], stderr=subprocess.PIPE, stdout=subprocess.PIPE).communicate()[0]
out = str(out).strip()
# try a few times as sometimes it fails waiting on boot
attempts = 0
launched = False
launch_failed = False
while attempts < 5:
try:
if self.install:
self.wait_for_device('d')
info("Installing application on device")
else:
self.wait_for_device('e')
info("Installing application on emulator")
output = self.run_adb('install', '-r', app_apk)
#output = run.run(cmd)
if output == None:
launch_failed = True
elif "Failure" in output:
error("Failed installing %s: %s" % (self.app_id, output))
launch_failed = True
elif not self.install:
launched = True
break
except Exception, e:
error(e)
time.sleep(3)
attempts+=1
return (launched, launch_failed)
def run_app(self):
info("Launching application ... %s" % self.name)
output = self.run_adb('shell', 'am', 'start',
'-a', 'android.intent.action.MAIN',
'-c','android.intent.category.LAUNCHER',
'-n', '%s/.%sActivity' % (self.app_id , self.classname),
'-f', '0x10200000')
trace("Launch output: %s" % output)
def wait_for_sdcard(self):
# Quick check: the existence of /sdcard/Android,
# which really should be there on all phones and emulators.
output = self.run_adb('shell', 'cd /sdcard/Android && echo SDCARD READY')
if 'SDCARD READY' in output:
return True
# Our old way of checking in case the above
# didn't succeed:
mount_points_check = ['/sdcard', '/mnt/sdcard']
# Check the symlink that is typically in root.
# If you find it, add its target to the mount points to check.
output = self.run_adb('shell', 'ls', '-l', '/sdcard')
if output:
target_pattern = r"\-\> (\S+)\s*$"
mount_points_check.extend(re.findall(target_pattern, output))
info("Waiting for SDCard to become available..")
waited = 0
max_wait = 60
while waited < max_wait:
output = self.run_adb('shell', 'mount')
if output != None:
mount_points = output.splitlines()
for mount_point in mount_points:
tokens = mount_point.split()
if len(tokens) < 2: continue
mount_path = tokens[1]
if mount_path in mount_points_check:
return True
else:
error("Error checking for SDCard using 'mount'")
return False
time.sleep(1)
waited += 1
error("Timed out waiting for SDCard to become available (%ds)" % max_wait)
return False
def push_deploy_json(self):
deploy_data = {
"debuggerEnabled": self.debugger_host != None,
"debuggerPort": self.debugger_port,
"profilerEnabled": self.profiler_host != None,
"profilerPort": self.profiler_port,
"fastdevPort": self.fastdev_port
}
deploy_json = os.path.join(self.project_dir, 'bin', 'deploy.json')
open(deploy_json, 'w+').write(simplejson.dumps(deploy_data))
sdcard_available = self.wait_for_sdcard()
if sdcard_available:
self.run_adb('shell', 'mkdir /sdcard/%s || echo' % self.app_id)
self.run_adb('push', deploy_json, '/sdcard/%s/deploy.json' % self.app_id)
os.unlink(deploy_json)
def verify_fastdev(self):
lock_file = os.path.join(self.top_dir, '.fastdev.lock')
if not fastdev.is_running(self.top_dir):
if os.path.exists(lock_file):
os.unlink(lock_file)
return False
else:
data = simplejson.loads(open(lock_file, 'r').read())
self.fastdev_port = data["port"]
return True
def fastdev_kill_app(self):
lock_file = os.path.join(self.top_dir, ".fastdev.lock")
if os.path.exists(lock_file):
class Options(object): pass
options = Options()
options.lock_file = lock_file
try:
return fastdev.kill_app(self.top_dir, options)
except Exception, e:
return False
def merge_internal_module_resources(self):
if not self.android_jars:
return
for jar in self.android_jars:
if not os.path.exists(jar):
continue
res_zip = jar[:-4] + '.res.zip'
if not os.path.exists(res_zip):
continue
res_zip_file = zipfile.ZipFile(res_zip, "r")
try:
zip_extractall(res_zip_file, self.project_dir)
except:
raise
finally:
res_zip_file.close()
def build_and_run(self, install, avd_id, keystore=None, keystore_pass='tirocks', keystore_alias='tidev', dist_dir=None, build_only=False, device_args=None, debugger_host=None, profiler_host=None):
deploy_type = 'development'
self.build_only = build_only
self.device_args = device_args
self.postbuild_modules = []
self.finalize_modules = []
self.non_orphans = []
if install:
if self.device_args == None:
self.device_args = ['-d']
if keystore == None:
deploy_type = 'test'
else:
deploy_type = 'production'
if self.device_args == None:
self.device_args = ['-e']
self.deploy_type = deploy_type
(java_failed, java_status) = prereq.check_java()
if java_failed:
error(java_status)
sys.exit(1)
# attempt to load any compiler plugins
if len(self.tiappxml.properties['plugins']) > 0:
titanium_dir = os.path.abspath(os.path.join(template_dir,'..','..','..','..'))
local_compiler_dir = os.path.abspath(os.path.join(self.top_dir,'plugins'))
tp_compiler_dir = os.path.abspath(os.path.join(titanium_dir,'plugins'))
if not os.path.exists(tp_compiler_dir) and not os.path.exists(local_compiler_dir):
error("Build Failed (Missing plugins directory)")
sys.exit(1)
compiler_config = {
'platform':'android',
'tiapp':self.tiappxml,
'project_dir':self.top_dir,
'titanium_dir':titanium_dir,
'appid':self.app_id,
'template_dir':template_dir,
'project_name':self.name,
'command':self.command,
'build_dir':self.project_dir,
'app_name':self.name,
'android_builder':self,
'deploy_type':deploy_type,
'dist_dir':dist_dir,
'logger':log
}
for plugin in self.tiappxml.properties['plugins']:
local_plugin_file = os.path.join(local_compiler_dir,plugin['name'],'plugin.py')
plugin_file = os.path.join(tp_compiler_dir,plugin['name'],plugin['version'],'plugin.py')
info("plugin=%s" % plugin_file)
if not os.path.exists(local_plugin_file) and not os.path.exists(plugin_file):
error("Build Failed (Missing plugin for %s)" % plugin['name'])
sys.exit(1)
info("Detected compiler plugin: %s/%s" % (plugin['name'],plugin['version']))
code_path = plugin_file
if os.path.exists(local_plugin_file):
code_path = local_plugin_file
compiler_config['plugin']=plugin
fin = open(code_path, 'rb')
m = hashlib.md5()
m.update(open(code_path,'rb').read())
code_hash = m.hexdigest()
p = imp.load_source(code_hash, code_path, fin)
module_functions = dict(inspect.getmembers(p, inspect.isfunction))
if module_functions.has_key('postbuild'):
debug("plugin contains a postbuild function. Will execute after project is built and packaged")
self.postbuild_modules.append((plugin['name'], p))
if module_functions.has_key('finalize'):
debug("plugin contains a finalize function. Will execute before script exits")
self.finalize_modules.append((plugin['name'], p))
p.compile(compiler_config)
fin.close()
# in Windows, if the adb server isn't running, calling "adb devices"
# will fork off a new adb server, and cause a lock-up when we
# try to pipe the process' stdout/stderr. the workaround is
# to simply call adb start-server here, and not care about
# the return code / pipes. (this is harmless if adb is already running)
# -- thanks to Bill Dawson for the workaround
if platform.system() == "Windows" and not build_only:
run.run([self.sdk.get_adb(), "start-server"], True, ignore_output=True)
ti_version_file = os.path.join(self.support_dir, '..', 'version.txt')
if os.path.exists(ti_version_file):
ti_version_info = read_properties(open(ti_version_file, 'r'), '=')
if not ti_version_info is None and 'version' in ti_version_info:
ti_version_string = 'Titanium SDK version: %s' % ti_version_info['version']
if 'timestamp' in ti_version_info or 'githash' in ti_version_info:
ti_version_string += ' ('
if 'timestamp' in ti_version_info:
ti_version_string += '%s' % ti_version_info['timestamp']
if 'githash' in ti_version_info:
ti_version_string += ' %s' % ti_version_info['githash']
ti_version_string += ')'
info(ti_version_string)
if not build_only:
if deploy_type == 'development':
self.wait_for_device('e')
elif deploy_type == 'test':
self.wait_for_device('d')
self.install = install
self.dist_dir = dist_dir
self.aapt = self.sdk.get_aapt()
self.android_jar = self.sdk.get_android_jar()
self.titanium_jar = os.path.join(self.support_dir,'titanium.jar')
self.kroll_apt_jar = os.path.join(self.support_dir, 'kroll-apt.jar')
dx = self.sdk.get_dx()
self.apkbuilder = self.sdk.get_apkbuilder()
self.sdcard_resources = '/sdcard/Ti.debug/%s/Resources' % self.app_id
self.resources_installed = False
if deploy_type == "production":
self.app_installed = False
else:
self.app_installed = not build_only and self.is_app_installed()
debug("%s installed? %s" % (self.app_id, self.app_installed))
#self.resources_installed = not build_only and self.are_resources_installed()
#debug("%s resources installed? %s" % (self.app_id, self.resources_installed))
if keystore == None:
keystore = os.path.join(self.support_dir,'dev_keystore')
self.keystore = keystore
self.keystore_pass = keystore_pass
self.keystore_alias = keystore_alias
curdir = os.getcwd()
self.support_resources_dir = os.path.join(self.support_dir, 'resources')
try:
os.chdir(self.project_dir)
self.android = Android(self.name, self.app_id, self.sdk, deploy_type, self.java)
if not os.path.exists('bin'):
os.makedirs('bin')
resources_dir = os.path.join(self.top_dir,'Resources')
self.assets_dir = os.path.join(self.project_dir,'bin','assets')
self.assets_resources_dir = os.path.join(self.assets_dir,'Resources')
if not os.path.exists(self.assets_resources_dir):
os.makedirs(self.assets_resources_dir)
shutil.copy(self.project_tiappxml, self.assets_dir)
finalxml = os.path.join(self.assets_dir,'tiapp.xml')
self.tiapp = TiAppXML(finalxml)
self.tiapp.setDeployType(deploy_type)
self.sdcard_copy = False
sdcard_property = "ti.android.loadfromsdcard"
if self.tiapp.has_app_property(sdcard_property):
self.sdcard_copy = self.tiapp.to_bool(self.tiapp.get_app_property(sdcard_property))
fastdev_property = "ti.android.fastdev"
fastdev_enabled = (self.deploy_type == 'development' and not self.build_only)
if self.tiapp.has_app_property(fastdev_property) and self.deploy_type == 'development':
fastdev_enabled = self.tiapp.to_bool(self.tiapp.get_app_property(fastdev_property))
if fastdev_enabled:
if self.verify_fastdev():
info("Fastdev server running, deploying in Fastdev mode")
self.fastdev = True
else:
warn("Fastdev enabled, but server isn't running, deploying normally")
self.classes_dir = os.path.join(self.project_dir, 'bin', 'classes')
if not os.path.exists(self.classes_dir):
os.makedirs(self.classes_dir)
if (not debugger_host is None) and len(debugger_host) > 0:
hostport = debugger_host.split(":")
self.debugger_host = hostport[0]
self.debugger_port = int(hostport[1])
debugger_enabled = self.debugger_host != None and len(self.debugger_host) > 0
if (not profiler_host is None) and len(profiler_host) > 0:
hostport = profiler_host.split(":")
self.profiler_host = hostport[0]
self.profiler_port = int(hostport[1])
profiler_enabled = self.profiler_host != None and len(self.profiler_host) > 0
# Detect which modules are being used.
# We need to know this info in a few places, so the info is saved
# in self.missing_modules and self.modules
detector = ModuleDetector(self.top_dir)
self.missing_modules, self.modules = detector.find_app_modules(self.tiapp, 'android', deploy_type)
self.copy_commonjs_modules()
self.copy_project_resources()
last_build_info = None
built_all_modules = False
build_info_path = os.path.join(self.project_dir, 'bin', 'build_info.json')
if os.path.exists(build_info_path):
last_build_info = simplejson.loads(open(build_info_path, 'r').read())
built_all_modules = last_build_info["include_all_modules"]
if self.tiapp.has_app_property("ti.android.compilejs"):
if self.tiapp.to_bool(self.tiapp.get_app_property('ti.android.compilejs')):
self.compile_js = True
elif self.tiapp.has_app_property('ti.deploytype'):
if self.tiapp.get_app_property('ti.deploytype') == 'production':
self.compile_js = True
if self.compile_js and os.environ.has_key('SKIP_JS_MINIFY'):
self.compile_js = False
info("Disabling JavaScript minification")
include_all_ti_modules = self.fastdev
if (self.tiapp.has_app_property('ti.android.include_all_modules')):
if self.tiapp.to_bool(self.tiapp.get_app_property('ti.android.include_all_modules')):
include_all_ti_modules = True
if self.tiapp_changed or (self.js_changed and not self.fastdev) or \
self.force_rebuild or self.deploy_type == "production" or \
(self.fastdev and not built_all_modules) or \
(not self.fastdev and built_all_modules):
self.android.config['compile_js'] = self.compile_js
trace("Generating Java Classes")
self.android.create(os.path.abspath(os.path.join(self.top_dir,'..')),
True, project_dir = self.top_dir, include_all_ti_modules=include_all_ti_modules)
open(build_info_path, 'w').write(simplejson.dumps({
"include_all_modules": include_all_ti_modules
}))
else:
info("Tiapp.xml unchanged, skipping class generation")
# compile resources
full_resource_dir = os.path.join(self.project_dir, self.assets_resources_dir)
compiler = Compiler(self.tiapp,
full_resource_dir,
self.java,
self.classes_dir,
self.project_gen_dir,
self.project_dir,
include_all_modules=include_all_ti_modules)
compiler.compile(compile_bytecode=self.compile_js, external_modules=self.modules)
self.compiled_files = compiler.compiled_files
self.android_jars = compiler.jar_libraries
self.merge_internal_module_resources()
if not os.path.exists(self.assets_dir):
os.makedirs(self.assets_dir)
self.resource_drawables_changed = self.copy_resource_drawables()
self.warn_dupe_drawable_folders()
self.copy_module_platform_folders()
special_resources_dir = os.path.join(self.top_dir,'platform','android')
if os.path.exists(special_resources_dir):
debug("found special platform files dir = %s" % special_resources_dir)
ignore_files = ignoreFiles
ignore_files.extend(['AndroidManifest.xml']) # don't want to overwrite build/android/AndroidManifest.xml yet
self.copy_project_platform_folder(ignoreDirs, ignore_files)
self.generate_stylesheet()
self.generate_aidl()
self.manifest_changed = self.generate_android_manifest(compiler)
my_avd = None
self.google_apis_supported = False
# find the AVD we've selected and determine if we support Google APIs
if avd_id is not None:
for avd_props in avd.get_avds(self.sdk):
if avd_props['id'] == avd_id:
my_avd = avd_props
self.google_apis_supported = (my_avd['name'].find('Google')!=-1 or my_avd['name'].find('APIs')!=-1)
break
if build_only or avd_id is None:
self.google_apis_supported = True
remove_orphaned_files(resources_dir, self.assets_resources_dir, self.non_orphans)
generated_classes_built = self.build_generated_classes()
# TODO: enable for "test" / device mode for debugger / fastdev
if not self.build_only and (self.deploy_type == "development" or self.deploy_type == "test"):
self.push_deploy_json()
self.classes_dex = os.path.join(self.project_dir, 'bin', 'classes.dex')
def jar_includer(path, isfile):
if isfile and path.endswith(".jar"): return True
return False
support_deltafy = Deltafy(self.support_dir, jar_includer)
self.support_deltas = support_deltafy.scan()
dex_built = False
if len(self.support_deltas) > 0 or generated_classes_built or self.deploy_type == "production":
# the dx.bat that ships with android in windows doesn't allow command line
# overriding of the java heap space, so we call the jar directly
if platform.system() == 'Windows':
dex_args = [self.java, '-Xmx1024M', '-Djava.ext.dirs=%s' % self.sdk.get_platform_tools_dir(), '-jar', self.sdk.get_dx_jar()]
else:
dex_args = [dx, '-JXmx1536M', '-JXX:-UseGCOverheadLimit']
# Look for New Relic module
newrelic_module = None
for module in self.modules:
if module.path.find("newrelic") > 0:
newrelic_module = module
break
# If New Relic is present, add its Java agent to the dex arguments.
if newrelic_module:
info("Adding New Relic support.")
# Copy the dexer java agent jar to a tempfile. Eliminates white space from
# the module path which causes problems with the dex -Jjavaagent argument.
temp_jar = tempfile.NamedTemporaryFile(suffix='.jar', delete=True)
shutil.copyfile(os.path.join(newrelic_module.path, 'class.rewriter.jar'), temp_jar.name)
dex_args += ['-Jjavaagent:' + os.path.join(temp_jar.name)]
dex_args += ['--dex', '--output='+self.classes_dex, self.classes_dir]
dex_args += self.android_jars
dex_args += self.module_jars
dex_args.append(os.path.join(self.support_dir, 'lib', 'titanium-verify.jar'))
if self.deploy_type != 'production':
dex_args.append(os.path.join(self.support_dir, 'lib', 'titanium-debug.jar'))
dex_args.append(os.path.join(self.support_dir, 'lib', 'titanium-profiler.jar'))
# the verifier depends on Ti.Network classes, so we may need to inject it
has_network_jar = False
for jar in self.android_jars:
if jar.endswith('titanium-network.jar'):
has_network_jar = True
break
if not has_network_jar:
dex_args.append(os.path.join(self.support_dir, 'modules', 'titanium-network.jar'))
info("Compiling Android Resources... This could take some time")
# TODO - Document Exit message
run_result = run.run(dex_args, warning_regex=r'warning: ')
if (run_result == None):
dex_built = False
error("System Error while compiling Android classes.dex")
sys.exit(1)
else:
dex_built = True
debug("Android classes.dex built")
if dex_built or generated_classes_built or self.tiapp_changed or self.manifest_changed or not self.app_installed or not self.fastdev:
# metadata has changed, we need to do a full re-deploy
launched, launch_failed = self.package_and_deploy()
if launched:
self.run_app()
info("Deployed %s ... Application should be running." % self.name)
elif launch_failed==False and not build_only:
info("Application installed. Launch from drawer on Home Screen")
elif not build_only:
# Relaunch app if nothing was built
info("Re-launching application ... %s" % self.name)
relaunched = False
killed = False
if self.fastdev:
killed = self.fastdev_kill_app()
if not killed:
processes = self.run_adb('shell', 'ps')
for line in processes.splitlines():
columns = line.split()
if len(columns) > 1:
pid = columns[1]
id = columns[len(columns)-1]
if id == self.app_id:
self.run_adb('shell', 'kill', pid)
relaunched = True
self.run_app()
if relaunched:
info("Relaunched %s ... Application should be running." % self.name)
self.post_build()
# Enable port forwarding for debugger if application
# acts as the server.
if debugger_enabled:
info('Forwarding host port %s to device for debugging.' % self.debugger_port)
forwardPort = 'tcp:%s' % self.debugger_port
self.sdk.run_adb(['forward', forwardPort, forwardPort])
# Enable port forwarding for profiler
if profiler_enabled:
info('Forwarding host port %s to device for profiling.' % self.profiler_port)
forwardPort = 'tcp:%s' % self.profiler_port
self.sdk.run_adb(['forward', forwardPort, forwardPort])
#intermediary code for on-device debugging (later)
#if debugger_host != None:
#import debugger
#debug("connecting to debugger: %s, debugger=%s" % (debugger_host, str(debugger)))
#debugger.run(debugger_host, '127.0.0.1:5999')
finally:
os.chdir(curdir)
sys.stdout.flush()
def post_build(self):
try:
if self.postbuild_modules:
for p in self.postbuild_modules:
info("Running postbuild function in %s plugin" % p[0])
p[1].postbuild()
except Exception,e:
error("Error performing post-build steps: %s" % e)
def finalize(self):
try:
if self.finalize_modules:
for p in self.finalize_modules:
info("Running finalize function in %s plugin" % p[0])
p[1].finalize()
except Exception,e:
error("Error performing finalize steps: %s" % e)
if __name__ == "__main__":
def usage():
print "%s <command> <project_name> <sdk_dir> <project_dir> <app_id> [key] [password] [alias] [dir] [avdid] [avdskin] [avdabi] [emulator options]" % os.path.basename(sys.argv[0])
print
print "available commands: "
print
print " emulator build and run the emulator"
print " simulator build and run the app on the simulator"
print " install build and install the app on the device"
print " distribute build final distribution package for upload to marketplace"
print " run build and run the project using values from tiapp.xml"
print " run-emulator run the emulator with a default AVD ID and skin"
sys.exit(1)
argc = len(sys.argv)
if argc < 2:
usage()
command = sys.argv[1]
if command == 'logcat':
launch_logcat()
template_dir = os.path.abspath(os.path.dirname(sys._getframe(0).f_code.co_filename))
get_values_from_tiapp = False
is_emulator = False
if command == 'run':
if argc < 4:
print 'Usage: %s run <project_dir> <android_sdk>' % sys.argv[0]
sys.exit(1)
get_values_from_tiapp = True
project_dir = sys.argv[2]
sdk_dir = sys.argv[3]
avd_id = "7"
elif command == 'run-emulator':
if argc < 4:
print 'Usage: %s run-emulator <project_dir> <android_sdk>' % sys.argv[0]
sys.exit(1)
get_values_from_tiapp = True
project_dir = sys.argv[2]
sdk_dir = sys.argv[3]
# sensible defaults?
avd_id = "7"
avd_skin = "HVGA"
else:
if command == 'emulator':
is_emulator = True
if argc < 6 or command == '--help' or (command=='distribute' and argc < 10):
usage()
if get_values_from_tiapp:
tiappxml = TiAppXML(os.path.join(project_dir, 'tiapp.xml'))
app_id = tiappxml.properties['id']
project_name = tiappxml.properties['name']
else:
project_name = dequote(sys.argv[2])
sdk_dir = os.path.abspath(os.path.expanduser(dequote(sys.argv[3])))
project_dir = os.path.abspath(os.path.expanduser(dequote(sys.argv[4])))
app_id = dequote(sys.argv[5])
log = TiLogger(os.path.join(os.path.abspath(os.path.expanduser(dequote(project_dir))), 'build.log'))
log.debug(" ".join(sys.argv))
builder = Builder(project_name,sdk_dir,project_dir,template_dir,app_id,is_emulator)
builder.command = command
try:
if command == 'run-emulator':
builder.run_emulator(avd_id, avd_skin, None, None, [])
elif command == 'run':
builder.build_and_run(False, avd_id)
elif command == 'emulator':
avd_id = dequote(sys.argv[6])
add_args = None
avd_abi = None
avd_skin = None
avd_name = None
if avd_id.isdigit():
avd_name = None
avd_skin = dequote(sys.argv[7])
if argc > 8:
# The first of the remaining args
# could either be an abi or an additional argument for
# the emulator. Compare to known abis.
next_index = 8
test_arg = sys.argv[next_index]
if test_arg in KNOWN_ABIS:
avd_abi = test_arg
next_index += 1
# Whatever remains (if anything) is an additional
# argument to pass to the emulator.
if argc > next_index:
add_args = sys.argv[next_index:]
else:
avd_name = sys.argv[6]
# If the avd is known by name, then the skin and abi shouldn't be passed,
# because the avd already has the skin and abi "in it".
avd_id = None
avd_skin = None
avd_abi = None
if argc > 7:
add_args = sys.argv[7:]
builder.run_emulator(avd_id, avd_skin, avd_name, avd_abi, add_args)
elif command == 'simulator':
info("Building %s for Android ... one moment" % project_name)
avd_id = dequote(sys.argv[6])
debugger_host = None
profiler_host = None
if len(sys.argv) > 9 and sys.argv[9] == 'profiler':
profiler_host = dequote(sys.argv[8])
elif len(sys.argv) > 8:
debugger_host = dequote(sys.argv[8])
builder.build_and_run(False, avd_id, debugger_host=debugger_host, profiler_host=profiler_host)
elif command == 'install':
avd_id = dequote(sys.argv[6])
device_args = ['-d']
# We have to be careful here because Windows can't handle an empty argument
# on the command line, so if a device serial number is not passed in, but
# a debugger_host (the argument after device serial number) _is_ passed in,
# to Windows it just looks like a serial number is passed in (the debugger_host
# argument shifts left to take over the empty argument.)
debugger_host = None
profiler_host = None
if len(sys.argv) >= 10 and sys.argv[9] == 'profiler':
profiler_host = dequote(sys.argv[8])
if len(sys.argv[7]) > 0:
device_args = ['-s', sys.argv[7]]
elif len(sys.argv) >= 9 and len(sys.argv[8]) > 0:
debugger_host = dequote(sys.argv[8])
if len(sys.argv[7]) > 0:
device_args = ['-s', sys.argv[7]]
elif len(sys.argv) >= 8 and len(sys.argv[7]) > 0:
arg7 = dequote(sys.argv[7])
if 'adb:' in arg7:
debugger_host = arg7
else:
device_args = ['-s', arg7]
builder.build_and_run(True, avd_id, device_args=device_args, debugger_host=debugger_host, profiler_host=profiler_host)
elif command == 'distribute':
key = os.path.abspath(os.path.expanduser(dequote(sys.argv[6])))
password = dequote(sys.argv[7])
alias = dequote(sys.argv[8])
output_dir = dequote(sys.argv[9])
builder.build_and_run(True, None, key, password, alias, output_dir)
elif command == 'build':
builder.build_and_run(False, 1, build_only=True)
else:
error("Unknown command: %s" % command)
usage()
except SystemExit, n:
sys.exit(n)
except:
e = traceback.format_exc()
error("Exception occured while building Android project:")
for line in e.splitlines():
error(line)
sys.exit(1)
finally:
# Don't run plugin finalizer functions if all we were doing is
# starting up the emulator.
if builder and command not in ("emulator", "run-emulator"):
builder.finalize()
| apache-2.0 |
deeplearning4j/libnd4j | tests_cpu/lib/googletest-release-1.8.0/googlemock/scripts/fuse_gmock_files.py | 242 | 8631 | #!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""fuse_gmock_files.py v0.1.0
Fuses Google Mock and Google Test source code into two .h files and a .cc file.
SYNOPSIS
fuse_gmock_files.py [GMOCK_ROOT_DIR] OUTPUT_DIR
Scans GMOCK_ROOT_DIR for Google Mock and Google Test source
code, assuming Google Test is in the GMOCK_ROOT_DIR/../googletest
directory, and generates three files:
OUTPUT_DIR/gtest/gtest.h, OUTPUT_DIR/gmock/gmock.h, and
OUTPUT_DIR/gmock-gtest-all.cc. Then you can build your tests
by adding OUTPUT_DIR to the include search path and linking
with OUTPUT_DIR/gmock-gtest-all.cc. These three files contain
everything you need to use Google Mock. Hence you can
"install" Google Mock by copying them to wherever you want.
GMOCK_ROOT_DIR can be omitted and defaults to the parent
directory of the directory holding this script.
EXAMPLES
./fuse_gmock_files.py fused_gmock
./fuse_gmock_files.py path/to/unpacked/gmock fused_gmock
This tool is experimental. In particular, it assumes that there is no
conditional inclusion of Google Mock or Google Test headers. Please
report any problems to googlemock@googlegroups.com. You can read
http://code.google.com/p/googlemock/wiki/CookBook for more
information.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import sets
import sys
# We assume that this file is in the scripts/ directory in the Google
# Mock root directory.
DEFAULT_GMOCK_ROOT_DIR = os.path.join(os.path.dirname(__file__), '..')
# We need to call into googletest/scripts/fuse_gtest_files.py.
sys.path.append(os.path.join(DEFAULT_GMOCK_ROOT_DIR, '../googletest/scripts'))
import fuse_gtest_files
gtest = fuse_gtest_files
# Regex for matching '#include "gmock/..."'.
INCLUDE_GMOCK_FILE_REGEX = re.compile(r'^\s*#\s*include\s*"(gmock/.+)"')
# Where to find the source seed files.
GMOCK_H_SEED = 'include/gmock/gmock.h'
GMOCK_ALL_CC_SEED = 'src/gmock-all.cc'
# Where to put the generated files.
GTEST_H_OUTPUT = 'gtest/gtest.h'
GMOCK_H_OUTPUT = 'gmock/gmock.h'
GMOCK_GTEST_ALL_CC_OUTPUT = 'gmock-gtest-all.cc'
def GetGTestRootDir(gmock_root):
"""Returns the root directory of Google Test."""
return os.path.join(gmock_root, '../googletest')
def ValidateGMockRootDir(gmock_root):
"""Makes sure gmock_root points to a valid gmock root directory.
The function aborts the program on failure.
"""
gtest.ValidateGTestRootDir(GetGTestRootDir(gmock_root))
gtest.VerifyFileExists(gmock_root, GMOCK_H_SEED)
gtest.VerifyFileExists(gmock_root, GMOCK_ALL_CC_SEED)
def ValidateOutputDir(output_dir):
"""Makes sure output_dir points to a valid output directory.
The function aborts the program on failure.
"""
gtest.VerifyOutputFile(output_dir, gtest.GTEST_H_OUTPUT)
gtest.VerifyOutputFile(output_dir, GMOCK_H_OUTPUT)
gtest.VerifyOutputFile(output_dir, GMOCK_GTEST_ALL_CC_OUTPUT)
def FuseGMockH(gmock_root, output_dir):
"""Scans folder gmock_root to generate gmock/gmock.h in output_dir."""
output_file = file(os.path.join(output_dir, GMOCK_H_OUTPUT), 'w')
processed_files = sets.Set() # Holds all gmock headers we've processed.
def ProcessFile(gmock_header_path):
"""Processes the given gmock header file."""
# We don't process the same header twice.
if gmock_header_path in processed_files:
return
processed_files.add(gmock_header_path)
# Reads each line in the given gmock header.
for line in file(os.path.join(gmock_root, gmock_header_path), 'r'):
m = INCLUDE_GMOCK_FILE_REGEX.match(line)
if m:
# It's '#include "gmock/..."' - let's process it recursively.
ProcessFile('include/' + m.group(1))
else:
m = gtest.INCLUDE_GTEST_FILE_REGEX.match(line)
if m:
# It's '#include "gtest/foo.h"'. We translate it to
# "gtest/gtest.h", regardless of what foo is, since all
# gtest headers are fused into gtest/gtest.h.
# There is no need to #include gtest.h twice.
if not gtest.GTEST_H_SEED in processed_files:
processed_files.add(gtest.GTEST_H_SEED)
output_file.write('#include "%s"\n' % (gtest.GTEST_H_OUTPUT,))
else:
# Otherwise we copy the line unchanged to the output file.
output_file.write(line)
ProcessFile(GMOCK_H_SEED)
output_file.close()
def FuseGMockAllCcToFile(gmock_root, output_file):
"""Scans folder gmock_root to fuse gmock-all.cc into output_file."""
processed_files = sets.Set()
def ProcessFile(gmock_source_file):
"""Processes the given gmock source file."""
# We don't process the same #included file twice.
if gmock_source_file in processed_files:
return
processed_files.add(gmock_source_file)
# Reads each line in the given gmock source file.
for line in file(os.path.join(gmock_root, gmock_source_file), 'r'):
m = INCLUDE_GMOCK_FILE_REGEX.match(line)
if m:
# It's '#include "gmock/foo.h"'. We treat it as '#include
# "gmock/gmock.h"', as all other gmock headers are being fused
# into gmock.h and cannot be #included directly.
# There is no need to #include "gmock/gmock.h" more than once.
if not GMOCK_H_SEED in processed_files:
processed_files.add(GMOCK_H_SEED)
output_file.write('#include "%s"\n' % (GMOCK_H_OUTPUT,))
else:
m = gtest.INCLUDE_GTEST_FILE_REGEX.match(line)
if m:
# It's '#include "gtest/..."'.
# There is no need to #include gtest.h as it has been
# #included by gtest-all.cc.
pass
else:
m = gtest.INCLUDE_SRC_FILE_REGEX.match(line)
if m:
# It's '#include "src/foo"' - let's process it recursively.
ProcessFile(m.group(1))
else:
# Otherwise we copy the line unchanged to the output file.
output_file.write(line)
ProcessFile(GMOCK_ALL_CC_SEED)
def FuseGMockGTestAllCc(gmock_root, output_dir):
"""Scans folder gmock_root to generate gmock-gtest-all.cc in output_dir."""
output_file = file(os.path.join(output_dir, GMOCK_GTEST_ALL_CC_OUTPUT), 'w')
# First, fuse gtest-all.cc into gmock-gtest-all.cc.
gtest.FuseGTestAllCcToFile(GetGTestRootDir(gmock_root), output_file)
# Next, append fused gmock-all.cc to gmock-gtest-all.cc.
FuseGMockAllCcToFile(gmock_root, output_file)
output_file.close()
def FuseGMock(gmock_root, output_dir):
"""Fuses gtest.h, gmock.h, and gmock-gtest-all.h."""
ValidateGMockRootDir(gmock_root)
ValidateOutputDir(output_dir)
gtest.FuseGTestH(GetGTestRootDir(gmock_root), output_dir)
FuseGMockH(gmock_root, output_dir)
FuseGMockGTestAllCc(gmock_root, output_dir)
def main():
argc = len(sys.argv)
if argc == 2:
# fuse_gmock_files.py OUTPUT_DIR
FuseGMock(DEFAULT_GMOCK_ROOT_DIR, sys.argv[1])
elif argc == 3:
# fuse_gmock_files.py GMOCK_ROOT_DIR OUTPUT_DIR
FuseGMock(sys.argv[1], sys.argv[2])
else:
print __doc__
sys.exit(1)
if __name__ == '__main__':
main()
| apache-2.0 |
pschmitt/home-assistant | tests/components/openerz/test_sensor.py | 13 | 1108 | """Tests for OpenERZ component."""
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.setup import async_setup_component
from tests.async_mock import MagicMock, patch
MOCK_CONFIG = {
"sensor": {
"platform": "openerz",
"name": "test_name",
"zip": 1234,
"waste_type": "glass",
}
}
async def test_sensor_state(hass):
"""Test whether default waste type set properly."""
with patch(
"homeassistant.components.openerz.sensor.OpenERZConnector"
) as patched_connector:
pickup_instance = MagicMock()
pickup_instance.find_next_pickup.return_value = "2020-12-12"
patched_connector.return_value = pickup_instance
await async_setup_component(hass, SENSOR_DOMAIN, MOCK_CONFIG)
await hass.async_block_till_done()
entity_id = "sensor.test_name"
test_openerz_state = hass.states.get(entity_id)
assert test_openerz_state.state == "2020-12-12"
assert test_openerz_state.name == "test_name"
pickup_instance.find_next_pickup.assert_called_once()
| apache-2.0 |
75651/kbengine_cloud | kbe/res/scripts/common/Lib/xml/dom/expatbuilder.py | 91 | 35755 | """Facility to use the Expat parser to load a minidom instance
from a string or file.
This avoids all the overhead of SAX and pulldom to gain performance.
"""
# Warning!
#
# This module is tightly bound to the implementation details of the
# minidom DOM and can't be used with other DOM implementations. This
# is due, in part, to a lack of appropriate methods in the DOM (there is
# no way to create Entity and Notation nodes via the DOM Level 2
# interface), and for performance. The later is the cause of some fairly
# cryptic code.
#
# Performance hacks:
#
# - .character_data_handler() has an extra case in which continuing
# data is appended to an existing Text node; this can be a
# speedup since pyexpat can break up character data into multiple
# callbacks even though we set the buffer_text attribute on the
# parser. This also gives us the advantage that we don't need a
# separate normalization pass.
#
# - Determining that a node exists is done using an identity comparison
# with None rather than a truth test; this avoids searching for and
# calling any methods on the node object if it exists. (A rather
# nice speedup is achieved this way as well!)
from xml.dom import xmlbuilder, minidom, Node
from xml.dom import EMPTY_NAMESPACE, EMPTY_PREFIX, XMLNS_NAMESPACE
from xml.parsers import expat
from xml.dom.minidom import _append_child, _set_attribute_node
from xml.dom.NodeFilter import NodeFilter
TEXT_NODE = Node.TEXT_NODE
CDATA_SECTION_NODE = Node.CDATA_SECTION_NODE
DOCUMENT_NODE = Node.DOCUMENT_NODE
FILTER_ACCEPT = xmlbuilder.DOMBuilderFilter.FILTER_ACCEPT
FILTER_REJECT = xmlbuilder.DOMBuilderFilter.FILTER_REJECT
FILTER_SKIP = xmlbuilder.DOMBuilderFilter.FILTER_SKIP
FILTER_INTERRUPT = xmlbuilder.DOMBuilderFilter.FILTER_INTERRUPT
theDOMImplementation = minidom.getDOMImplementation()
# Expat typename -> TypeInfo
_typeinfo_map = {
"CDATA": minidom.TypeInfo(None, "cdata"),
"ENUM": minidom.TypeInfo(None, "enumeration"),
"ENTITY": minidom.TypeInfo(None, "entity"),
"ENTITIES": minidom.TypeInfo(None, "entities"),
"ID": minidom.TypeInfo(None, "id"),
"IDREF": minidom.TypeInfo(None, "idref"),
"IDREFS": minidom.TypeInfo(None, "idrefs"),
"NMTOKEN": minidom.TypeInfo(None, "nmtoken"),
"NMTOKENS": minidom.TypeInfo(None, "nmtokens"),
}
class ElementInfo(object):
__slots__ = '_attr_info', '_model', 'tagName'
def __init__(self, tagName, model=None):
self.tagName = tagName
self._attr_info = []
self._model = model
def __getstate__(self):
return self._attr_info, self._model, self.tagName
def __setstate__(self, state):
self._attr_info, self._model, self.tagName = state
def getAttributeType(self, aname):
for info in self._attr_info:
if info[1] == aname:
t = info[-2]
if t[0] == "(":
return _typeinfo_map["ENUM"]
else:
return _typeinfo_map[info[-2]]
return minidom._no_type
def getAttributeTypeNS(self, namespaceURI, localName):
return minidom._no_type
def isElementContent(self):
if self._model:
type = self._model[0]
return type not in (expat.model.XML_CTYPE_ANY,
expat.model.XML_CTYPE_MIXED)
else:
return False
def isEmpty(self):
if self._model:
return self._model[0] == expat.model.XML_CTYPE_EMPTY
else:
return False
def isId(self, aname):
for info in self._attr_info:
if info[1] == aname:
return info[-2] == "ID"
return False
def isIdNS(self, euri, ename, auri, aname):
# not sure this is meaningful
return self.isId((auri, aname))
def _intern(builder, s):
return builder._intern_setdefault(s, s)
def _parse_ns_name(builder, name):
assert ' ' in name
parts = name.split(' ')
intern = builder._intern_setdefault
if len(parts) == 3:
uri, localname, prefix = parts
prefix = intern(prefix, prefix)
qname = "%s:%s" % (prefix, localname)
qname = intern(qname, qname)
localname = intern(localname, localname)
elif len(parts) == 2:
uri, localname = parts
prefix = EMPTY_PREFIX
qname = localname = intern(localname, localname)
else:
raise ValueError("Unsupported syntax: spaces in URIs not supported: %r" % name)
return intern(uri, uri), localname, prefix, qname
class ExpatBuilder:
"""Document builder that uses Expat to build a ParsedXML.DOM document
instance."""
def __init__(self, options=None):
if options is None:
options = xmlbuilder.Options()
self._options = options
if self._options.filter is not None:
self._filter = FilterVisibilityController(self._options.filter)
else:
self._filter = None
# This *really* doesn't do anything in this case, so
# override it with something fast & minimal.
self._finish_start_element = id
self._parser = None
self.reset()
def createParser(self):
"""Create a new parser object."""
return expat.ParserCreate()
def getParser(self):
"""Return the parser object, creating a new one if needed."""
if not self._parser:
self._parser = self.createParser()
self._intern_setdefault = self._parser.intern.setdefault
self._parser.buffer_text = True
self._parser.ordered_attributes = True
self._parser.specified_attributes = True
self.install(self._parser)
return self._parser
def reset(self):
"""Free all data structures used during DOM construction."""
self.document = theDOMImplementation.createDocument(
EMPTY_NAMESPACE, None, None)
self.curNode = self.document
self._elem_info = self.document._elem_info
self._cdata = False
def install(self, parser):
"""Install the callbacks needed to build the DOM into the parser."""
# This creates circular references!
parser.StartDoctypeDeclHandler = self.start_doctype_decl_handler
parser.StartElementHandler = self.first_element_handler
parser.EndElementHandler = self.end_element_handler
parser.ProcessingInstructionHandler = self.pi_handler
if self._options.entities:
parser.EntityDeclHandler = self.entity_decl_handler
parser.NotationDeclHandler = self.notation_decl_handler
if self._options.comments:
parser.CommentHandler = self.comment_handler
if self._options.cdata_sections:
parser.StartCdataSectionHandler = self.start_cdata_section_handler
parser.EndCdataSectionHandler = self.end_cdata_section_handler
parser.CharacterDataHandler = self.character_data_handler_cdata
else:
parser.CharacterDataHandler = self.character_data_handler
parser.ExternalEntityRefHandler = self.external_entity_ref_handler
parser.XmlDeclHandler = self.xml_decl_handler
parser.ElementDeclHandler = self.element_decl_handler
parser.AttlistDeclHandler = self.attlist_decl_handler
def parseFile(self, file):
"""Parse a document from a file object, returning the document
node."""
parser = self.getParser()
first_buffer = True
try:
while 1:
buffer = file.read(16*1024)
if not buffer:
break
parser.Parse(buffer, 0)
if first_buffer and self.document.documentElement:
self._setup_subset(buffer)
first_buffer = False
parser.Parse("", True)
except ParseEscape:
pass
doc = self.document
self.reset()
self._parser = None
return doc
def parseString(self, string):
"""Parse a document from a string, returning the document node."""
parser = self.getParser()
try:
parser.Parse(string, True)
self._setup_subset(string)
except ParseEscape:
pass
doc = self.document
self.reset()
self._parser = None
return doc
def _setup_subset(self, buffer):
"""Load the internal subset if there might be one."""
if self.document.doctype:
extractor = InternalSubsetExtractor()
extractor.parseString(buffer)
subset = extractor.getSubset()
self.document.doctype.internalSubset = subset
def start_doctype_decl_handler(self, doctypeName, systemId, publicId,
has_internal_subset):
doctype = self.document.implementation.createDocumentType(
doctypeName, publicId, systemId)
doctype.ownerDocument = self.document
_append_child(self.document, doctype)
self.document.doctype = doctype
if self._filter and self._filter.acceptNode(doctype) == FILTER_REJECT:
self.document.doctype = None
del self.document.childNodes[-1]
doctype = None
self._parser.EntityDeclHandler = None
self._parser.NotationDeclHandler = None
if has_internal_subset:
if doctype is not None:
doctype.entities._seq = []
doctype.notations._seq = []
self._parser.CommentHandler = None
self._parser.ProcessingInstructionHandler = None
self._parser.EndDoctypeDeclHandler = self.end_doctype_decl_handler
def end_doctype_decl_handler(self):
if self._options.comments:
self._parser.CommentHandler = self.comment_handler
self._parser.ProcessingInstructionHandler = self.pi_handler
if not (self._elem_info or self._filter):
self._finish_end_element = id
def pi_handler(self, target, data):
node = self.document.createProcessingInstruction(target, data)
_append_child(self.curNode, node)
if self._filter and self._filter.acceptNode(node) == FILTER_REJECT:
self.curNode.removeChild(node)
def character_data_handler_cdata(self, data):
childNodes = self.curNode.childNodes
if self._cdata:
if ( self._cdata_continue
and childNodes[-1].nodeType == CDATA_SECTION_NODE):
childNodes[-1].appendData(data)
return
node = self.document.createCDATASection(data)
self._cdata_continue = True
elif childNodes and childNodes[-1].nodeType == TEXT_NODE:
node = childNodes[-1]
value = node.data + data
node.data = value
return
else:
node = minidom.Text()
node.data = data
node.ownerDocument = self.document
_append_child(self.curNode, node)
def character_data_handler(self, data):
childNodes = self.curNode.childNodes
if childNodes and childNodes[-1].nodeType == TEXT_NODE:
node = childNodes[-1]
node.data = node.data + data
return
node = minidom.Text()
node.data = node.data + data
node.ownerDocument = self.document
_append_child(self.curNode, node)
def entity_decl_handler(self, entityName, is_parameter_entity, value,
base, systemId, publicId, notationName):
if is_parameter_entity:
# we don't care about parameter entities for the DOM
return
if not self._options.entities:
return
node = self.document._create_entity(entityName, publicId,
systemId, notationName)
if value is not None:
# internal entity
# node *should* be readonly, but we'll cheat
child = self.document.createTextNode(value)
node.childNodes.append(child)
self.document.doctype.entities._seq.append(node)
if self._filter and self._filter.acceptNode(node) == FILTER_REJECT:
del self.document.doctype.entities._seq[-1]
def notation_decl_handler(self, notationName, base, systemId, publicId):
node = self.document._create_notation(notationName, publicId, systemId)
self.document.doctype.notations._seq.append(node)
if self._filter and self._filter.acceptNode(node) == FILTER_ACCEPT:
del self.document.doctype.notations._seq[-1]
def comment_handler(self, data):
node = self.document.createComment(data)
_append_child(self.curNode, node)
if self._filter and self._filter.acceptNode(node) == FILTER_REJECT:
self.curNode.removeChild(node)
def start_cdata_section_handler(self):
self._cdata = True
self._cdata_continue = False
def end_cdata_section_handler(self):
self._cdata = False
self._cdata_continue = False
def external_entity_ref_handler(self, context, base, systemId, publicId):
return 1
def first_element_handler(self, name, attributes):
if self._filter is None and not self._elem_info:
self._finish_end_element = id
self.getParser().StartElementHandler = self.start_element_handler
self.start_element_handler(name, attributes)
def start_element_handler(self, name, attributes):
node = self.document.createElement(name)
_append_child(self.curNode, node)
self.curNode = node
if attributes:
for i in range(0, len(attributes), 2):
a = minidom.Attr(attributes[i], EMPTY_NAMESPACE,
None, EMPTY_PREFIX)
value = attributes[i+1]
a.value = value
a.ownerDocument = self.document
_set_attribute_node(node, a)
if node is not self.document.documentElement:
self._finish_start_element(node)
def _finish_start_element(self, node):
if self._filter:
# To be general, we'd have to call isSameNode(), but this
# is sufficient for minidom:
if node is self.document.documentElement:
return
filt = self._filter.startContainer(node)
if filt == FILTER_REJECT:
# ignore this node & all descendents
Rejecter(self)
elif filt == FILTER_SKIP:
# ignore this node, but make it's children become
# children of the parent node
Skipper(self)
else:
return
self.curNode = node.parentNode
node.parentNode.removeChild(node)
node.unlink()
# If this ever changes, Namespaces.end_element_handler() needs to
# be changed to match.
#
def end_element_handler(self, name):
curNode = self.curNode
self.curNode = curNode.parentNode
self._finish_end_element(curNode)
def _finish_end_element(self, curNode):
info = self._elem_info.get(curNode.tagName)
if info:
self._handle_white_text_nodes(curNode, info)
if self._filter:
if curNode is self.document.documentElement:
return
if self._filter.acceptNode(curNode) == FILTER_REJECT:
self.curNode.removeChild(curNode)
curNode.unlink()
def _handle_white_text_nodes(self, node, info):
if (self._options.whitespace_in_element_content
or not info.isElementContent()):
return
# We have element type information and should remove ignorable
# whitespace; identify for text nodes which contain only
# whitespace.
L = []
for child in node.childNodes:
if child.nodeType == TEXT_NODE and not child.data.strip():
L.append(child)
# Remove ignorable whitespace from the tree.
for child in L:
node.removeChild(child)
def element_decl_handler(self, name, model):
info = self._elem_info.get(name)
if info is None:
self._elem_info[name] = ElementInfo(name, model)
else:
assert info._model is None
info._model = model
def attlist_decl_handler(self, elem, name, type, default, required):
info = self._elem_info.get(elem)
if info is None:
info = ElementInfo(elem)
self._elem_info[elem] = info
info._attr_info.append(
[None, name, None, None, default, 0, type, required])
def xml_decl_handler(self, version, encoding, standalone):
self.document.version = version
self.document.encoding = encoding
# This is still a little ugly, thanks to the pyexpat API. ;-(
if standalone >= 0:
if standalone:
self.document.standalone = True
else:
self.document.standalone = False
# Don't include FILTER_INTERRUPT, since that's checked separately
# where allowed.
_ALLOWED_FILTER_RETURNS = (FILTER_ACCEPT, FILTER_REJECT, FILTER_SKIP)
class FilterVisibilityController(object):
"""Wrapper around a DOMBuilderFilter which implements the checks
to make the whatToShow filter attribute work."""
__slots__ = 'filter',
def __init__(self, filter):
self.filter = filter
def startContainer(self, node):
mask = self._nodetype_mask[node.nodeType]
if self.filter.whatToShow & mask:
val = self.filter.startContainer(node)
if val == FILTER_INTERRUPT:
raise ParseEscape
if val not in _ALLOWED_FILTER_RETURNS:
raise ValueError(
"startContainer() returned illegal value: " + repr(val))
return val
else:
return FILTER_ACCEPT
def acceptNode(self, node):
mask = self._nodetype_mask[node.nodeType]
if self.filter.whatToShow & mask:
val = self.filter.acceptNode(node)
if val == FILTER_INTERRUPT:
raise ParseEscape
if val == FILTER_SKIP:
# move all child nodes to the parent, and remove this node
parent = node.parentNode
for child in node.childNodes[:]:
parent.appendChild(child)
# node is handled by the caller
return FILTER_REJECT
if val not in _ALLOWED_FILTER_RETURNS:
raise ValueError(
"acceptNode() returned illegal value: " + repr(val))
return val
else:
return FILTER_ACCEPT
_nodetype_mask = {
Node.ELEMENT_NODE: NodeFilter.SHOW_ELEMENT,
Node.ATTRIBUTE_NODE: NodeFilter.SHOW_ATTRIBUTE,
Node.TEXT_NODE: NodeFilter.SHOW_TEXT,
Node.CDATA_SECTION_NODE: NodeFilter.SHOW_CDATA_SECTION,
Node.ENTITY_REFERENCE_NODE: NodeFilter.SHOW_ENTITY_REFERENCE,
Node.ENTITY_NODE: NodeFilter.SHOW_ENTITY,
Node.PROCESSING_INSTRUCTION_NODE: NodeFilter.SHOW_PROCESSING_INSTRUCTION,
Node.COMMENT_NODE: NodeFilter.SHOW_COMMENT,
Node.DOCUMENT_NODE: NodeFilter.SHOW_DOCUMENT,
Node.DOCUMENT_TYPE_NODE: NodeFilter.SHOW_DOCUMENT_TYPE,
Node.DOCUMENT_FRAGMENT_NODE: NodeFilter.SHOW_DOCUMENT_FRAGMENT,
Node.NOTATION_NODE: NodeFilter.SHOW_NOTATION,
}
class FilterCrutch(object):
__slots__ = '_builder', '_level', '_old_start', '_old_end'
def __init__(self, builder):
self._level = 0
self._builder = builder
parser = builder._parser
self._old_start = parser.StartElementHandler
self._old_end = parser.EndElementHandler
parser.StartElementHandler = self.start_element_handler
parser.EndElementHandler = self.end_element_handler
class Rejecter(FilterCrutch):
__slots__ = ()
def __init__(self, builder):
FilterCrutch.__init__(self, builder)
parser = builder._parser
for name in ("ProcessingInstructionHandler",
"CommentHandler",
"CharacterDataHandler",
"StartCdataSectionHandler",
"EndCdataSectionHandler",
"ExternalEntityRefHandler",
):
setattr(parser, name, None)
def start_element_handler(self, *args):
self._level = self._level + 1
def end_element_handler(self, *args):
if self._level == 0:
# restore the old handlers
parser = self._builder._parser
self._builder.install(parser)
parser.StartElementHandler = self._old_start
parser.EndElementHandler = self._old_end
else:
self._level = self._level - 1
class Skipper(FilterCrutch):
__slots__ = ()
def start_element_handler(self, *args):
node = self._builder.curNode
self._old_start(*args)
if self._builder.curNode is not node:
self._level = self._level + 1
def end_element_handler(self, *args):
if self._level == 0:
# We're popping back out of the node we're skipping, so we
# shouldn't need to do anything but reset the handlers.
self._builder._parser.StartElementHandler = self._old_start
self._builder._parser.EndElementHandler = self._old_end
self._builder = None
else:
self._level = self._level - 1
self._old_end(*args)
# framework document used by the fragment builder.
# Takes a string for the doctype, subset string, and namespace attrs string.
_FRAGMENT_BUILDER_INTERNAL_SYSTEM_ID = \
"http://xml.python.org/entities/fragment-builder/internal"
_FRAGMENT_BUILDER_TEMPLATE = (
'''\
<!DOCTYPE wrapper
%%s [
<!ENTITY fragment-builder-internal
SYSTEM "%s">
%%s
]>
<wrapper %%s
>&fragment-builder-internal;</wrapper>'''
% _FRAGMENT_BUILDER_INTERNAL_SYSTEM_ID)
class FragmentBuilder(ExpatBuilder):
"""Builder which constructs document fragments given XML source
text and a context node.
The context node is expected to provide information about the
namespace declarations which are in scope at the start of the
fragment.
"""
def __init__(self, context, options=None):
if context.nodeType == DOCUMENT_NODE:
self.originalDocument = context
self.context = context
else:
self.originalDocument = context.ownerDocument
self.context = context
ExpatBuilder.__init__(self, options)
def reset(self):
ExpatBuilder.reset(self)
self.fragment = None
def parseFile(self, file):
"""Parse a document fragment from a file object, returning the
fragment node."""
return self.parseString(file.read())
def parseString(self, string):
"""Parse a document fragment from a string, returning the
fragment node."""
self._source = string
parser = self.getParser()
doctype = self.originalDocument.doctype
ident = ""
if doctype:
subset = doctype.internalSubset or self._getDeclarations()
if doctype.publicId:
ident = ('PUBLIC "%s" "%s"'
% (doctype.publicId, doctype.systemId))
elif doctype.systemId:
ident = 'SYSTEM "%s"' % doctype.systemId
else:
subset = ""
nsattrs = self._getNSattrs() # get ns decls from node's ancestors
document = _FRAGMENT_BUILDER_TEMPLATE % (ident, subset, nsattrs)
try:
parser.Parse(document, 1)
except:
self.reset()
raise
fragment = self.fragment
self.reset()
## self._parser = None
return fragment
def _getDeclarations(self):
"""Re-create the internal subset from the DocumentType node.
This is only needed if we don't already have the
internalSubset as a string.
"""
doctype = self.context.ownerDocument.doctype
s = ""
if doctype:
for i in range(doctype.notations.length):
notation = doctype.notations.item(i)
if s:
s = s + "\n "
s = "%s<!NOTATION %s" % (s, notation.nodeName)
if notation.publicId:
s = '%s PUBLIC "%s"\n "%s">' \
% (s, notation.publicId, notation.systemId)
else:
s = '%s SYSTEM "%s">' % (s, notation.systemId)
for i in range(doctype.entities.length):
entity = doctype.entities.item(i)
if s:
s = s + "\n "
s = "%s<!ENTITY %s" % (s, entity.nodeName)
if entity.publicId:
s = '%s PUBLIC "%s"\n "%s"' \
% (s, entity.publicId, entity.systemId)
elif entity.systemId:
s = '%s SYSTEM "%s"' % (s, entity.systemId)
else:
s = '%s "%s"' % (s, entity.firstChild.data)
if entity.notationName:
s = "%s NOTATION %s" % (s, entity.notationName)
s = s + ">"
return s
def _getNSattrs(self):
return ""
def external_entity_ref_handler(self, context, base, systemId, publicId):
if systemId == _FRAGMENT_BUILDER_INTERNAL_SYSTEM_ID:
# this entref is the one that we made to put the subtree
# in; all of our given input is parsed in here.
old_document = self.document
old_cur_node = self.curNode
parser = self._parser.ExternalEntityParserCreate(context)
# put the real document back, parse into the fragment to return
self.document = self.originalDocument
self.fragment = self.document.createDocumentFragment()
self.curNode = self.fragment
try:
parser.Parse(self._source, 1)
finally:
self.curNode = old_cur_node
self.document = old_document
self._source = None
return -1
else:
return ExpatBuilder.external_entity_ref_handler(
self, context, base, systemId, publicId)
class Namespaces:
"""Mix-in class for builders; adds support for namespaces."""
def _initNamespaces(self):
# list of (prefix, uri) ns declarations. Namespace attrs are
# constructed from this and added to the element's attrs.
self._ns_ordered_prefixes = []
def createParser(self):
"""Create a new namespace-handling parser."""
parser = expat.ParserCreate(namespace_separator=" ")
parser.namespace_prefixes = True
return parser
def install(self, parser):
"""Insert the namespace-handlers onto the parser."""
ExpatBuilder.install(self, parser)
if self._options.namespace_declarations:
parser.StartNamespaceDeclHandler = (
self.start_namespace_decl_handler)
def start_namespace_decl_handler(self, prefix, uri):
"""Push this namespace declaration on our storage."""
self._ns_ordered_prefixes.append((prefix, uri))
def start_element_handler(self, name, attributes):
if ' ' in name:
uri, localname, prefix, qname = _parse_ns_name(self, name)
else:
uri = EMPTY_NAMESPACE
qname = name
localname = None
prefix = EMPTY_PREFIX
node = minidom.Element(qname, uri, prefix, localname)
node.ownerDocument = self.document
_append_child(self.curNode, node)
self.curNode = node
if self._ns_ordered_prefixes:
for prefix, uri in self._ns_ordered_prefixes:
if prefix:
a = minidom.Attr(_intern(self, 'xmlns:' + prefix),
XMLNS_NAMESPACE, prefix, "xmlns")
else:
a = minidom.Attr("xmlns", XMLNS_NAMESPACE,
"xmlns", EMPTY_PREFIX)
a.value = uri
a.ownerDocument = self.document
_set_attribute_node(node, a)
del self._ns_ordered_prefixes[:]
if attributes:
node._ensure_attributes()
_attrs = node._attrs
_attrsNS = node._attrsNS
for i in range(0, len(attributes), 2):
aname = attributes[i]
value = attributes[i+1]
if ' ' in aname:
uri, localname, prefix, qname = _parse_ns_name(self, aname)
a = minidom.Attr(qname, uri, localname, prefix)
_attrs[qname] = a
_attrsNS[(uri, localname)] = a
else:
a = minidom.Attr(aname, EMPTY_NAMESPACE,
aname, EMPTY_PREFIX)
_attrs[aname] = a
_attrsNS[(EMPTY_NAMESPACE, aname)] = a
a.ownerDocument = self.document
a.value = value
a.ownerElement = node
if __debug__:
# This only adds some asserts to the original
# end_element_handler(), so we only define this when -O is not
# used. If changing one, be sure to check the other to see if
# it needs to be changed as well.
#
def end_element_handler(self, name):
curNode = self.curNode
if ' ' in name:
uri, localname, prefix, qname = _parse_ns_name(self, name)
assert (curNode.namespaceURI == uri
and curNode.localName == localname
and curNode.prefix == prefix), \
"element stack messed up! (namespace)"
else:
assert curNode.nodeName == name, \
"element stack messed up - bad nodeName"
assert curNode.namespaceURI == EMPTY_NAMESPACE, \
"element stack messed up - bad namespaceURI"
self.curNode = curNode.parentNode
self._finish_end_element(curNode)
class ExpatBuilderNS(Namespaces, ExpatBuilder):
"""Document builder that supports namespaces."""
def reset(self):
ExpatBuilder.reset(self)
self._initNamespaces()
class FragmentBuilderNS(Namespaces, FragmentBuilder):
"""Fragment builder that supports namespaces."""
def reset(self):
FragmentBuilder.reset(self)
self._initNamespaces()
def _getNSattrs(self):
"""Return string of namespace attributes from this element and
ancestors."""
# XXX This needs to be re-written to walk the ancestors of the
# context to build up the namespace information from
# declarations, elements, and attributes found in context.
# Otherwise we have to store a bunch more data on the DOM
# (though that *might* be more reliable -- not clear).
attrs = ""
context = self.context
L = []
while context:
if hasattr(context, '_ns_prefix_uri'):
for prefix, uri in context._ns_prefix_uri.items():
# add every new NS decl from context to L and attrs string
if prefix in L:
continue
L.append(prefix)
if prefix:
declname = "xmlns:" + prefix
else:
declname = "xmlns"
if attrs:
attrs = "%s\n %s='%s'" % (attrs, declname, uri)
else:
attrs = " %s='%s'" % (declname, uri)
context = context.parentNode
return attrs
class ParseEscape(Exception):
"""Exception raised to short-circuit parsing in InternalSubsetExtractor."""
pass
class InternalSubsetExtractor(ExpatBuilder):
"""XML processor which can rip out the internal document type subset."""
subset = None
def getSubset(self):
"""Return the internal subset as a string."""
return self.subset
def parseFile(self, file):
try:
ExpatBuilder.parseFile(self, file)
except ParseEscape:
pass
def parseString(self, string):
try:
ExpatBuilder.parseString(self, string)
except ParseEscape:
pass
def install(self, parser):
parser.StartDoctypeDeclHandler = self.start_doctype_decl_handler
parser.StartElementHandler = self.start_element_handler
def start_doctype_decl_handler(self, name, publicId, systemId,
has_internal_subset):
if has_internal_subset:
parser = self.getParser()
self.subset = []
parser.DefaultHandler = self.subset.append
parser.EndDoctypeDeclHandler = self.end_doctype_decl_handler
else:
raise ParseEscape()
def end_doctype_decl_handler(self):
s = ''.join(self.subset).replace('\r\n', '\n').replace('\r', '\n')
self.subset = s
raise ParseEscape()
def start_element_handler(self, name, attrs):
raise ParseEscape()
def parse(file, namespaces=True):
"""Parse a document, returning the resulting Document node.
'file' may be either a file name or an open file object.
"""
if namespaces:
builder = ExpatBuilderNS()
else:
builder = ExpatBuilder()
if isinstance(file, str):
with open(file, 'rb') as fp:
result = builder.parseFile(fp)
else:
result = builder.parseFile(file)
return result
def parseString(string, namespaces=True):
"""Parse a document from a string, returning the resulting
Document node.
"""
if namespaces:
builder = ExpatBuilderNS()
else:
builder = ExpatBuilder()
return builder.parseString(string)
def parseFragment(file, context, namespaces=True):
"""Parse a fragment of a document, given the context from which it
was originally extracted. context should be the parent of the
node(s) which are in the fragment.
'file' may be either a file name or an open file object.
"""
if namespaces:
builder = FragmentBuilderNS(context)
else:
builder = FragmentBuilder(context)
if isinstance(file, str):
with open(file, 'rb') as fp:
result = builder.parseFile(fp)
else:
result = builder.parseFile(file)
return result
def parseFragmentString(string, context, namespaces=True):
"""Parse a fragment of a document from a string, given the context
from which it was originally extracted. context should be the
parent of the node(s) which are in the fragment.
"""
if namespaces:
builder = FragmentBuilderNS(context)
else:
builder = FragmentBuilder(context)
return builder.parseString(string)
def makeBuilder(options):
"""Create a builder based on an Options object."""
if options.namespaces:
return ExpatBuilderNS(options)
else:
return ExpatBuilder(options)
| lgpl-3.0 |
stefanaspect/browserresearch | cookietesting.py | 2 | 1175 | #!/usr/bin/env python
""" A simple web server to test how IE deals with cookie paths.
Usage:
`python cookietesting.py`
and then, from within your browser go to: http://localhost:8082/gateway
and the other URLs.
"""
from bottle import route, run, response, request
tmpl = None
@route('/gateway')
def gateway():
foo = request.get_cookie("foo")
print "in /gateway, foo = {0}".format(foo)
if foo is None:
response.set_cookie("foo", "foo-gateway", path="/gateway")
return "{0}".format(foo)
@route('/gateway-backend')
def gateway():
foo = request.get_cookie("foo")
print "in /gateway-backend, foo = {0}".format(foo)
if foo is None:
response.set_cookie("foo", "foo-gateway-backend", path="/gateway-backend")
return "{0}".format(foo)
@route('/g')
def gateway():
foo = request.get_cookie("foo")
print "in /g, foo = {0}".format(foo)
if foo is None:
response.set_cookie("foo", "foo", path="/g")
return "{0}".format(foo)
@route('/')
def index():
global tmpl
if tmpl is None:
with file('cookietesting.html') as fh:
tmpl = fh.read()
return tmpl
run(port=8082)
| isc |
mitchsmith/qd_screener | questionnaire/admin.py | 1 | 7839 | from django.contrib import admin
from django.forms import ModelChoiceField, ModelForm, Textarea
from questionnaire.models import *
from django.core.exceptions import ValidationError
from django.forms.models import BaseInlineFormSet
from questionnaire import admin_helper
############################################### HELPER METHODS ###########################################
def is_strictly_monotonically_increasing(sequence):
"""
Determines if sequence is strictly monotically increasing. Used to validate the order specified for
Questions and Answers.
"""
return all(x<y for x, y in zip(sequence, sequence[1:]))
############################################### Questionnaire Manager ##############################################
class CustomQuestionnaireModelField(ModelChoiceField):
def label_from_instance(self, obj):
return "%s" % obj.title
class QuestionnaireManagerAdminForm(ModelForm):
current_questionnaire = CustomQuestionnaireModelField(queryset=Questionnaire.objects.all())
class Meta:
model = QuestionnaireManager
class QuestionnaireManagerAdmin(admin.ModelAdmin):
form = QuestionnaireManagerAdminForm
admin.site.register(QuestionnaireManager, QuestionnaireManagerAdmin)
################################################ Answer ###################################################
class CustomQuestionField(ModelChoiceField):
def label_from_instance(self, obj):
return "%s" % obj.question
class AnswerAdminForm(ModelForm):
question = CustomQuestionField(queryset=Question.objects.all())
def Question_Text(obj):
return obj.question.question
class AnswerFormSet(BaseInlineFormSet):
def clean(self):
"""
Check that:
1. There's only one correct answer (this must be checked to be eligible)
2. A valid order in which to display answers has been specified
3. There are at least 2 answers
"""
super(AnswerFormSet, self).clean()
# Check #1
specified_sequence = []
num_correct_answers = 0
num_answers = 0
for form in self.forms:
if not hasattr(form, 'cleaned_data'):
continue
data = form.cleaned_data
if data.get('is_correct', False):
num_correct_answers += 1
data = data.get('sequence_order', -1)
if data > -1:
specified_sequence.append(data)
num_answers += 1
if num_correct_answers != 1:
raise ValidationError('Need to choose one "correct" answer')
# Check #2
specified_sequence = sorted(specified_sequence)
if not is_strictly_monotonically_increasing(specified_sequence):
message = """ The order you've specified in which to display answers doens't make sense.
Please enter a sequence starting with 1, without skipping or repeating numbers. """
raise ValidationError(message)
# Check #3
if num_answers < 2:
message = 'There should be at least 2 answers'
raise ValidationError(message)
class AnswerInline(admin.TabularInline):
model = Answer
formset = AnswerFormSet
ordering = ('-created_on',)
################################################# Question ##############################################
def Questionnaire_Title(obj):
return obj.questionnaire.title
class QuestionAdminForm(ModelForm):
# questionnaire = CustomQuestionnaireModelField(queryset=Questionnaire.objects.all())
def check_url(self, url):
if len(url) > 0:
return url.startswith('http')
return True
def clean(self):
cleaned_data = super(QuestionAdminForm, self).clean()
related_content_link = cleaned_data.get('related_content_link')
related_content_text = cleaned_data.get('related_content_text')
at_least_one_field_has_text = (len(related_content_link.strip()) + len(related_content_text.strip())) > 0
both_fields_have_text = (len(related_content_link.strip()) * len(related_content_text.strip())) > 0
if at_least_one_field_has_text and not both_fields_have_text:
raise ValidationError('Both related_content_link and related_content_text need to be either set or empty')
if not self.check_url(related_content_link):
raise ValidationError('%s does not seem to be a valid url' % related_content_link)
return cleaned_data
class Meta:
model = Question
widgets = {
'question': Textarea
}
class QuestionAdmin(admin.ModelAdmin):
inlines = [ AnswerInline, ]
search_fields = ['questionnaire__title', 'question' ]
list_display = ['question', Questionnaire_Title, 'sequence_order', 'created_on']
list_filter = ['created_on', admin_helper.QuestionListFilter]
ordering = ('-created_on',)
form = QuestionAdminForm
admin.site.register(Question, QuestionAdmin)
class QuestionFormSet(BaseInlineFormSet):
def clean(self):
"""
1. Check that all answers have been assigned a sequence (1..k)
in order, without skipping indices, and unique!
2. Check that related_content_link and related_content_text are both either
specified or blank
"""
super(QuestionFormSet, self).clean()
# Check #1
specified_sequence = []
for form in self.forms:
if not hasattr(form, 'cleaned_data'):
continue
data = form.cleaned_data
data = data.get('sequence_order', -1)
if data > -1:
specified_sequence.append(data)
specified_sequence = sorted(specified_sequence)
if not is_strictly_monotonically_increasing(specified_sequence):
message = """ The order you've specified in which to display questions doens't make sense.
Please enter a sequence starting with 1, without skipping or repeating numbers. """
raise ValidationError(message)
# Check #2
for form in self.forms:
if not hasattr(form, 'cleaned_data'):
continue
data = form.cleaned_data
related_content_link = data.get('related_content_link', '').strip()
related_content_text = data.get('related_content_text', '').strip()
at_least_one_field_has_text = (len(related_content_link.strip()) + len(related_content_text.strip())) > 0
both_fields_have_text = (len(related_content_link.strip()) * len(related_content_text.strip())) > 0
if at_least_one_field_has_text and not both_fields_have_text:
raise ValidationError('Both related_content_link and related_content_text need to be either set or empty')
class QuestionInline(admin.TabularInline):
model = Question
formset = QuestionFormSet
ordering = ('-created_on',)
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name == 'question':
kwargs['widget'] = Textarea()
return super(QuestionInline,self).formfield_for_dbfield(db_field,**kwargs)
############################################## Questionnaire ###############################################
class QuestionnaireAdmin(admin.ModelAdmin):
inlines = [ QuestionInline ]
list_display = ['title', 'description', 'created_on']
list_filter = ['created_on']
search_fields = ['title', 'sub_title']
ordering = ('-created_on',)
admin.site.register(Questionnaire, QuestionnaireAdmin)
class StudyAdmin(admin.ModelAdmin):
list_display = ['protocol_number', 'created_on']
list_filter = ['created_on']
search_fields = ['protocol_number']
ordering = ('-created_on',)
admin.site.register(Study, StudyAdmin)
| mit |
mattnenterprise/servo | tests/wpt/web-platform-tests/css/tools/travis/vcs.py | 46 | 1115 | import subprocess
import sys
from functools import partial
def vcs(bin_name):
def inner(command, *args, **kwargs):
repo = kwargs.pop("repo", None)
log_error = kwargs.pop("log_error", True)
quiet = kwargs.pop("quiet", False)
if kwargs:
raise TypeError, kwargs
args = list(args)
proc_kwargs = {}
if repo is not None:
proc_kwargs["cwd"] = repo
command_line = [bin_name, command] + args
if not quiet:
print >> sys.stderr, " ".join(command_line[:10])
try:
return subprocess.check_output(command_line, stderr=subprocess.STDOUT, **proc_kwargs)
except subprocess.CalledProcessError as e:
if log_error:
print >> sys.stderr, e.output
raise
return inner
git = vcs("git")
hg = vcs("hg")
def bind_to_repo(vcs_func, repo):
return partial(vcs_func, repo=repo)
def is_git_root(path):
try:
rv = git("rev-parse", "--show-cdup", repo=path)
except subprocess.CalledProcessError:
return False
return rv == "\n"
| mpl-2.0 |
DanteOnline/free-art | venv/lib/python3.4/site-packages/pip/_vendor/progress/bar.py | 510 | 2685 | # -*- coding: utf-8 -*-
# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from . import Progress
from .helpers import WritelnMixin
class Bar(WritelnMixin, Progress):
width = 32
message = ''
suffix = '%(index)d/%(max)d'
bar_prefix = ' |'
bar_suffix = '| '
empty_fill = ' '
fill = '#'
hide_cursor = True
def update(self):
filled_length = int(self.width * self.progress)
empty_length = self.width - filled_length
message = self.message % self
bar = self.fill * filled_length
empty = self.empty_fill * empty_length
suffix = self.suffix % self
line = ''.join([message, self.bar_prefix, bar, empty, self.bar_suffix,
suffix])
self.writeln(line)
class ChargingBar(Bar):
suffix = '%(percent)d%%'
bar_prefix = ' '
bar_suffix = ' '
empty_fill = u'∙'
fill = u'█'
class FillingSquaresBar(ChargingBar):
empty_fill = u'▢'
fill = u'▣'
class FillingCirclesBar(ChargingBar):
empty_fill = u'◯'
fill = u'◉'
class IncrementalBar(Bar):
phases = (u' ', u'▏', u'▎', u'▍', u'▌', u'▋', u'▊', u'▉', u'█')
def update(self):
nphases = len(self.phases)
expanded_length = int(nphases * self.width * self.progress)
filled_length = int(self.width * self.progress)
empty_length = self.width - filled_length
phase = expanded_length - (filled_length * nphases)
message = self.message % self
bar = self.phases[-1] * filled_length
current = self.phases[phase] if phase > 0 else ''
empty = self.empty_fill * max(0, empty_length - len(current))
suffix = self.suffix % self
line = ''.join([message, self.bar_prefix, bar, current, empty,
self.bar_suffix, suffix])
self.writeln(line)
class ShadyBar(IncrementalBar):
phases = (u' ', u'░', u'▒', u'▓', u'█')
| gpl-3.0 |
mcenirm/sorl-thumbnail | sorl/thumbnail/management/commands/thumbnail.py | 15 | 2272 | # encoding=utf-8
from __future__ import unicode_literals, print_function
import sys
from django.core.management.base import LabelCommand, CommandError
from sorl.thumbnail import default
from sorl.thumbnail.images import delete_all_thumbnails
class Command(LabelCommand):
help = (
'Handles thumbnails and key value store'
)
missing_args_message = 'Enter one of [cleanup, clear clear_delete_referenced clear_delete_all]'
def handle(self, *labels, **options):
verbosity = int(options.get('verbosity'))
# Django 1.4 compatibility fix
stdout = options.get('stdout', None)
stdout = stdout if stdout else sys.stdout
stderr = options.get('stderr', None)
stderr = stderr if stderr else sys.stderr
if not labels:
print(self.print_help('thumbnail', ''), file=stderr)
sys.exit(1)
if len(labels) != 1:
raise CommandError('`%s` is not a valid argument' % labels)
label = labels[0]
if label not in ['cleanup', 'clear', 'clear_delete_referenced', 'clear_delete_all']:
raise CommandError('`%s` unknown action' % label)
if label == 'cleanup':
if verbosity >= 1:
print("Cleanup thumbnails", end=' ... ', file=stdout)
default.kvstore.cleanup()
if verbosity >= 1:
print("[Done]", file=stdout)
return
if label == 'clear_delete_referenced':
if verbosity >= 1:
print("Delete all thumbnail files referenced in " +
"Key Value Store", end=' ... ', file=stdout)
default.kvstore.delete_all_thumbnail_files()
if verbosity >= 1:
print('[Done]', file=stdout)
if verbosity >= 1:
print("Clear the Key Value Store", end=' ... ', file=stdout)
default.kvstore.clear()
if verbosity >= 1:
print('[Done]', file=stdout)
if label == 'clear_delete_all':
if verbosity >= 1:
print("Delete all thumbnail files in THUMBNAIL_PREFIX", end=' ... ', file=stdout)
delete_all_thumbnails()
if verbosity >= 1:
print('[Done]', file=stdout)
| bsd-3-clause |
mtrgroup/django-mtr-utils | tests/app/settings.py | 1 | 3204 | import os
import django
# placeholder for gettext
def _(s):
return s
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = 'j#zwt2c!*(7(jz!m(tr$+jq^1d(+)e(^059f^nd_(*zj!gv0x)'
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['*']
INSTALLED_APPS = (
'modeltranslation',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'debug_toolbar',
'celery',
'django_rq',
'app',
'mtr.utils'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
if django.get_version() >= '1.7':
MIDDLEWARE_CLASSES += (
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
else:
INSTALLED_APPS += (
'south',
)
SOUTH_MIGRATION_MODULES = {
'app': 'app.south_migrations',
'sync': 'mtr.sync.south_migrations',
'utils': 'mtr.utils.south_migrations',
}
MIDDLEWARE_CLASSES += (
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
if django.get_version() >= '1.8':
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.media",
"django.template.context_processors.static",
"django.template.context_processors.tz",
'django.template.context_processors.request',
"django.contrib.messages.context_processors.messages",
)
else:
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
'django.core.context_processors.request',
"django.contrib.messages.context_processors.messages",
)
ROOT_URLCONF = 'app.urls'
WSGI_APPLICATION = 'app.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'olddb.sqlite3'),
}
}
if django.get_version() >= '1.7':
DATABASES['default']['NAME'] = os.path.join(BASE_DIR, 'db.sqlite3')
LANGUAGE_CODE = 'en'
LANGUAGES = (
('de', _('German')),
('en', _('English')),
)
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
BROKER_BACKEND = 'memory'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
RQ_QUEUES = {
'default': {
'HOST': 'localhost',
'PORT': 6379,
'DB': 0,
'DEFAULT_TIMEOUT': 360,
},
}
| mit |
stefanseefeld/numba | numba/typing/setdecl.py | 2 | 5933 | from __future__ import absolute_import, print_function
from .. import types
from .templates import (ConcreteTemplate, AbstractTemplate, AttributeTemplate,
CallableTemplate, Registry, signature, bound_function,
make_callable_template)
# Ensure set is typed as a collection as well
from . import collections
registry = Registry()
infer = registry.register
infer_global = registry.register_global
infer_getattr = registry.register_attr
@infer_global(set)
class SetBuiltin(AbstractTemplate):
def generic(self, args, kws):
assert not kws
if args:
# set(iterable)
iterable, = args
if isinstance(iterable, types.IterableType):
dtype = iterable.iterator_type.yield_type
if isinstance(dtype, types.Hashable):
return signature(types.Set(dtype), iterable)
else:
# set()
return signature(types.Set(types.undefined))
@infer_getattr
class SetAttribute(AttributeTemplate):
key = types.Set
@bound_function("set.add")
def resolve_add(self, set, args, kws):
item, = args
assert not kws
unified = self.context.unify_pairs(set.dtype, item)
if unified is not None:
sig = signature(types.none, unified)
sig.recvr = set.copy(dtype=unified)
return sig
@bound_function("set.clear")
def resolve_clear(self, set, args, kws):
assert not kws
if not args:
return signature(types.none)
@bound_function("set.copy")
def resolve_copy(self, set, args, kws):
assert not kws
if not args:
return signature(set)
@bound_function("set.discard")
def resolve_discard(self, set, args, kws):
item, = args
assert not kws
return signature(types.none, set.dtype)
@bound_function("set.pop")
def resolve_pop(self, set, args, kws):
assert not kws
if not args:
return signature(set.dtype)
@bound_function("set.remove")
def resolve_remove(self, set, args, kws):
item, = args
assert not kws
return signature(types.none, set.dtype)
@bound_function("set.update")
def resolve_update(self, set, args, kws):
iterable, = args
assert not kws
if not isinstance(iterable, types.IterableType):
return
dtype = iterable.iterator_type.yield_type
unified = self.context.unify_pairs(set.dtype, dtype)
if unified is not None:
sig = signature(types.none, iterable)
sig.recvr = set.copy(dtype=unified)
return sig
def _resolve_xxx_update(self, set, args, kws):
assert not kws
iterable, = args
# Set arguments only supported for now
# (note we can mix non-reflected and reflected arguments)
if isinstance(iterable, types.Set) and iterable.dtype == set.dtype:
return signature(types.none, iterable)
@bound_function("set.difference_update")
def resolve_difference_update(self, set, args, kws):
return self._resolve_xxx_update(set, args, kws)
@bound_function("set.intersection_update")
def resolve_intersection_update(self, set, args, kws):
return self._resolve_xxx_update(set, args, kws)
@bound_function("set.symmetric_difference_update")
def resolve_symmetric_difference_update(self, set, args, kws):
return self._resolve_xxx_update(set, args, kws)
def _resolve_operator(self, set, args, kws):
assert not kws
iterable, = args
# Set arguments only supported for now
# (note we can mix non-reflected and reflected arguments)
if isinstance(iterable, types.Set) and iterable.dtype == set.dtype:
return signature(set, iterable)
@bound_function("set.difference")
def resolve_difference(self, set, args, kws):
return self._resolve_operator(set, args, kws)
@bound_function("set.intersection")
def resolve_intersection(self, set, args, kws):
return self._resolve_operator(set, args, kws)
@bound_function("set.symmetric_difference")
def resolve_symmetric_difference(self, set, args, kws):
return self._resolve_operator(set, args, kws)
@bound_function("set.union")
def resolve_union(self, set, args, kws):
return self._resolve_operator(set, args, kws)
def _resolve_comparator(self, set, args, kws):
assert not kws
arg, = args
if arg == set:
return signature(types.boolean, arg)
@bound_function("set.isdisjoint")
def resolve_isdisjoint(self, set, args, kws):
return self._resolve_comparator(set, args, kws)
@bound_function("set.issubset")
def resolve_issubset(self, set, args, kws):
return self._resolve_comparator(set, args, kws)
@bound_function("set.issuperset")
def resolve_issuperset(self, set, args, kws):
return self._resolve_comparator(set, args, kws)
class SetOperator(AbstractTemplate):
def generic(self, args, kws):
if len(args) != 2:
return
a, b = args
if (isinstance(a, types.Set) and isinstance(b, types.Set)
and a.dtype == b.dtype):
return signature(a, *args)
class SetComparison(AbstractTemplate):
def generic(self, args, kws):
if len(args) != 2:
return
a, b = args
if isinstance(a, types.Set) and isinstance(b, types.Set) and a == b:
return signature(types.boolean, *args)
for op_key in '&|^-':
@infer
class ConcreteSetOperator(SetOperator):
key = op_key
@infer
class ConcreteInplaceSetOperator(SetOperator):
key = op_key + '='
for op_key in ('==', '!=', '<', '<=', '>=', '>'):
@infer
class ConcreteSetComparison(SetComparison):
key = op_key
| bsd-2-clause |
lsk112233/Clone-test-repo | events/search_indexes.py | 14 | 2169 | from django.template.defaultfilters import truncatewords_html, striptags
from haystack import indexes
from .models import Event, Calendar
class CalendarIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
name = indexes.CharField(model_attr='name')
description = indexes.CharField(null=True)
path = indexes.CharField()
rss = indexes.CharField(model_attr='rss', null=True)
twitter = indexes.CharField(model_attr='twitter', null=True)
ical_url = indexes.CharField(model_attr='url', null=True)
include_template = indexes.CharField()
def get_model(self):
return Calendar
def prepare_path(self, obj):
return obj.get_absolute_url()
def prepare_description(self, obj):
return striptags(truncatewords_html(obj.description, 50))
def prepare_include_template(self, obj):
return "search/includes/events.calendar.html"
def prepare(self, obj):
data = super().prepare(obj)
data['boost'] = 4
return data
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
name = indexes.CharField(model_attr='title')
description = indexes.CharField(null=True)
venue = indexes.CharField(null=True)
path = indexes.CharField()
include_template = indexes.CharField()
def get_model(self):
return Event
def prepare_include_template(self, obj):
return "search/includes/events.event.html"
def prepare_path(self, obj):
return obj.get_absolute_url()
def prepare_description(self, obj):
return striptags(truncatewords_html(obj.description.rendered, 50))
def prepare_venue(self, obj):
if obj.venue:
return obj.venue.name
else:
return None
def prepare(self, obj):
""" Boost events """
data = super().prepare(obj)
# Reduce boost of past events
if obj.is_past:
data['boost'] = 0.9
elif obj.featured:
data['boost'] = 1.2
else:
data['boost'] = 1.1
return data
| apache-2.0 |
huangkuan/hack | lib/oauth2client/contrib/devshell.py | 27 | 5011 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OAuth 2.0 utitilies for Google Developer Shell environment."""
import datetime
import json
import os
import socket
from oauth2client._helpers import _to_bytes
from oauth2client import client
# Expose utcnow() at module level to allow for
# easier testing (by replacing with a stub).
_UTCNOW = datetime.datetime.utcnow
DEVSHELL_ENV = 'DEVSHELL_CLIENT_PORT'
class Error(Exception):
"""Errors for this module."""
pass
class CommunicationError(Error):
"""Errors for communication with the Developer Shell server."""
class NoDevshellServer(Error):
"""Error when no Developer Shell server can be contacted."""
# The request for credential information to the Developer Shell client socket
# is always an empty PBLite-formatted JSON object, so just define it as a
# constant.
CREDENTIAL_INFO_REQUEST_JSON = '[]'
class CredentialInfoResponse(object):
"""Credential information response from Developer Shell server.
The credential information response from Developer Shell socket is a
PBLite-formatted JSON array with fields encoded by their index in the
array:
* Index 0 - user email
* Index 1 - default project ID. None if the project context is not known.
* Index 2 - OAuth2 access token. None if there is no valid auth context.
* Index 3 - Seconds until the access token expires. None if not present.
"""
def __init__(self, json_string):
"""Initialize the response data from JSON PBLite array."""
pbl = json.loads(json_string)
if not isinstance(pbl, list):
raise ValueError('Not a list: ' + str(pbl))
pbl_len = len(pbl)
self.user_email = pbl[0] if pbl_len > 0 else None
self.project_id = pbl[1] if pbl_len > 1 else None
self.access_token = pbl[2] if pbl_len > 2 else None
self.expires_in = pbl[3] if pbl_len > 3 else None
def _SendRecv():
"""Communicate with the Developer Shell server socket."""
port = int(os.getenv(DEVSHELL_ENV, 0))
if port == 0:
raise NoDevshellServer()
sock = socket.socket()
sock.connect(('localhost', port))
data = CREDENTIAL_INFO_REQUEST_JSON
msg = '%s\n%s' % (len(data), data)
sock.sendall(_to_bytes(msg, encoding='utf-8'))
header = sock.recv(6).decode()
if '\n' not in header:
raise CommunicationError('saw no newline in the first 6 bytes')
len_str, json_str = header.split('\n', 1)
to_read = int(len_str) - len(json_str)
if to_read > 0:
json_str += sock.recv(to_read, socket.MSG_WAITALL).decode()
return CredentialInfoResponse(json_str)
class DevshellCredentials(client.GoogleCredentials):
"""Credentials object for Google Developer Shell environment.
This object will allow a Google Developer Shell session to identify its
user to Google and other OAuth 2.0 servers that can verify assertions. It
can be used for the purpose of accessing data stored under the user
account.
This credential does not require a flow to instantiate because it
represents a two legged flow, and therefore has all of the required
information to generate and refresh its own access tokens.
"""
def __init__(self, user_agent=None):
super(DevshellCredentials, self).__init__(
None, # access_token, initialized below
None, # client_id
None, # client_secret
None, # refresh_token
None, # token_expiry
None, # token_uri
user_agent)
self._refresh(None)
def _refresh(self, http_request):
self.devshell_response = _SendRecv()
self.access_token = self.devshell_response.access_token
expires_in = self.devshell_response.expires_in
if expires_in is not None:
delta = datetime.timedelta(seconds=expires_in)
self.token_expiry = _UTCNOW() + delta
else:
self.token_expiry = None
@property
def user_email(self):
return self.devshell_response.user_email
@property
def project_id(self):
return self.devshell_response.project_id
@classmethod
def from_json(cls, json_data):
raise NotImplementedError(
'Cannot load Developer Shell credentials from JSON.')
@property
def serialization_data(self):
raise NotImplementedError(
'Cannot serialize Developer Shell credentials.')
| apache-2.0 |
hamzehd/edx-platform | openedx/core/djangoapps/course_groups/management/commands/post_cohort_membership_fix.py | 18 | 3740 | """
Intended to fix any inconsistencies that may arise during the rollout of the CohortMembership model.
Illustration: https://gist.github.com/efischer19/d62f8ee42b7fbfbc6c9a
"""
from django.core.management.base import BaseCommand
from django.db import IntegrityError
from openedx.core.djangoapps.course_groups.models import CourseUserGroup, CohortMembership
class Command(BaseCommand):
"""
Repair any inconsistencies between CourseUserGroup and CohortMembership. To be run after migration 0006.
"""
help = '''
Repairs any potential inconsistencies made in the window between running migrations 0005 and 0006, and deploying
the code changes to enforce use of CohortMembership that go with said migrations.
|commit|: optional argument. If not provided, will dry-run and list number of operations that would be made.
'''
def handle(self, *args, **options):
"""
Execute the command. Since this is designed to fix any issues cause by running pre-CohortMembership code
with the database already migrated to post-CohortMembership state, we will use the pre-CohortMembership
table CourseUserGroup as the canonical source of truth. This way, changes made in the window are persisted.
"""
commit = 'commit' in options
memberships_to_delete = 0
memberships_to_add = 0
# Begin by removing any data in CohortMemberships that does not match CourseUserGroups data
for membership in CohortMembership.objects.all():
try:
CourseUserGroup.objects.get(
group_type=CourseUserGroup.COHORT,
users__id=membership.user.id,
course_id=membership.course_id,
id=membership.course_user_group.id
)
except CourseUserGroup.DoesNotExist:
memberships_to_delete += 1
if commit:
membership.delete()
# Now we can add any CourseUserGroup data that is missing a backing CohortMembership
for course_group in CourseUserGroup.objects.filter(group_type=CourseUserGroup.COHORT):
for user in course_group.users.all():
try:
CohortMembership.objects.get(
user=user,
course_id=course_group.course_id,
course_user_group_id=course_group.id
)
except CohortMembership.DoesNotExist:
memberships_to_add += 1
if commit:
membership = CohortMembership(
course_user_group=course_group,
user=user,
course_id=course_group.course_id
)
try:
membership.save()
except IntegrityError: # If the user is in multiple cohorts, we arbitrarily choose between them
# In this case, allow the pre-existing entry to be "correct"
course_group.users.remove(user)
user.course_groups.remove(course_group)
print '{} CohortMemberships did not match the CourseUserGroup table and will be deleted'.format(
memberships_to_delete
)
print '{} CourseUserGroup users do not have a CohortMembership; one will be added if it is valid'.format(
memberships_to_add
)
if commit:
print 'Changes have been made and saved.'
else:
print 'Dry run, changes have not been saved. Run again with "commit" argument to save changes'
| agpl-3.0 |
USGSDenverPychron/pychron | pychron/hardware/actuators/iactuator.py | 1 | 1276 | # ===============================================================================
# Copyright 2014 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from traits.api import Interface
# ============= standard library imports ========================
# ============= local library imports ==========================
class IActuator(Interface):
def close_channel(self, obj):
pass
def open_channel(self, obj):
pass
def get_channel_state(self, obj, verbose=False):
pass
def get_lock_state(self, obj):
pass
# ============= EOF =============================================
| apache-2.0 |
fatihzkaratana/AutobahnPython | autobahn/autobahn/pbkdf2.py | 18 | 4869 | # -*- coding: utf-8 -*-
"""
pbkdf2
~~~~~~
This module implements pbkdf2 for Python. It also has some basic
tests that ensure that it works. The implementation is straightforward
and uses stdlib only stuff and can be easily be copy/pasted into
your favourite application.
Use this as replacement for bcrypt that does not need a c implementation
of a modified blowfish crypto algo.
Example usage:
>>> pbkdf2_hex('what i want to hash', 'the random salt')
'fa7cc8a2b0a932f8e6ea42f9787e9d36e592e0c222ada6a9'
How to use this:
1. Use a constant time string compare function to compare the stored hash
with the one you're generating::
def safe_str_cmp(a, b):
if len(a) != len(b):
return False
rv = 0
for x, y in izip(a, b):
rv |= ord(x) ^ ord(y)
return rv == 0
2. Use `os.urandom` to generate a proper salt of at least 8 byte.
Use a unique salt per hashed password.
3. Store ``algorithm$salt:costfactor$hash`` in the database so that
you can upgrade later easily to a different algorithm if you need
one. For instance ``PBKDF2-256$thesalt:10000$deadbeef...``.
:copyright: (c) Copyright 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
__all__ = ("pbkdf2_hex",
"pbkdf2_bin",)
import hmac
import hashlib
from struct import Struct
from operator import xor
from itertools import izip, starmap
_pack_int = Struct('>I').pack
def pbkdf2_hex(data, salt, iterations=1000, keylen=24, hashfunc=None):
"""Like :func:`pbkdf2_bin` but returns a hex encoded string."""
return pbkdf2_bin(data, salt, iterations, keylen, hashfunc).encode('hex')
def pbkdf2_bin(data, salt, iterations=1000, keylen=24, hashfunc=None):
"""Returns a binary digest for the PBKDF2 hash algorithm of `data`
with the given `salt`. It iterates `iterations` time and produces a
key of `keylen` bytes. By default SHA-1 is used as hash function,
a different hashlib `hashfunc` can be provided.
"""
hashfunc = hashfunc or hashlib.sha1
mac = hmac.new(data, None, hashfunc)
def _pseudorandom(x, mac=mac):
h = mac.copy()
h.update(x)
return map(ord, h.digest())
buf = []
for block in xrange(1, -(-keylen // mac.digest_size) + 1):
rv = u = _pseudorandom(salt + _pack_int(block))
for i in xrange(iterations - 1):
u = _pseudorandom(''.join(map(chr, u)))
rv = starmap(xor, izip(rv, u))
buf.extend(rv)
return ''.join(map(chr, buf))[:keylen]
def test():
failed = []
def check(data, salt, iterations, keylen, expected):
rv = pbkdf2_hex(data, salt, iterations, keylen)
if rv != expected:
print 'Test failed:'
print ' Expected: %s' % expected
print ' Got: %s' % rv
print ' Parameters:'
print ' data=%s' % data
print ' salt=%s' % salt
print ' iterations=%d' % iterations
print
failed.append(1)
# From RFC 6070
check('password', 'salt', 1, 20,
'0c60c80f961f0e71f3a9b524af6012062fe037a6')
check('password', 'salt', 2, 20,
'ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957')
check('password', 'salt', 4096, 20,
'4b007901b765489abead49d926f721d065a429c1')
check('passwordPASSWORDpassword', 'saltSALTsaltSALTsaltSALTsaltSALTsalt',
4096, 25, '3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038')
check('pass\x00word', 'sa\x00lt', 4096, 16,
'56fa6aa75548099dcc37d7f03425e0c3')
# This one is from the RFC but it just takes for ages
##check('password', 'salt', 16777216, 20,
## 'eefe3d61cd4da4e4e9945b3d6ba2158c2634e984')
# From Crypt-PBKDF2
check('password', 'ATHENA.MIT.EDUraeburn', 1, 16,
'cdedb5281bb2f801565a1122b2563515')
check('password', 'ATHENA.MIT.EDUraeburn', 1, 32,
'cdedb5281bb2f801565a1122b25635150ad1f7a04bb9f3a333ecc0e2e1f70837')
check('password', 'ATHENA.MIT.EDUraeburn', 2, 16,
'01dbee7f4a9e243e988b62c73cda935d')
check('password', 'ATHENA.MIT.EDUraeburn', 2, 32,
'01dbee7f4a9e243e988b62c73cda935da05378b93244ec8f48a99e61ad799d86')
check('password', 'ATHENA.MIT.EDUraeburn', 1200, 32,
'5c08eb61fdf71e4e4ec3cf6ba1f5512ba7e52ddbc5e5142f708a31e2e62b1e13')
check('X' * 64, 'pass phrase equals block size', 1200, 32,
'139c30c0966bc32ba55fdbf212530ac9c5ec59f1a452f5cc9ad940fea0598ed1')
check('X' * 65, 'pass phrase exceeds block size', 1200, 32,
'9ccad6d468770cd51b10e6a68721be611a8b4d282601db3b36be9246915ec82a')
raise SystemExit(bool(failed))
if __name__ == '__main__':
test()
| apache-2.0 |
sestrella/ansible | lib/ansible/modules/cloud/amazon/aws_inspector_target.py | 7 | 7966 | #!/usr/bin/python
# Copyright (c) 2018 Dennis Conrad for Sainsbury's
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: aws_inspector_target
short_description: Create, Update and Delete Amazon Inspector Assessment
Targets
description: Creates, updates, or deletes Amazon Inspector Assessment Targets
and manages the required Resource Groups.
version_added: "2.6"
author: "Dennis Conrad (@dennisconrad)"
options:
name:
description:
- The user-defined name that identifies the assessment target. The name
must be unique within the AWS account.
required: true
type: str
state:
description:
- The state of the assessment target.
choices:
- absent
- present
default: present
type: str
tags:
description:
- Tags of the EC2 instances to be added to the assessment target.
- Required if C(state=present).
type: dict
extends_documentation_fragment:
- aws
- ec2
requirements:
- boto3
- botocore
'''
EXAMPLES = '''
- name: Create my_target Assessment Target
aws_inspector_target:
name: my_target
tags:
role: scan_target
- name: Update Existing my_target Assessment Target with Additional Tags
aws_inspector_target:
name: my_target
tags:
env: dev
role: scan_target
- name: Delete my_target Assessment Target
aws_inspector_target:
name: my_target
state: absent
'''
RETURN = '''
arn:
description: The ARN that specifies the Amazon Inspector assessment target.
returned: success
type: str
sample: "arn:aws:inspector:eu-west-1:123456789012:target/0-O4LnL7n1"
created_at:
description: The time at which the assessment target was created.
returned: success
type: str
sample: "2018-01-29T13:48:51.958000+00:00"
name:
description: The name of the Amazon Inspector assessment target.
returned: success
type: str
sample: "my_target"
resource_group_arn:
description: The ARN that specifies the resource group that is associated
with the assessment target.
returned: success
type: str
sample: "arn:aws:inspector:eu-west-1:123456789012:resourcegroup/0-qY4gDel8"
tags:
description: The tags of the resource group that is associated with the
assessment target.
returned: success
type: list
sample: {"role": "scan_target", "env": "dev"}
updated_at:
description: The time at which the assessment target was last updated.
returned: success
type: str
sample: "2018-01-29T13:48:51.958000+00:00"
'''
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import AWSRetry
from ansible.module_utils.ec2 import (
HAS_BOTO3,
ansible_dict_to_boto3_tag_list,
boto3_tag_list_to_ansible_dict,
camel_dict_to_snake_dict,
compare_aws_tags,
)
try:
import botocore
except ImportError:
pass # caught by imported HAS_BOTO3
@AWSRetry.backoff(tries=5, delay=5, backoff=2.0)
def main():
argument_spec = dict(
name=dict(required=True),
state=dict(choices=['absent', 'present'], default='present'),
tags=dict(type='dict'),
)
required_if = [['state', 'present', ['tags']]]
module = AnsibleAWSModule(
argument_spec=argument_spec,
supports_check_mode=False,
required_if=required_if,
)
if not HAS_BOTO3:
module.fail_json(msg='boto3 and botocore are required for this module')
name = module.params.get('name')
state = module.params.get('state').lower()
tags = module.params.get('tags')
if tags:
tags = ansible_dict_to_boto3_tag_list(tags, 'key', 'value')
client = module.client('inspector')
try:
existing_target_arn = client.list_assessment_targets(
filter={'assessmentTargetNamePattern': name},
).get('assessmentTargetArns')[0]
existing_target = camel_dict_to_snake_dict(
client.describe_assessment_targets(
assessmentTargetArns=[existing_target_arn],
).get('assessmentTargets')[0]
)
existing_resource_group_arn = existing_target.get('resource_group_arn')
existing_resource_group_tags = client.describe_resource_groups(
resourceGroupArns=[existing_resource_group_arn],
).get('resourceGroups')[0].get('tags')
target_exists = True
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
) as e:
module.fail_json_aws(e, msg="trying to retrieve targets")
except IndexError:
target_exists = False
if state == 'present' and target_exists:
ansible_dict_tags = boto3_tag_list_to_ansible_dict(tags)
ansible_dict_existing_tags = boto3_tag_list_to_ansible_dict(
existing_resource_group_tags
)
tags_to_add, tags_to_remove = compare_aws_tags(
ansible_dict_tags,
ansible_dict_existing_tags
)
if not (tags_to_add or tags_to_remove):
existing_target.update({'tags': ansible_dict_existing_tags})
module.exit_json(changed=False, **existing_target)
else:
try:
updated_resource_group_arn = client.create_resource_group(
resourceGroupTags=tags,
).get('resourceGroupArn')
client.update_assessment_target(
assessmentTargetArn=existing_target_arn,
assessmentTargetName=name,
resourceGroupArn=updated_resource_group_arn,
)
updated_target = camel_dict_to_snake_dict(
client.describe_assessment_targets(
assessmentTargetArns=[existing_target_arn],
).get('assessmentTargets')[0]
)
updated_target.update({'tags': ansible_dict_tags})
module.exit_json(changed=True, **updated_target),
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
) as e:
module.fail_json_aws(e, msg="trying to update target")
elif state == 'present' and not target_exists:
try:
new_resource_group_arn = client.create_resource_group(
resourceGroupTags=tags,
).get('resourceGroupArn')
new_target_arn = client.create_assessment_target(
assessmentTargetName=name,
resourceGroupArn=new_resource_group_arn,
).get('assessmentTargetArn')
new_target = camel_dict_to_snake_dict(
client.describe_assessment_targets(
assessmentTargetArns=[new_target_arn],
).get('assessmentTargets')[0]
)
new_target.update({'tags': boto3_tag_list_to_ansible_dict(tags)})
module.exit_json(changed=True, **new_target)
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
) as e:
module.fail_json_aws(e, msg="trying to create target")
elif state == 'absent' and target_exists:
try:
client.delete_assessment_target(
assessmentTargetArn=existing_target_arn,
)
module.exit_json(changed=True)
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
) as e:
module.fail_json_aws(e, msg="trying to delete target")
elif state == 'absent' and not target_exists:
module.exit_json(changed=False)
if __name__ == '__main__':
main()
| gpl-3.0 |
ojengwa/sympy | sympy/functions/special/tests/test_bsplines.py | 27 | 3059 | from sympy.functions import bspline_basis, bspline_basis_set
from sympy import Piecewise, Interval
from sympy import symbols, Rational
x, y = symbols('x,y')
def test_basic_degree_0():
d = 0
knots = range(5)
splines = bspline_basis_set(d, knots, x)
for i in range(len(splines)):
assert splines[i] == Piecewise((1, Interval(i, i + 1)
.contains(x)), (0, True))
def test_basic_degree_1():
d = 1
knots = range(5)
splines = bspline_basis_set(d, knots, x)
assert splines[0] == Piecewise(
(x, Interval(0, 1, False, True).contains(x)),
(2 - x, Interval(1, 2).contains(x)), (0, True))
assert splines[1] == Piecewise(
(-1 + x, Interval(1, 2, False, True).contains(x)),
(3 - x, Interval(2, 3).contains(x)), (0, True))
assert splines[2] == Piecewise(
(-2 + x, Interval(2, 3, False, True).contains(x)),
(4 - x, Interval(3, 4).contains(x)), (0, True))
def test_basic_degree_2():
d = 2
knots = range(5)
splines = bspline_basis_set(d, knots, x)
b0 = Piecewise((x**2/2, Interval(0, 1, False, True).contains(x)),
(Rational(
-3, 2) + 3*x - x**2, Interval(1, 2, False, True).contains(x)),
(Rational(9, 2) - 3*x + x**2/2, Interval(2, 3).contains(x)), (0, True))
b1 = Piecewise(
(Rational(1, 2) - x + x**2/2, Interval(1, 2, False, True).contains(x)),
(Rational(
-11, 2) + 5*x - x**2, Interval(2, 3, False, True).contains(x)),
(8 - 4*x + x**2/2, Interval(3, 4).contains(x)), (0, True))
assert splines[0] == b0
assert splines[1] == b1
def test_basic_degree_3():
d = 3
knots = range(5)
splines = bspline_basis_set(d, knots, x)
b0 = Piecewise(
(x**3/6, Interval(0, 1, False, True).contains(x)),
(Rational(2, 3) - 2*x + 2*x**2 - x**3/2, Interval(1, 2,
False, True).contains(x)),
(Rational(-22, 3) + 10*x - 4*x**2 + x**3/2, Interval(2, 3,
False, True).contains(x)),
(Rational(32, 3) - 8*x + 2*x**2 - x**3/6, Interval(3, 4).contains(x)),
(0, True)
)
assert splines[0] == b0
def test_repeated_degree_1():
d = 1
knots = [0, 0, 1, 2, 2, 3, 4, 4]
splines = bspline_basis_set(d, knots, x)
assert splines[0] == Piecewise((1 - x, Interval(0, 1).contains(x)),
(0, True))
assert splines[1] == Piecewise(
(x, Interval(0, 1, False, True).contains(x)),
(2 - x, Interval(1, 2).contains(x)), (0, True))
assert splines[2] == Piecewise((-1 + x, Interval(1, 2).contains(x)
), (0, True))
assert splines[3] == Piecewise((3 - x, Interval(2, 3).contains(x)),
(0, True))
assert splines[4] == Piecewise(
(-2 + x, Interval(2, 3, False, True).contains(x)),
(4 - x, Interval(3, 4).contains(x)), (0, True))
assert splines[5] == Piecewise((-3 + x, Interval(3, 4).contains(x)
), (0, True))
| bsd-3-clause |
MaxWayne/Beginning-Game-Development-with-Python-and-Pygame | Chapter 10/10-4.py | 6 | 4150 | import pygame
from pygame.locals import *
from random import randint
from gameobjects.vector2 import Vector2
SCREEN_SIZE = (640, 480)
# In pixels per second, per second
GRAVITY = 250.0
# Increase for more bounciness, but don't go over 1!
BOUNCINESS = 0.7
def stero_pan(x_coord, screen_width):
right_volume = float(x_coord) / screen_width
left_volume = 1.0 - right_volume
return (left_volume, right_volume)
class Ball(object):
def __init__(self, position, speed, image, bounce_sound):
self.position = Vector2(position)
self.speed = Vector2(speed)
self.image = image
self.bounce_sound = bounce_sound
self.age = 0.0
def update(self, time_passed):
w, h = self.image.get_size()
screen_width, screen_height = SCREEN_SIZE
x, y = self.position
x -= w/2
y -= h/2
# Has the ball bounce
bounce = False
# Has the ball hit the bottom of the screen?
if y + h >= screen_height:
self.speed.y = -self.speed.y * BOUNCINESS
self.position.y = screen_height - h / 2.0 - 1.0
bounce = True
# Has the ball hit the left of the screen?
if x <= 0:
self.speed.x = -self.speed.x * BOUNCINESS
self.position.x = w / 2.0 + 1
bounce = True
# Has the ball hit the right of the screen
elif x + w >= screen_width:
self.speed.x = -self.speed.x * BOUNCINESS
self.position.x = screen_width - w / 2.0 - 1
bounce = True
# Do time based movement
self.position += self.speed * time_passed
# Add gravity
self.speed.y += time_passed * GRAVITY
if bounce:
self.play_bounce_sound()
self.age += time_passed
def play_bounce_sound(self):
channel = self.bounce_sound.play()
if channel is not None:
# Get the left and right volumes
left, right = stero_pan(self.position.x, SCREEN_SIZE[0])
channel.set_volume(left, right)
def render(self, surface):
# Draw the sprite center at self.position
w, h = self.image.get_size()
x, y = self.position
x -= w/2
y -= h/2
surface.blit(self.image, (x, y))
def run():
# Initialise 44KHz 16-bit stero sound
pygame.mixer.pre_init(44100, 16, 2, 1024*4)
pygame.init()
pygame.mixer.set_num_channels(8)
screen = pygame.display.set_mode(SCREEN_SIZE, 0)
print(pygame.display.get_wm_info())
hwnd = pygame.display.get_wm_info()["window"]
x, y = (200, 200)
pygame.mouse.set_visible(False)
clock = pygame.time.Clock()
ball_image = pygame.image.load("ball.png").convert_alpha()
mouse_image = pygame.image.load("mousecursor.png").convert_alpha()
# Load the sound file
bounce_sound = pygame.mixer.Sound("bounce.wav")
balls = []
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
quit()
if event.type == MOUSEBUTTONDOWN:
# Create a new ball at the mouse position
random_speed = ( randint(-400, 400), randint(-300, 0) )
new_ball = Ball( event.pos,
random_speed,
ball_image,
bounce_sound )
balls.append(new_ball)
time_passed_seconds = clock.tick() / 1000.
screen.fill((255, 255, 255))
dead_balls = []
for ball in balls:
ball.update(time_passed_seconds)
ball.render(screen)
# Make not of any balls that are older than 10 seconds
if ball.age > 10.0:
dead_balls.append(ball)
# remove any 'dead' balls from the main list
for ball in dead_balls:
balls.remove(ball)
# Draw the mouse cursor
mouse_pos = pygame.mouse.get_pos()
screen.blit(mouse_image, mouse_pos)
pygame.display.update()
if __name__ == "__main__":
run()
| mit |
ms-iot/python | cpython/Lib/smtplib.py | 3 | 42361 | #! /usr/bin/env python3
'''SMTP/ESMTP client class.
This should follow RFC 821 (SMTP), RFC 1869 (ESMTP), RFC 2554 (SMTP
Authentication) and RFC 2487 (Secure SMTP over TLS).
Notes:
Please remember, when doing ESMTP, that the names of the SMTP service
extensions are NOT the same thing as the option keywords for the RCPT
and MAIL commands!
Example:
>>> import smtplib
>>> s=smtplib.SMTP("localhost")
>>> print(s.help())
This is Sendmail version 8.8.4
Topics:
HELO EHLO MAIL RCPT DATA
RSET NOOP QUIT HELP VRFY
EXPN VERB ETRN DSN
For more info use "HELP <topic>".
To report bugs in the implementation send email to
sendmail-bugs@sendmail.org.
For local information send email to Postmaster at your site.
End of HELP info
>>> s.putcmd("vrfy","someone@here")
>>> s.getreply()
(250, "Somebody OverHere <somebody@here.my.org>")
>>> s.quit()
'''
# Author: The Dragon De Monsyne <dragondm@integral.org>
# ESMTP support, test code and doc fixes added by
# Eric S. Raymond <esr@thyrsus.com>
# Better RFC 821 compliance (MAIL and RCPT, and CRLF in data)
# by Carey Evans <c.evans@clear.net.nz>, for picky mail servers.
# RFC 2554 (authentication) support by Gerhard Haering <gerhard@bigfoot.de>.
#
# This was modified from the Python 1.5 library HTTP lib.
import socket
import io
import re
import email.utils
import email.message
import email.generator
import base64
import hmac
import copy
import datetime
import sys
from email.base64mime import body_encode as encode_base64
__all__ = ["SMTPException", "SMTPServerDisconnected", "SMTPResponseException",
"SMTPSenderRefused", "SMTPRecipientsRefused", "SMTPDataError",
"SMTPConnectError", "SMTPHeloError", "SMTPAuthenticationError",
"quoteaddr", "quotedata", "SMTP"]
SMTP_PORT = 25
SMTP_SSL_PORT = 465
CRLF = "\r\n"
bCRLF = b"\r\n"
_MAXLINE = 8192 # more than 8 times larger than RFC 821, 4.5.3
OLDSTYLE_AUTH = re.compile(r"auth=(.*)", re.I)
# Exception classes used by this module.
class SMTPException(OSError):
"""Base class for all exceptions raised by this module."""
class SMTPNotSupportedError(SMTPException):
"""The command or option is not supported by the SMTP server.
This exception is raised when an attempt is made to run a command or a
command with an option which is not supported by the server.
"""
class SMTPServerDisconnected(SMTPException):
"""Not connected to any SMTP server.
This exception is raised when the server unexpectedly disconnects,
or when an attempt is made to use the SMTP instance before
connecting it to a server.
"""
class SMTPResponseException(SMTPException):
"""Base class for all exceptions that include an SMTP error code.
These exceptions are generated in some instances when the SMTP
server returns an error code. The error code is stored in the
`smtp_code' attribute of the error, and the `smtp_error' attribute
is set to the error message.
"""
def __init__(self, code, msg):
self.smtp_code = code
self.smtp_error = msg
self.args = (code, msg)
class SMTPSenderRefused(SMTPResponseException):
"""Sender address refused.
In addition to the attributes set by on all SMTPResponseException
exceptions, this sets `sender' to the string that the SMTP refused.
"""
def __init__(self, code, msg, sender):
self.smtp_code = code
self.smtp_error = msg
self.sender = sender
self.args = (code, msg, sender)
class SMTPRecipientsRefused(SMTPException):
"""All recipient addresses refused.
The errors for each recipient are accessible through the attribute
'recipients', which is a dictionary of exactly the same sort as
SMTP.sendmail() returns.
"""
def __init__(self, recipients):
self.recipients = recipients
self.args = (recipients,)
class SMTPDataError(SMTPResponseException):
"""The SMTP server didn't accept the data."""
class SMTPConnectError(SMTPResponseException):
"""Error during connection establishment."""
class SMTPHeloError(SMTPResponseException):
"""The server refused our HELO reply."""
class SMTPAuthenticationError(SMTPResponseException):
"""Authentication error.
Most probably the server didn't accept the username/password
combination provided.
"""
def quoteaddr(addrstring):
"""Quote a subset of the email addresses defined by RFC 821.
Should be able to handle anything email.utils.parseaddr can handle.
"""
displayname, addr = email.utils.parseaddr(addrstring)
if (displayname, addr) == ('', ''):
# parseaddr couldn't parse it, use it as is and hope for the best.
if addrstring.strip().startswith('<'):
return addrstring
return "<%s>" % addrstring
return "<%s>" % addr
def _addr_only(addrstring):
displayname, addr = email.utils.parseaddr(addrstring)
if (displayname, addr) == ('', ''):
# parseaddr couldn't parse it, so use it as is.
return addrstring
return addr
# Legacy method kept for backward compatibility.
def quotedata(data):
"""Quote data for email.
Double leading '.', and change Unix newline '\\n', or Mac '\\r' into
Internet CRLF end-of-line.
"""
return re.sub(r'(?m)^\.', '..',
re.sub(r'(?:\r\n|\n|\r(?!\n))', CRLF, data))
def _quote_periods(bindata):
return re.sub(br'(?m)^\.', b'..', bindata)
def _fix_eols(data):
return re.sub(r'(?:\r\n|\n|\r(?!\n))', CRLF, data)
try:
import ssl
except ImportError:
_have_ssl = False
else:
_have_ssl = True
class SMTP:
"""This class manages a connection to an SMTP or ESMTP server.
SMTP Objects:
SMTP objects have the following attributes:
helo_resp
This is the message given by the server in response to the
most recent HELO command.
ehlo_resp
This is the message given by the server in response to the
most recent EHLO command. This is usually multiline.
does_esmtp
This is a True value _after you do an EHLO command_, if the
server supports ESMTP.
esmtp_features
This is a dictionary, which, if the server supports ESMTP,
will _after you do an EHLO command_, contain the names of the
SMTP service extensions this server supports, and their
parameters (if any).
Note, all extension names are mapped to lower case in the
dictionary.
See each method's docstrings for details. In general, there is a
method of the same name to perform each SMTP command. There is also a
method called 'sendmail' that will do an entire mail transaction.
"""
debuglevel = 0
file = None
helo_resp = None
ehlo_msg = "ehlo"
ehlo_resp = None
does_esmtp = 0
default_port = SMTP_PORT
def __init__(self, host='', port=0, local_hostname=None,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None):
"""Initialize a new instance.
If specified, `host' is the name of the remote host to which to
connect. If specified, `port' specifies the port to which to connect.
By default, smtplib.SMTP_PORT is used. If a host is specified the
connect method is called, and if it returns anything other than a
success code an SMTPConnectError is raised. If specified,
`local_hostname` is used as the FQDN of the local host in the HELO/EHLO
command. Otherwise, the local hostname is found using
socket.getfqdn(). The `source_address` parameter takes a 2-tuple (host,
port) for the socket to bind to as its source address before
connecting. If the host is '' and port is 0, the OS default behavior
will be used.
"""
self._host = host
self.timeout = timeout
self.esmtp_features = {}
self.command_encoding = 'ascii'
self.source_address = source_address
if host:
(code, msg) = self.connect(host, port)
if code != 220:
raise SMTPConnectError(code, msg)
if local_hostname is not None:
self.local_hostname = local_hostname
else:
# RFC 2821 says we should use the fqdn in the EHLO/HELO verb, and
# if that can't be calculated, that we should use a domain literal
# instead (essentially an encoded IP address like [A.B.C.D]).
fqdn = socket.getfqdn()
if '.' in fqdn:
self.local_hostname = fqdn
else:
# We can't find an fqdn hostname, so use a domain literal
addr = '127.0.0.1'
try:
addr = socket.gethostbyname(socket.gethostname())
except socket.gaierror:
pass
self.local_hostname = '[%s]' % addr
def __enter__(self):
return self
def __exit__(self, *args):
try:
code, message = self.docmd("QUIT")
if code != 221:
raise SMTPResponseException(code, message)
except SMTPServerDisconnected:
pass
finally:
self.close()
def set_debuglevel(self, debuglevel):
"""Set the debug output level.
A non-false value results in debug messages for connection and for all
messages sent to and received from the server.
"""
self.debuglevel = debuglevel
def _print_debug(self, *args):
if self.debuglevel > 1:
print(datetime.datetime.now().time(), *args, file=sys.stderr)
else:
print(*args, file=sys.stderr)
def _get_socket(self, host, port, timeout):
# This makes it simpler for SMTP_SSL to use the SMTP connect code
# and just alter the socket connection bit.
if self.debuglevel > 0:
self._print_debug('connect: to', (host, port), self.source_address)
return socket.create_connection((host, port), timeout,
self.source_address)
def connect(self, host='localhost', port=0, source_address=None):
"""Connect to a host on a given port.
If the hostname ends with a colon (`:') followed by a number, and
there is no port specified, that suffix will be stripped off and the
number interpreted as the port number to use.
Note: This method is automatically invoked by __init__, if a host is
specified during instantiation.
"""
if source_address:
self.source_address = source_address
if not port and (host.find(':') == host.rfind(':')):
i = host.rfind(':')
if i >= 0:
host, port = host[:i], host[i + 1:]
try:
port = int(port)
except ValueError:
raise OSError("nonnumeric port")
if not port:
port = self.default_port
if self.debuglevel > 0:
self._print_debug('connect:', (host, port))
self.sock = self._get_socket(host, port, self.timeout)
self.file = None
(code, msg) = self.getreply()
if self.debuglevel > 0:
self._print_debug('connect:', repr(msg))
return (code, msg)
def send(self, s):
"""Send `s' to the server."""
if self.debuglevel > 0:
self._print_debug('send:', repr(s))
if hasattr(self, 'sock') and self.sock:
if isinstance(s, str):
# send is used by the 'data' command, where command_encoding
# should not be used, but 'data' needs to convert the string to
# binary itself anyway, so that's not a problem.
s = s.encode(self.command_encoding)
try:
self.sock.sendall(s)
except OSError:
self.close()
raise SMTPServerDisconnected('Server not connected')
else:
raise SMTPServerDisconnected('please run connect() first')
def putcmd(self, cmd, args=""):
"""Send a command to the server."""
if args == "":
str = '%s%s' % (cmd, CRLF)
else:
str = '%s %s%s' % (cmd, args, CRLF)
self.send(str)
def getreply(self):
"""Get a reply from the server.
Returns a tuple consisting of:
- server response code (e.g. '250', or such, if all goes well)
Note: returns -1 if it can't read response code.
- server response string corresponding to response code (multiline
responses are converted to a single, multiline string).
Raises SMTPServerDisconnected if end-of-file is reached.
"""
resp = []
if self.file is None:
self.file = self.sock.makefile('rb')
while 1:
try:
line = self.file.readline(_MAXLINE + 1)
except OSError as e:
self.close()
raise SMTPServerDisconnected("Connection unexpectedly closed: "
+ str(e))
if not line:
self.close()
raise SMTPServerDisconnected("Connection unexpectedly closed")
if self.debuglevel > 0:
self._print_debug('reply:', repr(line))
if len(line) > _MAXLINE:
self.close()
raise SMTPResponseException(500, "Line too long.")
resp.append(line[4:].strip(b' \t\r\n'))
code = line[:3]
# Check that the error code is syntactically correct.
# Don't attempt to read a continuation line if it is broken.
try:
errcode = int(code)
except ValueError:
errcode = -1
break
# Check if multiline response.
if line[3:4] != b"-":
break
errmsg = b"\n".join(resp)
if self.debuglevel > 0:
self._print_debug('reply: retcode (%s); Msg: %a' % (errcode, errmsg))
return errcode, errmsg
def docmd(self, cmd, args=""):
"""Send a command, and return its response code."""
self.putcmd(cmd, args)
return self.getreply()
# std smtp commands
def helo(self, name=''):
"""SMTP 'helo' command.
Hostname to send for this command defaults to the FQDN of the local
host.
"""
self.putcmd("helo", name or self.local_hostname)
(code, msg) = self.getreply()
self.helo_resp = msg
return (code, msg)
def ehlo(self, name=''):
""" SMTP 'ehlo' command.
Hostname to send for this command defaults to the FQDN of the local
host.
"""
self.esmtp_features = {}
self.putcmd(self.ehlo_msg, name or self.local_hostname)
(code, msg) = self.getreply()
# According to RFC1869 some (badly written)
# MTA's will disconnect on an ehlo. Toss an exception if
# that happens -ddm
if code == -1 and len(msg) == 0:
self.close()
raise SMTPServerDisconnected("Server not connected")
self.ehlo_resp = msg
if code != 250:
return (code, msg)
self.does_esmtp = 1
#parse the ehlo response -ddm
assert isinstance(self.ehlo_resp, bytes), repr(self.ehlo_resp)
resp = self.ehlo_resp.decode("latin-1").split('\n')
del resp[0]
for each in resp:
# To be able to communicate with as many SMTP servers as possible,
# we have to take the old-style auth advertisement into account,
# because:
# 1) Else our SMTP feature parser gets confused.
# 2) There are some servers that only advertise the auth methods we
# support using the old style.
auth_match = OLDSTYLE_AUTH.match(each)
if auth_match:
# This doesn't remove duplicates, but that's no problem
self.esmtp_features["auth"] = self.esmtp_features.get("auth", "") \
+ " " + auth_match.groups(0)[0]
continue
# RFC 1869 requires a space between ehlo keyword and parameters.
# It's actually stricter, in that only spaces are allowed between
# parameters, but were not going to check for that here. Note
# that the space isn't present if there are no parameters.
m = re.match(r'(?P<feature>[A-Za-z0-9][A-Za-z0-9\-]*) ?', each)
if m:
feature = m.group("feature").lower()
params = m.string[m.end("feature"):].strip()
if feature == "auth":
self.esmtp_features[feature] = self.esmtp_features.get(feature, "") \
+ " " + params
else:
self.esmtp_features[feature] = params
return (code, msg)
def has_extn(self, opt):
"""Does the server support a given SMTP service extension?"""
return opt.lower() in self.esmtp_features
def help(self, args=''):
"""SMTP 'help' command.
Returns help text from server."""
self.putcmd("help", args)
return self.getreply()[1]
def rset(self):
"""SMTP 'rset' command -- resets session."""
self.command_encoding = 'ascii'
return self.docmd("rset")
def _rset(self):
"""Internal 'rset' command which ignores any SMTPServerDisconnected error.
Used internally in the library, since the server disconnected error
should appear to the application when the *next* command is issued, if
we are doing an internal "safety" reset.
"""
try:
self.rset()
except SMTPServerDisconnected:
pass
def noop(self):
"""SMTP 'noop' command -- doesn't do anything :>"""
return self.docmd("noop")
def mail(self, sender, options=[]):
"""SMTP 'mail' command -- begins mail xfer session.
This method may raise the following exceptions:
SMTPNotSupportedError The options parameter includes 'SMTPUTF8'
but the SMTPUTF8 extension is not supported by
the server.
"""
optionlist = ''
if options and self.does_esmtp:
if any(x.lower()=='smtputf8' for x in options):
if self.has_extn('smtputf8'):
self.command_encoding = 'utf-8'
else:
raise SMTPNotSupportedError(
'SMTPUTF8 not supported by server')
optionlist = ' ' + ' '.join(options)
self.putcmd("mail", "FROM:%s%s" % (quoteaddr(sender), optionlist))
return self.getreply()
def rcpt(self, recip, options=[]):
"""SMTP 'rcpt' command -- indicates 1 recipient for this mail."""
optionlist = ''
if options and self.does_esmtp:
optionlist = ' ' + ' '.join(options)
self.putcmd("rcpt", "TO:%s%s" % (quoteaddr(recip), optionlist))
return self.getreply()
def data(self, msg):
"""SMTP 'DATA' command -- sends message data to server.
Automatically quotes lines beginning with a period per rfc821.
Raises SMTPDataError if there is an unexpected reply to the
DATA command; the return value from this method is the final
response code received when the all data is sent. If msg
is a string, lone '\\r' and '\\n' characters are converted to
'\\r\\n' characters. If msg is bytes, it is transmitted as is.
"""
self.putcmd("data")
(code, repl) = self.getreply()
if self.debuglevel > 0:
self._print_debug('data:', (code, repl))
if code != 354:
raise SMTPDataError(code, repl)
else:
if isinstance(msg, str):
msg = _fix_eols(msg).encode('ascii')
q = _quote_periods(msg)
if q[-2:] != bCRLF:
q = q + bCRLF
q = q + b"." + bCRLF
self.send(q)
(code, msg) = self.getreply()
if self.debuglevel > 0:
self._print_debug('data:', (code, msg))
return (code, msg)
def verify(self, address):
"""SMTP 'verify' command -- checks for address validity."""
self.putcmd("vrfy", _addr_only(address))
return self.getreply()
# a.k.a.
vrfy = verify
def expn(self, address):
"""SMTP 'expn' command -- expands a mailing list."""
self.putcmd("expn", _addr_only(address))
return self.getreply()
# some useful methods
def ehlo_or_helo_if_needed(self):
"""Call self.ehlo() and/or self.helo() if needed.
If there has been no previous EHLO or HELO command this session, this
method tries ESMTP EHLO first.
This method may raise the following exceptions:
SMTPHeloError The server didn't reply properly to
the helo greeting.
"""
if self.helo_resp is None and self.ehlo_resp is None:
if not (200 <= self.ehlo()[0] <= 299):
(code, resp) = self.helo()
if not (200 <= code <= 299):
raise SMTPHeloError(code, resp)
def auth(self, mechanism, authobject):
"""Authentication command - requires response processing.
'mechanism' specifies which authentication mechanism is to
be used - the valid values are those listed in the 'auth'
element of 'esmtp_features'.
'authobject' must be a callable object taking a single argument:
data = authobject(challenge)
It will be called to process the server's challenge response; the
challenge argument it is passed will be a bytes. It should return
bytes data that will be base64 encoded and sent to the server.
"""
mechanism = mechanism.upper()
(code, resp) = self.docmd("AUTH", mechanism)
# Server replies with 334 (challenge) or 535 (not supported)
if code == 334:
challenge = base64.decodebytes(resp)
response = encode_base64(
authobject(challenge).encode('ascii'), eol='')
(code, resp) = self.docmd(response)
if code in (235, 503):
return (code, resp)
raise SMTPAuthenticationError(code, resp)
def auth_cram_md5(self, challenge):
""" Authobject to use with CRAM-MD5 authentication. Requires self.user
and self.password to be set."""
return self.user + " " + hmac.HMAC(
self.password.encode('ascii'), challenge, 'md5').hexdigest()
def auth_plain(self, challenge):
""" Authobject to use with PLAIN authentication. Requires self.user and
self.password to be set."""
return "\0%s\0%s" % (self.user, self.password)
def auth_login(self, challenge):
""" Authobject to use with LOGIN authentication. Requires self.user and
self.password to be set."""
(code, resp) = self.docmd(
encode_base64(self.user.encode('ascii'), eol=''))
if code == 334:
return self.password
raise SMTPAuthenticationError(code, resp)
def login(self, user, password):
"""Log in on an SMTP server that requires authentication.
The arguments are:
- user: The user name to authenticate with.
- password: The password for the authentication.
If there has been no previous EHLO or HELO command this session, this
method tries ESMTP EHLO first.
This method will return normally if the authentication was successful.
This method may raise the following exceptions:
SMTPHeloError The server didn't reply properly to
the helo greeting.
SMTPAuthenticationError The server didn't accept the username/
password combination.
SMTPNotSupportedError The AUTH command is not supported by the
server.
SMTPException No suitable authentication method was
found.
"""
self.ehlo_or_helo_if_needed()
if not self.has_extn("auth"):
raise SMTPNotSupportedError(
"SMTP AUTH extension not supported by server.")
# Authentication methods the server claims to support
advertised_authlist = self.esmtp_features["auth"].split()
# Authentication methods we can handle in our preferred order:
preferred_auths = ['CRAM-MD5', 'PLAIN', 'LOGIN']
# We try the supported authentications in our preferred order, if
# the server supports them.
authlist = [auth for auth in preferred_auths
if auth in advertised_authlist]
if not authlist:
raise SMTPException("No suitable authentication method found.")
# Some servers advertise authentication methods they don't really
# support, so if authentication fails, we continue until we've tried
# all methods.
self.user, self.password = user, password
for authmethod in authlist:
method_name = 'auth_' + authmethod.lower().replace('-', '_')
try:
(code, resp) = self.auth(authmethod, getattr(self, method_name))
# 235 == 'Authentication successful'
# 503 == 'Error: already authenticated'
if code in (235, 503):
return (code, resp)
except SMTPAuthenticationError as e:
last_exception = e
# We could not login successfully. Return result of last attempt.
raise last_exception
def starttls(self, keyfile=None, certfile=None, context=None):
"""Puts the connection to the SMTP server into TLS mode.
If there has been no previous EHLO or HELO command this session, this
method tries ESMTP EHLO first.
If the server supports TLS, this will encrypt the rest of the SMTP
session. If you provide the keyfile and certfile parameters,
the identity of the SMTP server and client can be checked. This,
however, depends on whether the socket module really checks the
certificates.
This method may raise the following exceptions:
SMTPHeloError The server didn't reply properly to
the helo greeting.
"""
self.ehlo_or_helo_if_needed()
if not self.has_extn("starttls"):
raise SMTPNotSupportedError(
"STARTTLS extension not supported by server.")
(resp, reply) = self.docmd("STARTTLS")
if resp == 220:
if not _have_ssl:
raise RuntimeError("No SSL support included in this Python")
if context is not None and keyfile is not None:
raise ValueError("context and keyfile arguments are mutually "
"exclusive")
if context is not None and certfile is not None:
raise ValueError("context and certfile arguments are mutually "
"exclusive")
if context is None:
context = ssl._create_stdlib_context(certfile=certfile,
keyfile=keyfile)
self.sock = context.wrap_socket(self.sock,
server_hostname=self._host)
self.file = None
# RFC 3207:
# The client MUST discard any knowledge obtained from
# the server, such as the list of SMTP service extensions,
# which was not obtained from the TLS negotiation itself.
self.helo_resp = None
self.ehlo_resp = None
self.esmtp_features = {}
self.does_esmtp = 0
return (resp, reply)
def sendmail(self, from_addr, to_addrs, msg, mail_options=[],
rcpt_options=[]):
"""This command performs an entire mail transaction.
The arguments are:
- from_addr : The address sending this mail.
- to_addrs : A list of addresses to send this mail to. A bare
string will be treated as a list with 1 address.
- msg : The message to send.
- mail_options : List of ESMTP options (such as 8bitmime) for the
mail command.
- rcpt_options : List of ESMTP options (such as DSN commands) for
all the rcpt commands.
msg may be a string containing characters in the ASCII range, or a byte
string. A string is encoded to bytes using the ascii codec, and lone
\\r and \\n characters are converted to \\r\\n characters.
If there has been no previous EHLO or HELO command this session, this
method tries ESMTP EHLO first. If the server does ESMTP, message size
and each of the specified options will be passed to it. If EHLO
fails, HELO will be tried and ESMTP options suppressed.
This method will return normally if the mail is accepted for at least
one recipient. It returns a dictionary, with one entry for each
recipient that was refused. Each entry contains a tuple of the SMTP
error code and the accompanying error message sent by the server.
This method may raise the following exceptions:
SMTPHeloError The server didn't reply properly to
the helo greeting.
SMTPRecipientsRefused The server rejected ALL recipients
(no mail was sent).
SMTPSenderRefused The server didn't accept the from_addr.
SMTPDataError The server replied with an unexpected
error code (other than a refusal of
a recipient).
SMTPNotSupportedError The mail_options parameter includes 'SMTPUTF8'
but the SMTPUTF8 extension is not supported by
the server.
Note: the connection will be open even after an exception is raised.
Example:
>>> import smtplib
>>> s=smtplib.SMTP("localhost")
>>> tolist=["one@one.org","two@two.org","three@three.org","four@four.org"]
>>> msg = '''\\
... From: Me@my.org
... Subject: testin'...
...
... This is a test '''
>>> s.sendmail("me@my.org",tolist,msg)
{ "three@three.org" : ( 550 ,"User unknown" ) }
>>> s.quit()
In the above example, the message was accepted for delivery to three
of the four addresses, and one was rejected, with the error code
550. If all addresses are accepted, then the method will return an
empty dictionary.
"""
self.ehlo_or_helo_if_needed()
esmtp_opts = []
if isinstance(msg, str):
msg = _fix_eols(msg).encode('ascii')
if self.does_esmtp:
if self.has_extn('size'):
esmtp_opts.append("size=%d" % len(msg))
for option in mail_options:
esmtp_opts.append(option)
(code, resp) = self.mail(from_addr, esmtp_opts)
if code != 250:
if code == 421:
self.close()
else:
self._rset()
raise SMTPSenderRefused(code, resp, from_addr)
senderrs = {}
if isinstance(to_addrs, str):
to_addrs = [to_addrs]
for each in to_addrs:
(code, resp) = self.rcpt(each, rcpt_options)
if (code != 250) and (code != 251):
senderrs[each] = (code, resp)
if code == 421:
self.close()
raise SMTPRecipientsRefused(senderrs)
if len(senderrs) == len(to_addrs):
# the server refused all our recipients
self._rset()
raise SMTPRecipientsRefused(senderrs)
(code, resp) = self.data(msg)
if code != 250:
if code == 421:
self.close()
else:
self._rset()
raise SMTPDataError(code, resp)
#if we got here then somebody got our mail
return senderrs
def send_message(self, msg, from_addr=None, to_addrs=None,
mail_options=[], rcpt_options={}):
"""Converts message to a bytestring and passes it to sendmail.
The arguments are as for sendmail, except that msg is an
email.message.Message object. If from_addr is None or to_addrs is
None, these arguments are taken from the headers of the Message as
described in RFC 2822 (a ValueError is raised if there is more than
one set of 'Resent-' headers). Regardless of the values of from_addr and
to_addr, any Bcc field (or Resent-Bcc field, when the Message is a
resent) of the Message object won't be transmitted. The Message
object is then serialized using email.generator.BytesGenerator and
sendmail is called to transmit the message. If the sender or any of
the recipient addresses contain non-ASCII and the server advertises the
SMTPUTF8 capability, the policy is cloned with utf8 set to True for the
serialization, and SMTPUTF8 and BODY=8BITMIME are asserted on the send.
If the server does not support SMTPUTF8, an SMPTNotSupported error is
raised. Otherwise the generator is called without modifying the
policy.
"""
# 'Resent-Date' is a mandatory field if the Message is resent (RFC 2822
# Section 3.6.6). In such a case, we use the 'Resent-*' fields. However,
# if there is more than one 'Resent-' block there's no way to
# unambiguously determine which one is the most recent in all cases,
# so rather than guess we raise a ValueError in that case.
#
# TODO implement heuristics to guess the correct Resent-* block with an
# option allowing the user to enable the heuristics. (It should be
# possible to guess correctly almost all of the time.)
self.ehlo_or_helo_if_needed()
resent = msg.get_all('Resent-Date')
if resent is None:
header_prefix = ''
elif len(resent) == 1:
header_prefix = 'Resent-'
else:
raise ValueError("message has more than one 'Resent-' header block")
if from_addr is None:
# Prefer the sender field per RFC 2822:3.6.2.
from_addr = (msg[header_prefix + 'Sender']
if (header_prefix + 'Sender') in msg
else msg[header_prefix + 'From'])
if to_addrs is None:
addr_fields = [f for f in (msg[header_prefix + 'To'],
msg[header_prefix + 'Bcc'],
msg[header_prefix + 'Cc'])
if f is not None]
to_addrs = [a[1] for a in email.utils.getaddresses(addr_fields)]
# Make a local copy so we can delete the bcc headers.
msg_copy = copy.copy(msg)
del msg_copy['Bcc']
del msg_copy['Resent-Bcc']
international = False
try:
''.join([from_addr, *to_addrs]).encode('ascii')
except UnicodeEncodeError:
if not self.has_extn('smtputf8'):
raise SMTPNotSupportedError(
"One or more source or delivery addresses require"
" internationalized email support, but the server"
" does not advertise the required SMTPUTF8 capability")
international = True
with io.BytesIO() as bytesmsg:
if international:
g = email.generator.BytesGenerator(
bytesmsg, policy=msg.policy.clone(utf8=True))
mail_options += ['SMTPUTF8', 'BODY=8BITMIME']
else:
g = email.generator.BytesGenerator(bytesmsg)
g.flatten(msg_copy, linesep='\r\n')
flatmsg = bytesmsg.getvalue()
return self.sendmail(from_addr, to_addrs, flatmsg, mail_options,
rcpt_options)
def close(self):
"""Close the connection to the SMTP server."""
try:
file = self.file
self.file = None
if file:
file.close()
finally:
sock = self.sock
self.sock = None
if sock:
sock.close()
def quit(self):
"""Terminate the SMTP session."""
res = self.docmd("quit")
# A new EHLO is required after reconnecting with connect()
self.ehlo_resp = self.helo_resp = None
self.esmtp_features = {}
self.does_esmtp = False
self.close()
return res
if _have_ssl:
class SMTP_SSL(SMTP):
""" This is a subclass derived from SMTP that connects over an SSL
encrypted socket (to use this class you need a socket module that was
compiled with SSL support). If host is not specified, '' (the local
host) is used. If port is omitted, the standard SMTP-over-SSL port
(465) is used. local_hostname and source_address have the same meaning
as they do in the SMTP class. keyfile and certfile are also optional -
they can contain a PEM formatted private key and certificate chain file
for the SSL connection. context also optional, can contain a
SSLContext, and is an alternative to keyfile and certfile; If it is
specified both keyfile and certfile must be None.
"""
default_port = SMTP_SSL_PORT
def __init__(self, host='', port=0, local_hostname=None,
keyfile=None, certfile=None,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None, context=None):
if context is not None and keyfile is not None:
raise ValueError("context and keyfile arguments are mutually "
"exclusive")
if context is not None and certfile is not None:
raise ValueError("context and certfile arguments are mutually "
"exclusive")
self.keyfile = keyfile
self.certfile = certfile
if context is None:
context = ssl._create_stdlib_context(certfile=certfile,
keyfile=keyfile)
self.context = context
SMTP.__init__(self, host, port, local_hostname, timeout,
source_address)
def _get_socket(self, host, port, timeout):
if self.debuglevel > 0:
self._print_debug('connect:', (host, port))
new_socket = socket.create_connection((host, port), timeout,
self.source_address)
new_socket = self.context.wrap_socket(new_socket,
server_hostname=self._host)
return new_socket
__all__.append("SMTP_SSL")
#
# LMTP extension
#
LMTP_PORT = 2003
class LMTP(SMTP):
"""LMTP - Local Mail Transfer Protocol
The LMTP protocol, which is very similar to ESMTP, is heavily based
on the standard SMTP client. It's common to use Unix sockets for
LMTP, so our connect() method must support that as well as a regular
host:port server. local_hostname and source_address have the same
meaning as they do in the SMTP class. To specify a Unix socket,
you must use an absolute path as the host, starting with a '/'.
Authentication is supported, using the regular SMTP mechanism. When
using a Unix socket, LMTP generally don't support or require any
authentication, but your mileage might vary."""
ehlo_msg = "lhlo"
def __init__(self, host='', port=LMTP_PORT, local_hostname=None,
source_address=None):
"""Initialize a new instance."""
SMTP.__init__(self, host, port, local_hostname=local_hostname,
source_address=source_address)
def connect(self, host='localhost', port=0, source_address=None):
"""Connect to the LMTP daemon, on either a Unix or a TCP socket."""
if host[0] != '/':
return SMTP.connect(self, host, port, source_address=source_address)
# Handle Unix-domain sockets.
try:
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.file = None
self.sock.connect(host)
except OSError:
if self.debuglevel > 0:
self._print_debug('connect fail:', host)
if self.sock:
self.sock.close()
self.sock = None
raise
(code, msg) = self.getreply()
if self.debuglevel > 0:
self._print_debug('connect:', msg)
return (code, msg)
# Test the sendmail method, which tests most of the others.
# Note: This always sends to localhost.
if __name__ == '__main__':
import sys
def prompt(prompt):
sys.stdout.write(prompt + ": ")
sys.stdout.flush()
return sys.stdin.readline().strip()
fromaddr = prompt("From")
toaddrs = prompt("To").split(',')
print("Enter message, end with ^D:")
msg = ''
while 1:
line = sys.stdin.readline()
if not line:
break
msg = msg + line
print("Message length is %d" % len(msg))
server = SMTP('localhost')
server.set_debuglevel(1)
server.sendmail(fromaddr, toaddrs, msg)
server.quit()
| bsd-3-clause |
projectbuendia/server-status | libraries/Adafruit_Python_GPIO/Adafruit_GPIO/PCF8574.py | 15 | 3412 | '''
Adafruit compatible using BaseGPIO class to represent a PCF8574/A IO expander
Copyright (C) 2015 Sylvan Butler
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.'''
import Adafruit_GPIO as GPIO
import Adafruit_GPIO.I2C as I2C
IN = GPIO.IN
OUT = GPIO.OUT
HIGH = GPIO.HIGH
LOW = GPIO.LOW
class PCF8574(GPIO.BaseGPIO):
"""Class to represent a PCF8574 or PCF8574A GPIO extender. Compatible
with the Adafruit_GPIO BaseGPIO class so it can be used as a custom GPIO
class for interacting with device.
"""
NUM_GPIO = 8
def __init__(self, address=0x27, busnum=None, i2c=None, **kwargs):
address = int(address)
self.__name__ = \
"PCF8574" if address in range(0x20, 0x28) else \
"PCF8574A" if address in range(0x38, 0x40) else \
"Bad address for PCF8574(A): 0x%02X not in range [0x20..0x27, 0x38..0x3F]" % address
if self.__name__[0] != 'P':
raise ValueError(self.__name__)
# Create I2C device.
i2c = i2c or I2C
busnum = busnum or i2c.get_default_bus()
self._device = i2c.get_i2c_device(address, busnum, **kwargs)
# Buffer register values so they can be changed without reading.
self.iodir = 0xFF # Default direction to all inputs is in
self.gpio = 0x00
self._write_pins()
def _write_pins(self):
self._device.writeRaw8(self.gpio | self.iodir)
def _read_pins(self):
return self._device.readRaw8() & self.iodir
def setup(self, pin, mode):
self.setup_pins({pin: mode})
def setup_pins(self, pins):
if False in [y for x,y in [(self._validate_pin(pin),mode in (IN,OUT)) for pin,mode in pins.iteritems()]]:
raise ValueError('Invalid MODE, IN or OUT')
for pin,mode in pins.iteritems():
self.iodir = self._bit2(self.iodir, pin, mode)
self._write_pins()
def output(self, pin, value):
self.output_pins({pin: value})
def output_pins(self, pins):
[self._validate_pin(pin) for pin in pins.keys()]
for pin,value in pins.iteritems():
self.gpio = self._bit2(self.gpio, pin, bool(value))
self._write_pins()
def input(self, pin):
return self.input_pins([pin])[0]
def input_pins(self, pins):
[self._validate_pin(pin) for pin in pins]
inp = self._read_pins()
return [bool(inp & (1<<pin)) for pin in pins]
| apache-2.0 |
aravindk1992/Data-Structure-Zoo | 1-Algorithm Analysis/algorithms.py | 6 | 2193 | """ 2: Algorithms
thomas moll 2015
"""
import time, random
def find_sequentially(arr, item):
""" Sequential Search
Complexity: O(n)
"""
for value, i in enumerate(arr):
# Check each item in the list
if item == value: #Runs N number of times
return True
return False
def binary_search(arr, item):
""" Binary Search
Complexity: O(log(n))
Only works on sorted arrays
"""
first = 0
last = len(arr)-1
found = False
# Note that first and last will get closer!
while first <= last and not found:
# Divide problem set
mid = (first+last)//2
if arr[mid] == item:
found = True
else:
# Decide which half to search next
if item < arr[mid]:
last = mid - 1
else:
first = mid + 1
return found
def array_equals(a, b):
""" Checks to see that two arrays
are completely equal, regardless of order
Complexity: O(n^2)
"""
i = 0
# Check all values in A
while i < len(a): # This loop runs N times
flag = False
j = 0
# Search for that value in B
while j < len(b): # This loop runs N times
if a[i] == b[j]:
flag = True
break
j+=1
if not flag:
return False
i+=1
return True
# Below are some speed tests comparing sequential to binary search
if __name__ == '__main__':
print 'Given an array of a million ordered ints...'
big_o_list = list(xrange(1000000))
item = random.randint(0, 1000000)
print 'Finding',item,'using sequential search'
t0 = time.time()
find_sequentially(big_o_list, item)
t1 = time.time()
total = t1-t0
print 'Found',item,'in',total,'MS'
item = random.randint(0, 1000000)
print 'Finding',item,'using binary search'
t2 = time.time()
binary_search(big_o_list, item)
t3 = time.time()
total = t2-t3
print 'Found',item,'in',total,'MS'
| mit |
htcondor/htcondor | src/condor_contrib/condor_pigeon/src/condor_pigeon_client/skype_linux_tools/sendAll.py | 10 | 1950 | #! /usr/bin/python
#
# Starts a Skype File Transfer.
# The open file dialog is set to the current directory.
# The users to send to are searched through their Skype name and their
# full name (display name actually).
#
# Requires at least Skype 2.0.0.27.
#
# (c) Copyright 2007, Vincent Oberle, vincent@oberle.org
#
# This software may be used and distributed according to the terms
# of the GNU Public License, incorporated herein by reference.
import os
import sys
import re
from optparse import OptionParser
import Skype4Py
parser = OptionParser('%prog user [user*]', version="%prog 0.1")
options, args = parser.parse_args()
if len(args) < 1:
parser.print_help()
sys.exit(0)
dest = args[1:]
d= args[0]
msg = ''
for m in dest:
msg +=m+' '
path = os.getcwd() # use current working directory
skype = Skype4Py.Skype()
skype.FriendlyName = 'file_sender'
skype.Attach() # attach to Skype client
ids = []
print dest
# quick-filter type of search
# first look for exact Skype ID match
for user in skype.Friends:
#print 'Found exact match'
if user.Handle.lower().find(d.lower()) != -1: ids.append(user)
elif user.DisplayName.lower().find(d.lower()) != -1: ids.append(user)
#ids.append(user)
found = True
# break
# if found: continue
# for user in skype.Friends:
# print who will send to, no need to ask for confirmation as user can still cancel the open file window
#msg = "Hi Condor Messaging Integration Test..good morning :)"
#msg = dest[0]
print 'Sending to:'
for i in ids:
if i.FullName: print ' ' + i.FullName + ' (' + i.Handle + ')'
else: print ' ' + i.Handle
skype.SendMessage(i.Handle, msg)
if ids: # ids is a list of Skype IDs
in_str = ''
if path: in_str = ' IN ' + path
ids_str = ids[0].Handle
for i in ids[1:]: ids_str = ids_str + ', ' + i.Handle
# OPEN FILETRANSFER echo123 IN C:\temp
cmd = 'OPEN FILETRANSFER ' + ids_str + in_str
# print cmd
# skype.SendCommand(skype.Command(cmd))
| apache-2.0 |
commtrack/temp-aquatest | reportlab/graphics/charts/lineplots.py | 1 | 46569 | #Copyright ReportLab Europe Ltd. 2000-2004
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/graphics/charts/lineplots.py
__version__=''' $Id: lineplots.py 3631 2010-01-13 10:54:24Z meitham $ '''
__doc__="""This module defines a very preliminary Line Plot example."""
import string, time
from reportlab.lib import colors
from reportlab.lib.validators import *
from reportlab.lib.attrmap import *
from reportlab.graphics.shapes import Drawing, Group, Rect, Line, PolyLine, Polygon, _SetKeyWordArgs
from reportlab.graphics.widgetbase import Widget, TypedPropertyCollection, PropHolder
from reportlab.graphics.charts.textlabels import Label
from reportlab.graphics.charts.axes import XValueAxis, YValueAxis, AdjYValueAxis, NormalDateXValueAxis
from reportlab.graphics.charts.utils import *
from reportlab.graphics.widgets.markers import uSymbol2Symbol, isSymbol, makeMarker
from reportlab.graphics.widgets.grids import Grid, DoubleGrid, ShadedRect, ShadedPolygon
from reportlab.pdfbase.pdfmetrics import stringWidth, getFont
from reportlab.graphics.charts.areas import PlotArea
# This might be moved again from here...
class LinePlotProperties(PropHolder):
_attrMap = AttrMap(
strokeWidth = AttrMapValue(isNumber, desc='Width of a line.'),
strokeColor = AttrMapValue(isColorOrNone, desc='Color of a line.'),
strokeDashArray = AttrMapValue(isListOfNumbersOrNone, desc='Dash array of a line.'),
symbol = AttrMapValue(None, desc='Widget placed at data points.',advancedUsage=1),
shader = AttrMapValue(None, desc='Shader Class.',advancedUsage=1),
filler = AttrMapValue(None, desc='Filler Class.',advancedUsage=1),
name = AttrMapValue(isStringOrNone, desc='Name of the line.'),
inFill = AttrMapValue(isBoolean, desc='If true flood fill to x axis',advancedUsage=1),
)
class Shader(_SetKeyWordArgs):
_attrMap = AttrMap(BASE=PlotArea,
vertical = AttrMapValue(isBoolean, desc='If true shade to x axis'),
colors = AttrMapValue(SequenceOf(isColorOrNone,lo=2,hi=2), desc='(AxisColor, LineColor)'),
)
def shade(self, lp, g, rowNo, rowColor, row):
c = [None,None]
c = getattr(self,'colors',c) or c
if not c[0]: c[0] = getattr(lp,'fillColor',colors.white)
if not c[1]: c[1] = rowColor
class NoFiller:
def fill(self, lp, g, rowNo, rowColor, points):
pass
class Filler:
'''mixin providing simple polygon fill'''
_attrMap = AttrMap(
fillColor = AttrMapValue(isColorOrNone, desc='filler interior color'),
strokeColor = AttrMapValue(isColorOrNone, desc='filler edge color'),
strokeWidth = AttrMapValue(isNumberOrNone, desc='filler edge width'),
)
def __init__(self,**kw):
self.__dict__ = kw
def fill(self, lp, g, rowNo, rowColor, points):
g.add(Polygon(points,
fillColor=getattr(self,'fillColor',rowColor),
strokeColor=getattr(self,'strokeColor',rowColor),
strokeWidth=getattr(self,'strokeWidth',0.1)))
class ShadedPolyFiller(Filler,ShadedPolygon):
pass
class PolyFiller(Filler,Polygon):
pass
from linecharts import AbstractLineChart
class LinePlot(AbstractLineChart):
"""Line plot with multiple lines.
Both x- and y-axis are value axis (so there are no seperate
X and Y versions of this class).
"""
_attrMap = AttrMap(BASE=PlotArea,
reversePlotOrder = AttrMapValue(isBoolean, desc='If true reverse plot order.',advancedUsage=1),
lineLabelNudge = AttrMapValue(isNumber, desc='Distance between a data point and its label.',advancedUsage=1),
lineLabels = AttrMapValue(None, desc='Handle to the list of data point labels.'),
lineLabelFormat = AttrMapValue(None, desc='Formatting string or function used for data point labels.'),
lineLabelArray = AttrMapValue(None, desc='explicit array of line label values, must match size of data if present.'),
joinedLines = AttrMapValue(isNumber, desc='Display data points joined with lines if true.'),
strokeColor = AttrMapValue(isColorOrNone, desc='Color used for background border of plot area.'),
fillColor = AttrMapValue(isColorOrNone, desc='Color used for background interior of plot area.'),
lines = AttrMapValue(None, desc='Handle of the lines.'),
xValueAxis = AttrMapValue(None, desc='Handle of the x axis.'),
yValueAxis = AttrMapValue(None, desc='Handle of the y axis.'),
data = AttrMapValue(None, desc='Data to be plotted, list of (lists of) x/y tuples.'),
annotations = AttrMapValue(None, desc='list of callables, will be called with self, xscale, yscale.',advancedUsage=1),
behindAxes = AttrMapValue(isBoolean, desc='If true use separate line group.',advancedUsage=1),
gridFirst = AttrMapValue(isBoolean, desc='If true use draw grids before axes.',advancedUsage=1),
)
def __init__(self):
PlotArea.__init__(self)
self.reversePlotOrder = 0
self.xValueAxis = XValueAxis()
self.yValueAxis = YValueAxis()
# this defines two series of 3 points. Just an example.
self.data = [
((1,1), (2,2), (2.5,1), (3,3), (4,5)),
((1,2), (2,3), (2.5,2), (3,4), (4,6))
]
self.lines = TypedPropertyCollection(LinePlotProperties)
self.lines.strokeWidth = 1
self.lines[0].strokeColor = colors.red
self.lines[1].strokeColor = colors.blue
self.lineLabels = TypedPropertyCollection(Label)
self.lineLabelFormat = None
self.lineLabelArray = None
# this says whether the origin is inside or outside
# the bar - +10 means put the origin ten points
# above the tip of the bar if value > 0, or ten
# points inside if bar value < 0. This is different
# to label dx/dy which are not dependent on the
# sign of the data.
self.lineLabelNudge = 10
# if you have multiple series, by default they butt
# together.
# New line chart attributes.
self.joinedLines = 1 # Connect items with straight lines.
#private attributes
self._inFill = None
self.annotations = []
self.behindAxes = 0
self.gridFirst = 0
def demo(self):
"""Shows basic use of a line chart."""
drawing = Drawing(400, 200)
data = [
((1,1), (2,2), (2.5,1), (3,3), (4,5)),
((1,2), (2,3), (2.5,2), (3.5,5), (4,6))
]
lp = LinePlot()
lp.x = 50
lp.y = 50
lp.height = 125
lp.width = 300
lp.data = data
lp.joinedLines = 1
lp.lineLabelFormat = '%2.0f'
lp.strokeColor = colors.black
lp.lines[0].strokeColor = colors.red
lp.lines[0].symbol = makeMarker('FilledCircle')
lp.lines[1].strokeColor = colors.blue
lp.lines[1].symbol = makeMarker('FilledDiamond')
lp.xValueAxis.valueMin = 0
lp.xValueAxis.valueMax = 5
lp.xValueAxis.valueStep = 1
lp.yValueAxis.valueMin = 0
lp.yValueAxis.valueMax = 7
lp.yValueAxis.valueStep = 1
drawing.add(lp)
return drawing
def calcPositions(self):
"""Works out where they go.
Sets an attribute _positions which is a list of
lists of (x, y) matching the data.
"""
self._seriesCount = len(self.data)
self._rowLength = max(map(len,self.data))
self._positions = []
for rowNo in range(len(self.data)):
line = []
for colNo in range(len(self.data[rowNo])):
datum = self.data[rowNo][colNo] # x,y value
if type(datum[0]) == type(''):
x = self.xValueAxis.scale(mktime(mkTimeTuple(datum[0])))
else:
x = self.xValueAxis.scale(datum[0])
y = self.yValueAxis.scale(datum[1])
line.append((x, y))
self._positions.append(line)
def _innerDrawLabel(self, rowNo, colNo, x, y):
"Draw a label for a given item in the list."
labelFmt = self.lineLabelFormat
labelValue = self.data[rowNo][colNo][1] ###
if labelFmt is None:
labelText = None
elif type(labelFmt) is StringType:
if labelFmt == 'values':
labelText = self.lineLabelArray[rowNo][colNo]
else:
labelText = labelFmt % labelValue
elif callable(labelFmt):
labelText = labelFmt(labelValue)
else:
raise ValueError("Unknown formatter type %s, expected string or function"%labelFmt)
if labelText:
label = self.lineLabels[(rowNo, colNo)]
if not label.visible: return
#hack to make sure labels are outside the bar
if y > 0:
label.setOrigin(x, y + self.lineLabelNudge)
else:
label.setOrigin(x, y - self.lineLabelNudge)
label.setText(labelText)
else:
label = None
return label
def drawLabel(self, G, rowNo, colNo, x, y):
'''Draw a label for a given item in the list.
G must have an add method'''
G.add(self._innerDrawLabel(rowNo,colNo,x,y))
def makeLines(self):
g = Group()
bubblePlot = getattr(self,'_bubblePlot',None)
if bubblePlot:
yA = self.yValueAxis
xA = self.xValueAxis
bubbleR = min(yA._bubbleRadius,xA._bubbleRadius)
bubbleMax = xA._bubbleMax
labelFmt = self.lineLabelFormat
P = range(len(self._positions))
if self.reversePlotOrder: P.reverse()
inFill = getattr(self,'_inFill',None)
styleCount = len(self.lines)
if inFill or [rowNo for rowNo in P if getattr(self.lines[rowNo%styleCount],'inFill',False)]:
inFillY = self.xValueAxis._y
inFillX0 = self.yValueAxis._x
inFillX1 = inFillX0 + self.xValueAxis._length
inFillG = getattr(self,'_inFillG',g)
lG = getattr(self,'_lineG',g)
# Iterate over data rows.
for rowNo in P:
row = self._positions[rowNo]
rowStyle = self.lines[rowNo % styleCount]
rowColor = getattr(rowStyle,'strokeColor',None)
dash = getattr(rowStyle, 'strokeDashArray', None)
if hasattr(rowStyle, 'strokeWidth'):
width = rowStyle.strokeWidth
elif hasattr(self.lines, 'strokeWidth'):
width = self.lines.strokeWidth
else:
width = None
# Iterate over data columns.
if self.joinedLines:
points = []
for xy in row:
points = points + [xy[0], xy[1]]
if inFill or getattr(rowStyle,'inFill',False):
fpoints = [inFillX0,inFillY] + points + [inFillX1,inFillY]
filler = getattr(rowStyle, 'filler', None)
if filler:
filler.fill(self,inFillG,rowNo,rowColor,fpoints)
else:
inFillG.add(Polygon(fpoints,fillColor=rowColor,strokeColor=rowColor,strokeWidth=width or 0.1))
if inFill in (None,0,2):
line = PolyLine(points,strokeColor=rowColor,strokeLineCap=0,strokeLineJoin=1)
if width:
line.strokeWidth = width
if dash:
line.strokeDashArray = dash
lG.add(line)
if hasattr(rowStyle, 'symbol'):
uSymbol = rowStyle.symbol
elif hasattr(self.lines, 'symbol'):
uSymbol = self.lines.symbol
else:
uSymbol = None
if uSymbol:
j = -1
if bubblePlot: drow = self.data[rowNo]
for xy in row:
j += 1
symbol = uSymbol2Symbol(uSymbol,xy[0],xy[1],rowColor)
if symbol:
if bubblePlot:
symbol.size = bubbleR*(drow[j][2]/bubbleMax)**0.5
g.add(symbol)
# Draw data labels.
for colNo in range(len(row)):
x1, y1 = row[colNo]
self.drawLabel(g, rowNo, colNo, x1, y1)
shader = getattr(rowStyle, 'shader', None)
if shader: shader.shade(self,g,rowNo,rowColor,row)
return g
def draw(self):
yA = self.yValueAxis
xA = self.xValueAxis
if getattr(self,'_bubblePlot',None):
yA._bubblePlot = xA._bubblePlot = 1
yA.setPosition(self.x, self.y, self.height)
if yA: yA.joinAxis = xA
if xA: xA.joinAxis = yA
yA.configure(self.data)
# if zero is in chart, put x axis there, otherwise use bottom.
xAxisCrossesAt = yA.scale(0)
if ((xAxisCrossesAt > self.y + self.height) or (xAxisCrossesAt < self.y)):
y = self.y
else:
y = xAxisCrossesAt
xA.setPosition(self.x, y, self.width)
xA.configure(self.data)
self.calcPositions()
g = Group()
g.add(self.makeBackground())
if self._inFill or self.behindAxes:
xA._joinToAxis()
if self._inFill:
self._inFillG = Group()
g.add(self._inFillG)
if self.behindAxes:
self._lineG = Group()
g.add(self._lineG)
if self.gridFirst:
xA.makeGrid(g,parent=self,dim=yA.getGridDims)
yA.makeGrid(g,parent=self,dim=xA.getGridDims)
g.add(xA)
g.add(yA)
if not self.gridFirst:
xAdgl = getattr(xA,'drawGridLast',False)
yAdgl = getattr(yA,'drawGridLast',False)
if not xAdgl: xA.makeGrid(g,parent=self,dim=yA.getGridDims)
if not yAdgl: yA.makeGrid(g,parent=self,dim=xA.getGridDims)
annotations = getattr(self,'annotations',[])
for a in annotations:
if getattr(a,'beforeLines',None):
g.add(a(self,xA.scale,yA.scale))
g.add(self.makeLines())
if not self.gridFirst:
if xAdgl: xA.makeGrid(g,parent=self,dim=yA.getGridDims)
if yAdgl: yA.makeGrid(g,parent=self,dim=xA.getGridDims)
for a in annotations:
if not getattr(a,'beforeLines',None):
g.add(a(self,xA.scale,yA.scale))
return g
def addCrossHair(self,name,xv,yv,strokeColor=colors.black,strokeWidth=1,beforeLines=True):
from reportlab.graphics.shapes import Group, Line
annotations = [a for a in getattr(self,'annotations',[]) if getattr(a,'name',None)!=name]
def annotation(self,xScale,yScale):
x = xScale(xv)
y = yScale(yv)
g = Group()
xA = xScale.im_self #the x axis
g.add(Line(xA._x,y,xA._x+xA._length,y,strokeColor=strokeColor,strokeWidth=strokeWidth))
yA = yScale.im_self #the y axis
g.add(Line(x,yA._y,x,yA._y+yA._length,strokeColor=strokeColor,strokeWidth=strokeWidth))
return g
annotation.beforeLines = beforeLines
annotations.append(annotation)
self.annotations = annotations
class LinePlot3D(LinePlot):
_attrMap = AttrMap(BASE=LinePlot,
theta_x = AttrMapValue(isNumber, desc='dx/dz'),
theta_y = AttrMapValue(isNumber, desc='dy/dz'),
zDepth = AttrMapValue(isNumber, desc='depth of an individual series'),
zSpace = AttrMapValue(isNumber, desc='z gap around series'),
)
theta_x = .5
theta_y = .5
zDepth = 10
zSpace = 3
def calcPositions(self):
LinePlot.calcPositions(self)
nSeries = self._seriesCount
zSpace = self.zSpace
zDepth = self.zDepth
if self.xValueAxis.style=='parallel_3d':
_3d_depth = nSeries*zDepth+(nSeries+1)*zSpace
else:
_3d_depth = zDepth + 2*zSpace
self._3d_dx = self.theta_x*_3d_depth
self._3d_dy = self.theta_y*_3d_depth
def _calc_z0(self,rowNo):
zSpace = self.zSpace
if self.xValueAxis.style=='parallel_3d':
z0 = rowNo*(self.zDepth+zSpace)+zSpace
else:
z0 = zSpace
return z0
def _zadjust(self,x,y,z):
return x+z*self.theta_x, y+z*self.theta_y
def makeLines(self):
bubblePlot = getattr(self,'_bubblePlot',None)
assert not bubblePlot, "_bubblePlot not supported for 3d yet"
#if bubblePlot:
# yA = self.yValueAxis
# xA = self.xValueAxis
# bubbleR = min(yA._bubbleRadius,xA._bubbleRadius)
# bubbleMax = xA._bubbleMax
labelFmt = self.lineLabelFormat
positions = self._positions
P = range(len(positions))
if self.reversePlotOrder: P.reverse()
inFill = getattr(self,'_inFill',None)
assert not inFill, "inFill not supported for 3d yet"
#if inFill:
# inFillY = self.xValueAxis._y
# inFillX0 = self.yValueAxis._x
# inFillX1 = inFillX0 + self.xValueAxis._length
# inFillG = getattr(self,'_inFillG',g)
zDepth = self.zDepth
_zadjust = self._zadjust
theta_x = self.theta_x
theta_y = self.theta_y
from linecharts import _FakeGroup
F = _FakeGroup()
from utils3d import _make_3d_line_info, find_intersections
if self.xValueAxis.style!='parallel_3d':
tileWidth = getattr(self,'_3d_tilewidth',1)
if getattr(self,'_find_intersections',None):
from copy import copy
fpositions = map(copy,positions)
I = find_intersections(fpositions,small=tileWidth)
ic = None
for i,j,x,y in I:
if ic!=i:
ic = i
jc = 0
else:
jc+=1
fpositions[i].insert(j+jc,(x,y))
tileWidth = None
else:
fpositions = positions
else:
tileWidth = None
fpositions = positions
# Iterate over data rows.
styleCount = len(self.lines)
for rowNo in P:
row = positions[rowNo]
n = len(row)
rowStyle = self.lines[rowNo % styleCount]
rowColor = rowStyle.strokeColor
dash = getattr(rowStyle, 'strokeDashArray', None)
z0 = self._calc_z0(rowNo)
z1 = z0 + zDepth
if hasattr(rowStyle, 'strokeWidth'):
width = rowStyle.strokeWidth
elif hasattr(self.lines, 'strokeWidth'):
width = self.lines.strokeWidth
else:
width = None
# Iterate over data columns.
if self.joinedLines:
if n:
frow = fpositions[rowNo]
x0, y0 = frow[0]
for colNo in xrange(1,len(frow)):
x1, y1 = frow[colNo]
_make_3d_line_info( F, x0, x1, y0, y1, z0, z1,
theta_x, theta_y,
rowColor, fillColorShaded=None, tileWidth=tileWidth,
strokeColor=None, strokeWidth=None, strokeDashArray=None,
shading=0.1)
x0, y0 = x1, y1
if hasattr(rowStyle, 'symbol'):
uSymbol = rowStyle.symbol
elif hasattr(self.lines, 'symbol'):
uSymbol = self.lines.symbol
else:
uSymbol = None
if uSymbol:
for xy in row:
x1, y1 = row[colNo]
x1, y1 = _zadjust(x1,y1,z0)
symbol = uSymbol2Symbol(uSymbol,xy[0],xy[1],rowColor)
if symbol: F.add((1,z0,z0,x1,y1,symbol))
# Draw data labels.
for colNo in xrange(n):
x1, y1 = row[colNo]
x1, y1 = _zadjust(x1,y1,z0)
L = self._innerDrawLabel(rowNo, colNo, x1, y1)
if L: F.add((2,z0,z0,x1,y1,L))
F.sort()
g = Group()
map(lambda x,a=g.add: a(x[-1]),F.value())
return g
_monthlyIndexData = [[(19971202, 100.0),
(19971231, 100.1704367),
(19980131, 101.5639577),
(19980228, 102.1879927),
(19980331, 101.6337257),
(19980430, 102.7640446),
(19980531, 102.9198038),
(19980630, 103.25938789999999),
(19980731, 103.2516421),
(19980831, 105.4744329),
(19980930, 109.3242705),
(19981031, 111.9859291),
(19981130, 110.9184642),
(19981231, 110.9184642),
(19990131, 111.9882532),
(19990228, 109.7912614),
(19990331, 110.24189629999999),
(19990430, 110.4279321),
(19990531, 109.33955469999999),
(19990630, 108.2341748),
(19990731, 110.21294469999999),
(19990831, 110.9683062),
(19990930, 112.4425371),
(19991031, 112.7314032),
(19991130, 112.3509645),
(19991231, 112.3660659),
(20000131, 110.9255248),
(20000229, 110.5266306),
(20000331, 113.3116101),
(20000430, 111.0449133),
(20000531, 111.702717),
(20000630, 113.5832178)],
[(19971202, 100.0),
(19971231, 100.0),
(19980131, 100.8),
(19980228, 102.0),
(19980331, 101.9),
(19980430, 103.0),
(19980531, 103.0),
(19980630, 103.1),
(19980731, 103.1),
(19980831, 102.8),
(19980930, 105.6),
(19981031, 108.3),
(19981130, 108.1),
(19981231, 111.9),
(19990131, 113.1),
(19990228, 110.2),
(19990331, 111.8),
(19990430, 112.3),
(19990531, 110.1),
(19990630, 109.3),
(19990731, 111.2),
(19990831, 111.7),
(19990930, 112.6),
(19991031, 113.2),
(19991130, 113.9),
(19991231, 115.4),
(20000131, 112.7),
(20000229, 113.9),
(20000331, 115.8),
(20000430, 112.2),
(20000531, 112.6),
(20000630, 114.6)]]
class GridLinePlot(LinePlot):
"""A customized version of LinePlot.
It uses NormalDateXValueAxis() and AdjYValueAxis() for the X and Y axes.
The chart has a default grid background with thin horizontal lines
aligned with the tickmarks (and labels). You can change the back-
ground to be any Grid or ShadedRect, or scale the whole chart.
If you do provide a background, you can specify the colours of the
stripes with 'background.stripeColors'.
"""
_attrMap = AttrMap(BASE=LinePlot,
background = AttrMapValue(None, desc='Background for chart area (now Grid or ShadedRect).'),
scaleFactor = AttrMapValue(isNumberOrNone, desc='Scalefactor to apply to whole drawing.'),
)
def __init__(self):
from reportlab.lib import colors
LinePlot.__init__(self)
self.xValueAxis = NormalDateXValueAxis()
self.yValueAxis = AdjYValueAxis()
self.scaleFactor = None
self.background = Grid()
self.background.orientation = 'horizontal'
self.background.useRects = 0
self.background.useLines = 1
self.background.strokeWidth = 0.5
self.background.strokeColor = colors.black
self.data = _monthlyIndexData
def demo(self,drawing=None):
from reportlab.lib import colors
if not drawing:
drawing = Drawing(400, 200)
lp = AdjLinePlot()
lp.x = 50
lp.y = 50
lp.height = 125
lp.width = 300
lp.data = _monthlyIndexData
lp.joinedLines = 1
lp.strokeColor = colors.black
c0 = colors.PCMYKColor(100,65,0,30, spotName='PANTONE 288 CV', density=100)
lp.lines[0].strokeColor = c0
lp.lines[0].strokeWidth = 2
lp.lines[0].strokeDashArray = None
c1 = colors.PCMYKColor(0,79,91,0, spotName='PANTONE Wm Red CV', density=100)
lp.lines[1].strokeColor = c1
lp.lines[1].strokeWidth = 1
lp.lines[1].strokeDashArray = [3,1]
lp.xValueAxis.labels.fontSize = 10
lp.xValueAxis.labels.textAnchor = 'start'
lp.xValueAxis.labels.boxAnchor = 'w'
lp.xValueAxis.labels.angle = -45
lp.xValueAxis.labels.dx = 0
lp.xValueAxis.labels.dy = -8
lp.xValueAxis.xLabelFormat = '{mm}/{yy}'
lp.yValueAxis.labelTextFormat = '%5d%% '
lp.yValueAxis.tickLeft = 5
lp.yValueAxis.labels.fontSize = 10
lp.background = Grid()
lp.background.stripeColors = [colors.pink, colors.lightblue]
lp.background.orientation = 'vertical'
drawing.add(lp,'plot')
return drawing
def draw(self):
xva, yva = self.xValueAxis, self.yValueAxis
if xva: xva.joinAxis = yva
if yva: yva.joinAxis = xva
yva.setPosition(self.x, self.y, self.height)
yva.configure(self.data)
# if zero is in chart, put x axis there, otherwise
# use bottom.
xAxisCrossesAt = yva.scale(0)
if ((xAxisCrossesAt > self.y + self.height) or (xAxisCrossesAt < self.y)):
y = self.y
else:
y = xAxisCrossesAt
xva.setPosition(self.x, y, self.width)
xva.configure(self.data)
back = self.background
if isinstance(back, Grid):
if back.orientation == 'vertical' and xva._tickValues:
xpos = map(xva.scale, [xva._valueMin] + xva._tickValues)
steps = []
for i in range(len(xpos)-1):
steps.append(xpos[i+1] - xpos[i])
back.deltaSteps = steps
elif back.orientation == 'horizontal' and yva._tickValues:
ypos = map(yva.scale, [yva._valueMin] + yva._tickValues)
steps = []
for i in range(len(ypos)-1):
steps.append(ypos[i+1] - ypos[i])
back.deltaSteps = steps
elif isinstance(back, DoubleGrid):
# Ideally, these lines would not be needed...
back.grid0.x = self.x
back.grid0.y = self.y
back.grid0.width = self.width
back.grid0.height = self.height
back.grid1.x = self.x
back.grid1.y = self.y
back.grid1.width = self.width
back.grid1.height = self.height
# some room left for optimization...
if back.grid0.orientation == 'vertical' and xva._tickValues:
xpos = map(xva.scale, [xva._valueMin] + xva._tickValues)
steps = []
for i in range(len(xpos)-1):
steps.append(xpos[i+1] - xpos[i])
back.grid0.deltaSteps = steps
elif back.grid0.orientation == 'horizontal' and yva._tickValues:
ypos = map(yva.scale, [yva._valueMin] + yva._tickValues)
steps = []
for i in range(len(ypos)-1):
steps.append(ypos[i+1] - ypos[i])
back.grid0.deltaSteps = steps
if back.grid1.orientation == 'vertical' and xva._tickValues:
xpos = map(xva.scale, [xva._valueMin] + xva._tickValues)
steps = []
for i in range(len(xpos)-1):
steps.append(xpos[i+1] - xpos[i])
back.grid1.deltaSteps = steps
elif back.grid1.orientation == 'horizontal' and yva._tickValues:
ypos = map(yva.scale, [yva._valueMin] + yva._tickValues)
steps = []
for i in range(len(ypos)-1):
steps.append(ypos[i+1] - ypos[i])
back.grid1.deltaSteps = steps
self.calcPositions()
width, height, scaleFactor = self.width, self.height, self.scaleFactor
if scaleFactor and scaleFactor!=1:
#g = Drawing(scaleFactor*width, scaleFactor*height)
g.transform = (scaleFactor, 0, 0, scaleFactor,0,0)
else:
g = Group()
g.add(self.makeBackground())
g.add(self.xValueAxis)
g.add(self.yValueAxis)
g.add(self.makeLines())
return g
class AreaLinePlot(LinePlot):
'''we're given data in the form [(X1,Y11,..Y1M)....(Xn,Yn1,...YnM)]'''#'
def __init__(self):
LinePlot.__init__(self)
self._inFill = 1
self.reversePlotOrder = 1
self.data = [(1,20,100,30),(2,11,50,15),(3,15,70,40)]
def draw(self):
try:
odata = self.data
n = len(odata)
m = len(odata[0])
S = n*[0]
self.data = []
for i in xrange(1,m):
D = []
for j in xrange(n):
S[j] = S[j] + odata[j][i]
D.append((odata[j][0],S[j]))
self.data.append(D)
return LinePlot.draw(self)
finally:
self.data = odata
class SplitLinePlot(AreaLinePlot):
def __init__(self):
AreaLinePlot.__init__(self)
self.xValueAxis = NormalDateXValueAxis()
self.yValueAxis = AdjYValueAxis()
self.data=[(20030601,0.95,0.05,0.0),(20030701,0.95,0.05,0.0),(20030801,0.95,0.05,0.0),(20030901,0.95,0.05,0.0),(20031001,0.95,0.05,0.0),(20031101,0.95,0.05,0.0),(20031201,0.95,0.05,0.0),(20040101,0.95,0.05,0.0),(20040201,0.95,0.05,0.0),(20040301,0.95,0.05,0.0),(20040401,0.95,0.05,0.0),(20040501,0.95,0.05,0.0),(20040601,0.95,0.05,0.0),(20040701,0.95,0.05,0.0),(20040801,0.95,0.05,0.0),(20040901,0.95,0.05,0.0),(20041001,0.95,0.05,0.0),(20041101,0.95,0.05,0.0),(20041201,0.95,0.05,0.0),(20050101,0.95,0.05,0.0),(20050201,0.95,0.05,0.0),(20050301,0.95,0.05,0.0),(20050401,0.95,0.05,0.0),(20050501,0.95,0.05,0.0),(20050601,0.95,0.05,0.0),(20050701,0.95,0.05,0.0),(20050801,0.95,0.05,0.0),(20050901,0.95,0.05,0.0),(20051001,0.95,0.05,0.0),(20051101,0.95,0.05,0.0),(20051201,0.95,0.05,0.0),(20060101,0.95,0.05,0.0),(20060201,0.95,0.05,0.0),(20060301,0.95,0.05,0.0),(20060401,0.95,0.05,0.0),(20060501,0.95,0.05,0.0),(20060601,0.95,0.05,0.0),(20060701,0.95,0.05,0.0),(20060801,0.95,0.05,0.0),(20060901,0.95,0.05,0.0),(20061001,0.95,0.05,0.0),(20061101,0.95,0.05,0.0),(20061201,0.95,0.05,0.0),(20070101,0.95,0.05,0.0),(20070201,0.95,0.05,0.0),(20070301,0.95,0.05,0.0),(20070401,0.95,0.05,0.0),(20070501,0.95,0.05,0.0),(20070601,0.95,0.05,0.0),(20070701,0.95,0.05,0.0),(20070801,0.95,0.05,0.0),(20070901,0.95,0.05,0.0),(20071001,0.95,0.05,0.0),(20071101,0.95,0.05,0.0),(20071201,0.95,0.05,0.0),(20080101,0.95,0.05,0.0),(20080201,0.95,0.05,0.0),(20080301,0.95,0.05,0.0),(20080401,0.95,0.05,0.0),(20080501,0.95,0.05,0.0),(20080601,0.95,0.05,0.0),(20080701,0.95,0.05,0.0),(20080801,0.95,0.05,0.0),(20080901,0.95,0.05,0.0),(20081001,0.95,0.05,0.0),(20081101,0.95,0.05,0.0),(20081201,0.95,0.05,0.0),(20090101,0.95,0.05,0.0),(20090201,0.91,0.09,0.0),(20090301,0.91,0.09,0.0),(20090401,0.91,0.09,0.0),(20090501,0.91,0.09,0.0),(20090601,0.91,0.09,0.0),(20090701,0.91,0.09,0.0),(20090801,0.91,0.09,0.0),(20090901,0.91,0.09,0.0),(20091001,0.91,0.09,0.0),(20091101,0.91,0.09,0.0),(20091201,0.91,0.09,0.0),(20100101,0.91,0.09,0.0),(20100201,0.81,0.19,0.0),(20100301,0.81,0.19,0.0),(20100401,0.81,0.19,0.0),(20100501,0.81,0.19,0.0),(20100601,0.81,0.19,0.0),(20100701,0.81,0.19,0.0),(20100801,0.81,0.19,0.0),(20100901,0.81,0.19,0.0),(20101001,0.81,0.19,0.0),(20101101,0.81,0.19,0.0),(20101201,0.81,0.19,0.0),(20110101,0.81,0.19,0.0),(20110201,0.72,0.28,0.0),(20110301,0.72,0.28,0.0),(20110401,0.72,0.28,0.0),(20110501,0.72,0.28,0.0),(20110601,0.72,0.28,0.0),(20110701,0.72,0.28,0.0),(20110801,0.72,0.28,0.0),(20110901,0.72,0.28,0.0),(20111001,0.72,0.28,0.0),(20111101,0.72,0.28,0.0),(20111201,0.72,0.28,0.0),(20120101,0.72,0.28,0.0),(20120201,0.53,0.47,0.0),(20120301,0.53,0.47,0.0),(20120401,0.53,0.47,0.0),(20120501,0.53,0.47,0.0),(20120601,0.53,0.47,0.0),(20120701,0.53,0.47,0.0),(20120801,0.53,0.47,0.0),(20120901,0.53,0.47,0.0),(20121001,0.53,0.47,0.0),(20121101,0.53,0.47,0.0),(20121201,0.53,0.47,0.0),(20130101,0.53,0.47,0.0),(20130201,0.44,0.56,0.0),(20130301,0.44,0.56,0.0),(20130401,0.44,0.56,0.0),(20130501,0.44,0.56,0.0),(20130601,0.44,0.56,0.0),(20130701,0.44,0.56,0.0),(20130801,0.44,0.56,0.0),(20130901,0.44,0.56,0.0),(20131001,0.44,0.56,0.0),(20131101,0.44,0.56,0.0),(20131201,0.44,0.56,0.0),(20140101,0.44,0.56,0.0),(20140201,0.36,0.5,0.14),(20140301,0.36,0.5,0.14),(20140401,0.36,0.5,0.14),(20140501,0.36,0.5,0.14),(20140601,0.36,0.5,0.14),(20140701,0.36,0.5,0.14),(20140801,0.36,0.5,0.14),(20140901,0.36,0.5,0.14),(20141001,0.36,0.5,0.14),(20141101,0.36,0.5,0.14),(20141201,0.36,0.5,0.14),(20150101,0.36,0.5,0.14),(20150201,0.3,0.41,0.29),(20150301,0.3,0.41,0.29),(20150401,0.3,0.41,0.29),(20150501,0.3,0.41,0.29),(20150601,0.3,0.41,0.29),(20150701,0.3,0.41,0.29),(20150801,0.3,0.41,0.29),(20150901,0.3,0.41,0.29),(20151001,0.3,0.41,0.29),(20151101,0.3,0.41,0.29),(20151201,0.3,0.41,0.29),(20160101,0.3,0.41,0.29),(20160201,0.26,0.36,0.38),(20160301,0.26,0.36,0.38),(20160401,0.26,0.36,0.38),(20160501,0.26,0.36,0.38),(20160601,0.26,0.36,0.38),(20160701,0.26,0.36,0.38),(20160801,0.26,0.36,0.38),(20160901,0.26,0.36,0.38),(20161001,0.26,0.36,0.38),(20161101,0.26,0.36,0.38),(20161201,0.26,0.36,0.38),(20170101,0.26,0.36,0.38),(20170201,0.2,0.3,0.5),(20170301,0.2,0.3,0.5),(20170401,0.2,0.3,0.5),(20170501,0.2,0.3,0.5),(20170601,0.2,0.3,0.5),(20170701,0.2,0.3,0.5),(20170801,0.2,0.3,0.5),(20170901,0.2,0.3,0.5),(20171001,0.2,0.3,0.5),(20171101,0.2,0.3,0.5),(20171201,0.2,0.3,0.5),(20180101,0.2,0.3,0.5),(20180201,0.13,0.37,0.5),(20180301,0.13,0.37,0.5),(20180401,0.13,0.37,0.5),(20180501,0.13,0.37,0.5),(20180601,0.13,0.37,0.5),(20180701,0.13,0.37,0.5),(20180801,0.13,0.37,0.5),(20180901,0.13,0.37,0.5),(20181001,0.13,0.37,0.5),(20181101,0.13,0.37,0.5),(20181201,0.13,0.37,0.5),(20190101,0.13,0.37,0.5),(20190201,0.1,0.4,0.5),(20190301,0.1,0.4,0.5),(20190401,0.1,0.4,0.5),(20190501,0.1,0.4,0.5),(20190601,0.1,0.4,0.5),(20190701,0.1,0.4,0.5),(20190801,0.1,0.4,0.5),(20190901,0.1,0.4,0.5),(20191001,0.1,0.4,0.5),(20191101,0.1,0.4,0.5),(20191201,0.1,0.4,0.5),(20200101,0.1,0.4,0.5)]
self.yValueAxis.requiredRange = None
self.yValueAxis.leftAxisPercent = 0
self.yValueAxis.leftAxisOrigShiftMin = 0
self.yValueAxis.leftAxisOrigShiftIPC = 0
self.lines[0].strokeColor = colors.toColor(0x0033cc)
self.lines[1].strokeColor = colors.toColor(0x99c3ff)
self.lines[2].strokeColor = colors.toColor(0xCC0033)
def _maxWidth(T, fontName, fontSize):
'''return max stringWidth for the list of strings T'''
if type(T) not in (type(()),type([])): T = (T,)
T = filter(None,T)
return T and max(map(lambda t,sW=stringWidth,fN=fontName, fS=fontSize: sW(t,fN,fS),T)) or 0
class ScatterPlot(LinePlot):
"""A scatter plot widget"""
_attrMap = AttrMap(BASE=LinePlot,
width = AttrMapValue(isNumber, desc="Width of the area inside the axes"),
height = AttrMapValue(isNumber, desc="Height of the area inside the axes"),
outerBorderOn = AttrMapValue(isBoolean, desc="Is there an outer border (continuation of axes)"),
outerBorderColor = AttrMapValue(isColorOrNone, desc="Color of outer border (if any)"),
background = AttrMapValue(isColorOrNone, desc="Background color (if any)"),
labelOffset = AttrMapValue(isNumber, desc="Space between label and Axis (or other labels)",advancedUsage=1),
axisTickLengths = AttrMapValue(isNumber, desc="Lenth of the ticks on both axes"),
axisStrokeWidth = AttrMapValue(isNumber, desc="Stroke width for both axes"),
xLabel = AttrMapValue(isString, desc="Label for the whole X-Axis"),
yLabel = AttrMapValue(isString, desc="Label for the whole Y-Axis"),
data = AttrMapValue(isAnything, desc='Data points - a list of x/y tuples.'),
strokeColor = AttrMapValue(isColorOrNone, desc='Color used for border of plot area.'),
fillColor = AttrMapValue(isColorOrNone, desc='Color used for background interior of plot area.'),
leftPadding = AttrMapValue(isNumber, desc='Padding on left of drawing'),
rightPadding = AttrMapValue(isNumber, desc='Padding on right of drawing'),
topPadding = AttrMapValue(isNumber, desc='Padding at top of drawing'),
bottomPadding = AttrMapValue(isNumber, desc='Padding at bottom of drawing'),
)
def __init__(self):
LinePlot.__init__(self)
self.width = 142
self.height = 77
self.outerBorderOn = 1
self.outerBorderColor = colors.black
self.background = None
_labelOffset = 3
_axisTickLengths = 2
_axisStrokeWidth = 0.5
self.yValueAxis.valueMin = None
self.yValueAxis.valueMax = None
self.yValueAxis.valueStep = None
self.yValueAxis.labelTextFormat = '%s'
self.xLabel="X Lable"
self.xValueAxis.labels.fontSize = 6
self.yLabel="Y Lable"
self.yValueAxis.labels.fontSize = 6
self.data =[((0.030, 62.73),
(0.074, 54.363),
(1.216, 17.964)),
((1.360, 11.621),
(1.387, 50.011),
(1.428, 68.953)),
((1.444, 86.888),
(1.754, 35.58),
(1.766, 36.05))]
#values for lineplot
self.joinedLines = 0
self.fillColor = self.background
self.leftPadding=5
self.rightPadding=10
self.topPadding=5
self.bottomPadding=5
self.x = self.leftPadding+_axisTickLengths+(_labelOffset*2)
self.x=self.x+_maxWidth(str(self.yValueAxis.valueMax), self.yValueAxis.labels.fontName, self.yValueAxis.labels.fontSize)
self.y = self.bottomPadding+_axisTickLengths+_labelOffset+self.xValueAxis.labels.fontSize
self.xValueAxis.labels.dy = -_labelOffset
self.xValueAxis.tickDown = _axisTickLengths
self.xValueAxis.strokeWidth = _axisStrokeWidth
self.xValueAxis.rangeRound='both'
self.yValueAxis.labels.dx = -_labelOffset
self.yValueAxis.tickLeft = _axisTickLengths
self.yValueAxis.strokeWidth = _axisStrokeWidth
self.yValueAxis.rangeRound='both'
self.lineLabelFormat="%.2f"
self.lineLabels.fontSize = 5
self.lineLabels.boxAnchor = 'e'
self.lineLabels.dx = -2
self.lineLabelNudge = 0
self.lines.symbol=makeMarker('FilledCircle',size=3)
self.lines[1].symbol=makeMarker('FilledDiamond',size=3)
self.lines[2].symbol=makeMarker('FilledSquare',size=3)
self.lines[2].strokeColor = colors.green
def _getDrawingDimensions(self):
tx = self.leftPadding+self.yValueAxis.tickLeft+(self.yValueAxis.labels.dx*2)+self.xValueAxis.labels.fontSize
tx=tx+(5*_maxWidth(str(self.yValueAxis.valueMax), self.yValueAxis.labels.fontName, self.yValueAxis.labels.fontSize))
tx=tx+self.width+self.rightPadding
t=('%.2f%%'%self.xValueAxis.valueMax)
tx=tx+(_maxWidth(t, self.yValueAxis.labels.fontName, self.yValueAxis.labels.fontSize))
ty = self.bottomPadding+self.xValueAxis.tickDown+(self.xValueAxis.labels.dy*2)+(self.xValueAxis.labels.fontSize*2)
ty=ty+self.yValueAxis.labels.fontSize+self.height+self.topPadding
#print (tx, ty)
return (tx,ty)
def demo(self,drawing=None):
if not drawing:
tx,ty=self._getDrawingDimensions()
drawing = Drawing(tx,ty)
drawing.add(self.draw())
return drawing
def draw(self):
ascent=getFont(self.xValueAxis.labels.fontName).face.ascent
if ascent==0:
ascent=0.718 # default (from helvetica)
ascent=ascent*self.xValueAxis.labels.fontSize # normalize
#basic LinePlot - does the Axes, Ticks etc
lp = LinePlot.draw(self)
xLabel = self.xLabel
if xLabel: #Overall label for the X-axis
xl=Label()
xl.x = (self.x+self.width)/2.0
xl.y = 0
xl.fontName = self.xValueAxis.labels.fontName
xl.fontSize = self.xValueAxis.labels.fontSize
xl.setText(xLabel)
lp.add(xl)
yLabel = self.yLabel
if yLabel: #Overall label for the Y-axis
yl=Label()
yl.angle = 90
yl.x = 0
yl.y = (self.y+self.height/2.0)
yl.fontName = self.yValueAxis.labels.fontName
yl.fontSize = self.yValueAxis.labels.fontSize
yl.setText(yLabel)
lp.add(yl)
# do a bounding box - in the same style as the axes
if self.outerBorderOn:
lp.add(Rect(self.x, self.y, self.width, self.height,
strokeColor = self.outerBorderColor,
strokeWidth = self.yValueAxis.strokeWidth,
fillColor = None))
lp.shift(self.leftPadding, self.bottomPadding)
return lp
def sample1a():
"A line plot with non-equidistant points in x-axis."
drawing = Drawing(400, 200)
data = [
((1,1), (2,2), (2.5,1), (3,3), (4,5)),
((1,2), (2,3), (2.5,2), (3.5,5), (4,6))
]
lp = LinePlot()
lp.x = 50
lp.y = 50
lp.height = 125
lp.width = 300
lp.data = data
lp.joinedLines = 1
lp.strokeColor = colors.black
lp.lines.symbol = makeMarker('UK_Flag')
lp.lines[0].strokeWidth = 2
lp.lines[1].strokeWidth = 4
lp.xValueAxis.valueMin = 0
lp.xValueAxis.valueMax = 5
lp.xValueAxis.valueStep = 1
lp.yValueAxis.valueMin = 0
lp.yValueAxis.valueMax = 7
lp.yValueAxis.valueStep = 1
drawing.add(lp)
return drawing
def sample1b():
"A line plot with non-equidistant points in x-axis."
drawing = Drawing(400, 200)
data = [
((1,1), (2,2), (2.5,1), (3,3), (4,5)),
((1,2), (2,3), (2.5,2), (3.5,5), (4,6))
]
lp = LinePlot()
lp.x = 50
lp.y = 50
lp.height = 125
lp.width = 300
lp.data = data
lp.joinedLines = 1
lp.lines.symbol = makeMarker('Circle')
lp.lineLabelFormat = '%2.0f'
lp.strokeColor = colors.black
lp.xValueAxis.valueMin = 0
lp.xValueAxis.valueMax = 5
lp.xValueAxis.valueSteps = [1, 2, 2.5, 3, 4, 5]
lp.xValueAxis.labelTextFormat = '%2.1f'
lp.yValueAxis.valueMin = 0
lp.yValueAxis.valueMax = 7
lp.yValueAxis.valueStep = 1
drawing.add(lp)
return drawing
def sample1c():
"A line plot with non-equidistant points in x-axis."
drawing = Drawing(400, 200)
data = [
((1,1), (2,2), (2.5,1), (3,3), (4,5)),
((1,2), (2,3), (2.5,2), (3.5,5), (4,6))
]
lp = LinePlot()
lp.x = 50
lp.y = 50
lp.height = 125
lp.width = 300
lp.data = data
lp.joinedLines = 1
lp.lines[0].symbol = makeMarker('FilledCircle')
lp.lines[1].symbol = makeMarker('Circle')
lp.lineLabelFormat = '%2.0f'
lp.strokeColor = colors.black
lp.xValueAxis.valueMin = 0
lp.xValueAxis.valueMax = 5
lp.xValueAxis.valueSteps = [1, 2, 2.5, 3, 4, 5]
lp.xValueAxis.labelTextFormat = '%2.1f'
lp.yValueAxis.valueMin = 0
lp.yValueAxis.valueMax = 7
lp.yValueAxis.valueSteps = [1, 2, 3, 5, 6]
drawing.add(lp)
return drawing
def preprocessData(series):
"Convert date strings into seconds and multiply values by 100."
return map(lambda x: (str2seconds(x[0]), x[1]*100), series)
def sample2():
"A line plot with non-equidistant points in x-axis."
drawing = Drawing(400, 200)
data = [
(('25/11/1991',1),
('30/11/1991',1.000933333),
('31/12/1991',1.0062),
('31/01/1992',1.0112),
('29/02/1992',1.0158),
('31/03/1992',1.020733333),
('30/04/1992',1.026133333),
('31/05/1992',1.030266667),
('30/06/1992',1.034466667),
('31/07/1992',1.038733333),
('31/08/1992',1.0422),
('30/09/1992',1.045533333),
('31/10/1992',1.049866667),
('30/11/1992',1.054733333),
('31/12/1992',1.061),
),
]
data[0] = preprocessData(data[0])
lp = LinePlot()
lp.x = 50
lp.y = 50
lp.height = 125
lp.width = 300
lp.data = data
lp.joinedLines = 1
lp.lines.symbol = makeMarker('FilledDiamond')
lp.strokeColor = colors.black
start = mktime(mkTimeTuple('25/11/1991'))
t0 = mktime(mkTimeTuple('30/11/1991'))
t1 = mktime(mkTimeTuple('31/12/1991'))
t2 = mktime(mkTimeTuple('31/03/1992'))
t3 = mktime(mkTimeTuple('30/06/1992'))
t4 = mktime(mkTimeTuple('30/09/1992'))
end = mktime(mkTimeTuple('31/12/1992'))
lp.xValueAxis.valueMin = start
lp.xValueAxis.valueMax = end
lp.xValueAxis.valueSteps = [start, t0, t1, t2, t3, t4, end]
lp.xValueAxis.labelTextFormat = seconds2str
lp.xValueAxis.labels[1].dy = -20
lp.xValueAxis.labels[2].dy = -35
lp.yValueAxis.labelTextFormat = '%4.2f'
lp.yValueAxis.valueMin = 100
lp.yValueAxis.valueMax = 110
lp.yValueAxis.valueStep = 2
drawing.add(lp)
return drawing
| bsd-3-clause |
ddico/odoomrp-wip | mrp_bom_sale_pack/models/mrp_bom_sale_pack.py | 16 | 4558 | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api, _
class MrpBomSaleOrder(models.Model):
_name = 'mrp.bom.sale.order'
bom_line = fields.Many2one(comodel_name='mrp.bom.line')
product_id = fields.Many2one(related='bom_line.product_id',
string='Product')
product_uom = fields.Many2one(related='bom_line.product_uom',
string='Unit of Measure ')
product_uom_qty = fields.Float(string='Quantity (UoS)')
sale_order = fields.Many2one(comodel_name='sale.order.line')
class SaleOrderLine(models.Model):
_inherit = 'sale.order.line'
mrp_boms = fields.One2many(comodel_name='mrp.bom.sale.order',
inverse_name='sale_order', string='Mrp BoM',
copy=True)
@api.one
@api.depends('product_uom_qty', 'product_id')
def _calc_stock(self):
for line in self:
v_stock = []
r_stock = []
for oline in line.mrp_boms:
v_stock.append(oline.bom_line.product_id.virtual_available /
(oline.product_uom_qty))
r_stock.append(oline.bom_line.product_id.qty_available /
(oline.product_uom_qty))
line.virtual_stock = min(v_stock or [0])
line.real_stock = min(r_stock or [0])
virtual_stock = fields.Float(string='Virtual stock',
compute='_calc_stock')
real_stock = fields.Float(string='Stock', compute='_calc_stock')
@api.multi
def product_id_change(
self, pricelist, product, qty=0, uom=False, qty_uos=0,
uos=False, name='', partner_id=False, lang=False, update_tax=True,
date_order=False, packaging=False, fiscal_position=False,
flag=False):
res = super(SaleOrderLine, self).product_id_change(
pricelist, product, qty=qty, uom=uom, qty_uos=qty_uos, uos=uos,
name=name, partner_id=partner_id, lang=lang, update_tax=update_tax,
date_order=date_order, packaging=packaging,
fiscal_position=fiscal_position, flag=flag)
mrp_bom_obj = self.env['mrp.bom']
product_obj = self.env['product.product']
product_id = product_obj.search([('id', '=', product)])
mrp_bom = []
mrp_bom = mrp_bom_obj.search([
('product_tmpl_id', '=', product_id.product_tmpl_id.id),
('type', '=', 'phantom')])
order_lines = []
for line in mrp_bom.bom_line_ids:
order_line = {
'bom_line': line.id,
'product_uom_qty':
line.product_qty * qty,
}
order_lines.append(order_line)
res['value'].update({'mrp_boms': ([(0, 0, oline)
for oline in order_lines])})
return res
class StockMove(models.Model):
_inherit = 'stock.move'
@api.model
def _action_explode(self, move):
res = super(StockMove, self)._action_explode(move)
bom_obj = self.env['mrp.bom']
product_obj = self.env['product.product']
for new_move in self.env['stock.move'].browse(res):
product = product_obj.search([(
'id', '=', new_move.procurement_id.product_id.id)])
if bom_obj.search([
'&', ('product_tmpl_id', '=', product.product_tmpl_id.id),
('type', '=', 'phantom')]):
new_move.note = (
_('Product: "%s" \n') % (new_move.procurement_id.name))
return res
@api.multi
def _picking_assign(self, procurement_group, location_from,
location_to):
res = super(StockMove, self)._picking_assign(
procurement_group, location_from, location_to)
pick_obj = self.env['stock.picking']
notes = []
for move in self:
for procurement in move.procurement_id:
if procurement not in notes and move.note:
notes.append(procurement)
picking = pick_obj.search(
[('id', '=', move.picking_id.id)])
picking.note = (picking.note or '') + move.note
return res
| agpl-3.0 |
OshynSong/scikit-learn | sklearn/utils/tests/test_fixes.py | 281 | 1829 | # Authors: Gael Varoquaux <gael.varoquaux@normalesup.org>
# Justin Vincent
# Lars Buitinck
# License: BSD 3 clause
import numpy as np
from nose.tools import assert_equal
from nose.tools import assert_false
from nose.tools import assert_true
from numpy.testing import (assert_almost_equal,
assert_array_almost_equal)
from sklearn.utils.fixes import divide, expit
from sklearn.utils.fixes import astype
def test_expit():
# Check numerical stability of expit (logistic function).
# Simulate our previous Cython implementation, based on
#http://fa.bianp.net/blog/2013/numerical-optimizers-for-logistic-regression
assert_almost_equal(expit(1000.), 1. / (1. + np.exp(-1000.)), decimal=16)
assert_almost_equal(expit(-1000.), np.exp(-1000.) / (1. + np.exp(-1000.)),
decimal=16)
x = np.arange(10)
out = np.zeros_like(x, dtype=np.float32)
assert_array_almost_equal(expit(x), expit(x, out=out))
def test_divide():
assert_equal(divide(.6, 1), .600000000000)
def test_astype_copy_memory():
a_int32 = np.ones(3, np.int32)
# Check that dtype conversion works
b_float32 = astype(a_int32, dtype=np.float32, copy=False)
assert_equal(b_float32.dtype, np.float32)
# Changing dtype forces a copy even if copy=False
assert_false(np.may_share_memory(b_float32, a_int32))
# Check that copy can be skipped if requested dtype match
c_int32 = astype(a_int32, dtype=np.int32, copy=False)
assert_true(c_int32 is a_int32)
# Check that copy can be forced, and is the case by default:
d_int32 = astype(a_int32, dtype=np.int32, copy=True)
assert_false(np.may_share_memory(d_int32, a_int32))
e_int32 = astype(a_int32, dtype=np.int32)
assert_false(np.may_share_memory(e_int32, a_int32))
| bsd-3-clause |
TinajaLabs/tinajagate | downloads/pyserial-2.5/serial/rfc2217.py | 10 | 58059 | #! python
#
# Python Serial Port Extension for Win32, Linux, BSD, Jython
# see __init__.py
#
# This module implements a RFC2217 compatible client. RF2217 descibes a
# protocol to access serial ports over TCP/IP and allows setting the baud rate,
# modem control lines etc.
#
# (C) 2001-2009 Chris Liechti <cliechti@gmx.net>
# this is distributed under a free software license, see license.txt
# TODO:
# - setting control line -> answer is not checked (had problems with one of the
# severs). consider implementing a compatibility mode flag to make check
# conditional
# - write timeout not implemented at all
##############################################################################
# observations and issues with servers
#=============================================================================
# sredird V2.2.1
# - http://www.ibiblio.org/pub/Linux/system/serial/ sredird-2.2.2.tar.gz
# - does not acknowledge SET_CONTROL (RTS/DTR) correctly, always responding
# [105 1] instead of the actual value.
# - SET_BAUDRATE answer contains 4 extra null bytes -> probably for larger
# numbers than 2**32?
# - To get the signature [COM_PORT_OPTION 0] has to be sent.
# - run a server: while true; do nc -l -p 7000 -c "sredird debug /dev/ttyUSB0 /var/lock/sredir"; done
#=============================================================================
# telnetcpcd (untested)
# - http://ftp.wayne.edu/kermit/sredird/telnetcpcd-1.09.tar.gz
# - To get the signature [COM_PORT_OPTION] w/o data has to be sent.
#=============================================================================
# ser2net
# - does not negotiate BINARY or COM_PORT_OPTION for his side but at least
# acknowledges that the client activates these options
# - The configuration may be that the server prints a banner. As this client
# implementation does a flushInput on connect, this banner is hidden from
# the user application.
# - NOTIFY_MODEMSTATE: the poll interval of the server seems to be one
# second.
# - To get the signature [COM_PORT_OPTION 0] has to be sent.
# - run a server: run ser2net daemon, in /etc/ser2net.conf:
# 2000:telnet:0:/dev/ttyS0:9600 remctl banner
##############################################################################
# How to identify ports? pySerial might want to support other protocols in the
# future, so lets use an URL scheme.
# for RFC2217 compliant servers we will use this:
# rfc2217://<host>:<port>[/option[/option...]]
#
# options:
# - "debug" print diagnostic messages
# - "ign_set_control": do not look at the answers to SET_CONTROL
# - "poll_modem": issue NOTIFY_MODEMSTATE requests when CTS/DTR/RI/CD is read.
# Without this option it expects that the server sends notifications
# automatically on change (which most servers do and is according to the
# RFC).
# the order of the options is not relevant
from serialutil import *
import time
import struct
import socket
import threading
import Queue
import logging
# port string is expected to be something like this:
# rfc2217://host:port
# host may be an IP or including domain, whatever.
# port is 0...65535
# map log level names to constants. used in fromURL()
LOGGER_LEVELS = {
'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
}
# telnet protocol characters
IAC = to_bytes([255]) # Interpret As Command
DONT = to_bytes([254])
DO = to_bytes([253])
WONT = to_bytes([252])
WILL = to_bytes([251])
IAC_DOUBLED = to_bytes([IAC, IAC])
SE = to_bytes([240]) # Subnegotiation End
NOP = to_bytes([241]) # No Operation
DM = to_bytes([242]) # Data Mark
BRK = to_bytes([243]) # Break
IP = to_bytes([244]) # Interrupt process
AO = to_bytes([245]) # Abort output
AYT = to_bytes([246]) # Are You There
EC = to_bytes([247]) # Erase Character
EL = to_bytes([248]) # Erase Line
GA = to_bytes([249]) # Go Ahead
SB = to_bytes([250]) # Subnegotiation Begin
# selected telnet options
BINARY = to_bytes([0]) # 8-bit data path
ECHO = to_bytes([1]) # echo
SGA = to_bytes([3]) # suppress go ahead
# RFC2217
COM_PORT_OPTION = to_bytes([44])
# Client to Access Server
SET_BAUDRATE = to_bytes([1])
SET_DATASIZE = to_bytes([2])
SET_PARITY = to_bytes([3])
SET_STOPSIZE = to_bytes([4])
SET_CONTROL = to_bytes([5])
NOTIFY_LINESTATE = to_bytes([6])
NOTIFY_MODEMSTATE = to_bytes([7])
FLOWCONTROL_SUSPEND = to_bytes([8])
FLOWCONTROL_RESUME = to_bytes([9])
SET_LINESTATE_MASK = to_bytes([10])
SET_MODEMSTATE_MASK = to_bytes([11])
PURGE_DATA = to_bytes([12])
SERVER_SET_BAUDRATE = to_bytes([101])
SERVER_SET_DATASIZE = to_bytes([102])
SERVER_SET_PARITY = to_bytes([103])
SERVER_SET_STOPSIZE = to_bytes([104])
SERVER_SET_CONTROL = to_bytes([105])
SERVER_NOTIFY_LINESTATE = to_bytes([106])
SERVER_NOTIFY_MODEMSTATE = to_bytes([107])
SERVER_FLOWCONTROL_SUSPEND = to_bytes([108])
SERVER_FLOWCONTROL_RESUME = to_bytes([109])
SERVER_SET_LINESTATE_MASK = to_bytes([110])
SERVER_SET_MODEMSTATE_MASK = to_bytes([111])
SERVER_PURGE_DATA = to_bytes([112])
RFC2217_ANSWER_MAP = {
SET_BAUDRATE: SERVER_SET_BAUDRATE,
SET_DATASIZE: SERVER_SET_DATASIZE,
SET_PARITY: SERVER_SET_PARITY,
SET_STOPSIZE: SERVER_SET_STOPSIZE,
SET_CONTROL: SERVER_SET_CONTROL,
NOTIFY_LINESTATE: SERVER_NOTIFY_LINESTATE,
NOTIFY_MODEMSTATE: SERVER_NOTIFY_MODEMSTATE,
FLOWCONTROL_SUSPEND: SERVER_FLOWCONTROL_SUSPEND,
FLOWCONTROL_RESUME: SERVER_FLOWCONTROL_RESUME,
SET_LINESTATE_MASK: SERVER_SET_LINESTATE_MASK,
SET_MODEMSTATE_MASK: SERVER_SET_MODEMSTATE_MASK,
PURGE_DATA: SERVER_PURGE_DATA,
}
SET_CONTROL_REQ_FLOW_SETTING = to_bytes([0]) # Request Com Port Flow Control Setting (outbound/both)
SET_CONTROL_USE_NO_FLOW_CONTROL = to_bytes([1]) # Use No Flow Control (outbound/both)
SET_CONTROL_USE_SW_FLOW_CONTROL = to_bytes([2]) # Use XON/XOFF Flow Control (outbound/both)
SET_CONTROL_USE_HW_FLOW_CONTROL = to_bytes([3]) # Use HARDWARE Flow Control (outbound/both)
SET_CONTROL_REQ_BREAK_STATE = to_bytes([4]) # Request BREAK State
SET_CONTROL_BREAK_ON = to_bytes([5]) # Set BREAK State ON
SET_CONTROL_BREAK_OFF = to_bytes([6]) # Set BREAK State OFF
SET_CONTROL_REQ_DTR = to_bytes([7]) # Request DTR Signal State
SET_CONTROL_DTR_ON = to_bytes([8]) # Set DTR Signal State ON
SET_CONTROL_DTR_OFF = to_bytes([9]) # Set DTR Signal State OFF
SET_CONTROL_REQ_RTS = to_bytes([10]) # Request RTS Signal State
SET_CONTROL_RTS_ON = to_bytes([11]) # Set RTS Signal State ON
SET_CONTROL_RTS_OFF = to_bytes([12]) # Set RTS Signal State OFF
SET_CONTROL_REQ_FLOW_SETTING_IN = to_bytes([13]) # Request Com Port Flow Control Setting (inbound)
SET_CONTROL_USE_NO_FLOW_CONTROL_IN = to_bytes([14]) # Use No Flow Control (inbound)
SET_CONTROL_USE_SW_FLOW_CONTOL_IN = to_bytes([15]) # Use XON/XOFF Flow Control (inbound)
SET_CONTROL_USE_HW_FLOW_CONTOL_IN = to_bytes([16]) # Use HARDWARE Flow Control (inbound)
SET_CONTROL_USE_DCD_FLOW_CONTROL = to_bytes([17]) # Use DCD Flow Control (outbound/both)
SET_CONTROL_USE_DTR_FLOW_CONTROL = to_bytes([18]) # Use DTR Flow Control (inbound)
SET_CONTROL_USE_DSR_FLOW_CONTROL = to_bytes([19]) # Use DSR Flow Control (outbound/both)
LINESTATE_MASK_TIMEOUT = 128 # Time-out Error
LINESTATE_MASK_SHIFTREG_EMPTY = 64 # Transfer Shift Register Empty
LINESTATE_MASK_TRANSREG_EMPTY = 32 # Transfer Holding Register Empty
LINESTATE_MASK_BREAK_DETECT = 16 # Break-detect Error
LINESTATE_MASK_FRAMING_ERROR = 8 # Framing Error
LINESTATE_MASK_PARTIY_ERROR = 4 # Parity Error
LINESTATE_MASK_OVERRUN_ERROR = 2 # Overrun Error
LINESTATE_MASK_DATA_READY = 1 # Data Ready
MODEMSTATE_MASK_CD = 128 # Receive Line Signal Detect (also known as Carrier Detect)
MODEMSTATE_MASK_RI = 64 # Ring Indicator
MODEMSTATE_MASK_DSR = 32 # Data-Set-Ready Signal State
MODEMSTATE_MASK_CTS = 16 # Clear-To-Send Signal State
MODEMSTATE_MASK_CD_CHANGE = 8 # Delta Receive Line Signal Detect
MODEMSTATE_MASK_RI_CHANGE = 4 # Trailing-edge Ring Detector
MODEMSTATE_MASK_DSR_CHANGE = 2 # Delta Data-Set-Ready
MODEMSTATE_MASK_CTS_CHANGE = 1 # Delta Clear-To-Send
PURGE_RECEIVE_BUFFER = to_bytes([1]) # Purge access server receive data buffer
PURGE_TRANSMIT_BUFFER = to_bytes([2]) # Purge access server transmit data buffer
PURGE_BOTH_BUFFERS = to_bytes([3]) # Purge both the access server receive data buffer and the access server transmit data buffer
RFC2217_PARITY_MAP = {
PARITY_NONE: 1,
PARITY_ODD: 2,
PARITY_EVEN: 3,
PARITY_MARK: 4,
PARITY_SPACE: 5,
}
RFC2217_REVERSE_PARITY_MAP = dict((v,k) for k,v in RFC2217_PARITY_MAP.items())
RFC2217_STOPBIT_MAP = {
STOPBITS_ONE: 1,
STOPBITS_ONE_POINT_FIVE: 3,
STOPBITS_TWO: 2,
}
RFC2217_REVERSE_STOPBIT_MAP = dict((v,k) for k,v in RFC2217_STOPBIT_MAP.items())
# Telnet filter states
M_NORMAL = 0
M_IAC_SEEN = 1
M_NEGOTIATE = 2
# TelnetOption and TelnetSubnegotiation states
REQUESTED = 'REQUESTED'
ACTIVE = 'ACTIVE'
INACTIVE = 'INACTIVE'
REALLY_INACTIVE = 'REALLY_INACTIVE'
class TelnetOption(object):
"""Manage a single telnet option, keeps track of DO/DONT WILL/WONT."""
def __init__(self, connection, name, option, send_yes, send_no, ack_yes, ack_no, initial_state, activation_callback=None):
"""Init option.
:param connection: connection used to transmit answers
:param name: a readable name for debug outputs
:param send_yes: what to send when option is to be enabled.
:param send_no: what to send when option is to be disabled.
:param ack_yes: what to expect when remote agrees on option.
:param ack_no: what to expect when remote disagrees on option.
:param initial_state: options initialized with REQUESTED are tried to
be enabled on startup. use INACTIVE for all others.
"""
self.connection = connection
self.name = name
self.option = option
self.send_yes = send_yes
self.send_no = send_no
self.ack_yes = ack_yes
self.ack_no = ack_no
self.state = initial_state
self.active = False
self.activation_callback = activation_callback
def __repr__(self):
"""String for debug outputs"""
return "%s:%s(%s)" % (self.name, self.active, self.state)
def process_incoming(self, command):
"""A DO/DONT/WILL/WONT was received for this option, update state and
answer when needed."""
if command == self.ack_yes:
if self.state is REQUESTED:
self.state = ACTIVE
self.active = True
if self.activation_callback is not None:
self.activation_callback()
elif self.state is ACTIVE:
pass
elif self.state is INACTIVE:
self.state = ACTIVE
self.connection.telnetSendOption(self.send_yes, self.option)
self.active = True
if self.activation_callback is not None:
self.activation_callback()
elif self.state is REALLY_INACTIVE:
self.connection.telnetSendOption(self.send_no, self.option)
else:
raise ValueError('option in illegal state %r' % self)
elif command == self.ack_no:
if self.state is REQUESTED:
self.state = INACTIVE
self.active = False
elif self.state is ACTIVE:
self.state = INACTIVE
self.connection.telnetSendOption(self.send_no, self.option)
self.active = False
elif self.state is INACTIVE:
pass
elif self.state is REALLY_INACTIVE:
pass
else:
raise ValueError('option in illegal state %r' % self)
class TelnetSubnegotiation(object):
"""A object to handle subnegotiation of options. In this case actually
sub-sub options for RFC 2217. It is used to track com port options."""
def __init__(self, connection, name, option, ack_option=None):
if ack_option is None: ack_option = option
self.connection = connection
self.name = name
self.option = option
self.value = None
self.ack_option = ack_option
self.state = INACTIVE
def __repr__(self):
"""String for debug outputs."""
return "%s:%s" % (self.name, self.state)
def set(self, value):
"""request a change of the value. a request is sent to the server. if
the client needs to know if the change is performed he has to check the
state of this object."""
self.value = value
self.state = REQUESTED
self.connection.rfc2217SendSubnegotiation(self.option, self.value)
if self.connection.logger:
self.connection.logger.debug("SB Requesting %s -> %r" % (self.name, self.value))
def isReady(self):
"""check if answer from server has been received. when server rejects
the change, raise a ValueError."""
if self.state == REALLY_INACTIVE:
raise ValueError("remote rejected value for option %r" % (self.name))
return self.state == ACTIVE
# add property to have a similar interface as TelnetOption
active = property(isReady)
def wait(self, timeout=3):
"""wait until the subnegotiation has been acknowledged or timeout. It
can also throw a value error when the answer from the server does not
match the value sent."""
timeout_time = time.time() + timeout
while time.time() < timeout_time:
time.sleep(0.05) # prevent 100% CPU load
if self.isReady():
break
else:
raise SerialException("timeout while waiting for option %r" % (self.name))
def checkAnswer(self, suboption):
"""check an incoming subnegotiation block. the parameter already has
cut off the header like sub option number and com port option value."""
if self.value == suboption[:len(self.value)]:
self.state = ACTIVE
else:
# error propagation done in isReady
self.state = REALLY_INACTIVE
if self.connection.logger:
self.connection.logger.debug("SB Answer %s -> %r -> %s" % (self.name, suboption, self.state))
class RFC2217Serial(SerialBase):
"""Serial port implementation for RFC 2217 remote serial ports."""
BAUDRATES = (50, 75, 110, 134, 150, 200, 300, 600, 1200, 1800, 2400, 4800,
9600, 19200, 38400, 57600, 115200)
def open(self):
"""Open port with current settings. This may throw a SerialException
if the port cannot be opened."""
self.logger = None
self._ignore_set_control_answer = False
self._poll_modem_state = False
self._network_timeout = 3
if self._port is None:
raise SerialException("Port must be configured before it can be used.")
try:
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.connect(self.fromURL(self.portstr))
self._socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
except Exception, msg:
self._socket = None
raise SerialException("Could not open port %s: %s" % (self.portstr, msg))
self._socket.settimeout(5) # XXX good value?
# use a thread save queue as buffer. it also simplifies implementing
# the read timeout
self._read_buffer = Queue.Queue()
# to ensure that user writes does not interfere with internal
# telnet/rfc2217 options establish a lock
self._write_lock = threading.Lock()
# name the following separately so that, below, a check can be easily done
mandadory_options = [
TelnetOption(self, 'we-BINARY', BINARY, WILL, WONT, DO, DONT, INACTIVE),
TelnetOption(self, 'we-RFC2217', COM_PORT_OPTION, WILL, WONT, DO, DONT, REQUESTED),
]
# all supported telnet options
self._telnet_options = [
TelnetOption(self, 'ECHO', ECHO, DO, DONT, WILL, WONT, REQUESTED),
TelnetOption(self, 'we-SGA', SGA, WILL, WONT, DO, DONT, REQUESTED),
TelnetOption(self, 'they-SGA', SGA, DO, DONT, WILL, WONT, REQUESTED),
TelnetOption(self, 'they-BINARY', BINARY, DO, DONT, WILL, WONT, INACTIVE),
TelnetOption(self, 'they-RFC2217', COM_PORT_OPTION, DO, DONT, WILL, WONT, REQUESTED),
] + mandadory_options
# RFC 2217 specific states
# COM port settings
self._rfc2217_port_settings = {
'baudrate': TelnetSubnegotiation(self, 'baudrate', SET_BAUDRATE, SERVER_SET_BAUDRATE),
'datasize': TelnetSubnegotiation(self, 'datasize', SET_DATASIZE, SERVER_SET_DATASIZE),
'parity': TelnetSubnegotiation(self, 'parity', SET_PARITY, SERVER_SET_PARITY),
'stopsize': TelnetSubnegotiation(self, 'stopsize', SET_STOPSIZE, SERVER_SET_STOPSIZE),
}
# There are more subnegotiation object, combine all in one dictionary
# for easy access
self._rfc2217_options = {
'purge': TelnetSubnegotiation(self, 'purge', PURGE_DATA, SERVER_PURGE_DATA),
'control': TelnetSubnegotiation(self, 'control', SET_CONTROL, SERVER_SET_CONTROL),
}
self._rfc2217_options.update(self._rfc2217_port_settings)
# cache for line and modem states that the server sends to us
self._linestate = 0
self._modemstate = None
self._modemstate_expires = 0
# RFC 2217 flow control between server and client
self._remote_suspend_flow = False
self._thread = threading.Thread(target=self._telnetReadLoop)
self._thread.setDaemon(True)
self._thread.setName('pySerial RFC 2217 reader thread for %s' % (self._port,))
self._thread.start()
# negotiate Telnet/RFC 2217 -> send initial requests
for option in self._telnet_options:
if option.state is REQUESTED:
self.telnetSendOption(option.send_yes, option.option)
# now wait until important options are negotiated
timeout_time = time.time() + self._network_timeout
while time.time() < timeout_time:
time.sleep(0.05) # prevent 100% CPU load
if sum(o.active for o in mandadory_options) == len(mandadory_options):
break
else:
raise SerialException("Remote does not seem to support RFC2217 or BINARY mode %r" % mandadory_options)
if self.logger:
self.logger.info("Negotiated options: %s" % self._telnet_options)
# fine, go on, set RFC 2271 specific things
self._reconfigurePort()
# all things set up get, now a clean start
self._isOpen = True
if not self._rtscts:
self.setRTS(True)
self.setDTR(True)
self.flushInput()
self.flushOutput()
def _reconfigurePort(self):
"""Set communication parameters on opened port."""
if self._socket is None:
raise SerialException("Can only operate on open ports")
# if self._timeout != 0 and self._interCharTimeout is not None:
# XXX
if self._writeTimeout is not None:
raise NotImplementedError('writeTimeout is currently not supported')
# XXX
# Setup the connection
# to get good performance, all parameter changes are sent first...
if not isinstance(self._baudrate, (int, long)) or not 0 < self._baudrate < 2**32:
raise ValueError("invalid baudrate: %r" % (self._baudrate))
self._rfc2217_port_settings['baudrate'].set(struct.pack('!I', self._baudrate))
self._rfc2217_port_settings['datasize'].set(struct.pack('!B', self._bytesize))
self._rfc2217_port_settings['parity'].set(struct.pack('!B', RFC2217_PARITY_MAP[self._parity]))
self._rfc2217_port_settings['stopsize'].set(struct.pack('!B', RFC2217_STOPBIT_MAP[self._stopbits]))
# and now wait until parameters are active
items = self._rfc2217_port_settings.values()
if self.logger:
self.logger.debug("Negotiating settings: %s" % (items,))
timeout_time = time.time() + self._network_timeout
while time.time() < timeout_time:
time.sleep(0.05) # prevent 100% CPU load
if sum(o.active for o in items) == len(items):
break
else:
raise SerialException("Remote does not accept parameter change (RFC2217): %r" % items)
if self.logger:
self.logger.info("Negotiated settings: %s" % (items,))
if self._rtscts and self._xonxoff:
raise ValueError('xonxoff and rtscts together are not supported')
elif self._rtscts:
self.rfc2217SetControl(SET_CONTROL_USE_HW_FLOW_CONTROL)
elif self._xonxoff:
self.rfc2217SetControl(SET_CONTROL_USE_SW_FLOW_CONTROL)
else:
self.rfc2217SetControl(SET_CONTROL_USE_NO_FLOW_CONTROL)
def close(self):
"""Close port"""
if self._isOpen:
if self._socket:
try:
self._socket.shutdown(socket.SHUT_RDWR)
self._socket.close()
except:
# ignore errors.
pass
self._socket = None
if self._thread:
self._thread.join()
self._isOpen = False
# in case of quick reconnects, give the server some time
time.sleep(0.3)
def makeDeviceName(self, port):
raise SerialException("there is no sensible way to turn numbers into URLs")
def fromURL(self, url):
"""extract host and port from an URL string"""
if url.lower().startswith("rfc2217://"): url = url[10:]
try:
# is there a "path" (our options)?
if '/' in url:
# cut away options
url, options = url.split('/', 1)
# process options now, directly altering self
for option in options.split('/'):
if '=' in option:
option, value = option.split('=', 1)
else:
value = None
if option == 'logging':
logging.basicConfig() # XXX is that good to call it here?
self.logger = logging.getLogger('pySerial.rfc2217')
self.logger.setLevel(LOGGER_LEVELS[value])
self.logger.debug('enabled logging')
elif option == 'ign_set_control':
self._ignore_set_control_answer = True
elif option == 'poll_modem':
self._poll_modem_state = True
elif option == 'timeout':
self._network_timeout = float(value)
else:
raise ValueError('unknown option: %r' % (option,))
# get host and port
host, port = url.split(':', 1) # may raise ValueError because of unpacking
port = int(port) # and this if it's not a number
if not 0 <= port < 65536: raise ValueError("port not in range 0...65535")
except ValueError, e:
raise SerialException('expected a string in the form "[rfc2217://]<host>:<port>[/option[/option...]]": %s' % e)
return (host, port)
# - - - - - - - - - - - - - - - - - - - - - - - -
def inWaiting(self):
"""Return the number of characters currently in the input buffer."""
if not self._isOpen: raise portNotOpenError
return self._read_buffer.qsize()
def read(self, size=1):
"""Read size bytes from the serial port. If a timeout is set it may
return less characters as requested. With no timeout it will block
until the requested number of bytes is read."""
if not self._isOpen: raise portNotOpenError
data = bytearray()
try:
while len(data) < size:
if self._thread is None:
raise SerialException('connection failed (reader thread died)')
data.append(self._read_buffer.get(True, self._timeout))
except Queue.Empty: # -> timeout
pass
return bytes(data)
def write(self, data):
"""Output the given string over the serial port. Can block if the
connection is blocked. May raise SerialException if the connection is
closed."""
if not self._isOpen: raise portNotOpenError
self._write_lock.acquire()
try:
try:
self._socket.sendall(data.replace(IAC, IAC_DOUBLED))
except socket.error, e:
raise SerialException("socket connection failed: %s" % e) # XXX what exception if socket connection fails
finally:
self._write_lock.release()
return len(data)
def flushInput(self):
"""Clear input buffer, discarding all that is in the buffer."""
if not self._isOpen: raise portNotOpenError
self.rfc2217SendPurge(PURGE_RECEIVE_BUFFER)
# empty read buffer
while self._read_buffer.qsize():
self._read_buffer.get(False)
def flushOutput(self):
"""Clear output buffer, aborting the current output and
discarding all that is in the buffer."""
if not self._isOpen: raise portNotOpenError
self.rfc2217SendPurge(PURGE_TRANSMIT_BUFFER)
def sendBreak(self, duration=0.25):
"""Send break condition. Timed, returns to idle state after given
duration."""
if not self._isOpen: raise portNotOpenError
self.setBreak(True)
time.sleep(duration)
self.setBreak(False)
def setBreak(self, level=True):
"""Set break: Controls TXD. When active, to transmitting is
possible."""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('set BREAK to %s' % ('inactive', 'active')[bool(level)])
if level:
self.rfc2217SetControl(SET_CONTROL_BREAK_ON)
else:
self.rfc2217SetControl(SET_CONTROL_BREAK_OFF)
def setRTS(self, level=True):
"""Set terminal status line: Request To Send."""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('set RTS to %s' % ('inactive', 'active')[bool(level)])
if level:
self.rfc2217SetControl(SET_CONTROL_RTS_ON)
else:
self.rfc2217SetControl(SET_CONTROL_RTS_OFF)
def setDTR(self, level=True):
"""Set terminal status line: Data Terminal Ready."""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('set DTR to %s' % ('inactive', 'active')[bool(level)])
if level:
self.rfc2217SetControl(SET_CONTROL_DTR_ON)
else:
self.rfc2217SetControl(SET_CONTROL_DTR_OFF)
def getCTS(self):
"""Read terminal status line: Clear To Send."""
if not self._isOpen: raise portNotOpenError
return bool(self.getModemState() & MODEMSTATE_MASK_CTS)
def getDSR(self):
"""Read terminal status line: Data Set Ready."""
if not self._isOpen: raise portNotOpenError
return bool(self.getModemState() & MODEMSTATE_MASK_DSR)
def getRI(self):
"""Read terminal status line: Ring Indicator."""
if not self._isOpen: raise portNotOpenError
return bool(self.getModemState() & MODEMSTATE_MASK_RI)
def getCD(self):
"""Read terminal status line: Carrier Detect."""
if not self._isOpen: raise portNotOpenError
return bool(self.getModemState() & MODEMSTATE_MASK_CD)
# - - - platform specific - - -
# None so far
# - - - RFC2217 specific - - -
def _telnetReadLoop(self):
"""read loop for the socket."""
mode = M_NORMAL
suboption = None
try:
while self._socket is not None:
try:
data = self._socket.recv(1024)
except socket.timeout:
# just need to get out of recv form time to time to check if
# still alive
continue
except socket.error:
# connection fails -> terminate loop
break
for byte in data:
if mode == M_NORMAL:
# interpret as command or as data
if byte == IAC:
mode = M_IAC_SEEN
else:
# store data in read buffer or sub option buffer
# depending on state
if suboption is not None:
suboption.append(byte)
else:
self._read_buffer.put(byte)
elif mode == M_IAC_SEEN:
if byte == IAC:
# interpret as command doubled -> insert character
# itself
if suboption is not None:
suboption.append(IAC)
else:
self._read_buffer.put(IAC)
mode = M_NORMAL
elif byte == SB:
# sub option start
suboption = bytearray()
mode = M_NORMAL
elif byte == SE:
# sub option end -> process it now
self._telnetProcessSubnegotiation(bytes(suboption))
suboption = None
mode = M_NORMAL
elif byte in (DO, DONT, WILL, WONT):
# negotiation
telnet_command = byte
mode = M_NEGOTIATE
else:
# other telnet commands
self._telnetProcessCommand(byte)
mode = M_NORMAL
elif mode == M_NEGOTIATE: # DO, DONT, WILL, WONT was received, option now following
self._telnetNegotiateOption(telnet_command, byte)
mode = M_NORMAL
finally:
self._thread = None
if self.logger:
self.logger.debug("read thread terminated")
# - incoming telnet commands and options
def _telnetProcessCommand(self, command):
"""Process commands other than DO, DONT, WILL, WONT."""
# Currently none. RFC2217 only uses negotiation and subnegotiation.
if self.logger:
self.logger.warning("ignoring Telnet command: %r" % (command,))
def _telnetNegotiateOption(self, command, option):
"""Process incoming DO, DONT, WILL, WONT."""
# check our registered telnet options and forward command to them
# they know themselves if they have to answer or not
known = False
for item in self._telnet_options:
# can have more than one match! as some options are duplicated for
# 'us' and 'them'
if item.option == option:
item.process_incoming(command)
known = True
if not known:
# handle unknown options
# only answer to positive requests and deny them
if command == WILL or command == DO:
self.telnetSendOption((command == WILL and DONT or WONT), option)
if self.logger:
self.logger.warning("rejected Telnet option: %r" % (option,))
def _telnetProcessSubnegotiation(self, suboption):
"""Process subnegotiation, the data between IAC SB and IAC SE."""
if suboption[0:1] == COM_PORT_OPTION:
if suboption[1:2] == SERVER_NOTIFY_LINESTATE and len(suboption) >= 3:
self._linestate = ord(suboption[2:3]) # ensure it is a number
if self.logger:
self.logger.info("NOTIFY_LINESTATE: %s" % self._linestate)
elif suboption[1:2] == SERVER_NOTIFY_MODEMSTATE and len(suboption) >= 3:
self._modemstate = ord(suboption[2:3]) # ensure it is a number
if self.logger:
self.logger.info("NOTIFY_MODEMSTATE: %s" % self._modemstate)
# update time when we think that a poll would make sense
self._modemstate_expires = time.time() + 0.3
elif suboption[1:2] == FLOWCONTROL_SUSPEND:
self._remote_suspend_flow = True
elif suboption[1:2] == FLOWCONTROL_RESUME:
self._remote_suspend_flow = False
else:
for item in self._rfc2217_options.values():
if item.ack_option == suboption[1:2]:
#~ print "processing COM_PORT_OPTION: %r" % list(suboption[1:])
item.checkAnswer(bytes(suboption[2:]))
break
else:
if self.logger:
self.logger.warning("ignoring COM_PORT_OPTION: %r" % (suboption,))
else:
if self.logger:
self.logger.warning("ignoring subnegotiation: %r" % (suboption,))
# - outgoing telnet commands and options
def _internal_raw_write(self, data):
"""internal socket write with no data escaping. used to send telnet stuff."""
self._write_lock.acquire()
try:
self._socket.sendall(data)
finally:
self._write_lock.release()
def telnetSendOption(self, action, option):
"""Send DO, DONT, WILL, WONT."""
self._internal_raw_write(to_bytes([IAC, action, option]))
def rfc2217SendSubnegotiation(self, option, value=''):
"""Subnegotiation of RFC2217 parameters."""
value = value.replace(IAC, IAC_DOUBLED)
self._internal_raw_write(to_bytes([IAC, SB, COM_PORT_OPTION, option] + list(value) + [IAC, SE]))
def rfc2217SendPurge(self, value):
item = self._rfc2217_options['purge']
item.set(value) # transmit desired purge type
item.wait(self._network_timeout) # wait for acknowledge from the server
def rfc2217SetControl(self, value):
item = self._rfc2217_options['control']
item.set(value) # transmit desired purge type
if self._ignore_set_control_answer:
# answers are ignored when option is set. compatibility mode for
# servers that answers, but not the expected ones... (or no answer
# at all) i.e. sredird
time.sleep(0.1) # this helps getting the unit tests passed
else:
item.wait(self._network_timeout) # wait for acknowledge from the server
def rfc2217FlowServerReady(self):
"""check if server is ready to receive data. block for some time when
not."""
#~ if self._remote_suspend_flow:
#~ wait---
def getModemState(self):
"""get last modem state (cached value. if value is "old", request a new
one. this cache helps that we don't issue to many requests when e.g. all
status lines, one after the other is queried by te user (getCTS, getDSR
etc.)"""
# active modem state polling enabled? is the value fresh enough?
if self._poll_modem_state and self._modemstate_expires < time.time():
if self.logger:
self.logger.debug('polling modem state')
# when it is older, request an update
self.rfc2217SendSubnegotiation(NOTIFY_MODEMSTATE)
timeout_time = time.time() + self._network_timeout
while time.time() < timeout_time:
time.sleep(0.05) # prevent 100% CPU load
# when expiration time is updated, it means that there is a new
# value
if self._modemstate_expires > time.time():
if self.logger:
self.logger.warning('poll for modem state failed')
break
# even when there is a timeout, do not generate an error just
# return the last known value. this way we can support buggy
# servers that do not respond to polls, but send automatic
# updates.
if self._modemstate is not None:
if self.logger:
self.logger.debug('using cached modem state')
return self._modemstate
else:
# never received a notification from the server
raise SerialException("remote sends no NOTIFY_MODEMSTATE")
# assemble Serial class with the platform specific implementation and the base
# for file-like behavior. for Python 2.6 and newer, that provide the new I/O
# library, derive from io.RawIOBase
try:
import io
except ImportError:
# classic version with our own file-like emulation
class Serial(RFC2217Serial, FileLike):
pass
else:
# io library present
class Serial(RFC2217Serial, io.RawIOBase):
pass
#############################################################################
# The following is code that helps implementing an RFC 2217 server.
class PortManager(object):
"""This class manages the state of Telnet and RFC 2217. It needs a serial
instance and a connection to work with. connection is expected to implement
a (thread safe) write function, that writes the string to the network."""
def __init__(self, serial_port, connection, logger=None):
self.serial = serial_port
self.connection = connection
self.logger = logger
self._client_is_rfc2217 = False
# filter state machine
self.mode = M_NORMAL
self.suboption = None
self.telnet_command = None
# states for modem/line control events
self.modemstate_mask = 255
self.last_modemstate = None
self.linstate_mask = 0
# all supported telnet options
self._telnet_options = [
TelnetOption(self, 'ECHO', ECHO, WILL, WONT, DO, DONT, REQUESTED),
TelnetOption(self, 'we-SGA', SGA, WILL, WONT, DO, DONT, REQUESTED),
TelnetOption(self, 'they-SGA', SGA, DO, DONT, WILL, WONT, INACTIVE),
TelnetOption(self, 'we-BINARY', BINARY, WILL, WONT, DO, DONT, INACTIVE),
TelnetOption(self, 'they-BINARY', BINARY, DO, DONT, WILL, WONT, REQUESTED),
TelnetOption(self, 'we-RFC2217', COM_PORT_OPTION, WILL, WONT, DO, DONT, REQUESTED, self._client_ok),
TelnetOption(self, 'they-RFC2217', COM_PORT_OPTION, DO, DONT, WILL, WONT, INACTIVE, self._client_ok),
]
# negotiate Telnet/RFC2217 -> send initial requests
if self.logger:
self.logger.debug("requesting initial Telnet/RFC 2217 options")
for option in self._telnet_options:
if option.state is REQUESTED:
self.telnetSendOption(option.send_yes, option.option)
# issue 1st modem state notification
def _client_ok(self):
"""callback of telnet option. it gets called when option is activated.
this one here is used to detect when the client agrees on RFC 2217. a
flag is set so that other functions like check_modem_lines know if the
client is ok."""
# The callback is used for we and they so if one party agrees, we're
# already happy. it seems not all servers do the negotiation correctly
# and i guess there are incorrect clients too.. so be happy if client
# answers one or the other positively.
self._client_is_rfc2217 = True
if self.logger:
self.logger.info("client accepts RFC 2217")
# this is to ensure that the client gets a notification, even if there
# was no change
self.check_modem_lines(force_notification=True)
# - outgoing telnet commands and options
def telnetSendOption(self, action, option):
"""Send DO, DONT, WILL, WONT."""
self.connection.write(to_bytes([IAC, action, option]))
def rfc2217SendSubnegotiation(self, option, value=''):
"""Subnegotiation of RFC 2217 parameters."""
value = value.replace(IAC, IAC_DOUBLED)
self.connection.write(to_bytes([IAC, SB, COM_PORT_OPTION, option] + list(value) + [IAC, SE]))
# - check modem lines, needs to be called periodically from user to
# establish polling
def check_modem_lines(self, force_notification=False):
modemstate = (
(self.serial.getCTS() and MODEMSTATE_MASK_CTS) |
(self.serial.getDSR() and MODEMSTATE_MASK_DSR) |
(self.serial.getRI() and MODEMSTATE_MASK_RI) |
(self.serial.getCD() and MODEMSTATE_MASK_CD)
)
# check what has changed
deltas = modemstate ^ (self.last_modemstate or 0) # when last is None -> 0
if deltas & MODEMSTATE_MASK_CTS:
modemstate |= MODEMSTATE_MASK_CTS_CHANGE
if deltas & MODEMSTATE_MASK_DSR:
modemstate |= MODEMSTATE_MASK_DSR_CHANGE
if deltas & MODEMSTATE_MASK_RI:
modemstate |= MODEMSTATE_MASK_RI_CHANGE
if deltas & MODEMSTATE_MASK_CD:
modemstate |= MODEMSTATE_MASK_CD_CHANGE
# if new state is different and the mask allows this change, send
# notification. suppress notifications when client is not rfc2217
if modemstate != self.last_modemstate or force_notification:
if (self._client_is_rfc2217 and (modemstate & self.modemstate_mask)) or force_notification:
self.rfc2217SendSubnegotiation(
SERVER_NOTIFY_MODEMSTATE,
to_bytes([modemstate & self.modemstate_mask])
)
if self.logger:
self.logger.info("NOTIFY_MODEMSTATE: %s" % (modemstate,))
# save last state, but forget about deltas.
# otherwise it would also notify about changing deltas which is
# probably not very useful
self.last_modemstate = modemstate & 0xf0
# - outgoing data escaping
def escape(self, data):
"""this function is for the user. all outgoing data has to be properly
escaped, so that no IAC character in the data stream messes up the
Telnet state machine in the server.
socket.sendall(escape(data))
"""
for byte in data:
if byte == IAC:
yield IAC
yield IAC
else:
yield byte
# - incoming data filter
def filter(self, data):
"""handle a bunch of incoming bytes. this is a generator. it will yield
all characters not of interest for Telnet/RFC 2217.
The idea is that the reader thread pushes data from the socket through
this filter:
for byte in filter(socket.recv(1024)):
# do things like CR/LF conversion/whatever
# and write data to the serial port
serial.write(byte)
(socket error handling code left as exercise for the reader)
"""
for byte in data:
if self.mode == M_NORMAL:
# interpret as command or as data
if byte == IAC:
self.mode = M_IAC_SEEN
else:
# store data in sub option buffer or pass it to our
# consumer depending on state
if self.suboption is not None:
self.suboption.append(byte)
else:
yield byte
elif self.mode == M_IAC_SEEN:
if byte == IAC:
# interpret as command doubled -> insert character
# itself
if self.suboption is not None:
self.suboption.append(byte)
else:
yield byte
self.mode = M_NORMAL
elif byte == SB:
# sub option start
self.suboption = bytearray()
self.mode = M_NORMAL
elif byte == SE:
# sub option end -> process it now
self._telnetProcessSubnegotiation(bytes(self.suboption))
self.suboption = None
self.mode = M_NORMAL
elif byte in (DO, DONT, WILL, WONT):
# negotiation
self.telnet_command = byte
self.mode = M_NEGOTIATE
else:
# other telnet commands
self._telnetProcessCommand(byte)
self.mode = M_NORMAL
elif self.mode == M_NEGOTIATE: # DO, DONT, WILL, WONT was received, option now following
self._telnetNegotiateOption(self.telnet_command, byte)
self.mode = M_NORMAL
# - incoming telnet commands and options
def _telnetProcessCommand(self, command):
"""Process commands other than DO, DONT, WILL, WONT."""
# Currently none. RFC2217 only uses negotiation and subnegotiation.
if self.logger:
self.logger.warning("ignoring Telnet command: %r" % (command,))
def _telnetNegotiateOption(self, command, option):
"""Process incoming DO, DONT, WILL, WONT."""
# check our registered telnet options and forward command to them
# they know themselves if they have to answer or not
known = False
for item in self._telnet_options:
# can have more than one match! as some options are duplicated for
# 'us' and 'them'
if item.option == option:
item.process_incoming(command)
known = True
if not known:
# handle unknown options
# only answer to positive requests and deny them
if command == WILL or command == DO:
self.telnetSendOption((command == WILL and DONT or WONT), option)
if self.logger:
self.logger.warning("rejected Telnet option: %r" % (option,))
def _telnetProcessSubnegotiation(self, suboption):
"""Process subnegotiation, the data between IAC SB and IAC SE."""
if suboption[0:1] == COM_PORT_OPTION:
if self.logger:
self.logger.debug('received COM_PORT_OPTION: %r' % (suboption,))
if suboption[1:2] == SET_BAUDRATE:
backup = self.serial.baudrate
try:
(self.serial.baudrate,) = struct.unpack("!I", suboption[2:6])
except ValueError, e:
if self.logger:
self.logger.error("failed to set baud rate: %s" % (e,))
self.serial.baudrate = backup
else:
if self.logger:
self.logger.info("changed baud rate: %s" % (self.serial.baudrate,))
self.rfc2217SendSubnegotiation(SERVER_SET_BAUDRATE, struct.pack("!I", self.serial.baudrate))
elif suboption[1:2] == SET_DATASIZE:
backup = self.serial.bytesize
try:
(self.serial.bytesize,) = struct.unpack("!B", suboption[2:3])
except ValueError, e:
if self.logger:
self.logger.error("failed to set data size: %s" % (e,))
self.serial.bytesize = backup
else:
if self.logger:
self.logger.info("changed data size: %s" % (self.serial.bytesize,))
self.rfc2217SendSubnegotiation(SERVER_SET_DATASIZE, struct.pack("!B", self.serial.bytesize))
elif suboption[1:2] == SET_PARITY:
backup = self.serial.parity
try:
self.serial.parity = RFC2217_REVERSE_PARITY_MAP[struct.unpack("!B", suboption[2:3])[0]]
except ValueError, e:
if self.logger:
self.logger.error("failed to set parity: %s" % (e,))
self.serial.parity = backup
else:
if self.logger:
self.logger.info("changed parity: %s" % (self.serial.parity,))
self.rfc2217SendSubnegotiation(
SERVER_SET_PARITY,
struct.pack("!B", RFC2217_PARITY_MAP[self.serial.parity])
)
elif suboption[1:2] == SET_STOPSIZE:
backup = self.serial.stopbits
try:
self.serial.stopbits = RFC2217_REVERSE_STOPBIT_MAP[struct.unpack("!B", suboption[2:3])[0]]
except ValueError, e:
if self.logger:
self.logger.error("failed to set stop bits: %s" % (e,))
self.serial.stopbits = backup
else:
if self.logger:
self.logger.info("changed stop bits: %s" % (self.serial.stopbits,))
self.rfc2217SendSubnegotiation(
SERVER_SET_STOPSIZE,
struct.pack("!B", RFC2217_STOPBIT_MAP[self.serial.stopbits])
)
elif suboption[1:2] == SET_CONTROL:
if suboption[2:3] == SET_CONTROL_REQ_FLOW_SETTING:
if self.serial.xonxoff:
self.rfc2217SendSubnegotiation(SERVER_SET_CONTROL, SET_CONTROL_USE_SW_FLOW_CONTROL)
elif self.serial.rtscts:
self.rfc2217SendSubnegotiation(SERVER_SET_CONTROL, SET_CONTROL_USE_HW_FLOW_CONTROL)
else:
self.rfc2217SendSubnegotiation(SERVER_SET_CONTROL, SET_CONTROL_USE_NO_FLOW_CONTROL)
elif suboption[2:3] == SET_CONTROL_USE_NO_FLOW_CONTROL:
self.serial.xonxoff = False
self.serial.rtscts = False
if self.logger:
self.logger.info("changed flow control to None")
self.rfc2217SendSubnegotiation(SERVER_SET_CONTROL, SET_CONTROL_USE_NO_FLOW_CONTROL)
elif suboption[2:3] == SET_CONTROL_USE_SW_FLOW_CONTROL:
self.serial.xonxoff = True
if self.logger:
self.logger.info("changed flow control to XON/XOFF")
self.rfc2217SendSubnegotiation(SERVER_SET_CONTROL, SET_CONTROL_USE_SW_FLOW_CONTROL)
elif suboption[2:3] == SET_CONTROL_USE_HW_FLOW_CONTROL:
self.serial.rtscts = True
if self.logger:
self.logger.info("changed flow control to RTS/CTS")
self.rfc2217SendSubnegotiation(SERVER_SET_CONTROL, SET_CONTROL_USE_HW_FLOW_CONTROL)
elif suboption[2:3] == SET_CONTROL_REQ_BREAK_STATE:
if self.logger:
self.logger.warning("requested break state - not implemented")
pass # XXX needs cached value
elif suboption[2:3] == SET_CONTROL_BREAK_ON:
self.serial.setBreak(True)
if self.logger:
self.logger.info("changed BREAK to active")
self.rfc2217SendSubnegotiation(SERVER_SET_CONTROL, SET_CONTROL_BREAK_ON)
elif suboption[2:3] == SET_CONTROL_BREAK_OFF:
self.serial.setBreak(False)
if self.logger:
self.logger.info("changed BREAK to inactive")
self.rfc2217SendSubnegotiation(SERVER_SET_CONTROL, SET_CONTROL_BREAK_OFF)
elif suboption[2:3] == SET_CONTROL_REQ_DTR:
if self.logger:
self.logger.warning("requested DTR state - not implemented")
pass # XXX needs cached value
elif suboption[2:3] == SET_CONTROL_DTR_ON:
self.serial.setDTR(True)
if self.logger:
self.logger.info("changed DTR to active")
self.rfc2217SendSubnegotiation(SERVER_SET_CONTROL, SET_CONTROL_DTR_ON)
elif suboption[2:3] == SET_CONTROL_DTR_OFF:
self.serial.setDTR(False)
if self.logger:
self.logger.info("changed DTR to inactive")
self.rfc2217SendSubnegotiation(SERVER_SET_CONTROL, SET_CONTROL_DTR_OFF)
elif suboption[2:3] == SET_CONTROL_REQ_RTS:
if self.logger:
self.logger.warning("requested RTS state - not implemented")
pass # XXX needs cached value
#~ self.rfc2217SendSubnegotiation(SERVER_SET_CONTROL, SET_CONTROL_RTS_ON)
elif suboption[2:3] == SET_CONTROL_RTS_ON:
self.serial.setRTS(True)
if self.logger:
self.logger.info("changed RTS to active")
self.rfc2217SendSubnegotiation(SERVER_SET_CONTROL, SET_CONTROL_RTS_ON)
elif suboption[2:3] == SET_CONTROL_RTS_OFF:
self.serial.setRTS(False)
if self.logger:
self.logger.info("changed RTS to inactive")
self.rfc2217SendSubnegotiation(SERVER_SET_CONTROL, SET_CONTROL_RTS_OFF)
#~ elif suboption[2:3] == SET_CONTROL_REQ_FLOW_SETTING_IN:
#~ elif suboption[2:3] == SET_CONTROL_USE_NO_FLOW_CONTROL_IN:
#~ elif suboption[2:3] == SET_CONTROL_USE_SW_FLOW_CONTOL_IN:
#~ elif suboption[2:3] == SET_CONTROL_USE_HW_FLOW_CONTOL_IN:
#~ elif suboption[2:3] == SET_CONTROL_USE_DCD_FLOW_CONTROL:
#~ elif suboption[2:3] == SET_CONTROL_USE_DTR_FLOW_CONTROL:
#~ elif suboption[2:3] == SET_CONTROL_USE_DSR_FLOW_CONTROL:
elif suboption[1:2] == NOTIFY_LINESTATE:
# client polls for current state
self.rfc2217SendSubnegotiation(
SERVER_NOTIFY_LINESTATE,
to_bytes([0]) # sorry, nothing like that implemented
)
elif suboption[1:2] == NOTIFY_MODEMSTATE:
if self.logger:
self.logger.info("request for modem state")
# client polls for current state
self.check_modem_lines(force_notification=True)
elif suboption[1:2] == FLOWCONTROL_SUSPEND:
if self.logger:
self.logger.info("suspend")
self._remote_suspend_flow = True
elif suboption[1:2] == FLOWCONTROL_RESUME:
if self.logger:
self.logger.info("resume")
self._remote_suspend_flow = False
elif suboption[1:2] == SET_LINESTATE_MASK:
self.linstate_mask = ord(suboption[2:3]) # ensure it is a number
if self.logger:
self.logger.info("line state mask: 0x%02x" % (self.linstate_mask,))
elif suboption[1:2] == SET_MODEMSTATE_MASK:
self.modemstate_mask = ord(suboption[2:3]) # ensure it is a number
if self.logger:
self.logger.info("modem state mask: 0x%02x" % (self.modemstate_mask,))
elif suboption[1:2] == PURGE_DATA:
if suboption[2:3] == PURGE_RECEIVE_BUFFER:
self.serial.flushInput()
if self.logger:
self.logger.info("purge in")
self.rfc2217SendSubnegotiation(SERVER_PURGE_DATA, PURGE_RECEIVE_BUFFER)
elif suboption[2:3] == PURGE_TRANSMIT_BUFFER:
self.serial.flushOutput()
if self.logger:
self.logger.info("purge out")
self.rfc2217SendSubnegotiation(SERVER_PURGE_DATA, PURGE_TRANSMIT_BUFFER)
elif suboption[2:3] == PURGE_BOTH_BUFFERS:
self.serial.flushInput()
self.serial.flushOutput()
if self.logger:
self.logger.info("purge both")
self.rfc2217SendSubnegotiation(SERVER_PURGE_DATA, PURGE_BOTH_BUFFERS)
else:
if self.logger:
self.logger.error("undefined PURGE_DATA: %r" % list(suboption[2:]))
else:
if self.logger:
self.logger.error("undefined COM_PORT_OPTION: %r" % list(suboption[1:]))
else:
if self.logger:
self.logger.warning("unknown subnegotiation: %r" % (suboption,))
# simple client test
if __name__ == '__main__':
import sys
s = Serial('rfc2217://localhost:7000', 115200)
sys.stdout.write('%s\n' % s)
#~ s.baudrate = 1898
sys.stdout.write("write...\n")
s.write("hello\n")
s.flush()
sys.stdout.write("read: %s\n" % s.read(5))
#~ s.baudrate = 19200
#~ s.databits = 7
s.close()
| mit |
pinterb/st2 | contrib/packs/actions/pack_mgmt/delete.py | 2 | 1549 | import os
import shutil
from oslo.config import cfg
from st2actions.runners.pythonrunner import Action
from st2common.constants.pack import SYSTEM_PACK_NAMES
from st2common.util.shell import quote_unix
BLOCKED_PACKS = frozenset(SYSTEM_PACK_NAMES)
class UninstallPackAction(Action):
def __init__(self, config=None):
super(UninstallPackAction, self).__init__(config=config)
self._base_virtualenvs_path = os.path.join(cfg.CONF.system.base_path,
'virtualenvs/')
def run(self, packs, abs_repo_base):
intersection = BLOCKED_PACKS & frozenset(packs)
if len(intersection) > 0:
names = ', '.join(list(intersection))
raise ValueError('Uninstall includes an uninstallable pack - %s.' % (names))
# 1. Delete pack content
for fp in os.listdir(abs_repo_base):
abs_fp = os.path.join(abs_repo_base, fp)
if fp in packs and os.path.isdir(abs_fp):
self.logger.debug('Deleting pack directory "%s"' % (abs_fp))
shutil.rmtree(abs_fp)
# 2. Delete pack virtual environment
for pack_name in packs:
pack_name = quote_unix(pack_name)
virtualenv_path = os.path.join(self._base_virtualenvs_path, pack_name)
if os.path.isdir(virtualenv_path):
self.logger.debug('Deleting virtualenv "%s" for pack "%s"' %
(virtualenv_path, pack_name))
shutil.rmtree(virtualenv_path)
| apache-2.0 |
dhalleine/tensorflow | tensorflow/tools/docs/gen_cc_md.py | 5 | 8299 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Convert Doxygen .xml files to MarkDown (.md files)."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import re
from BeautifulSoup import BeautifulStoneSoup
import tensorflow as tf
ANCHOR_RE = re.compile(r'\W+')
PAGE_TEMPLATE = '''# `{0} {1}`
{2}
###Member Details
{3}'''
INDEX_TEMPLATE = '''# TensorFlow C++ Session API reference documentation
TensorFlow's public C++ API includes only the API for executing graphs, as of
version 0.5. To control the execution of a graph from C++:
1. Build the computation graph using the [Python API](../python/).
1. Use [`tf.train.write_graph()`](../python/train.md#write_graph) to
write the graph to a file.
1. Load the graph using the C++ Session API. For example:
```c++
// Reads a model graph definition from disk, and creates a session object you
// can use to run it.
Status LoadGraph(string graph_file_name, Session** session) {
GraphDef graph_def;
TF_RETURN_IF_ERROR(
ReadBinaryProto(Env::Default(), graph_file_name, &graph_def));
TF_RETURN_IF_ERROR(NewSession(SessionOptions(), session));
TF_RETURN_IF_ERROR((*session)->Create(graph_def));
return Status::OK();
}
```
1. Run the graph with a call to `session->Run()`
## Env
@@Env
@@RandomAccessFile
@@WritableFile
@@EnvWrapper
## Session
@@Session
@@SessionOptions
## Status
@@Status
@@Status::State
## Tensor
@@Tensor
@@TensorShape
@@TensorShapeDim
@@TensorShapeUtils
@@PartialTensorShape
@@PartialTensorShapeUtils
@@TF_Buffer
## Thread
@@Thread
@@ThreadOptions
'''
FLAGS = tf.flags.FLAGS
tf.flags.DEFINE_string('src_dir', None,
'Directory containing the doxygen output.')
tf.flags.DEFINE_string('out_dir', None,
'Directory to which docs should be written.')
def member_definition(member_elt):
def_text = ''
def_elt = member_elt.find('definition')
if def_elt:
def_text = def_elt.text
return def_text
def member_sig(member_elt):
def_text = member_definition(member_elt)
argstring_text = ''
argstring = member_elt.find('argsstring')
if argstring:
argstring_text = argstring.text
sig = def_text + argstring_text
return sig
def anchorize(name):
return ANCHOR_RE.sub('_', name)
def element_text(member_elt, elt_name):
"""Extract all `para` text from (`elt_name` in) `member_elt`."""
text = []
if elt_name:
elt = member_elt.find(elt_name)
else:
elt = member_elt
if elt:
paras = elt.findAll('para')
for p in paras:
text.append(p.getText(separator=u' ').strip())
return '\n\n'.join(text)
def full_member_entry(member_elt):
"""Generate the description of `member_elt` for "Member Details"."""
anchor = '{#' + anchorize(member_definition(member_elt)) + '}'
full_entry = '#### `%s` %s' % (member_sig(member_elt), anchor)
complete_descr = element_text(member_elt, 'briefdescription') + '\n\n'
complete_descr += element_text(member_elt, 'detaileddescription')
if complete_descr:
full_entry += '\n\n' + complete_descr
return full_entry
def brief_member_entry(member_elt):
"""Generate the description of `member_elt` for the "Member Summary"."""
brief_item = ''
brief_descr = element_text(member_elt, 'briefdescription')
if brief_descr:
brief_item = '\n * ' + brief_descr
sig = member_sig(member_elt)
memdef = member_definition(member_elt)
linkified_sig = '[`{0}`](#{1})'.format(sig, anchorize(memdef))
return '* ' + linkified_sig + brief_item
def all_briefs(members):
briefs = [brief_member_entry(member_elt) for member_elt in members]
return '\n'.join(briefs)
def all_fulls(members):
fulls = [full_member_entry(member_elt) for member_elt in members]
return '\n\n'.join(fulls)
def page_overview(class_elt):
"""Returns the contents of the .md file for `class_elt`."""
overview_brief = ''
overview_details = ''
briefs = class_elt.findAll('briefdescription', recursive=False)
if briefs:
overview_brief = element_text(briefs[0], None)
details = class_elt.findAll('detaileddescription', recursive=False)
if details:
overview_details = element_text(details[0], None)
return overview_brief + '\n\n' + overview_details
def page_with_name(pages, name):
def match(n):
for i in xrange(len(pages)):
if pages[i].get_name() == n:
return i
return None
return match(name) or match('tensorflow::' + name)
def get_all_indexed_pages():
all_pages = set()
lines = INDEX_TEMPLATE.split('\n')
for i in range(len(lines)):
if lines[i].startswith('@@'):
name = lines[i][2:]
all_pages.add(name)
return all_pages
def index_page(pages):
"""Create the index page linking to `pages` using INDEX_TEMPLATE."""
pages = pages[:]
lines = INDEX_TEMPLATE.split('\n')
all_md_files = []
for i in range(len(lines)):
if lines[i].startswith('@@'):
name = lines[i][2:]
page_index = page_with_name(pages, name)
if page_index is None:
raise ValueError('Missing page with name: ' + name)
lines[i] = '* [{0}]({1})'.format(
pages[page_index].get_name(), pages[page_index].get_md_filename())
all_md_files.append(pages[page_index].get_md_filename())
pages.pop(page_index)
return '\n'.join(lines)
def page_in_name_list(page, names):
for name in names:
if page.get_name() == name or page.get_name() == 'tensorflow::' + name:
return True
return False
class Page(object):
"""Holds the MarkDown converted contents of a .xml page."""
def __init__(self, xml_path, deftype):
self.type = deftype
xml_file = open(xml_path)
xml = xml_file.read()
xml = xml.replace('<computeroutput>', '`').replace('</computeroutput>', '`')
# TODO(josh11b): Should not use HTML entities inside ```...```.
soup = BeautifulStoneSoup(
xml, convertEntities=BeautifulStoneSoup.HTML_ENTITIES)
self.name = soup.find('compoundname').text
print('Making page with name ' + self.name + ' (from ' + xml_path + ')')
members = soup('memberdef', prot='public')
fulls = all_fulls(members)
self.overview = page_overview(soup.find('compounddef'))
self.page_text = PAGE_TEMPLATE.format(
self.type, self.name, self.overview, fulls)
def get_text(self):
return self.page_text
def get_name(self):
return self.name
def get_short_name(self):
parse = self.get_name().split('::')
return parse[len(parse)-1]
def get_type(self):
return self.type
def get_md_filename(self):
capitalized_type = self.get_type()[0].upper() + self.get_type()[1:]
return capitalized_type + anchorize(self.get_short_name()) + '.md'
def main(unused_argv):
print('Converting in ' + FLAGS.src_dir)
pages = []
all_pages = get_all_indexed_pages()
xml_files = os.listdir(FLAGS.src_dir)
for fname in xml_files:
if len(fname) < 6: continue
newpage = None
if fname[0:5] == 'class':
newpage = Page(os.path.join(FLAGS.src_dir, fname), 'class')
elif fname[0:6] == 'struct':
newpage = Page(os.path.join(FLAGS.src_dir, fname), 'struct')
if newpage is not None and page_in_name_list(newpage, all_pages):
pages.append(newpage)
md_filename = newpage.get_md_filename()
print('Writing ' + md_filename)
md_file = open(os.path.join(FLAGS.out_dir, md_filename), 'w')
print(newpage.get_text(), file=md_file)
index_text = index_page(pages)
index_md_file = open(os.path.join(FLAGS.out_dir, 'index.md'), 'w')
print(index_text, file=index_md_file)
return 0
if __name__ == '__main__':
tf.app.run()
| apache-2.0 |
ctuning/ck-env | package/imagenet-2012-val-lmdb-256/custom.py | 5 | 5860 | #!/usr/bin/python
#
# Developer: Grigori Fursin, Grigori.Fursin@cTuning.org, http://fursin.net
#
import os
import sys
import json
##############################################################################
# customize installation
def pre_path(i):
"""
Input: {
cfg - meta of this soft entry
self_cfg - meta of module soft
ck_kernel - import CK kernel module (to reuse functions)
host_os_uoa - host OS UOA
host_os_uid - host OS UID
host_os_dict - host OS meta
target_os_uoa - target OS UOA
target_os_uid - target OS UID
target_os_dict - target OS meta
target_device_id - target device ID (if via ADB)
tags - list of tags used to search this entry
env - updated environment vars from meta
customize - updated customize vars from meta
deps - resolved dependencies for this soft
interactive - if 'yes', can ask questions, otherwise quiet
path - path to entry (with scripts)
install_path - installation path
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
(install-env) - prepare environment to be used before the install script
}
"""
# Get variables
ck=i['ck_kernel']
s=''
hosd=i['host_os_dict']
tosd=i['target_os_dict']
# Check platform
hplat=hosd.get('ck_name','')
hproc=hosd.get('processor','')
tproc=tosd.get('processor','')
phosd=hosd.get('ck_name','')
ie={}
svarb=hosd.get('env_var_start','')
svarb1=hosd.get('env_var_extra1','')
svare=hosd.get('env_var_stop','')
svare1=hosd.get('env_var_extra2','')
iv=i.get('interactive','')
cus=i.get('customize',{})
cfg=i.get('cfg',{})
deps=i.get('deps',{})
p=i.get('path','')
pi=i.get('install_path','')
fi=cus.get('first_images','')
if fi=='':
r=ck.inp({'text':'Input number of images to process (or press Enter for default): '})
if r['return']>0: return r
s=r['string'].strip()
if s!='':
s=int(s)
cus['first_images']=s
esp=cus.get('extra_suggested_path','')
x='-img'+str(s)
cus['extra_suggested_path']=esp+x
extra_name = cfg.get('package_extra_name') + ' ('+str(s)+' images)'
cus['package_extra_name'] = extra_name
cfg['package_extra_name'] = extra_name
return {'return':0, 'install_env':ie}
##############################################################################
# customize installation
def post_deps(i):
"""
Input: {
cfg - meta of this soft entry
self_cfg - meta of module soft
ck_kernel - import CK kernel module (to reuse functions)
host_os_uoa - host OS UOA
host_os_uid - host OS UID
host_os_dict - host OS meta
target_os_uoa - target OS UOA
target_os_uid - target OS UID
target_os_dict - target OS meta
target_device_id - target device ID (if via ADB)
tags - list of tags used to search this entry
env - updated environment vars from meta
customize - updated customize vars from meta
deps - resolved dependencies for this soft
interactive - if 'yes', can ask questions, otherwise quiet
path - path to entry (with scripts)
install_path - installation path
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
(install-env) - prepare environment to be used before the install script
}
"""
# Get variables
ck=i['ck_kernel']
s=''
hosd=i['host_os_dict']
tosd=i['target_os_dict']
# Check platform
hplat=hosd.get('ck_name','')
hproc=hosd.get('processor','')
tproc=tosd.get('processor','')
phosd=hosd.get('ck_name','')
ie={}
svarb=hosd.get('env_var_start','')
svarb1=hosd.get('env_var_extra1','')
svare=hosd.get('env_var_stop','')
svare1=hosd.get('env_var_extra2','')
iv=i.get('interactive','')
cus=i.get('customize',{})
cfg=i.get('cfg',{})
deps=i.get('deps',{})
p=i.get('path','')
pi=i.get('install_path','')
fi=cus.get('first_images','')
if fi!='':
fi=int(fi)
# Get original val txt and produce new one with selected number of images ...
vt=deps.get('dataset-imagenet-aux',{}).get('dict',{}).get('env',{}).get('CK_CAFFE_IMAGENET_VAL_TXT','')
if vt!='':
r=ck.gen_tmp_file({'prefix':'tmp-', 'suffix':'.tmp', 'remove_dir':'no'})
if r['return']>0: return r
fn=r['file_name']
ck.out('Pruning file '+vt+' and recording to '+fn+' ...')
ss=''
r=ck.load_text_file({'text_file':vt, 'split_to_list':'yes'})
if r['return']==0:
lst=r['lst']
n=0
for s in lst:
n+=1
if n>fi:
break
ss+=s+'\n'
r=ck.save_text_file({'text_file':fn, 'string':ss})
if r['return']>0: return r
ie['CK_CAFFE_IMAGENET_VAL_TXT']=fn
return {'return':0, 'install_env':ie}
| bsd-3-clause |
wistoch/meego-app-browser | tools/grit/grit/shortcuts_unittests.py | 7 | 3061 | #!/usr/bin/python2.4
# Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Unit tests for grit.shortcuts
'''
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), '..'))
import unittest
from grit import shortcuts
from grit import clique
from grit import tclib
from grit.gather import rc
class ShortcutsUnittest(unittest.TestCase):
def setUp(self):
self.uq = clique.UberClique()
def testFunctionality(self):
c = self.uq.MakeClique(tclib.Message(text="Hello &there"))
c.AddToShortcutGroup('group_name')
c = self.uq.MakeClique(tclib.Message(text="Howdie &there partner"))
c.AddToShortcutGroup('group_name')
warnings = shortcuts.GenerateDuplicateShortcutsWarnings(self.uq, 'PROJECT')
self.failUnless(warnings)
def testAmpersandEscaping(self):
c = self.uq.MakeClique(tclib.Message(text="Hello &there"))
c.AddToShortcutGroup('group_name')
c = self.uq.MakeClique(tclib.Message(text="S&&T are the &letters S and T"))
c.AddToShortcutGroup('group_name')
warnings = shortcuts.GenerateDuplicateShortcutsWarnings(self.uq, 'PROJECT')
self.failUnless(len(warnings) == 0)
def testDialog(self):
dlg = rc.Dialog('''\
IDD_SIDEBAR_RSS_PANEL_PROPPAGE DIALOGEX 0, 0, 239, 221
STYLE DS_SETFONT | DS_FIXEDSYS | WS_CHILD
FONT 8, "MS Shell Dlg", 400, 0, 0x1
BEGIN
PUSHBUTTON "Add &URL",IDC_SIDEBAR_RSS_ADD_URL,182,53,57,14
EDITTEXT IDC_SIDEBAR_RSS_NEW_URL,0,53,178,15,ES_AUTOHSCROLL
PUSHBUTTON "&Remove",IDC_SIDEBAR_RSS_REMOVE,183,200,56,14
PUSHBUTTON "&Edit",IDC_SIDEBAR_RSS_EDIT,123,200,56,14
CONTROL "&Automatically add commonly viewed clips",
IDC_SIDEBAR_RSS_AUTO_ADD,"Button",BS_AUTOCHECKBOX |
BS_MULTILINE | WS_TABSTOP,0,200,120,17
PUSHBUTTON "",IDC_SIDEBAR_RSS_HIDDEN,179,208,6,6,NOT WS_VISIBLE
LTEXT "You can display clips from blogs, news sites, and other online sources.",
IDC_STATIC,0,0,239,10
LISTBOX IDC_SIDEBAR_DISPLAYED_FEED_LIST,0,69,239,127,LBS_SORT |
LBS_OWNERDRAWFIXED | LBS_HASSTRINGS |
LBS_NOINTEGRALHEIGHT | WS_VSCROLL | WS_HSCROLL |
WS_TABSTOP
LTEXT "Add a clip from a recently viewed website by clicking Add Recent Clips.",
IDC_STATIC,0,13,141,19
LTEXT "Or, if you know a site supports RSS or Atom, you can enter the RSS or Atom URL below and add it to your list of Web Clips.",
IDC_STATIC,0,33,239,18
PUSHBUTTON "Add Recent &Clips (10)...",
IDC_SIDEBAR_RSS_ADD_RECENT_CLIPS,146,14,93,14
END''')
dlg.SetUberClique(self.uq)
dlg.Parse()
warnings = shortcuts.GenerateDuplicateShortcutsWarnings(self.uq, 'PROJECT')
self.failUnless(len(warnings) == 0)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
dhenyjarasandy/scrapy | tests/test_http_cookies.py | 38 | 2178 | from six.moves.urllib.parse import urlparse
from unittest import TestCase
from scrapy.http import Request, Response
from scrapy.http.cookies import WrappedRequest, WrappedResponse
class WrappedRequestTest(TestCase):
def setUp(self):
self.request = Request("http://www.example.com/page.html", \
headers={"Content-Type": "text/html"})
self.wrapped = WrappedRequest(self.request)
def test_get_full_url(self):
self.assertEqual(self.wrapped.get_full_url(), self.request.url)
def test_get_host(self):
self.assertEqual(self.wrapped.get_host(), urlparse(self.request.url).netloc)
def test_get_type(self):
self.assertEqual(self.wrapped.get_type(), urlparse(self.request.url).scheme)
def test_is_unverifiable(self):
self.assertFalse(self.wrapped.is_unverifiable())
def test_is_unverifiable2(self):
self.request.meta['is_unverifiable'] = True
self.assertTrue(self.wrapped.is_unverifiable())
def test_get_origin_req_host(self):
self.assertEqual(self.wrapped.get_origin_req_host(), 'www.example.com')
def test_has_header(self):
self.assertTrue(self.wrapped.has_header('content-type'))
self.assertFalse(self.wrapped.has_header('xxxxx'))
def test_get_header(self):
self.assertEqual(self.wrapped.get_header('content-type'), 'text/html')
self.assertEqual(self.wrapped.get_header('xxxxx', 'def'), 'def')
def test_header_items(self):
self.assertEqual(self.wrapped.header_items(), [('Content-Type', ['text/html'])])
def test_add_unredirected_header(self):
self.wrapped.add_unredirected_header('hello', 'world')
self.assertEqual(self.request.headers['hello'], 'world')
class WrappedResponseTest(TestCase):
def setUp(self):
self.response = Response("http://www.example.com/page.html",
headers={"Content-TYpe": "text/html"})
self.wrapped = WrappedResponse(self.response)
def test_info(self):
self.assert_(self.wrapped.info() is self.wrapped)
def test_getheaders(self):
self.assertEqual(self.wrapped.getheaders('content-type'), ['text/html'])
| bsd-3-clause |
leeon/annotated-django | django/contrib/gis/db/backends/oracle/models.py | 6 | 2204 | """
The GeometryColumns and SpatialRefSys models for the Oracle spatial
backend.
It should be noted that Oracle Spatial does not have database tables
named according to the OGC standard, so the closest analogs are used.
For example, the `USER_SDO_GEOM_METADATA` is used for the GeometryColumns
model and the `SDO_COORD_REF_SYS` is used for the SpatialRefSys model.
"""
from django.contrib.gis.db import models
from django.contrib.gis.db.backends.base import SpatialRefSysMixin
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class GeometryColumns(models.Model):
"Maps to the Oracle USER_SDO_GEOM_METADATA table."
table_name = models.CharField(max_length=32)
column_name = models.CharField(max_length=1024)
srid = models.IntegerField(primary_key=True)
# TODO: Add support for `diminfo` column (type MDSYS.SDO_DIM_ARRAY).
class Meta:
db_table = 'USER_SDO_GEOM_METADATA'
managed = False
@classmethod
def table_name_col(cls):
"""
Returns the name of the metadata column used to store the feature table
name.
"""
return 'table_name'
@classmethod
def geom_col_name(cls):
"""
Returns the name of the metadata column used to store the feature
geometry column.
"""
return 'column_name'
def __str__(self):
return '%s - %s (SRID: %s)' % (self.table_name, self.column_name, self.srid)
class SpatialRefSys(models.Model, SpatialRefSysMixin):
"Maps to the Oracle MDSYS.CS_SRS table."
cs_name = models.CharField(max_length=68)
srid = models.IntegerField(primary_key=True)
auth_srid = models.IntegerField()
auth_name = models.CharField(max_length=256)
wktext = models.CharField(max_length=2046)
# Optional geometry representing the bounds of this coordinate
# system. By default, all are NULL in the table.
cs_bounds = models.PolygonField(null=True)
objects = models.GeoManager()
class Meta:
db_table = 'CS_SRS'
managed = False
@property
def wkt(self):
return self.wktext
@classmethod
def wkt_col(cls):
return 'wktext'
| bsd-3-clause |
joelpinheiro/safebox-smartcard-auth | Server/veserver/lib/python2.7/site-packages/cherrypy/test/test_httpauth.py | 17 | 6334 | import cherrypy
from cherrypy._cpcompat import md5, sha, ntob
from cherrypy.lib import httpauth
from cherrypy.test import helper
class HTTPAuthTest(helper.CPWebCase):
def setup_server():
class Root:
def index(self):
return "This is public."
index.exposed = True
class DigestProtected:
def index(self):
return "Hello %s, you've been authorized." % (
cherrypy.request.login)
index.exposed = True
class BasicProtected:
def index(self):
return "Hello %s, you've been authorized." % (
cherrypy.request.login)
index.exposed = True
class BasicProtected2:
def index(self):
return "Hello %s, you've been authorized." % (
cherrypy.request.login)
index.exposed = True
def fetch_users():
return {'test': 'test'}
def sha_password_encrypter(password):
return sha(ntob(password)).hexdigest()
def fetch_password(username):
return sha(ntob('test')).hexdigest()
conf = {
'/digest': {
'tools.digest_auth.on': True,
'tools.digest_auth.realm': 'localhost',
'tools.digest_auth.users': fetch_users
},
'/basic': {
'tools.basic_auth.on': True,
'tools.basic_auth.realm': 'localhost',
'tools.basic_auth.users': {
'test': md5(ntob('test')).hexdigest()
}
},
'/basic2': {
'tools.basic_auth.on': True,
'tools.basic_auth.realm': 'localhost',
'tools.basic_auth.users': fetch_password,
'tools.basic_auth.encrypt': sha_password_encrypter
}
}
root = Root()
root.digest = DigestProtected()
root.basic = BasicProtected()
root.basic2 = BasicProtected2()
cherrypy.tree.mount(root, config=conf)
setup_server = staticmethod(setup_server)
def testPublic(self):
self.getPage("/")
self.assertStatus('200 OK')
self.assertHeader('Content-Type', 'text/html;charset=utf-8')
self.assertBody('This is public.')
def testBasic(self):
self.getPage("/basic/")
self.assertStatus(401)
self.assertHeader('WWW-Authenticate', 'Basic realm="localhost"')
self.getPage('/basic/', [('Authorization', 'Basic dGVzdDp0ZX60')])
self.assertStatus(401)
self.getPage('/basic/', [('Authorization', 'Basic dGVzdDp0ZXN0')])
self.assertStatus('200 OK')
self.assertBody("Hello test, you've been authorized.")
def testBasic2(self):
self.getPage("/basic2/")
self.assertStatus(401)
self.assertHeader('WWW-Authenticate', 'Basic realm="localhost"')
self.getPage('/basic2/', [('Authorization', 'Basic dGVzdDp0ZX60')])
self.assertStatus(401)
self.getPage('/basic2/', [('Authorization', 'Basic dGVzdDp0ZXN0')])
self.assertStatus('200 OK')
self.assertBody("Hello test, you've been authorized.")
def testDigest(self):
self.getPage("/digest/")
self.assertStatus(401)
value = None
for k, v in self.headers:
if k.lower() == "www-authenticate":
if v.startswith("Digest"):
value = v
break
if value is None:
self._handlewebError(
"Digest authentification scheme was not found")
value = value[7:]
items = value.split(', ')
tokens = {}
for item in items:
key, value = item.split('=')
tokens[key.lower()] = value
missing_msg = "%s is missing"
bad_value_msg = "'%s' was expecting '%s' but found '%s'"
nonce = None
if 'realm' not in tokens:
self._handlewebError(missing_msg % 'realm')
elif tokens['realm'] != '"localhost"':
self._handlewebError(bad_value_msg %
('realm', '"localhost"', tokens['realm']))
if 'nonce' not in tokens:
self._handlewebError(missing_msg % 'nonce')
else:
nonce = tokens['nonce'].strip('"')
if 'algorithm' not in tokens:
self._handlewebError(missing_msg % 'algorithm')
elif tokens['algorithm'] != '"MD5"':
self._handlewebError(bad_value_msg %
('algorithm', '"MD5"', tokens['algorithm']))
if 'qop' not in tokens:
self._handlewebError(missing_msg % 'qop')
elif tokens['qop'] != '"auth"':
self._handlewebError(bad_value_msg %
('qop', '"auth"', tokens['qop']))
# Test a wrong 'realm' value
base_auth = (
'Digest '
'username="test", '
'realm="wrong realm", '
'nonce="%s", '
'uri="/digest/", '
'algorithm=MD5, '
'response="%s", '
'qop=auth, '
'nc=%s, '
'cnonce="1522e61005789929"'
)
auth = base_auth % (nonce, '', '00000001')
params = httpauth.parseAuthorization(auth)
response = httpauth._computeDigestResponse(params, 'test')
auth = base_auth % (nonce, response, '00000001')
self.getPage('/digest/', [('Authorization', auth)])
self.assertStatus(401)
# Test that must pass
base_auth = (
'Digest '
'username="test", '
'realm="localhost", '
'nonce="%s", '
'uri="/digest/", '
'algorithm=MD5, '
'response="%s", '
'qop=auth, '
'nc=%s, '
'cnonce="1522e61005789929"'
)
auth = base_auth % (nonce, '', '00000001')
params = httpauth.parseAuthorization(auth)
response = httpauth._computeDigestResponse(params, 'test')
auth = base_auth % (nonce, response, '00000001')
self.getPage('/digest/', [('Authorization', auth)])
self.assertStatus('200 OK')
self.assertBody("Hello test, you've been authorized.")
| gpl-2.0 |
glennyonemitsu/MarkupHiveSDK | flask/module.py | 850 | 1363 | # -*- coding: utf-8 -*-
"""
flask.module
~~~~~~~~~~~~
Implements a class that represents module blueprints.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
from .blueprints import Blueprint
def blueprint_is_module(bp):
"""Used to figure out if something is actually a module"""
return isinstance(bp, Module)
class Module(Blueprint):
"""Deprecated module support. Until Flask 0.6 modules were a different
name of the concept now available as blueprints in Flask. They are
essentially doing the same but have some bad semantics for templates and
static files that were fixed with blueprints.
.. versionchanged:: 0.7
Modules were deprecated in favor for blueprints.
"""
def __init__(self, import_name, name=None, url_prefix=None,
static_path=None, subdomain=None):
if name is None:
assert '.' in import_name, 'name required if package name ' \
'does not point to a submodule'
name = import_name.rsplit('.', 1)[1]
Blueprint.__init__(self, name, import_name, url_prefix=url_prefix,
subdomain=subdomain, template_folder='templates')
if os.path.isdir(os.path.join(self.root_path, 'static')):
self._static_folder = 'static'
| mit |
CasparLi/calibre | src/calibre/ebooks/pdf/render/serialize.py | 13 | 18336 | #!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:fdm=marker:ai
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2012, Kovid Goyal <kovid at kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import hashlib
from future_builtins import map
from PyQt5.Qt import QBuffer, QByteArray, QImage, Qt, QColor, qRgba, QPainter
from calibre.constants import (__appname__, __version__)
from calibre.ebooks.pdf.render.common import (
Reference, EOL, serialize, Stream, Dictionary, String, Name, Array,
fmtnum)
from calibre.ebooks.pdf.render.fonts import FontManager
from calibre.ebooks.pdf.render.links import Links
from calibre.utils.date import utcnow
PDFVER = b'%PDF-1.4' # 1.4 is needed for XMP metadata
class IndirectObjects(object):
def __init__(self):
self._list = []
self._map = {}
self._offsets = []
def __len__(self):
return len(self._list)
def add(self, o):
self._list.append(o)
ref = Reference(len(self._list), o)
self._map[id(o)] = ref
self._offsets.append(None)
return ref
def commit(self, ref, stream):
self.write_obj(stream, ref.num, ref.obj)
def write_obj(self, stream, num, obj):
stream.write(EOL)
self._offsets[num-1] = stream.tell()
stream.write('%d 0 obj'%num)
stream.write(EOL)
serialize(obj, stream)
if stream.last_char != EOL:
stream.write(EOL)
stream.write('endobj')
stream.write(EOL)
def __getitem__(self, o):
try:
return self._map[id(self._list[o] if isinstance(o, int) else o)]
except (KeyError, IndexError):
raise KeyError('The object %r was not found'%o)
def pdf_serialize(self, stream):
for i, obj in enumerate(self._list):
offset = self._offsets[i]
if offset is None:
self.write_obj(stream, i+1, obj)
def write_xref(self, stream):
self.xref_offset = stream.tell()
stream.write(b'xref'+EOL)
stream.write('0 %d'%(1+len(self._offsets)))
stream.write(EOL)
stream.write('%010d 65535 f '%0)
stream.write(EOL)
for offset in self._offsets:
line = '%010d 00000 n '%offset
stream.write(line.encode('ascii') + EOL)
return self.xref_offset
class Page(Stream):
def __init__(self, parentref, *args, **kwargs):
super(Page, self).__init__(*args, **kwargs)
self.page_dict = Dictionary({
'Type': Name('Page'),
'Parent': parentref,
})
self.opacities = {}
self.fonts = {}
self.xobjects = {}
self.patterns = {}
def set_opacity(self, opref):
if opref not in self.opacities:
self.opacities[opref] = 'Opa%d'%len(self.opacities)
name = self.opacities[opref]
serialize(Name(name), self)
self.write(b' gs ')
def add_font(self, fontref):
if fontref not in self.fonts:
self.fonts[fontref] = 'F%d'%len(self.fonts)
return self.fonts[fontref]
def add_image(self, imgref):
if imgref not in self.xobjects:
self.xobjects[imgref] = 'Image%d'%len(self.xobjects)
return self.xobjects[imgref]
def add_pattern(self, patternref):
if patternref not in self.patterns:
self.patterns[patternref] = 'Pat%d'%len(self.patterns)
return self.patterns[patternref]
def add_resources(self):
r = Dictionary()
if self.opacities:
extgs = Dictionary()
for opref, name in self.opacities.iteritems():
extgs[name] = opref
r['ExtGState'] = extgs
if self.fonts:
fonts = Dictionary()
for ref, name in self.fonts.iteritems():
fonts[name] = ref
r['Font'] = fonts
if self.xobjects:
xobjects = Dictionary()
for ref, name in self.xobjects.iteritems():
xobjects[name] = ref
r['XObject'] = xobjects
if self.patterns:
r['ColorSpace'] = Dictionary({'PCSp':Array(
[Name('Pattern'), Name('DeviceRGB')])})
patterns = Dictionary()
for ref, name in self.patterns.iteritems():
patterns[name] = ref
r['Pattern'] = patterns
if r:
self.page_dict['Resources'] = r
def end(self, objects, stream):
contents = objects.add(self)
objects.commit(contents, stream)
self.page_dict['Contents'] = contents
self.add_resources()
ret = objects.add(self.page_dict)
# objects.commit(ret, stream)
return ret
class Path(object):
def __init__(self):
self.ops = []
def move_to(self, x, y):
self.ops.append((x, y, 'm'))
def line_to(self, x, y):
self.ops.append((x, y, 'l'))
def curve_to(self, x1, y1, x2, y2, x, y):
self.ops.append((x1, y1, x2, y2, x, y, 'c'))
def close(self):
self.ops.append(('h',))
class Catalog(Dictionary):
def __init__(self, pagetree):
super(Catalog, self).__init__({'Type':Name('Catalog'),
'Pages': pagetree})
class PageTree(Dictionary):
def __init__(self, page_size):
super(PageTree, self).__init__({'Type':Name('Pages'),
'MediaBox':Array([0, 0, page_size[0], page_size[1]]),
'Kids':Array(), 'Count':0,
})
def add_page(self, pageref):
self['Kids'].append(pageref)
self['Count'] += 1
def get_ref(self, num):
return self['Kids'][num-1]
def get_num(self, pageref):
try:
return self['Kids'].index(pageref) + 1
except ValueError:
return -1
class HashingStream(object):
def __init__(self, f):
self.f = f
self.tell = f.tell
self.hashobj = hashlib.sha256()
self.last_char = b''
def write(self, raw):
self.write_raw(raw if isinstance(raw, bytes) else raw.encode('ascii'))
def write_raw(self, raw):
self.f.write(raw)
self.hashobj.update(raw)
if raw:
self.last_char = raw[-1]
class Image(Stream):
def __init__(self, data, w, h, depth, mask, soft_mask, dct):
Stream.__init__(self)
self.width, self.height, self.depth = w, h, depth
self.mask, self.soft_mask = mask, soft_mask
if dct:
self.filters.append(Name('DCTDecode'))
else:
self.compress = True
self.write(data)
def add_extra_keys(self, d):
d['Type'] = Name('XObject')
d['Subtype']= Name('Image')
d['Width'] = self.width
d['Height'] = self.height
if self.depth == 1:
d['ImageMask'] = True
d['Decode'] = Array([1, 0])
else:
d['BitsPerComponent'] = 8
d['ColorSpace'] = Name('Device' + ('RGB' if self.depth == 32 else
'Gray'))
if self.mask is not None:
d['Mask'] = self.mask
if self.soft_mask is not None:
d['SMask'] = self.soft_mask
class Metadata(Stream):
def __init__(self, mi):
Stream.__init__(self)
from calibre.ebooks.metadata.xmp import metadata_to_xmp_packet
self.write(metadata_to_xmp_packet(mi))
def add_extra_keys(self, d):
d['Type'] = Name('Metadata')
d['Subtype'] = Name('XML')
class PDFStream(object):
PATH_OPS = {
# stroke fill fill-rule
(False, False, 'winding') : 'n',
(False, False, 'evenodd') : 'n',
(False, True, 'winding') : 'f',
(False, True, 'evenodd') : 'f*',
(True, False, 'winding') : 'S',
(True, False, 'evenodd') : 'S',
(True, True, 'winding') : 'B',
(True, True, 'evenodd') : 'B*',
}
def __init__(self, stream, page_size, compress=False, mark_links=False,
debug=print):
self.stream = HashingStream(stream)
self.compress = compress
self.write_line(PDFVER)
self.write_line(b'%íì¦"')
creator = ('%s %s [http://calibre-ebook.com]'%(__appname__,
__version__))
self.write_line('%% Created by %s'%creator)
self.objects = IndirectObjects()
self.objects.add(PageTree(page_size))
self.objects.add(Catalog(self.page_tree))
self.current_page = Page(self.page_tree, compress=self.compress)
self.info = Dictionary({
'Creator':String(creator),
'Producer':String(creator),
'CreationDate': utcnow(),
})
self.stroke_opacities, self.fill_opacities = {}, {}
self.font_manager = FontManager(self.objects, self.compress)
self.image_cache = {}
self.pattern_cache, self.shader_cache = {}, {}
self.debug = debug
self.links = Links(self, mark_links, page_size)
i = QImage(1, 1, QImage.Format_ARGB32)
i.fill(qRgba(0, 0, 0, 255))
self.alpha_bit = i.constBits().asstring(4).find(b'\xff')
@property
def page_tree(self):
return self.objects[0]
@property
def catalog(self):
return self.objects[1]
def get_pageref(self, pagenum):
return self.page_tree.obj.get_ref(pagenum)
def set_metadata(self, title=None, author=None, tags=None, mi=None):
if title:
self.info['Title'] = String(title)
if author:
self.info['Author'] = String(author)
if tags:
self.info['Keywords'] = String(tags)
if mi is not None:
self.metadata = self.objects.add(Metadata(mi))
self.catalog.obj['Metadata'] = self.metadata
def write_line(self, byts=b''):
byts = byts if isinstance(byts, bytes) else byts.encode('ascii')
self.stream.write(byts + EOL)
def transform(self, *args):
if len(args) == 1:
m = args[0]
vals = [m.m11(), m.m12(), m.m21(), m.m22(), m.dx(), m.dy()]
else:
vals = args
cm = ' '.join(map(fmtnum, vals))
self.current_page.write_line(cm + ' cm')
def save_stack(self):
self.current_page.write_line('q')
def restore_stack(self):
self.current_page.write_line('Q')
def reset_stack(self):
self.current_page.write_line('Q q')
def draw_rect(self, x, y, width, height, stroke=True, fill=False):
self.current_page.write('%s re '%' '.join(map(fmtnum, (x, y, width, height))))
self.current_page.write_line(self.PATH_OPS[(stroke, fill, 'winding')])
def write_path(self, path):
for i, op in enumerate(path.ops):
if i != 0:
self.current_page.write_line()
for x in op:
self.current_page.write(
(fmtnum(x) if isinstance(x, (int, long, float)) else x) + ' ')
def draw_path(self, path, stroke=True, fill=False, fill_rule='winding'):
if not path.ops:
return
self.write_path(path)
self.current_page.write_line(self.PATH_OPS[(stroke, fill, fill_rule)])
def add_clip(self, path, fill_rule='winding'):
if not path.ops:
return
self.write_path(path)
op = 'W' if fill_rule == 'winding' else 'W*'
self.current_page.write_line(op + ' ' + 'n')
def serialize(self, o):
serialize(o, self.current_page)
def set_stroke_opacity(self, opacity):
if opacity not in self.stroke_opacities:
op = Dictionary({'Type':Name('ExtGState'), 'CA': opacity})
self.stroke_opacities[opacity] = self.objects.add(op)
self.current_page.set_opacity(self.stroke_opacities[opacity])
def set_fill_opacity(self, opacity):
opacity = float(opacity)
if opacity not in self.fill_opacities:
op = Dictionary({'Type':Name('ExtGState'), 'ca': opacity})
self.fill_opacities[opacity] = self.objects.add(op)
self.current_page.set_opacity(self.fill_opacities[opacity])
def end_page(self):
pageref = self.current_page.end(self.objects, self.stream)
self.page_tree.obj.add_page(pageref)
self.current_page = Page(self.page_tree, compress=self.compress)
def draw_glyph_run(self, transform, size, font_metrics, glyphs):
glyph_ids = {x[-1] for x in glyphs}
fontref = self.font_manager.add_font(font_metrics, glyph_ids)
name = self.current_page.add_font(fontref)
self.current_page.write(b'BT ')
serialize(Name(name), self.current_page)
self.current_page.write(' %s Tf '%fmtnum(size))
self.current_page.write('%s Tm '%' '.join(map(fmtnum, transform)))
for x, y, glyph_id in glyphs:
self.current_page.write_raw(('%s %s Td <%04X> Tj '%(
fmtnum(x), fmtnum(y), glyph_id)).encode('ascii'))
self.current_page.write_line(b' ET')
def get_image(self, cache_key):
return self.image_cache.get(cache_key, None)
def write_image(self, data, w, h, depth, dct=False, mask=None,
soft_mask=None, cache_key=None):
imgobj = Image(data, w, h, depth, mask, soft_mask, dct)
self.image_cache[cache_key] = r = self.objects.add(imgobj)
self.objects.commit(r, self.stream)
return r
def add_image(self, img, cache_key):
ref = self.get_image(cache_key)
if ref is not None:
return ref
fmt = img.format()
image = QImage(img)
if (image.depth() == 1 and img.colorTable().size() == 2 and
img.colorTable().at(0) == QColor(Qt.black).rgba() and
img.colorTable().at(1) == QColor(Qt.white).rgba()):
if fmt == QImage.Format_MonoLSB:
image = image.convertToFormat(QImage.Format_Mono)
fmt = QImage.Format_Mono
else:
if (fmt != QImage.Format_RGB32 and fmt != QImage.Format_ARGB32):
image = image.convertToFormat(QImage.Format_ARGB32)
fmt = QImage.Format_ARGB32
w = image.width()
h = image.height()
d = image.depth()
if fmt == QImage.Format_Mono:
bytes_per_line = (w + 7) >> 3
data = image.constBits().asstring(bytes_per_line * h)
return self.write_image(data, w, h, d, cache_key=cache_key)
has_alpha = False
soft_mask = None
if fmt == QImage.Format_ARGB32:
tmask = image.constBits().asstring(4*w*h)[self.alpha_bit::4]
sdata = bytearray(tmask)
vals = set(sdata)
vals.discard(255) # discard opaque pixels
has_alpha = bool(vals)
if has_alpha:
# Blend image onto a white background as otherwise Qt will render
# transparent pixels as black
background = QImage(image.size(), QImage.Format_ARGB32_Premultiplied)
background.fill(Qt.white)
painter = QPainter(background)
painter.drawImage(0, 0, image)
painter.end()
image = background
ba = QByteArray()
buf = QBuffer(ba)
image.save(buf, 'jpeg', 94)
data = bytes(ba.data())
if has_alpha:
soft_mask = self.write_image(tmask, w, h, 8)
return self.write_image(data, w, h, 32, dct=True,
soft_mask=soft_mask, cache_key=cache_key)
def add_pattern(self, pattern):
if pattern.cache_key not in self.pattern_cache:
self.pattern_cache[pattern.cache_key] = self.objects.add(pattern)
return self.current_page.add_pattern(self.pattern_cache[pattern.cache_key])
def add_shader(self, shader):
if shader.cache_key not in self.shader_cache:
self.shader_cache[shader.cache_key] = self.objects.add(shader)
return self.shader_cache[shader.cache_key]
def draw_image(self, x, y, width, height, imgref):
name = self.current_page.add_image(imgref)
self.current_page.write('q %s 0 0 %s %s %s cm '%(fmtnum(width),
fmtnum(-height), fmtnum(x), fmtnum(y+height)))
serialize(Name(name), self.current_page)
self.current_page.write_line(' Do Q')
def apply_color_space(self, color, pattern, stroke=False):
wl = self.current_page.write_line
if color is not None and pattern is None:
wl(' '.join(map(fmtnum, color)) + (' RG' if stroke else ' rg'))
elif color is None and pattern is not None:
wl('/Pattern %s /%s %s'%('CS' if stroke else 'cs', pattern,
'SCN' if stroke else 'scn'))
elif color is not None and pattern is not None:
col = ' '.join(map(fmtnum, color))
wl('/PCSp %s %s /%s %s'%('CS' if stroke else 'cs', col, pattern,
'SCN' if stroke else 'scn'))
def apply_fill(self, color=None, pattern=None, opacity=None):
if opacity is not None:
self.set_fill_opacity(opacity)
self.apply_color_space(color, pattern)
def apply_stroke(self, color=None, pattern=None, opacity=None):
if opacity is not None:
self.set_stroke_opacity(opacity)
self.apply_color_space(color, pattern, stroke=True)
def end(self):
if self.current_page.getvalue():
self.end_page()
self.font_manager.embed_fonts(self.debug)
inforef = self.objects.add(self.info)
self.links.add_links()
self.objects.pdf_serialize(self.stream)
self.write_line()
startxref = self.objects.write_xref(self.stream)
file_id = String(self.stream.hashobj.hexdigest().decode('ascii'))
self.write_line('trailer')
trailer = Dictionary({'Root':self.catalog, 'Size':len(self.objects)+1,
'ID':Array([file_id, file_id]), 'Info':inforef})
serialize(trailer, self.stream)
self.write_line('startxref')
self.write_line('%d'%startxref)
self.stream.write('%%EOF')
| gpl-3.0 |
dmitry-sobolev/ansible | lib/ansible/plugins/strategy/__init__.py | 13 | 44724 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import threading
import time
from collections import deque
from multiprocessing import Lock
from jinja2.exceptions import UndefinedError
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable
from ansible.executor import action_write_locks
from ansible.executor.process.worker import WorkerProcess
from ansible.executor.task_result import TaskResult
from ansible.inventory.host import Host
from ansible.inventory.group import Group
from ansible.module_utils.six.moves import queue as Queue
from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils._text import to_text
from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.included_file import IncludedFile
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.role_include import IncludeRole
from ansible.plugins import action_loader, connection_loader, filter_loader, lookup_loader, module_loader, test_loader
from ansible.template import Templar
from ansible.vars import combine_vars, strip_internal_keys
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['StrategyBase']
class StrategySentinel:
pass
# TODO: this should probably be in the plugins/__init__.py, with
# a smarter mechanism to set all of the attributes based on
# the loaders created there
class SharedPluginLoaderObj:
'''
A simple object to make pass the various plugin loaders to
the forked processes over the queue easier
'''
def __init__(self):
self.action_loader = action_loader
self.connection_loader = connection_loader
self.filter_loader = filter_loader
self.test_loader = test_loader
self.lookup_loader = lookup_loader
self.module_loader = module_loader
_sentinel = StrategySentinel()
def results_thread_main(strategy):
while True:
try:
result = strategy._final_q.get()
if isinstance(result, StrategySentinel):
break
else:
strategy._results_lock.acquire()
strategy._results.append(result)
strategy._results_lock.release()
except (IOError, EOFError):
break
except Queue.Empty:
pass
class StrategyBase:
'''
This is the base class for strategy plugins, which contains some common
code useful to all strategies like running handlers, cleanup actions, etc.
'''
def __init__(self, tqm):
self._tqm = tqm
self._inventory = tqm.get_inventory()
self._workers = tqm.get_workers()
self._notified_handlers = tqm._notified_handlers
self._listening_handlers = tqm._listening_handlers
self._variable_manager = tqm.get_variable_manager()
self._loader = tqm.get_loader()
self._final_q = tqm._final_q
self._step = getattr(tqm._options, 'step', False)
self._diff = getattr(tqm._options, 'diff', False)
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
# internal counters
self._pending_results = 0
self._cur_worker = 0
# this dictionary is used to keep track of hosts that have
# outstanding tasks still in queue
self._blocked_hosts = dict()
self._results = deque()
self._results_lock = threading.Condition(threading.Lock())
# create the result processing thread for reading results in the background
self._results_thread = threading.Thread(target=results_thread_main, args=(self,))
self._results_thread.daemon = True
self._results_thread.start()
def cleanup(self):
self._final_q.put(_sentinel)
self._results_thread.join()
def run(self, iterator, play_context, result=0):
# execute one more pass through the iterator without peeking, to
# make sure that all of the hosts are advanced to their final task.
# This should be safe, as everything should be ITERATING_COMPLETE by
# this point, though the strategy may not advance the hosts itself.
[iterator.get_next_task_for_host(host) for host in self._inventory.get_hosts(iterator._play.hosts) if host.name not in self._tqm._unreachable_hosts]
# save the failed/unreachable hosts, as the run_handlers()
# method will clear that information during its execution
failed_hosts = iterator.get_failed_hosts()
unreachable_hosts = self._tqm._unreachable_hosts.keys()
display.debug("running handlers")
handler_result = self.run_handlers(iterator, play_context)
if isinstance(handler_result, bool) and not handler_result:
result |= self._tqm.RUN_ERROR
elif not handler_result:
result |= handler_result
# now update with the hosts (if any) that failed or were
# unreachable during the handler execution phase
failed_hosts = set(failed_hosts).union(iterator.get_failed_hosts())
unreachable_hosts = set(unreachable_hosts).union(self._tqm._unreachable_hosts.keys())
# return the appropriate code, depending on the status hosts after the run
if not isinstance(result, bool) and result != self._tqm.RUN_OK:
return result
elif len(unreachable_hosts) > 0:
return self._tqm.RUN_UNREACHABLE_HOSTS
elif len(failed_hosts) > 0:
return self._tqm.RUN_FAILED_HOSTS
else:
return self._tqm.RUN_OK
def get_hosts_remaining(self, play):
return [host for host in self._inventory.get_hosts(play.hosts)
if host.name not in self._tqm._failed_hosts and host.name not in self._tqm._unreachable_hosts]
def get_failed_hosts(self, play):
return [host for host in self._inventory.get_hosts(play.hosts) if host.name in self._tqm._failed_hosts]
def add_tqm_variables(self, vars, play):
'''
Base class method to add extra variables/information to the list of task
vars sent through the executor engine regarding the task queue manager state.
'''
vars['ansible_current_hosts'] = [h.name for h in self.get_hosts_remaining(play)]
vars['ansible_failed_hosts'] = [h.name for h in self.get_failed_hosts(play)]
def _queue_task(self, host, task, task_vars, play_context):
''' handles queueing the task up to be sent to a worker '''
display.debug("entering _queue_task() for %s/%s" % (host.name, task.action))
# Add a write lock for tasks.
# Maybe this should be added somewhere further up the call stack but
# this is the earliest in the code where we have task (1) extracted
# into its own variable and (2) there's only a single code path
# leading to the module being run. This is called by three
# functions: __init__.py::_do_handler_run(), linear.py::run(), and
# free.py::run() so we'd have to add to all three to do it there.
# The next common higher level is __init__.py::run() and that has
# tasks inside of play_iterator so we'd have to extract them to do it
# there.
if task.action not in action_write_locks.action_write_locks:
display.debug('Creating lock for %s' % task.action)
action_write_locks.action_write_locks[task.action] = Lock()
# and then queue the new task
try:
# create a dummy object with plugin loaders set as an easier
# way to share them with the forked processes
shared_loader_obj = SharedPluginLoaderObj()
queued = False
starting_worker = self._cur_worker
while True:
(worker_prc, rslt_q) = self._workers[self._cur_worker]
if worker_prc is None or not worker_prc.is_alive():
worker_prc = WorkerProcess(self._final_q, task_vars, host, task, play_context, self._loader, self._variable_manager, shared_loader_obj)
self._workers[self._cur_worker][0] = worker_prc
worker_prc.start()
display.debug("worker is %d (out of %d available)" % (self._cur_worker+1, len(self._workers)))
queued = True
self._cur_worker += 1
if self._cur_worker >= len(self._workers):
self._cur_worker = 0
if queued:
break
elif self._cur_worker == starting_worker:
time.sleep(0.0001)
self._pending_results += 1
except (EOFError, IOError, AssertionError) as e:
# most likely an abort
display.debug("got an error while queuing: %s" % e)
return
display.debug("exiting _queue_task() for %s/%s" % (host.name, task.action))
def get_task_hosts(self, iterator, task_host, task):
if task.run_once:
host_list = [host for host in self._inventory.get_hosts(iterator._play.hosts) if host.name not in self._tqm._unreachable_hosts]
else:
host_list = [task_host]
return host_list
def get_delegated_hosts(self, result, task):
host_name = task.delegate_to
actual_host = self._inventory.get_host(host_name)
if actual_host is None:
actual_host = Host(name=host_name)
return [actual_host]
def _process_pending_results(self, iterator, one_pass=False, max_passes=None):
'''
Reads results off the final queue and takes appropriate action
based on the result (executing callbacks, updating state, etc.).
'''
ret_results = []
def get_original_host(host_name):
host_name = to_text(host_name)
if host_name in self._inventory._hosts_cache:
return self._inventory._hosts_cache[host_name]
else:
return self._inventory.get_host(host_name)
def search_handler_blocks_by_name(handler_name, handler_blocks):
for handler_block in handler_blocks:
for handler_task in handler_block.block:
if handler_task.name:
handler_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, task=handler_task)
templar = Templar(loader=self._loader, variables=handler_vars)
try:
# first we check with the full result of get_name(), which may
# include the role name (if the handler is from a role). If that
# is not found, we resort to the simple name field, which doesn't
# have anything extra added to it.
target_handler_name = templar.template(handler_task.name)
if target_handler_name == handler_name:
return handler_task
else:
target_handler_name = templar.template(handler_task.get_name())
if target_handler_name == handler_name:
return handler_task
except (UndefinedError, AnsibleUndefinedVariable):
# We skip this handler due to the fact that it may be using
# a variable in the name that was conditionally included via
# set_fact or some other method, and we don't want to error
# out unnecessarily
continue
return None
def search_handler_blocks_by_uuid(handler_uuid, handler_blocks):
for handler_block in handler_blocks:
for handler_task in handler_block.block:
if handler_uuid == handler_task._uuid:
return handler_task
return None
def parent_handler_match(target_handler, handler_name):
if target_handler:
if isinstance(target_handler, (TaskInclude, IncludeRole)):
try:
handler_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, task=target_handler)
templar = Templar(loader=self._loader, variables=handler_vars)
target_handler_name = templar.template(target_handler.name)
if target_handler_name == handler_name:
return True
else:
target_handler_name = templar.template(target_handler.get_name())
if target_handler_name == handler_name:
return True
except (UndefinedError, AnsibleUndefinedVariable):
pass
return parent_handler_match(target_handler._parent, handler_name)
else:
return False
cur_pass = 0
while True:
try:
self._results_lock.acquire()
task_result = self._results.pop()
except IndexError:
break
finally:
self._results_lock.release()
# get the original host and task. We then assign them to the TaskResult for use in callbacks/etc.
original_host = get_original_host(task_result._host)
found_task = iterator.get_original_task(original_host, task_result._task)
original_task = found_task.copy(exclude_parent=True, exclude_tasks=True)
original_task._parent = found_task._parent
for (attr, val) in iteritems(task_result._task_fields):
setattr(original_task, attr, val)
task_result._host = original_host
task_result._task = original_task
# get the correct loop var for use later
if original_task.loop_control:
loop_var = original_task.loop_control.loop_var or 'item'
else:
loop_var = 'item'
# send callbacks for 'non final' results
if '_ansible_retry' in task_result._result:
self._tqm.send_callback('v2_runner_retry', task_result)
continue
elif '_ansible_item_result' in task_result._result:
if task_result.is_failed() or task_result.is_unreachable():
self._tqm.send_callback('v2_runner_item_on_failed', task_result)
elif task_result.is_skipped():
self._tqm.send_callback('v2_runner_item_on_skipped', task_result)
else:
if 'diff' in task_result._result:
if self._diff:
self._tqm.send_callback('v2_on_file_diff', task_result)
self._tqm.send_callback('v2_runner_item_on_ok', task_result)
continue
if original_task.register:
host_list = self.get_task_hosts(iterator, original_host, original_task)
clean_copy = strip_internal_keys(task_result._result)
if 'invocation' in clean_copy:
del clean_copy['invocation']
for target_host in host_list:
self._variable_manager.set_nonpersistent_facts(target_host, {original_task.register: clean_copy})
# all host status messages contain 2 entries: (msg, task_result)
role_ran = False
if task_result.is_failed():
role_ran = True
ignore_errors = original_task.ignore_errors
if not ignore_errors:
display.debug("marking %s as failed" % original_host.name)
if original_task.run_once:
# if we're using run_once, we have to fail every host here
for h in self._inventory.get_hosts(iterator._play.hosts):
if h.name not in self._tqm._unreachable_hosts:
state, _ = iterator.get_next_task_for_host(h, peek=True)
iterator.mark_host_failed(h)
state, new_task = iterator.get_next_task_for_host(h, peek=True)
else:
iterator.mark_host_failed(original_host)
# increment the failed count for this host
self._tqm._stats.increment('failures', original_host.name)
# grab the current state and if we're iterating on the rescue portion
# of a block then we save the failed task in a special var for use
# within the rescue/always
state, _ = iterator.get_next_task_for_host(original_host, peek=True)
if iterator.is_failed(original_host) and state and state.run_state == iterator.ITERATING_COMPLETE:
self._tqm._failed_hosts[original_host.name] = True
if state and state.run_state == iterator.ITERATING_RESCUE:
self._variable_manager.set_nonpersistent_facts(
original_host,
dict(
ansible_failed_task=original_task.serialize(),
ansible_failed_result=task_result._result,
),
)
else:
self._tqm._stats.increment('ok', original_host.name)
if 'changed' in task_result._result and task_result._result['changed']:
self._tqm._stats.increment('changed', original_host.name)
self._tqm.send_callback('v2_runner_on_failed', task_result, ignore_errors=ignore_errors)
elif task_result.is_unreachable():
self._tqm._unreachable_hosts[original_host.name] = True
iterator._play._removed_hosts.append(original_host.name)
self._tqm._stats.increment('dark', original_host.name)
self._tqm.send_callback('v2_runner_on_unreachable', task_result)
elif task_result.is_skipped():
self._tqm._stats.increment('skipped', original_host.name)
self._tqm.send_callback('v2_runner_on_skipped', task_result)
else:
role_ran = True
if original_task.loop:
# this task had a loop, and has more than one result, so
# loop over all of them instead of a single result
result_items = task_result._result.get('results', [])
else:
result_items = [ task_result._result ]
for result_item in result_items:
if '_ansible_notify' in result_item:
if task_result.is_changed():
# The shared dictionary for notified handlers is a proxy, which
# does not detect when sub-objects within the proxy are modified.
# So, per the docs, we reassign the list so the proxy picks up and
# notifies all other threads
for handler_name in result_item['_ansible_notify']:
found = False
# Find the handler using the above helper. First we look up the
# dependency chain of the current task (if it's from a role), otherwise
# we just look through the list of handlers in the current play/all
# roles and use the first one that matches the notify name
target_handler = search_handler_blocks_by_name(handler_name, iterator._play.handlers)
if target_handler is not None:
found = True
if original_host not in self._notified_handlers[target_handler._uuid]:
self._notified_handlers[target_handler._uuid].append(original_host)
# FIXME: should this be a callback?
display.vv("NOTIFIED HANDLER %s" % (handler_name,))
else:
# As there may be more than one handler with the notified name as the
# parent, so we just keep track of whether or not we found one at all
for target_handler_uuid in self._notified_handlers:
target_handler = search_handler_blocks_by_uuid(target_handler_uuid, iterator._play.handlers)
if target_handler and parent_handler_match(target_handler, handler_name):
found = True
if original_host not in self._notified_handlers[target_handler._uuid]:
self._notified_handlers[target_handler._uuid].append(original_host)
display.vv("NOTIFIED HANDLER %s" % (target_handler.get_name(),))
if handler_name in self._listening_handlers:
for listening_handler_uuid in self._listening_handlers[handler_name]:
listening_handler = search_handler_blocks_by_uuid(listening_handler_uuid, iterator._play.handlers)
if listening_handler is not None:
found = True
else:
continue
if original_host not in self._notified_handlers[listening_handler._uuid]:
self._notified_handlers[listening_handler._uuid].append(original_host)
display.vv("NOTIFIED HANDLER %s" % (listening_handler.get_name(),))
# and if none were found, then we raise an error
if not found:
msg = ("The requested handler '%s' was not found in either the main handlers list nor in the listening "
"handlers list" % handler_name)
if C.ERROR_ON_MISSING_HANDLER:
raise AnsibleError(msg)
else:
display.warning(msg)
if 'add_host' in result_item:
# this task added a new host (add_host module)
new_host_info = result_item.get('add_host', dict())
self._add_host(new_host_info, iterator)
elif 'add_group' in result_item:
# this task added a new group (group_by module)
self._add_group(original_host, result_item)
if 'ansible_facts' in result_item:
if original_task.action == 'include_vars':
if original_task.delegate_to is not None:
host_list = self.get_delegated_hosts(result_item, original_task)
else:
host_list = self.get_task_hosts(iterator, original_host, original_task)
for (var_name, var_value) in iteritems(result_item['ansible_facts']):
# find the host we're actually referring too here, which may
# be a host that is not really in inventory at all
for target_host in host_list:
self._variable_manager.set_host_variable(target_host, var_name, var_value)
else:
# if delegated fact and we are delegating facts, we need to change target host for them
if original_task.delegate_to is not None and original_task.delegate_facts:
host_list = self.get_delegated_hosts(result_item, original_task)
else:
host_list = self.get_task_hosts(iterator, original_host, original_task)
for target_host in host_list:
if original_task.action == 'set_fact':
self._variable_manager.set_nonpersistent_facts(target_host, result_item['ansible_facts'].copy())
else:
self._variable_manager.set_host_facts(target_host, result_item['ansible_facts'].copy())
if 'ansible_stats' in result_item and 'data' in result_item['ansible_stats'] and result_item['ansible_stats']['data']:
if 'per_host' not in result_item['ansible_stats'] or result_item['ansible_stats']['per_host']:
host_list = self.get_task_hosts(iterator, original_host, original_task)
else:
host_list = [None]
data = result_item['ansible_stats']['data']
aggregate = 'aggregate' in result_item['ansible_stats'] and result_item['ansible_stats']['aggregate']
for myhost in host_list:
for k in data.keys():
if aggregate:
self._tqm._stats.update_custom_stats(k, data[k], myhost)
else:
self._tqm._stats.set_custom_stats(k, data[k], myhost)
if 'diff' in task_result._result:
if self._diff:
self._tqm.send_callback('v2_on_file_diff', task_result)
if original_task.action not in ['include', 'include_role']:
self._tqm._stats.increment('ok', original_host.name)
if 'changed' in task_result._result and task_result._result['changed']:
self._tqm._stats.increment('changed', original_host.name)
# finally, send the ok for this task
self._tqm.send_callback('v2_runner_on_ok', task_result)
self._pending_results -= 1
if original_host.name in self._blocked_hosts:
del self._blocked_hosts[original_host.name]
# If this is a role task, mark the parent role as being run (if
# the task was ok or failed, but not skipped or unreachable)
if original_task._role is not None and role_ran: #TODO: and original_task.action != 'include_role':?
# lookup the role in the ROLE_CACHE to make sure we're dealing
# with the correct object and mark it as executed
for (entry, role_obj) in iteritems(iterator._play.ROLE_CACHE[original_task._role._role_name]):
if role_obj._uuid == original_task._role._uuid:
role_obj._had_task_run[original_host.name] = True
ret_results.append(task_result)
if one_pass or max_passes is not None and (cur_pass+1) >= max_passes:
break
cur_pass += 1
return ret_results
def _wait_on_pending_results(self, iterator):
'''
Wait for the shared counter to drop to zero, using a short sleep
between checks to ensure we don't spin lock
'''
ret_results = []
display.debug("waiting for pending results...")
while self._pending_results > 0 and not self._tqm._terminated:
if self._tqm.has_dead_workers():
raise AnsibleError("A worker was found in a dead state")
results = self._process_pending_results(iterator)
ret_results.extend(results)
if self._pending_results > 0:
time.sleep(C.DEFAULT_INTERNAL_POLL_INTERVAL)
display.debug("no more pending results, returning what we have")
return ret_results
def _add_host(self, host_info, iterator):
'''
Helper function to add a new host to inventory based on a task result.
'''
host_name = host_info.get('host_name')
# Check if host in inventory, add if not
new_host = self._inventory.get_host(host_name)
if not new_host:
new_host = Host(name=host_name)
self._inventory._hosts_cache[host_name] = new_host
self._inventory.get_host_vars(new_host)
allgroup = self._inventory.get_group('all')
allgroup.add_host(new_host)
# Set/update the vars for this host
new_host.vars = combine_vars(new_host.vars, self._inventory.get_host_vars(new_host))
new_host.vars = combine_vars(new_host.vars, host_info.get('host_vars', dict()))
new_groups = host_info.get('groups', [])
for group_name in new_groups:
if not self._inventory.get_group(group_name):
new_group = Group(group_name)
self._inventory.add_group(new_group)
self._inventory.get_group_vars(new_group)
new_group.vars = self._inventory.get_group_variables(group_name)
else:
new_group = self._inventory.get_group(group_name)
new_group.add_host(new_host)
# add this host to the group cache
if self._inventory.groups is not None:
if group_name in self._inventory.groups:
if new_host not in self._inventory.get_group(group_name).hosts:
self._inventory.get_group(group_name).hosts.append(new_host.name)
# clear pattern caching completely since it's unpredictable what
# patterns may have referenced the group
self._inventory.clear_pattern_cache()
# clear cache of group dict, which is used in magic host variables
self._inventory.clear_group_dict_cache()
# also clear the hostvar cache entry for the given play, so that
# the new hosts are available if hostvars are referenced
self._variable_manager.invalidate_hostvars_cache(play=iterator._play)
def _add_group(self, host, result_item):
'''
Helper function to add a group (if it does not exist), and to assign the
specified host to that group.
'''
changed = False
# the host here is from the executor side, which means it was a
# serialized/cloned copy and we'll need to look up the proper
# host object from the master inventory
real_host = self._inventory.get_host(host.name)
group_name = result_item.get('add_group')
new_group = self._inventory.get_group(group_name)
if not new_group:
# create the new group and add it to inventory
new_group = Group(name=group_name)
self._inventory.add_group(new_group)
new_group.vars = self._inventory.get_group_vars(new_group)
# and add the group to the proper hierarchy
allgroup = self._inventory.get_group('all')
allgroup.add_child_group(new_group)
changed = True
if group_name not in host.get_groups():
new_group.add_host(real_host)
changed = True
if changed:
# clear cache of group dict, which is used in magic host variables
self._inventory.clear_group_dict_cache()
return changed
def _load_included_file(self, included_file, iterator, is_handler=False):
'''
Loads an included YAML file of tasks, applying the optional set of variables.
'''
display.debug("loading included file: %s" % included_file._filename)
try:
data = self._loader.load_from_file(included_file._filename)
if data is None:
return []
elif not isinstance(data, list):
raise AnsibleError("included task files must contain a list of tasks")
ti_copy = included_file._task.copy()
temp_vars = ti_copy.vars.copy()
temp_vars.update(included_file._args)
# pop tags out of the include args, if they were specified there, and assign
# them to the include. If the include already had tags specified, we raise an
# error so that users know not to specify them both ways
tags = included_file._task.vars.pop('tags', [])
if isinstance(tags, string_types):
tags = tags.split(',')
if len(tags) > 0:
if len(included_file._task.tags) > 0:
raise AnsibleParserError("Include tasks should not specify tags in more than one way (both via args and directly on the task). "
"Mixing tag specify styles is prohibited for whole import hierarchy, not only for single import statement",
obj=included_file._task._ds)
display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option")
included_file._task.tags = tags
ti_copy.vars = temp_vars
block_list = load_list_of_blocks(
data,
play=iterator._play,
parent_block=None,
task_include=ti_copy,
role=included_file._task._role,
use_handlers=is_handler,
loader=self._loader,
variable_manager=self._variable_manager,
)
# since we skip incrementing the stats when the task result is
# first processed, we do so now for each host in the list
for host in included_file._hosts:
self._tqm._stats.increment('ok', host.name)
except AnsibleError as e:
# mark all of the hosts including this file as failed, send callbacks,
# and increment the stats for this host
for host in included_file._hosts:
tr = TaskResult(host=host, task=included_file._task, return_data=dict(failed=True, reason=to_text(e)))
iterator.mark_host_failed(host)
self._tqm._failed_hosts[host.name] = True
self._tqm._stats.increment('failures', host.name)
self._tqm.send_callback('v2_runner_on_failed', tr)
return []
# finally, send the callback and return the list of blocks loaded
self._tqm.send_callback('v2_playbook_on_include', included_file)
display.debug("done processing included file")
return block_list
def run_handlers(self, iterator, play_context):
'''
Runs handlers on those hosts which have been notified.
'''
result = self._tqm.RUN_OK
for handler_block in iterator._play.handlers:
# FIXME: handlers need to support the rescue/always portions of blocks too,
# but this may take some work in the iterator and gets tricky when
# we consider the ability of meta tasks to flush handlers
for handler in handler_block.block:
if handler._uuid in self._notified_handlers and len(self._notified_handlers[handler._uuid]):
result = self._do_handler_run(handler, handler.get_name(), iterator=iterator, play_context=play_context)
if not result:
break
return result
def _do_handler_run(self, handler, handler_name, iterator, play_context, notified_hosts=None):
# FIXME: need to use iterator.get_failed_hosts() instead?
#if not len(self.get_hosts_remaining(iterator._play)):
# self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
# result = False
# break
saved_name = handler.name
handler.name = handler_name
self._tqm.send_callback('v2_playbook_on_handler_task_start', handler)
handler.name = saved_name
if notified_hosts is None:
notified_hosts = self._notified_handlers[handler._uuid]
run_once = False
try:
action = action_loader.get(handler.action, class_only=True)
if handler.run_once or getattr(action, 'BYPASS_HOST_LOOP', False):
run_once = True
except KeyError:
# we don't care here, because the action may simply not have a
# corresponding action plugin
pass
host_results = []
for host in notified_hosts:
if not handler.has_triggered(host) and (not iterator.is_failed(host) or play_context.force_handlers):
task_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, host=host, task=handler)
self.add_tqm_variables(task_vars, play=iterator._play)
self._queue_task(host, handler, task_vars, play_context)
if run_once:
break
# collect the results from the handler run
host_results = self._wait_on_pending_results(iterator)
try:
included_files = IncludedFile.process_include_results(
host_results,
self._tqm,
iterator=iterator,
inventory=self._inventory,
loader=self._loader,
variable_manager=self._variable_manager
)
except AnsibleError as e:
return False
result = True
if len(included_files) > 0:
for included_file in included_files:
try:
new_blocks = self._load_included_file(included_file, iterator=iterator, is_handler=True)
# for every task in each block brought in by the include, add the list
# of hosts which included the file to the notified_handlers dict
for block in new_blocks:
iterator._play.handlers.append(block)
iterator.cache_block_tasks(block)
for task in block.block:
result = self._do_handler_run(
handler=task,
handler_name=None,
iterator=iterator,
play_context=play_context,
notified_hosts=included_file._hosts[:],
)
if not result:
break
except AnsibleError as e:
for host in included_file._hosts:
iterator.mark_host_failed(host)
self._tqm._failed_hosts[host.name] = True
display.warning(str(e))
continue
# wipe the notification list
self._notified_handlers[handler._uuid] = []
display.debug("done running handlers, result is: %s" % result)
return result
def _take_step(self, task, host=None):
ret=False
msg=u'Perform task: %s ' % task
if host:
msg += u'on %s ' % host
msg += u'(N)o/(y)es/(c)ontinue: '
resp = display.prompt(msg)
if resp.lower() in ['y','yes']:
display.debug("User ran task")
ret = True
elif resp.lower() in ['c', 'continue']:
display.debug("User ran task and canceled step mode")
self._step = False
ret = True
else:
display.debug("User skipped task")
display.banner(msg)
return ret
def _execute_meta(self, task, play_context, iterator, target_host):
# meta tasks store their args in the _raw_params field of args,
# since they do not use k=v pairs, so get that
meta_action = task.args.get('_raw_params')
# FIXME(s):
# * raise an error or show a warning when a conditional is used
# on a meta task that doesn't support them
def _evaluate_conditional(h):
all_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, host=h, task=task)
templar = Templar(loader=self._loader, variables=all_vars)
return task.evaluate_conditional(templar, all_vars)
skipped = False
msg = ''
if meta_action == 'noop':
# FIXME: issue a callback for the noop here?
msg="noop"
elif meta_action == 'flush_handlers':
self.run_handlers(iterator, play_context)
msg = "ran handlers"
elif meta_action == 'refresh_inventory':
self._inventory.refresh_inventory()
msg = "inventory successfully refreshed"
elif meta_action == 'clear_facts':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
self._variable_manager.clear_facts(host)
msg = "facts cleared"
else:
skipped = True
elif meta_action == 'clear_host_errors':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
self._tqm._failed_hosts.pop(host.name, False)
self._tqm._unreachable_hosts.pop(host.name, False)
iterator._host_states[host.name].fail_state = iterator.FAILED_NONE
msg="cleared host errors"
else:
skipped = True
elif meta_action == 'end_play':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
if not host.name in self._tqm._unreachable_hosts:
iterator._host_states[host.name].run_state = iterator.ITERATING_COMPLETE
msg="ending play"
elif meta_action == 'reset_connection':
connection = connection_loader.get(play_context.connection, play_context, '/dev/null')
connection.reset()
msg= 'reset connection'
else:
raise AnsibleError("invalid meta action requested: %s" % meta_action, obj=task._ds)
result = { 'msg': msg }
if skipped:
result['skipped'] = True
else:
result['changed'] = False
display.vv("META: %s" % msg)
return [TaskResult(target_host, task, result)]
def get_hosts_left(self, iterator):
''' returns list of available hosts for this iterator by filtering out unreachables '''
hosts_left = []
for host in self._inventory.get_hosts(iterator._play.hosts, order=iterator._play.order):
if host.name not in self._tqm._unreachable_hosts:
hosts_left.append(host)
return hosts_left
| gpl-3.0 |
Thor77/youtube-dl | youtube_dl/extractor/streetvoice.py | 23 | 1614 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import unified_strdate
class StreetVoiceIE(InfoExtractor):
_VALID_URL = r'https?://(?:.+?\.)?streetvoice\.com/[^/]+/songs/(?P<id>[0-9]+)'
_TESTS = [{
'url': 'http://streetvoice.com/skippylu/songs/94440/',
'md5': '15974627fc01a29e492c98593c2fd472',
'info_dict': {
'id': '94440',
'ext': 'mp3',
'title': '輸',
'description': 'Crispy脆樂團 - 輸',
'thumbnail': 're:^https?://.*\.jpg$',
'duration': 260,
'upload_date': '20091018',
'uploader': 'Crispy脆樂團',
'uploader_id': '627810',
}
}, {
'url': 'http://tw.streetvoice.com/skippylu/songs/94440/',
'only_matching': True,
}]
def _real_extract(self, url):
song_id = self._match_id(url)
song = self._download_json(
'https://streetvoice.com/api/v1/public/song/%s/' % song_id, song_id, data=b'')
title = song['name']
author = song['user']['nickname']
return {
'id': song_id,
'url': song['file'],
'title': title,
'description': '%s - %s' % (author, title),
'thumbnail': self._proto_relative_url(song.get('image'), 'http:'),
'duration': song.get('length'),
'upload_date': unified_strdate(song.get('created_at')),
'uploader': author,
'uploader_id': compat_str(song['user']['id']),
}
| unlicense |
mdanielwork/intellij-community | python/lib/Lib/site-packages/django/utils/_os.py | 71 | 2011 | import os
from os.path import join, normcase, normpath, abspath, isabs, sep
from django.utils.encoding import force_unicode
# Define our own abspath function that can handle joining
# unicode paths to a current working directory that has non-ASCII
# characters in it. This isn't necessary on Windows since the
# Windows version of abspath handles this correctly. The Windows
# abspath also handles drive letters differently than the pure
# Python implementation, so it's best not to replace it.
if os.name == 'nt':
abspathu = abspath
else:
def abspathu(path):
"""
Version of os.path.abspath that uses the unicode representation
of the current working directory, thus avoiding a UnicodeDecodeError
in join when the cwd has non-ASCII characters.
"""
if not isabs(path):
path = join(os.getcwdu(), path)
return normpath(path)
def safe_join(base, *paths):
"""
Joins one or more path components to the base path component intelligently.
Returns a normalized, absolute version of the final path.
The final path must be located inside of the base path component (otherwise
a ValueError is raised).
"""
# We need to use normcase to ensure we don't false-negative on case
# insensitive operating systems (like Windows).
base = force_unicode(base)
paths = [force_unicode(p) for p in paths]
final_path = normcase(abspathu(join(base, *paths)))
base_path = normcase(abspathu(base))
base_path_len = len(base_path)
# Ensure final_path starts with base_path and that the next character after
# the final path is os.sep (or nothing, in which case final_path must be
# equal to base_path).
if not final_path.startswith(base_path) \
or final_path[base_path_len:base_path_len+1] not in ('', sep):
raise ValueError('The joined path (%s) is located outside of the base '
'path component (%s)' % (final_path, base_path))
return final_path
| apache-2.0 |
kevclarx/ansible | lib/ansible/modules/cloud/amazon/ec2_vpc_nacl_facts.py | 10 | 6644 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'curated'}
DOCUMENTATION = '''
---
module: ec2_vpc_nacl_facts
short_description: Gather facts about Network ACLs in an AWS VPC
description:
- Gather facts about Network ACLs in an AWS VPC
version_added: "2.2"
author: "Brad Davidson (@brandond)"
requirements: [ boto3 ]
options:
nacl_ids:
description:
- A list of Network ACL IDs to retrieve facts about.
required: false
default: []
filters:
description:
- A dict of filters to apply. Each dict item consists of a filter key and a filter value. See \
U(http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeNetworkAcls.html) for possible filters. Filter \
names and values are case sensitive.
required: false
default: {}
notes:
- By default, the module will return all Network ACLs.
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Gather facts about all Network ACLs:
- name: Get All NACLs
register: all_nacls
ec2_vpc_nacl_facts:
region: us-west-2
# Retrieve default Network ACLs:
- name: Get Default NACLs
register: default_nacls
ec2_vpc_nacl_facts:
region: us-west-2
filters:
'default': 'true'
'''
RETURN = '''
nacl:
description: Returns an array of complex objects as described below.
returned: success
type: complex
contains:
nacl_id:
description: The ID of the Network Access Control List.
returned: always
type: string
vpc_id:
description: The ID of the VPC that the NACL is attached to.
returned: always
type: string
is_default:
description: True if the NACL is the default for its VPC.
returned: always
type: boolean
tags:
description: A dict of tags associated with the NACL.
returned: always
type: dict
subnets:
description: A list of subnet IDs that are associated with the NACL.
returned: always
type: list of string
ingress:
description: A list of NACL ingress rules.
returned: always
type: list of list
egress:
description: A list of NACL egress rules.
returned: always
type: list of list
'''
try:
import boto3
from botocore.exceptions import ClientError, NoCredentialsError
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
# VPC-supported IANA protocol numbers
# http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml
PROTOCOL_NAMES = {'-1': 'all', '1': 'icmp', '6': 'tcp', '17': 'udp'}
def list_ec2_vpc_nacls(connection, module):
nacl_ids = module.params.get("nacl_ids")
filters = ansible_dict_to_boto3_filter_list(module.params.get("filters"))
try:
nacls = connection.describe_network_acls(NetworkAclIds=nacl_ids, Filters=filters)
except (ClientError, NoCredentialsError) as e:
module.fail_json(msg=e.message, **camel_dict_to_snake_dict(e.response))
# Turn the boto3 result in to ansible_friendly_snaked_names
snaked_nacls = []
for nacl in nacls['NetworkAcls']:
snaked_nacls.append(camel_dict_to_snake_dict(nacl))
# Turn the boto3 result in to ansible friendly tag dictionary
for nacl in snaked_nacls:
if 'tags' in nacl:
nacl['tags'] = boto3_tag_list_to_ansible_dict(nacl['tags'], 'key', 'value')
if 'entries' in nacl:
nacl['egress'] = [nacl_entry_to_list(e) for e in nacl['entries']
if e['rule_number'] != 32767 and e['egress']]
nacl['ingress'] = [nacl_entry_to_list(e) for e in nacl['entries']
if e['rule_number'] != 32767 and not e['egress']]
del nacl['entries']
if 'associations' in nacl:
nacl['subnets'] = [a['subnet_id'] for a in nacl['associations']]
del nacl['associations']
if 'network_acl_id' in nacl:
nacl['nacl_id'] = nacl['network_acl_id']
del nacl['network_acl_id']
module.exit_json(nacls=snaked_nacls)
def nacl_entry_to_list(entry):
elist = [entry['rule_number'],
PROTOCOL_NAMES[entry['protocol']],
entry['rule_action'],
entry['cidr_block']
]
if entry['protocol'] == '1':
elist = elist + [-1, -1]
else:
elist = elist + [None, None, None, None]
if 'icmp_type_code' in entry:
elist[4] = entry['icmp_type_code']['type']
elist[5] = entry['icmp_type_code']['code']
if 'port_range' in entry:
elist[6] = entry['port_range']['from']
elist[7] = entry['port_range']['to']
return elist
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
nacl_ids=dict(default=[], type='list'),
filters=dict(default={}, type='dict')
)
)
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=[
['nacl_ids', 'filters']
]
)
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
if region:
connection = boto3_conn(module, conn_type='client', resource='ec2',
region=region, endpoint=ec2_url, **aws_connect_params)
else:
module.fail_json(msg="region must be specified")
list_ec2_vpc_nacls(connection, module)
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| gpl-3.0 |
18padx08/PPTex | PPTexEnv_x86_64/lib/python2.7/site-packages/sympy/physics/tests/test_qho_1d.py | 28 | 1172 | from sympy import exp, integrate, oo, Rational, pi, S, simplify, sqrt
from sympy.abc import omega, m, x
from sympy.physics.qho_1d import psi_n, E_n
from sympy.physics.quantum.constants import hbar
nu = m * omega / hbar
def test_wavefunction():
Psi = {
0: (nu/pi)**(S(1)/4) * exp(-nu * x**2 /2),
1: (nu/pi)**(S(1)/4) * sqrt(2*nu) * x * exp(-nu * x**2 /2),
2: (nu/pi)**(S(1)/4) * (2 * nu * x**2 - 1)/sqrt(2) * exp(-nu * x**2 /2),
3: (nu/pi)**(S(1)/4) * sqrt(nu/3) * (2 * nu * x**3 - 3 * x) * exp(-nu * x**2 /2)
}
for n in Psi:
assert simplify(psi_n(n, x, m, omega) - Psi[n]) == 0
def test_norm(n=1):
# Maximum "n" which is tested:
for i in range(n + 1):
assert integrate(psi_n(i, x, 1, 1)**2, (x, -oo, oo)) == 1
def test_orthogonality(n=1):
# Maximum "n" which is tested:
for i in range(n + 1):
for j in range(i + 1, n + 1):
assert integrate(
psi_n(i, x, 1, 1)*psi_n(j, x, 1, 1), (x, -oo, oo)) == 0
def test_energies(n=1):
# Maximum "n" which is tested:
for i in range(n + 1):
assert E_n(i, omega) == hbar * omega * (i + Rational(1, 2))
| mit |
Telestream/telestream-cloud-python-sdk | telestream_cloud_qc_sdk/test/test_container_type.py | 1 | 1326 | # coding: utf-8
"""
Qc API
Qc API # noqa: E501
The version of the OpenAPI document: 3.0.0
Contact: cloudsupport@telestream.net
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import telestream_cloud_qc
from telestream_cloud_qc.models.container_type import ContainerType # noqa: E501
from telestream_cloud_qc.rest import ApiException
class TestContainerType(unittest.TestCase):
"""ContainerType unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test ContainerType
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = telestream_cloud_qc.models.container_type.ContainerType() # noqa: E501
if include_optional :
return ContainerType(
)
else :
return ContainerType(
)
def testContainerType(self):
"""Test ContainerType"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| mit |
daniel-j/lutris | lutris/services/service_game.py | 1 | 1672 | """Communicates between third party services games and Lutris games"""
from lutris import pga
class ServiceGame:
"""Representation of a game from a 3rd party service"""
store = NotImplemented
installer_slug = NotImplemented
def __init__(self):
self.appid = None # External ID of the game on the 3rd party service
self.game_id = None # Internal Lutris ID
self.runner = None # Name of the runner
self.name = None # Name
self.slug = None # Game slug
self.icon = None # Game icon / logo
self.details = None # Additional details for the game
@classmethod
def new_from_lutris_id(cls, game_id):
"""Create a ServiceGame from its Lutris ID"""
service_game = cls()
service_game.game_id = game_id
return service_game
@property
def config_id(self):
"""Returns the ID to use for the lutris config file"""
return self.slug + "-" + self.installer_slug
def install(self):
"""Add an installed game to the library"""
self.game_id = pga.add_or_update(
id=self.game_id,
name=self.name,
runner=self.runner,
slug=self.slug,
installed=1,
configpath=self.config_id,
installer_slug=self.installer_slug,
)
self.create_config()
return self.game_id
def uninstall(self):
"""Uninstall a game from Lutris"""
return pga.add_or_update(id=self.game_id, installed=0)
def create_config(self):
"""Implement this in subclasses to properly create the game config"""
raise NotImplementedError
| gpl-3.0 |
credativUK/OCB | addons/account/project/project.py | 38 | 2477 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_analytic_journal(osv.osv):
_name = 'account.analytic.journal'
_description = 'Analytic Journal'
_columns = {
'name': fields.char('Journal Name', size=64, required=True),
'code': fields.char('Journal Code', size=8),
'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the analytic journal without removing it."),
'type': fields.selection([('sale','Sale'), ('purchase','Purchase'), ('cash','Cash'), ('general','General'), ('situation','Situation')], 'Type', size=32, required=True, help="Gives the type of the analytic journal. When it needs for a document (eg: an invoice) to create analytic entries, OpenERP will look for a matching journal of the same type."),
'line_ids': fields.one2many('account.analytic.line', 'journal_id', 'Lines'),
'company_id': fields.many2one('res.company', 'Company', required=True),
}
_defaults = {
'active': True,
'type': 'general',
'company_id': lambda self,cr,uid,c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.id,
}
account_analytic_journal()
class account_journal(osv.osv):
_inherit="account.journal"
_columns = {
'analytic_journal_id':fields.many2one('account.analytic.journal','Analytic Journal', help="Journal for analytic entries"),
}
account_journal()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Mindwerks/worldengine | worldengine/drawing_functions.py | 2 | 18176 | """
This file should contain only functions that operates on pixels, not on images,
so no references to PIL are necessary and the module can be used also through
Jython
"""
import numpy
import sys
import time
from worldengine.common import get_verbose, count_neighbours
from worldengine.common import anti_alias as anti_alias_channel
from worldengine.biome import BiomeGroup, _un_camelize
# -------------------
# Reusable functions
# -------------------
def gradient(value, low, high, low_color, high_color):
lr, lg, lb = low_color
if high == low:
return lr, lg, lb, 255
_range = float(high - low)
_x = float(value - low) / _range
_ix = 1.0 - _x
hr, hg, hb = high_color
r = int(lr * _ix + hr * _x)
g = int(lg * _ix + hg * _x)
b = int(lb * _ix + hb * _x)
return r, g, b, 255
def rgba_to_rgb(rgba):
r, g, b, a = rgba
return r, g, b
def draw_rivers_on_image(world, target, factor=1):
"""Draw only the rivers, it expect the background to be in place
"""
for y in range(world.height):
for x in range(world.width):
if world.is_land((x, y)) and (world.layers['river_map'].data[y, x] > 0.0):
for dx in range(factor):
for dy in range(factor):
target.set_pixel(x * factor + dx, y * factor + dy, (0, 0, 128, 255))
if world.is_land((x, y)) and (world.layers['lake_map'].data[y, x] != 0):
for dx in range(factor):
for dy in range(factor):
target.set_pixel(x * factor + dx, y * factor + dy, (0, 100, 128, 255))
# -------------------
# Drawing ancient map
# -------------------
def _find_mountains_mask(world, factor):
_mask = numpy.zeros((world.height, world.width), float)
_mask[world.elevation>world.get_mountain_level()] = 1.0
# disregard elevated oceans
_mask[world.ocean] = 0.0
# this is fast but not 100% precise
# subsequent steps are fiendishly sensitive to these precision errors
# therefore the rounding
_mask[_mask>0] = numpy.around(count_neighbours(_mask, 3)[_mask>0], 6)
_mask[_mask<32.000000001] = 0.0
_mask /= 4.0
_mask = _mask.repeat(factor, 0).repeat(factor, 1)
return _mask
def _build_biome_group_masks(world, factor):
biome_groups = BiomeGroup.__subclasses__()
biome_masks = {}
for group in biome_groups:
group_mask = numpy.zeros((world.height, world.width), float)
for biome in group.__subclasses__():
group_mask[world.biome==_un_camelize(biome.__name__)] += 1.0
group_mask[group_mask>0] = count_neighbours(group_mask)[group_mask>0]
group_mask[group_mask<5.000000001] = 0.0
group_mask = group_mask.repeat(factor, 0).repeat(factor, 1)
biome_masks[_un_camelize(group.__name__)] = group_mask
return biome_masks
def _draw_shaded_pixel(pixels, x, y, r, g, b):
nb = (x ** int(y / 5) + x * 23 + y * 37 + (x * y) * 13) % 75
nr = r - nb
ng = g - nb
nb = b - nb
pixels[y, x] = (nr, ng, nb, 255)
def _draw_forest_pattern1(pixels, x, y, c, c2):
pixels[y - 4, x + 0] = c
pixels[y - 3, x + 0] = c
pixels[y - 2, x - 1] = c
pixels[y - 2, x + 1] = c
pixels[y - 1, x - 1] = c
pixels[y - 1, x + 1] = c
pixels[y + 0, x - 2] = c
pixels[y + 0, x + 1] = c
pixels[y + 0, x + 2] = c
pixels[y + 1, x - 2] = c
pixels[y + 1, x + 2] = c
pixels[y + 2, x - 3] = c
pixels[y + 2, x - 1] = c
pixels[y + 2, x + 3] = c
pixels[y + 3, x - 3] = c
pixels[y + 3, x - 2] = c
pixels[y + 3, x - 1] = c
pixels[y + 3, x - 0] = c
pixels[y + 3, x + 1] = c
pixels[y + 3, x + 2] = c
pixels[y + 3, x + 3] = c
pixels[y + 4, x - 0] = c
pixels[y - 2, x + 0] = c2
pixels[y - 1, x + 0] = c2
pixels[y - 0, x - 1] = c2
pixels[y - 0, x - 0] = c2
pixels[y + 1, x - 1] = c2
pixels[y + 1, x - 0] = c2
pixels[y + 1, x + 1] = c2
pixels[y + 2, x - 2] = c2
pixels[y + 2, x - 0] = c2
pixels[y + 2, x + 1] = c2
pixels[y + 2, x + 2] = c2
def _draw_forest_pattern2(pixels, x, y, c, c2):
pixels[y - 4, x - 1] = c
pixels[y - 4, x - 0] = c
pixels[y - 4, x + 1] = c
pixels[y - 3, x - 2] = c
pixels[y - 3, x - 1] = c
pixels[y - 3, x + 2] = c
pixels[y - 2, x - 2] = c
pixels[y - 2, x + 1] = c
pixels[y - 2, x + 2] = c
pixels[y - 1, x - 2] = c
pixels[y - 1, x + 2] = c
pixels[y - 0, x - 2] = c
pixels[y - 0, x - 1] = c
pixels[y - 0, x + 2] = c
pixels[y + 1, x - 2] = c
pixels[y + 1, x + 1] = c
pixels[y + 1, x + 2] = c
pixels[y + 2, x - 1] = c
pixels[y + 2, x - 0] = c
pixels[y + 2, x + 1] = c
pixels[y + 3, x - 0] = c
pixels[y + 4, x - 0] = c
pixels[y - 3, x + 0] = c2
pixels[y - 3, x + 1] = c2
pixels[y - 2, x - 1] = c2
pixels[y - 2, x - 0] = c2
pixels[y - 1, x - 1] = c2
pixels[y - 1, x - 0] = c2
pixels[y - 1, x + 1] = c2
pixels[y - 0, x - 0] = c2
pixels[y - 0, x + 1] = c2
pixels[y + 1, x - 1] = c2
pixels[y + 1, x - 0] = c2
def _draw_desert_pattern(pixels, x, y, c):
pixels[y - 2, x - 1] = c
pixels[y - 2, x - 0] = c
pixels[y - 2, x + 1] = c
pixels[y - 2, x + 1] = c
pixels[y - 2, x + 2] = c
pixels[y - 1, x - 2] = c
pixels[y - 1, x - 1] = c
pixels[y - 1, x - 0] = c
pixels[y - 1, x + 4] = c
pixels[y - 0, x - 4] = c
pixels[y - 0, x - 3] = c
pixels[y - 0, x - 2] = c
pixels[y - 0, x - 1] = c
pixels[y - 0, x + 1] = c
pixels[y - 0, x + 2] = c
pixels[y - 0, x + 6] = c
pixels[y + 1, x - 5] = c
pixels[y + 1, x - 0] = c
pixels[y + 1, x + 7] = c
pixels[y + 1, x + 8] = c
pixels[y + 2, x - 8] = c
pixels[y + 2, x - 7] = c
def _draw_glacier(pixels, x, y):
rg = 255 - (x ** int(y / 5) + x * 23 + y * 37 + (x * y) * 13) % 75
pixels[y, x] = (rg, rg, 255, 255)
def _draw_cold_parklands(pixels, x, y, w, h):
b = (x ** int(y / 5) + x * 23 + y * 37 + (x * y) * 13) % 75
r = 105 - b
g = 96 - b
b = 38 - int(b / 2)
pixels[y, x] = (r, g, b, 255)
def _draw_boreal_forest(pixels, x, y, w, h):
c = (0, 32, 0, 255)
c2 = (0, 64, 0, 255)
_draw_forest_pattern1(pixels, x, y, c, c2)
def _draw_warm_temperate_forest(pixels, x, y, w, h):
c = (0, 96, 0, 255)
c2 = (0, 192, 0, 255)
_draw_forest_pattern2(pixels, x, y, c, c2)
def _draw_temperate_forest1(pixels, x, y, w, h):
c = (0, 64, 0, 255)
c2 = (0, 96, 0, 255)
_draw_forest_pattern1(pixels, x, y, c, c2)
def _draw_temperate_forest2(pixels, x, y, w, h):
c = (0, 64, 0, 255)
c2 = (0, 112, 0, 255)
_draw_forest_pattern2(pixels, x, y, c, c2)
def _draw_tropical_dry_forest(pixels, x, y, w, h):
c = (51, 36, 3, 255)
c2 = (139, 204, 58, 255)
_draw_forest_pattern2(pixels, x, y, c, c2)
def _draw_jungle(pixels, x, y, w, h):
c = (0, 128, 0, 255)
c2 = (0, 255, 0, 255)
_draw_forest_pattern2(pixels, x, y, c, c2)
def _draw_cool_desert(pixels, x, y, w, h):
c = (72, 72, 53, 255)
# c2 = (219, 220, 200, 255) # TODO: not used?
_draw_desert_pattern(pixels, x, y, c)
def _draw_hot_desert(pixels, x, y, w, h):
c = (72, 72, 53, 255)
# c2 = (219, 220, 200, 255) # TODO: not used?
_draw_desert_pattern(pixels, x, y, c)
def _draw_tundra(pixels, x, y, w, h):
_draw_shaded_pixel(pixels,x, y, 166, 148, 75)
def _draw_steppe(pixels, x, y, w, h):
_draw_shaded_pixel(pixels, x, y, 96, 192, 96)
def _draw_chaparral(pixels, x, y, w, h):
_draw_shaded_pixel(pixels, x, y, 180, 171, 113)
def _draw_savanna(pixels, x, y, w, h):
_draw_shaded_pixel(pixels, x, y, 255, 246, 188)
# TODO: complete and enable this one
def _dynamic_draw_a_mountain(pixels, rng, x, y, w=3, h=3):
# mcl = (0, 0, 0, 255) # TODO: No longer used?
# mcll = (128, 128, 128, 255)
mcr = (75, 75, 75, 255)
# left edge
last_leftborder = None
for mody in range(-h, h + 1):
bottomness = (float(mody + h) / 2.0) / w
min_leftborder = int(bottomness * w * 0.66)
if not last_leftborder == None:
min_leftborder = max(min_leftborder, last_leftborder - 1)
max_leftborder = int(bottomness * w * 1.33)
if not last_leftborder == None:
max_leftborder = min(max_leftborder, last_leftborder + 1)
leftborder = int(bottomness * w) + rng.randint(-2, 2)/2
if leftborder < min_leftborder:
leftborder = min_leftborder
if leftborder > max_leftborder:
leftborder = max_leftborder
last_leftborder = leftborder
darkarea = int(bottomness * w / 2)
lightarea = int(bottomness * w / 2)
for itx in range(darkarea, leftborder + 1):
pixels[y + mody, x - itx] = gradient(itx, darkarea, leftborder,
(0, 0, 0), (64, 64, 64))
for itx in range(-darkarea, lightarea + 1):
pixels[y + mody, x - itx] = gradient(itx, -darkarea, lightarea,
(64, 64, 64), (128, 128, 128))
for itx in range(lightarea, leftborder):
pixels[y + mody, x - itx] = (181, 166, 127, 255) # land_color
# right edge
last_modx = None
for mody in range(-h, h + 1):
bottomness = (float(mody + h) / 2.0) / w
min_modx = int(bottomness * w * 0.66)
if not last_modx == None:
min_modx = max(min_modx, last_modx - 1)
max_modx = int(bottomness * w * 1.33)
if not last_modx == None:
max_modx = min(max_modx, last_modx + 1)
modx = int(bottomness * w) + numpy.random.randint(-2, 2)/2
if modx < min_modx:
modx = min_modx
if modx > max_modx:
modx = max_modx
last_modx = modx
pixels[y + mody, x - itx] = mcr
def _draw_a_mountain(pixels, x, y, w=3, h=3):
# mcl = (0, 0, 0, 255) # TODO: No longer used?
# mcll = (128, 128, 128, 255)
mcr = (75, 75, 75, 255)
# left edge
for mody in range(-h, h + 1):
bottomness = (float(mody + h) / 2.0) / w
leftborder = int(bottomness * w)
darkarea = int(bottomness * w / 2)
lightarea = int(bottomness * w / 2)
for itx in range(darkarea, leftborder + 1):
pixels[y + mody, x - itx] = gradient(itx, darkarea, leftborder,
(0, 0, 0), (64, 64, 64))
for itx in range(-darkarea, lightarea + 1):
pixels[y + mody, x + itx] = gradient(itx, -darkarea, lightarea,
(64, 64, 64), (128, 128, 128))
for itx in range(lightarea, leftborder):
pixels[y + mody, x + itx] = (181, 166, 127, 255) # land_color
# right edge
for mody in range(-h, h + 1):
bottomness = (float(mody + h) / 2.0) / w
modx = int(bottomness * w)
pixels[y + mody, x + modx] = mcr
def draw_ancientmap(world, target, resize_factor=1,
sea_color=(212, 198, 169, 255),
draw_biome = True, draw_rivers = True, draw_mountains = True,
draw_outer_land_border = False, verbose=get_verbose()):
rng = numpy.random.RandomState(world.seed) # create our own random generator
if verbose:
start_time = time.time()
land_color = (
181, 166, 127, 255) # TODO: Put this in the argument list too??
scaled_ocean = world.ocean.repeat(resize_factor, 0).repeat(resize_factor, 1)
borders = numpy.zeros((resize_factor * world.height, resize_factor * world.width), bool)
borders[count_neighbours(scaled_ocean) > 0] = True
borders[scaled_ocean] = False
# cache neighbours count at different radii
border_neighbours = {}
border_neighbours[6] = numpy.rint(count_neighbours(borders, 6))
border_neighbours[9] = numpy.rint(count_neighbours(borders, 9))
if draw_outer_land_border:
inner_borders = borders
outer_borders = None
for i in range(2):
_outer_borders = numpy.zeros((resize_factor * world.height, resize_factor * world.width), bool)
_outer_borders[count_neighbours(inner_borders) > 0] = True
_outer_borders[inner_borders] = False
_outer_borders[numpy.logical_not(scaled_ocean)] = False
outer_borders = _outer_borders
inner_borders = outer_borders
if draw_mountains:
mountains_mask = _find_mountains_mask(world, resize_factor)
if draw_biome:
biome_masks = _build_biome_group_masks(world, resize_factor)
def _draw_biome(name, _func, w, h, r, _alt_func = None):
if verbose:
start_time = time.time()
for y in range(resize_factor * world.height):
for x in range(resize_factor * world.width):
if biome_masks[name][y, x] > 0:
if r == 0 or border_neighbours[r][y,x] <= 2:
if _alt_func is not None and rng.random_sample() > .5:
_alt_func(target, x, y, w, h)
else:
_func(target, x, y, w, h)
biome_masks[name][y-r:y+r+1,x-r:x+r+1] = 0.0
if verbose:
elapsed_time = time.time() - start_time
print(
"...drawing_functions.draw_ancientmap: " + name +
" Elapsed time " + str(elapsed_time) + " seconds.")
if verbose:
elapsed_time = time.time() - start_time
print(
"...drawing_functions.draw_oldmap_on_pixel: init Elapsed time " +
str(elapsed_time) + " seconds.")
sys.stdout.flush()
if verbose:
start_time = time.time()
border_color = (0, 0, 0, 255)
outer_border_color = gradient(0.5, 0, 1.0, rgba_to_rgb(border_color), rgba_to_rgb(sea_color))
# start in low resolution
num_channels = 4
channels = numpy.zeros((num_channels, world.height, world.width), int)
for c in range(num_channels):
channels[c] = land_color[c]
channels[c][world.ocean] = sea_color[c]
# now go full resolution
channels = channels.repeat(resize_factor, 1).repeat(resize_factor, 2)
if draw_outer_land_border:
for c in range(num_channels):
channels[c][outer_borders] = outer_border_color[c]
for c in range(num_channels):
channels[c][borders] = border_color[c]
if verbose:
elapsed_time = time.time() - start_time
print(
"...drawing_functions.draw_oldmap_on_pixel: color ocean " +
"Elapsed time " + str(elapsed_time) + " seconds.")
if verbose:
start_time = time.time()
# don't anti-alias the alpha channel
for c in range(num_channels-1):
channels[c] = anti_alias_channel(channels[c], 1)
# switch from channel major storage to pixel major storage
for c in range(num_channels):
target[:,:,c] = channels[c,:,:]
if verbose:
elapsed_time = time.time() - start_time
print(
"...drawing_functions.draw_oldmap_on_pixel: anti alias " +
"Elapsed time " + str(elapsed_time) + " seconds.")
if draw_biome:
# Draw glacier
if verbose:
start_time = time.time()
for y in range(resize_factor * world.height):
for x in range(resize_factor * world.width):
if not borders[y, x] and world.is_iceland(
(int(x / resize_factor), int(y / resize_factor))):
_draw_glacier(target, x, y)
if verbose:
elapsed_time = time.time() - start_time
print(
"...drawing_functions.draw_oldmap_on_pixel: draw glacier " +
"Elapsed time " + str(elapsed_time) + " seconds.")
_draw_biome('tundra', _draw_tundra, 0, 0, 0)
_draw_biome('cold parklands', _draw_cold_parklands, 0, 0, 0)
_draw_biome('steppe', _draw_steppe, 0, 0, 0)
_draw_biome('chaparral', _draw_chaparral, 0, 0, 0)
_draw_biome('savanna', _draw_savanna, 0, 0, 0)
_draw_biome('cool desert', _draw_cool_desert, 8, 2, 9)
_draw_biome('hot desert', _draw_hot_desert, 8, 2, 9)
_draw_biome('boreal forest', _draw_boreal_forest, 4, 5, 6)
_draw_biome('cool temperate forest', _draw_temperate_forest1, 4, 5, 6,
_draw_temperate_forest2)
_draw_biome('warm temperate forest', _draw_warm_temperate_forest, 4, 5, 6)
_draw_biome('tropical dry forest group', _draw_tropical_dry_forest, 4, 5, 6)
_draw_biome('jungle', _draw_jungle, 4, 5, 6)
# TODO: there was a stub for a rock desert biome group
# it should be super easy to introduce that group with the new
# biome group concept but since it did nothing I removed the stub
if draw_rivers:
draw_rivers_on_image(world, target, resize_factor)
# Draw mountains
if draw_mountains:
if verbose:
start_time = time.time()
for y in range(resize_factor * world.height):
for x in range(resize_factor * world.width):
if mountains_mask[y, x] > 0:
w = mountains_mask[y, x]
h = 3 + int(world.level_of_mountain(
(int(x / resize_factor), int(y / resize_factor))))
r = max(int(w / 3 * 2), h)
if r not in border_neighbours:
border_neighbours[r] = numpy.rint(count_neighbours(borders, r))
if border_neighbours[r][y,x] <= 2:
_draw_a_mountain(target, x, y, w=w, h=h)
mountains_mask[y-r:y+r+1,x-r:x+r+1] = 0.0
if verbose:
elapsed_time = time.time() - start_time
print(
"...drawing_functions.draw_oldmap_on_pixel: draw mountains " +
"Elapsed time " + str(elapsed_time) + " seconds.")
| mit |
kazemakase/scikit-learn | sklearn/feature_extraction/hashing.py | 183 | 6155 | # Author: Lars Buitinck <L.J.Buitinck@uva.nl>
# License: BSD 3 clause
import numbers
import numpy as np
import scipy.sparse as sp
from . import _hashing
from ..base import BaseEstimator, TransformerMixin
def _iteritems(d):
"""Like d.iteritems, but accepts any collections.Mapping."""
return d.iteritems() if hasattr(d, "iteritems") else d.items()
class FeatureHasher(BaseEstimator, TransformerMixin):
"""Implements feature hashing, aka the hashing trick.
This class turns sequences of symbolic feature names (strings) into
scipy.sparse matrices, using a hash function to compute the matrix column
corresponding to a name. The hash function employed is the signed 32-bit
version of Murmurhash3.
Feature names of type byte string are used as-is. Unicode strings are
converted to UTF-8 first, but no Unicode normalization is done.
Feature values must be (finite) numbers.
This class is a low-memory alternative to DictVectorizer and
CountVectorizer, intended for large-scale (online) learning and situations
where memory is tight, e.g. when running prediction code on embedded
devices.
Read more in the :ref:`User Guide <feature_hashing>`.
Parameters
----------
n_features : integer, optional
The number of features (columns) in the output matrices. Small numbers
of features are likely to cause hash collisions, but large numbers
will cause larger coefficient dimensions in linear learners.
dtype : numpy type, optional
The type of feature values. Passed to scipy.sparse matrix constructors
as the dtype argument. Do not set this to bool, np.boolean or any
unsigned integer type.
input_type : string, optional
Either "dict" (the default) to accept dictionaries over
(feature_name, value); "pair" to accept pairs of (feature_name, value);
or "string" to accept single strings.
feature_name should be a string, while value should be a number.
In the case of "string", a value of 1 is implied.
The feature_name is hashed to find the appropriate column for the
feature. The value's sign might be flipped in the output (but see
non_negative, below).
non_negative : boolean, optional, default np.float64
Whether output matrices should contain non-negative values only;
effectively calls abs on the matrix prior to returning it.
When True, output values can be interpreted as frequencies.
When False, output values will have expected value zero.
Examples
--------
>>> from sklearn.feature_extraction import FeatureHasher
>>> h = FeatureHasher(n_features=10)
>>> D = [{'dog': 1, 'cat':2, 'elephant':4},{'dog': 2, 'run': 5}]
>>> f = h.transform(D)
>>> f.toarray()
array([[ 0., 0., -4., -1., 0., 0., 0., 0., 0., 2.],
[ 0., 0., 0., -2., -5., 0., 0., 0., 0., 0.]])
See also
--------
DictVectorizer : vectorizes string-valued features using a hash table.
sklearn.preprocessing.OneHotEncoder : handles nominal/categorical features
encoded as columns of integers.
"""
def __init__(self, n_features=(2 ** 20), input_type="dict",
dtype=np.float64, non_negative=False):
self._validate_params(n_features, input_type)
self.dtype = dtype
self.input_type = input_type
self.n_features = n_features
self.non_negative = non_negative
@staticmethod
def _validate_params(n_features, input_type):
# strangely, np.int16 instances are not instances of Integral,
# while np.int64 instances are...
if not isinstance(n_features, (numbers.Integral, np.integer)):
raise TypeError("n_features must be integral, got %r (%s)."
% (n_features, type(n_features)))
elif n_features < 1 or n_features >= 2 ** 31:
raise ValueError("Invalid number of features (%d)." % n_features)
if input_type not in ("dict", "pair", "string"):
raise ValueError("input_type must be 'dict', 'pair' or 'string',"
" got %r." % input_type)
def fit(self, X=None, y=None):
"""No-op.
This method doesn't do anything. It exists purely for compatibility
with the scikit-learn transformer API.
Returns
-------
self : FeatureHasher
"""
# repeat input validation for grid search (which calls set_params)
self._validate_params(self.n_features, self.input_type)
return self
def transform(self, raw_X, y=None):
"""Transform a sequence of instances to a scipy.sparse matrix.
Parameters
----------
raw_X : iterable over iterable over raw features, length = n_samples
Samples. Each sample must be iterable an (e.g., a list or tuple)
containing/generating feature names (and optionally values, see
the input_type constructor argument) which will be hashed.
raw_X need not support the len function, so it can be the result
of a generator; n_samples is determined on the fly.
y : (ignored)
Returns
-------
X : scipy.sparse matrix, shape = (n_samples, self.n_features)
Feature matrix, for use with estimators or further transformers.
"""
raw_X = iter(raw_X)
if self.input_type == "dict":
raw_X = (_iteritems(d) for d in raw_X)
elif self.input_type == "string":
raw_X = (((f, 1) for f in x) for x in raw_X)
indices, indptr, values = \
_hashing.transform(raw_X, self.n_features, self.dtype)
n_samples = indptr.shape[0] - 1
if n_samples == 0:
raise ValueError("Cannot vectorize empty sequence.")
X = sp.csr_matrix((values, indices, indptr), dtype=self.dtype,
shape=(n_samples, self.n_features))
X.sum_duplicates() # also sorts the indices
if self.non_negative:
np.abs(X.data, X.data)
return X
| bsd-3-clause |
nginx/unit | test/test_asgi_websockets.py | 1 | 44158 | import struct
import time
from distutils.version import LooseVersion
import pytest
from unit.applications.lang.python import TestApplicationPython
from unit.applications.websockets import TestApplicationWebsocket
from unit.option import option
class TestASGIWebsockets(TestApplicationPython):
prerequisites = {
'modules': {'python': lambda v: LooseVersion(v) >= LooseVersion('3.5')}
}
load_module = 'asgi'
ws = TestApplicationWebsocket()
@pytest.fixture(autouse=True)
def setup_method_fixture(self, request, skip_alert):
assert 'success' in self.conf(
{'http': {'websocket': {'keepalive_interval': 0}}}, 'settings'
), 'clear keepalive_interval'
skip_alert(r'socket close\(\d+\) failed')
def close_connection(self, sock):
assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
self.check_close(sock)
def check_close(self, sock, code=1000, no_close=False, frame=None):
if frame == None:
frame = self.ws.frame_read(sock)
assert frame['fin'] == True, 'close fin'
assert frame['opcode'] == self.ws.OP_CLOSE, 'close opcode'
assert frame['code'] == code, 'close code'
if not no_close:
sock.close()
def check_frame(self, frame, fin, opcode, payload, decode=True):
if opcode == self.ws.OP_BINARY or not decode:
data = frame['data']
else:
data = frame['data'].decode('utf-8')
assert frame['fin'] == fin, 'fin'
assert frame['opcode'] == opcode, 'opcode'
assert data == payload, 'payload'
def test_asgi_websockets_handshake(self):
self.load('websockets/mirror')
resp, sock, key = self.ws.upgrade()
sock.close()
assert resp['status'] == 101, 'status'
assert resp['headers']['Upgrade'] == 'websocket', 'upgrade'
assert resp['headers']['Connection'] == 'Upgrade', 'connection'
assert resp['headers']['Sec-WebSocket-Accept'] == self.ws.accept(
key
), 'key'
# remove "mirror" application
self.load('websockets/subprotocol')
def test_asgi_websockets_subprotocol(self):
self.load('websockets/subprotocol')
resp, sock, key = self.ws.upgrade()
sock.close()
assert resp['status'] == 101, 'status'
assert (
resp['headers']['x-subprotocols'] == "('chat', 'phone', 'video')"
), 'subprotocols'
assert resp['headers']['sec-websocket-protocol'] == 'chat', 'key'
def test_asgi_websockets_mirror(self):
self.load('websockets/mirror')
message = 'blah'
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, message)
frame = self.ws.frame_read(sock)
assert message == frame['data'].decode('utf-8'), 'mirror'
self.ws.frame_write(sock, self.ws.OP_TEXT, message)
frame = self.ws.frame_read(sock)
assert message == frame['data'].decode('utf-8'), 'mirror 2'
sock.close()
def test_asgi_websockets_mirror_app_change(self):
self.load('websockets/mirror')
message = 'blah'
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, message)
frame = self.ws.frame_read(sock)
assert message == frame['data'].decode('utf-8'), 'mirror'
self.load('websockets/subprotocol')
self.ws.frame_write(sock, self.ws.OP_TEXT, message)
frame = self.ws.frame_read(sock)
assert message == frame['data'].decode('utf-8'), 'mirror 2'
sock.close()
def test_asgi_websockets_no_mask(self):
self.load('websockets/mirror')
message = 'blah'
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, message, mask=False)
frame = self.ws.frame_read(sock)
assert frame['opcode'] == self.ws.OP_CLOSE, 'no mask opcode'
assert frame['code'] == 1002, 'no mask close code'
sock.close()
def test_asgi_websockets_fragmentation(self):
self.load('websockets/mirror')
message = 'blah'
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, message, fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, ' ', fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, message)
frame = self.ws.frame_read(sock)
assert message + ' ' + message == frame['data'].decode(
'utf-8'
), 'mirror framing'
sock.close()
def test_asgi_websockets_length_long(self):
self.load('websockets/mirror')
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
self.ws.frame_write(
sock, self.ws.OP_CONT, 'fragment2', length=2 ** 64 - 1
)
self.check_close(sock, 1009) # 1009 - CLOSE_TOO_LARGE
def test_asgi_websockets_frame_fragmentation_invalid(self):
self.load('websockets/mirror')
message = 'blah'
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_PING, message, fin=False)
frame = self.ws.frame_read(sock)
frame.pop('data')
assert frame == {
'fin': True,
'rsv1': False,
'rsv2': False,
'rsv3': False,
'opcode': self.ws.OP_CLOSE,
'mask': 0,
'code': 1002,
'reason': 'Fragmented control frame',
}, 'close frame'
sock.close()
def test_asgi_websockets_large(self):
self.load('websockets/mirror')
message = '0123456789' * 300
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, message)
frame = self.ws.frame_read(sock)
data = frame['data'].decode('utf-8')
frame = self.ws.frame_read(sock)
data += frame['data'].decode('utf-8')
assert message == data, 'large'
sock.close()
def test_asgi_websockets_two_clients(self):
self.load('websockets/mirror')
message1 = 'blah1'
message2 = 'blah2'
_, sock1, _ = self.ws.upgrade()
_, sock2, _ = self.ws.upgrade()
self.ws.frame_write(sock1, self.ws.OP_TEXT, message1)
self.ws.frame_write(sock2, self.ws.OP_TEXT, message2)
frame1 = self.ws.frame_read(sock1)
frame2 = self.ws.frame_read(sock2)
assert message1 == frame1['data'].decode('utf-8'), 'client 1'
assert message2 == frame2['data'].decode('utf-8'), 'client 2'
sock1.close()
sock2.close()
@pytest.mark.skip('not yet')
def test_asgi_websockets_handshake_upgrade_absent(
self,
): # FAIL https://tools.ietf.org/html/rfc6455#section-4.2.1
self.load('websockets/mirror')
resp = self.get(
headers={
'Host': 'localhost',
'Connection': 'Upgrade',
'Sec-WebSocket-Key': self.ws.key(),
'Sec-WebSocket-Protocol': 'chat',
'Sec-WebSocket-Version': 13,
},
)
assert resp['status'] == 400, 'upgrade absent'
def test_asgi_websockets_handshake_case_insensitive(self):
self.load('websockets/mirror')
resp, sock, _ = self.ws.upgrade(
headers={
'Host': 'localhost',
'Upgrade': 'WEBSOCKET',
'Connection': 'UPGRADE',
'Sec-WebSocket-Key': self.ws.key(),
'Sec-WebSocket-Protocol': 'chat',
'Sec-WebSocket-Version': 13,
}
)
sock.close()
assert resp['status'] == 101, 'status'
@pytest.mark.skip('not yet')
def test_asgi_websockets_handshake_connection_absent(self): # FAIL
self.load('websockets/mirror')
resp = self.get(
headers={
'Host': 'localhost',
'Upgrade': 'websocket',
'Sec-WebSocket-Key': self.ws.key(),
'Sec-WebSocket-Protocol': 'chat',
'Sec-WebSocket-Version': 13,
},
)
assert resp['status'] == 400, 'status'
def test_asgi_websockets_handshake_version_absent(self):
self.load('websockets/mirror')
resp = self.get(
headers={
'Host': 'localhost',
'Upgrade': 'websocket',
'Connection': 'Upgrade',
'Sec-WebSocket-Key': self.ws.key(),
'Sec-WebSocket-Protocol': 'chat',
},
)
assert resp['status'] == 426, 'status'
@pytest.mark.skip('not yet')
def test_asgi_websockets_handshake_key_invalid(self):
self.load('websockets/mirror')
resp = self.get(
headers={
'Host': 'localhost',
'Upgrade': 'websocket',
'Connection': 'Upgrade',
'Sec-WebSocket-Key': '!',
'Sec-WebSocket-Protocol': 'chat',
'Sec-WebSocket-Version': 13,
},
)
assert resp['status'] == 400, 'key length'
key = self.ws.key()
resp = self.get(
headers={
'Host': 'localhost',
'Upgrade': 'websocket',
'Connection': 'Upgrade',
'Sec-WebSocket-Key': [key, key],
'Sec-WebSocket-Protocol': 'chat',
'Sec-WebSocket-Version': 13,
},
)
assert (
resp['status'] == 400
), 'key double' # FAIL https://tools.ietf.org/html/rfc6455#section-11.3.1
def test_asgi_websockets_handshake_method_invalid(self):
self.load('websockets/mirror')
resp = self.post(
headers={
'Host': 'localhost',
'Upgrade': 'websocket',
'Connection': 'Upgrade',
'Sec-WebSocket-Key': self.ws.key(),
'Sec-WebSocket-Protocol': 'chat',
'Sec-WebSocket-Version': 13,
},
)
assert resp['status'] == 400, 'status'
def test_asgi_websockets_handshake_http_10(self):
self.load('websockets/mirror')
resp = self.get(
headers={
'Host': 'localhost',
'Upgrade': 'websocket',
'Connection': 'Upgrade',
'Sec-WebSocket-Key': self.ws.key(),
'Sec-WebSocket-Protocol': 'chat',
'Sec-WebSocket-Version': 13,
},
http_10=True,
)
assert resp['status'] == 400, 'status'
def test_asgi_websockets_handshake_uri_invalid(self):
self.load('websockets/mirror')
resp = self.get(
headers={
'Host': 'localhost',
'Upgrade': 'websocket',
'Connection': 'Upgrade',
'Sec-WebSocket-Key': self.ws.key(),
'Sec-WebSocket-Protocol': 'chat',
'Sec-WebSocket-Version': 13,
},
url='!',
)
assert resp['status'] == 400, 'status'
def test_asgi_websockets_protocol_absent(self):
self.load('websockets/mirror')
key = self.ws.key()
resp, sock, _ = self.ws.upgrade(
headers={
'Host': 'localhost',
'Upgrade': 'websocket',
'Connection': 'Upgrade',
'Sec-WebSocket-Key': key,
'Sec-WebSocket-Version': 13,
}
)
sock.close()
assert resp['status'] == 101, 'status'
assert resp['headers']['Upgrade'] == 'websocket', 'upgrade'
assert resp['headers']['Connection'] == 'Upgrade', 'connection'
assert resp['headers']['Sec-WebSocket-Accept'] == self.ws.accept(
key
), 'key'
# autobahn-testsuite
#
# Some following tests fail because of Unit does not support UTF-8
# validation for websocket frames. It should be implemented
# by application, if necessary.
def test_asgi_websockets_1_1_1__1_1_8(self):
self.load('websockets/mirror')
opcode = self.ws.OP_TEXT
_, sock, _ = self.ws.upgrade()
def check_length(length, chopsize=None):
payload = '*' * length
self.ws.frame_write(sock, opcode, payload, chopsize=chopsize)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, opcode, payload)
check_length(0) # 1_1_1
check_length(125) # 1_1_2
check_length(126) # 1_1_3
check_length(127) # 1_1_4
check_length(128) # 1_1_5
check_length(65535) # 1_1_6
check_length(65536) # 1_1_7
check_length(65536, chopsize=997) # 1_1_8
self.close_connection(sock)
def test_asgi_websockets_1_2_1__1_2_8(self):
self.load('websockets/mirror')
opcode = self.ws.OP_BINARY
_, sock, _ = self.ws.upgrade()
def check_length(length, chopsize=None):
payload = b'\xfe' * length
self.ws.frame_write(sock, opcode, payload, chopsize=chopsize)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, opcode, payload)
check_length(0) # 1_2_1
check_length(125) # 1_2_2
check_length(126) # 1_2_3
check_length(127) # 1_2_4
check_length(128) # 1_2_5
check_length(65535) # 1_2_6
check_length(65536) # 1_2_7
check_length(65536, chopsize=997) # 1_2_8
self.close_connection(sock)
def test_asgi_websockets_2_1__2_6(self):
self.load('websockets/mirror')
op_ping = self.ws.OP_PING
op_pong = self.ws.OP_PONG
_, sock, _ = self.ws.upgrade()
def check_ping(payload, chopsize=None, decode=True):
self.ws.frame_write(sock, op_ping, payload, chopsize=chopsize)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, op_pong, payload, decode=decode)
check_ping('') # 2_1
check_ping('Hello, world!') # 2_2
check_ping(b'\x00\xff\xfe\xfd\xfc\xfb\x00\xff', decode=False) # 2_3
check_ping(b'\xfe' * 125, decode=False) # 2_4
check_ping(b'\xfe' * 125, chopsize=1, decode=False) # 2_6
self.close_connection(sock)
# 2_5
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_PING, b'\xfe' * 126)
self.check_close(sock, 1002)
def test_asgi_websockets_2_7__2_9(self):
self.load('websockets/mirror')
# 2_7
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_PONG, '')
assert self.recvall(sock, read_timeout=0.1) == b'', '2_7'
# 2_8
self.ws.frame_write(sock, self.ws.OP_PONG, 'unsolicited pong payload')
assert self.recvall(sock, read_timeout=0.1) == b'', '2_8'
# 2_9
payload = 'ping payload'
self.ws.frame_write(sock, self.ws.OP_PONG, 'unsolicited pong payload')
self.ws.frame_write(sock, self.ws.OP_PING, payload)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_PONG, payload)
self.close_connection(sock)
def test_asgi_websockets_2_10__2_11(self):
self.load('websockets/mirror')
# 2_10
_, sock, _ = self.ws.upgrade()
for i in range(0, 10):
self.ws.frame_write(sock, self.ws.OP_PING, 'payload-%d' % i)
for i in range(0, 10):
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_PONG, 'payload-%d' % i)
# 2_11
for i in range(0, 10):
opcode = self.ws.OP_PING
self.ws.frame_write(sock, opcode, 'payload-%d' % i, chopsize=1)
for i in range(0, 10):
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_PONG, 'payload-%d' % i)
self.close_connection(sock)
@pytest.mark.skip('not yet')
def test_asgi_websockets_3_1__3_7(self):
self.load('websockets/mirror')
payload = 'Hello, world!'
# 3_1
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, payload, rsv1=True)
self.check_close(sock, 1002)
# 3_2
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
self.ws.frame_write(sock, self.ws.OP_TEXT, payload, rsv2=True)
self.ws.frame_write(sock, self.ws.OP_PING, '')
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, payload)
self.check_close(sock, 1002, no_close=True)
assert self.recvall(sock, read_timeout=0.1) == b'', 'empty 3_2'
sock.close()
# 3_3
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, payload)
self.ws.frame_write(
sock, self.ws.OP_TEXT, payload, rsv1=True, rsv2=True
)
self.check_close(sock, 1002, no_close=True)
assert self.recvall(sock, read_timeout=0.1) == b'', 'empty 3_3'
sock.close()
# 3_4
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, payload, chopsize=1)
self.ws.frame_write(
sock, self.ws.OP_TEXT, payload, rsv3=True, chopsize=1
)
self.ws.frame_write(sock, self.ws.OP_PING, '')
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, payload)
self.check_close(sock, 1002, no_close=True)
assert self.recvall(sock, read_timeout=0.1) == b'', 'empty 3_4'
sock.close()
# 3_5
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(
sock,
self.ws.OP_BINARY,
b'\x00\xff\xfe\xfd\xfc\xfb\x00\xff',
rsv1=True,
rsv3=True,
)
self.check_close(sock, 1002)
# 3_6
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(
sock, self.ws.OP_PING, payload, rsv2=True, rsv3=True
)
self.check_close(sock, 1002)
# 3_7
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(
sock, self.ws.OP_CLOSE, payload, rsv1=True, rsv2=True, rsv3=True
)
self.check_close(sock, 1002)
def test_asgi_websockets_4_1_1__4_2_5(self):
self.load('websockets/mirror')
payload = 'Hello, world!'
# 4_1_1
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, 0x03, '')
self.check_close(sock, 1002)
# 4_1_2
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, 0x04, 'reserved opcode payload')
self.check_close(sock, 1002)
# 4_1_3
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, payload)
self.ws.frame_write(sock, 0x05, '')
self.ws.frame_write(sock, self.ws.OP_PING, '')
self.check_close(sock, 1002)
# 4_1_4
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, payload)
self.ws.frame_write(sock, 0x06, payload)
self.ws.frame_write(sock, self.ws.OP_PING, '')
self.check_close(sock, 1002)
# 4_1_5
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, payload, chopsize=1)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, payload)
self.ws.frame_write(sock, 0x07, payload, chopsize=1)
self.ws.frame_write(sock, self.ws.OP_PING, '')
self.check_close(sock, 1002)
# 4_2_1
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, 0x0B, '')
self.check_close(sock, 1002)
# 4_2_2
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, 0x0C, 'reserved opcode payload')
self.check_close(sock, 1002)
# 4_2_3
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, payload)
self.ws.frame_write(sock, 0x0D, '')
self.ws.frame_write(sock, self.ws.OP_PING, '')
self.check_close(sock, 1002)
# 4_2_4
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, payload)
self.ws.frame_write(sock, 0x0E, payload)
self.ws.frame_write(sock, self.ws.OP_PING, '')
self.check_close(sock, 1002)
# 4_2_5
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, payload, chopsize=1)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, payload)
self.ws.frame_write(sock, 0x0F, payload, chopsize=1)
self.ws.frame_write(sock, self.ws.OP_PING, '')
self.check_close(sock, 1002)
def test_asgi_websockets_5_1__5_20(self):
self.load('websockets/mirror')
# 5_1
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_PING, 'fragment1', fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
self.check_close(sock, 1002)
# 5_2
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_PONG, 'fragment1', fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
self.check_close(sock, 1002)
# 5_3
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
# 5_4
self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
assert self.recvall(sock, read_timeout=0.1) == b'', '5_4'
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
# 5_5
self.ws.frame_write(
sock, self.ws.OP_TEXT, 'fragment1', fin=False, chopsize=1
)
self.ws.frame_write(
sock, self.ws.OP_CONT, 'fragment2', fin=True, chopsize=1
)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
# 5_6
ping_payload = 'ping payload'
self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
self.ws.frame_write(sock, self.ws.OP_PING, ping_payload)
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_PONG, ping_payload)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
# 5_7
ping_payload = 'ping payload'
self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
assert self.recvall(sock, read_timeout=0.1) == b'', '5_7'
self.ws.frame_write(sock, self.ws.OP_PING, ping_payload)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_PONG, ping_payload)
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
# 5_8
ping_payload = 'ping payload'
self.ws.frame_write(
sock, self.ws.OP_TEXT, 'fragment1', fin=False, chopsize=1
)
self.ws.frame_write(sock, self.ws.OP_PING, ping_payload, chopsize=1)
self.ws.frame_write(
sock, self.ws.OP_CONT, 'fragment2', fin=True, chopsize=1
)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_PONG, ping_payload)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, 'fragment1fragment2')
# 5_9
self.ws.frame_write(
sock, self.ws.OP_CONT, 'non-continuation payload', fin=True
)
self.ws.frame_write(sock, self.ws.OP_TEXT, 'Hello, world!', fin=True)
self.check_close(sock, 1002)
# 5_10
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(
sock, self.ws.OP_CONT, 'non-continuation payload', fin=True
)
self.ws.frame_write(sock, self.ws.OP_TEXT, 'Hello, world!', fin=True)
self.check_close(sock, 1002)
# 5_11
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(
sock,
self.ws.OP_CONT,
'non-continuation payload',
fin=True,
chopsize=1,
)
self.ws.frame_write(
sock, self.ws.OP_TEXT, 'Hello, world!', fin=True, chopsize=1
)
self.check_close(sock, 1002)
# 5_12
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(
sock, self.ws.OP_CONT, 'non-continuation payload', fin=False
)
self.ws.frame_write(sock, self.ws.OP_TEXT, 'Hello, world!', fin=True)
self.check_close(sock, 1002)
# 5_13
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(
sock, self.ws.OP_CONT, 'non-continuation payload', fin=False
)
self.ws.frame_write(sock, self.ws.OP_TEXT, 'Hello, world!', fin=True)
self.check_close(sock, 1002)
# 5_14
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(
sock,
self.ws.OP_CONT,
'non-continuation payload',
fin=False,
chopsize=1,
)
self.ws.frame_write(
sock, self.ws.OP_TEXT, 'Hello, world!', fin=True, chopsize=1
)
self.check_close(sock, 1002)
# 5_15
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=True)
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=False)
self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment4', fin=True)
frame = self.ws.frame_read(sock)
if frame['opcode'] == self.ws.OP_TEXT:
self.check_frame(
frame, True, self.ws.OP_TEXT, 'fragment1fragment2'
)
frame = None
self.check_close(sock, 1002, frame=frame)
# 5_16
_, sock, _ = self.ws.upgrade()
for i in range(0, 2):
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment1', fin=False)
self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment2', fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=True)
self.check_close(sock, 1002)
# 5_17
_, sock, _ = self.ws.upgrade()
for i in range(0, 2):
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment1', fin=True)
self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment2', fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=True)
self.check_close(sock, 1002)
# 5_18
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment2')
self.check_close(sock, 1002)
# 5_19
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=False)
self.ws.frame_write(sock, self.ws.OP_PING, 'pongme 1!')
time.sleep(1)
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment4', fin=False)
self.ws.frame_write(sock, self.ws.OP_PING, 'pongme 2!')
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment5')
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_PONG, 'pongme 1!')
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_PONG, 'pongme 2!')
self.check_frame(
self.ws.frame_read(sock),
True,
self.ws.OP_TEXT,
'fragment1fragment2fragment3fragment4fragment5',
)
# 5_20
self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2', fin=False)
self.ws.frame_write(sock, self.ws.OP_PING, 'pongme 1!')
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_PONG, 'pongme 1!')
time.sleep(1)
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment3', fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment4', fin=False)
self.ws.frame_write(sock, self.ws.OP_PING, 'pongme 2!')
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_PONG, 'pongme 2!')
assert self.recvall(sock, read_timeout=0.1) == b'', '5_20'
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment5')
self.check_frame(
self.ws.frame_read(sock),
True,
self.ws.OP_TEXT,
'fragment1fragment2fragment3fragment4fragment5',
)
self.close_connection(sock)
def test_asgi_websockets_6_1_1__6_4_4(self):
self.load('websockets/mirror')
# 6_1_1
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, '')
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, '')
# 6_1_2
self.ws.frame_write(sock, self.ws.OP_TEXT, '', fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, '', fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, '')
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, '')
# 6_1_3
payload = 'middle frame payload'
self.ws.frame_write(sock, self.ws.OP_TEXT, '', fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, payload, fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, '')
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, payload)
# 6_2_1
payload = 'Hello-µ@ßöäüàá-UTF-8!!'
self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, payload)
# 6_2_2
self.ws.frame_write(sock, self.ws.OP_TEXT, payload[:12], fin=False)
self.ws.frame_write(sock, self.ws.OP_CONT, payload[12:])
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, payload)
# 6_2_3
self.ws.message(sock, self.ws.OP_TEXT, payload, fragmention_size=1)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, payload)
# 6_2_4
payload = '\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5'
self.ws.message(sock, self.ws.OP_TEXT, payload, fragmention_size=1)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, payload)
self.close_connection(sock)
# Unit does not support UTF-8 validation
#
# # 6_3_1 FAIL
#
# payload_1 = '\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5'
# payload_2 = '\xed\xa0\x80'
# payload_3 = '\x65\x64\x69\x74\x65\x64'
#
# payload = payload_1 + payload_2 + payload_3
#
# self.ws.message(sock, self.ws.OP_TEXT, payload)
# self.check_close(sock, 1007)
#
# # 6_3_2 FAIL
#
# _, sock, _ = self.ws.upgrade()
#
# self.ws.message(sock, self.ws.OP_TEXT, payload, fragmention_size=1)
# self.check_close(sock, 1007)
#
# # 6_4_1 ... 6_4_4 FAIL
def test_asgi_websockets_7_1_1__7_5_1(self):
self.load('websockets/mirror')
# 7_1_1
_, sock, _ = self.ws.upgrade()
payload = "Hello World!"
self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, payload)
self.close_connection(sock)
# 7_1_2
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
self.check_close(sock)
# 7_1_3
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
self.check_close(sock, no_close=True)
self.ws.frame_write(sock, self.ws.OP_PING, '')
assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
sock.close()
# 7_1_4
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
self.check_close(sock, no_close=True)
self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
sock.close()
# 7_1_5
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, 'fragment1', fin=False)
self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
self.check_close(sock, no_close=True)
self.ws.frame_write(sock, self.ws.OP_CONT, 'fragment2')
assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
sock.close()
# 7_1_6
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_TEXT, 'BAsd7&jh23' * 26 * 2 ** 10)
self.ws.frame_write(sock, self.ws.OP_TEXT, payload)
self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
self.recvall(sock, read_timeout=1)
self.ws.frame_write(sock, self.ws.OP_PING, '')
assert self.recvall(sock, read_timeout=0.1) == b'', 'empty soc'
sock.close()
# 7_3_1
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_CLOSE, '')
self.check_close(sock)
# 7_3_2
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_CLOSE, 'a')
self.check_close(sock, 1002)
# 7_3_3
_, sock, _ = self.ws.upgrade()
self.ws.frame_write(sock, self.ws.OP_CLOSE, self.ws.serialize_close())
self.check_close(sock)
# 7_3_4
_, sock, _ = self.ws.upgrade()
payload = self.ws.serialize_close(reason='Hello World!')
self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
self.check_close(sock)
# 7_3_5
_, sock, _ = self.ws.upgrade()
payload = self.ws.serialize_close(reason='*' * 123)
self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
self.check_close(sock)
# 7_3_6
_, sock, _ = self.ws.upgrade()
payload = self.ws.serialize_close(reason='*' * 124)
self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
self.check_close(sock, 1002)
# # 7_5_1 FAIL Unit does not support UTF-8 validation
#
# _, sock, _ = self.ws.upgrade()
#
# payload = self.ws.serialize_close(reason = '\xce\xba\xe1\xbd\xb9\xcf' \
# '\x83\xce\xbc\xce\xb5\xed\xa0\x80\x65\x64\x69\x74\x65\x64')
#
# self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
# self.check_close(sock, 1007)
def test_asgi_websockets_7_7_X__7_9_X(self):
self.load('websockets/mirror')
valid_codes = [
1000,
1001,
1002,
1003,
1007,
1008,
1009,
1010,
1011,
3000,
3999,
4000,
4999,
]
invalid_codes = [0, 999, 1004, 1005, 1006, 1016, 1100, 2000, 2999]
for code in valid_codes:
_, sock, _ = self.ws.upgrade()
payload = self.ws.serialize_close(code=code)
self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
self.check_close(sock)
for code in invalid_codes:
_, sock, _ = self.ws.upgrade()
payload = self.ws.serialize_close(code=code)
self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
self.check_close(sock, 1002)
def test_asgi_websockets_7_13_1__7_13_2(self):
self.load('websockets/mirror')
# 7_13_1
_, sock, _ = self.ws.upgrade()
payload = self.ws.serialize_close(code=5000)
self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
self.check_close(sock, 1002)
# 7_13_2
_, sock, _ = self.ws.upgrade()
payload = struct.pack('!I', 65536) + ''.encode('utf-8')
self.ws.frame_write(sock, self.ws.OP_CLOSE, payload)
self.check_close(sock, 1002)
def test_asgi_websockets_9_1_1__9_6_6(self, is_unsafe):
if not is_unsafe:
pytest.skip('unsafe, long run')
self.load('websockets/mirror')
assert 'success' in self.conf(
{
'http': {
'websocket': {
'max_frame_size': 33554432,
'keepalive_interval': 0,
}
}
},
'settings',
), 'increase max_frame_size and keepalive_interval'
_, sock, _ = self.ws.upgrade()
op_text = self.ws.OP_TEXT
op_binary = self.ws.OP_BINARY
def check_payload(opcode, length, chopsize=None):
if opcode == self.ws.OP_TEXT:
payload = '*' * length
else:
payload = b'*' * length
self.ws.frame_write(sock, opcode, payload, chopsize=chopsize)
frame = self.ws.frame_read(sock, read_timeout=5)
self.check_frame(frame, True, opcode, payload)
def check_message(opcode, f_size):
if opcode == self.ws.OP_TEXT:
payload = '*' * 4 * 2 ** 20
else:
payload = b'*' * 4 * 2 ** 20
self.ws.message(sock, opcode, payload, fragmention_size=f_size)
frame = self.ws.frame_read(sock, read_timeout=5)
self.check_frame(frame, True, opcode, payload)
check_payload(op_text, 64 * 2 ** 10) # 9_1_1
check_payload(op_text, 256 * 2 ** 10) # 9_1_2
check_payload(op_text, 2 ** 20) # 9_1_3
check_payload(op_text, 4 * 2 ** 20) # 9_1_4
check_payload(op_text, 8 * 2 ** 20) # 9_1_5
check_payload(op_text, 16 * 2 ** 20) # 9_1_6
check_payload(op_binary, 64 * 2 ** 10) # 9_2_1
check_payload(op_binary, 256 * 2 ** 10) # 9_2_2
check_payload(op_binary, 2 ** 20) # 9_2_3
check_payload(op_binary, 4 * 2 ** 20) # 9_2_4
check_payload(op_binary, 8 * 2 ** 20) # 9_2_5
check_payload(op_binary, 16 * 2 ** 20) # 9_2_6
if option.system != 'Darwin' and option.system != 'FreeBSD':
check_message(op_text, 64) # 9_3_1
check_message(op_text, 256) # 9_3_2
check_message(op_text, 2 ** 10) # 9_3_3
check_message(op_text, 4 * 2 ** 10) # 9_3_4
check_message(op_text, 16 * 2 ** 10) # 9_3_5
check_message(op_text, 64 * 2 ** 10) # 9_3_6
check_message(op_text, 256 * 2 ** 10) # 9_3_7
check_message(op_text, 2 ** 20) # 9_3_8
check_message(op_text, 4 * 2 ** 20) # 9_3_9
check_message(op_binary, 64) # 9_4_1
check_message(op_binary, 256) # 9_4_2
check_message(op_binary, 2 ** 10) # 9_4_3
check_message(op_binary, 4 * 2 ** 10) # 9_4_4
check_message(op_binary, 16 * 2 ** 10) # 9_4_5
check_message(op_binary, 64 * 2 ** 10) # 9_4_6
check_message(op_binary, 256 * 2 ** 10) # 9_4_7
check_message(op_binary, 2 ** 20) # 9_4_8
check_message(op_binary, 4 * 2 ** 20) # 9_4_9
check_payload(op_text, 2 ** 20, chopsize=64) # 9_5_1
check_payload(op_text, 2 ** 20, chopsize=128) # 9_5_2
check_payload(op_text, 2 ** 20, chopsize=256) # 9_5_3
check_payload(op_text, 2 ** 20, chopsize=512) # 9_5_4
check_payload(op_text, 2 ** 20, chopsize=1024) # 9_5_5
check_payload(op_text, 2 ** 20, chopsize=2048) # 9_5_6
check_payload(op_binary, 2 ** 20, chopsize=64) # 9_6_1
check_payload(op_binary, 2 ** 20, chopsize=128) # 9_6_2
check_payload(op_binary, 2 ** 20, chopsize=256) # 9_6_3
check_payload(op_binary, 2 ** 20, chopsize=512) # 9_6_4
check_payload(op_binary, 2 ** 20, chopsize=1024) # 9_6_5
check_payload(op_binary, 2 ** 20, chopsize=2048) # 9_6_6
self.close_connection(sock)
def test_asgi_websockets_10_1_1(self):
self.load('websockets/mirror')
_, sock, _ = self.ws.upgrade()
payload = '*' * 65536
self.ws.message(sock, self.ws.OP_TEXT, payload, fragmention_size=1300)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_TEXT, payload)
self.close_connection(sock)
# settings
def test_asgi_websockets_max_frame_size(self):
self.load('websockets/mirror')
assert 'success' in self.conf(
{'http': {'websocket': {'max_frame_size': 100}}}, 'settings'
), 'configure max_frame_size'
_, sock, _ = self.ws.upgrade()
payload = '*' * 94
opcode = self.ws.OP_TEXT
self.ws.frame_write(sock, opcode, payload) # frame length is 100
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, opcode, payload)
payload = '*' * 95
self.ws.frame_write(sock, opcode, payload) # frame length is 101
self.check_close(sock, 1009) # 1009 - CLOSE_TOO_LARGE
def test_asgi_websockets_read_timeout(self):
self.load('websockets/mirror')
assert 'success' in self.conf(
{'http': {'websocket': {'read_timeout': 5}}}, 'settings'
), 'configure read_timeout'
_, sock, _ = self.ws.upgrade()
frame = self.ws.frame_to_send(self.ws.OP_TEXT, 'blah')
sock.sendall(frame[:2])
time.sleep(2)
self.check_close(sock, 1001) # 1001 - CLOSE_GOING_AWAY
def test_asgi_websockets_keepalive_interval(self):
self.load('websockets/mirror')
assert 'success' in self.conf(
{'http': {'websocket': {'keepalive_interval': 5}}}, 'settings'
), 'configure keepalive_interval'
_, sock, _ = self.ws.upgrade()
frame = self.ws.frame_to_send(self.ws.OP_TEXT, 'blah')
sock.sendall(frame[:2])
time.sleep(2)
frame = self.ws.frame_read(sock)
self.check_frame(frame, True, self.ws.OP_PING, '') # PING frame
sock.close()
| apache-2.0 |
OpenPTV/postptv | flowtracks/scene.py | 1 | 17778 | # -*- coding: utf-8 -*-
#Created on Sun Aug 10 11:28:42 2014
#
# Private references:
# [1] https://docs.python.org/2/library/itertools.html
"""
A module for manipulating PTV analyses saved as HDF5 files in the flowtracks
format. Allows reading the data by iterating over frames or over trajectories.
Main design goals:
1. Keep as little as possible in memory.
2. Minimize separate file accesses by allowing reading by frames instead of \
only by trajectories as in the old code.
"""
import itertools as it, tables, numpy as np
from configparser import ConfigParser
from .trajectory import Trajectory, ParticleSnapshot
from .particle import Particle
class Frame(object):
pass
def pairwise(iterable):
"""
copied from itertools documentation, [1]
s -> (s0,s1), (s1,s2), (s2, s3), ...
"""
a, b = it.tee(iterable)
next(b, None)
return it.izip(a, b)
def gen_query_string(key, range_spec):
"""
A small utility to create query string suitable for PyTables'
``read_where()`` from a range specification.
Arguments:
key - name of search field.
range_spec - a tuple (min, max, invert). If ``invert`` is false, the search
range is between min and max. Otherwise it is anywhere except that.
In regular ranges, the max boundary is excluded as usual in Python. In
inverted range, consequentlt, it is the min boundary that's excluded.
Returns:
A string representing all boolean conditions necessary for representing the
given range.
Example:
>>> gen_query_string('example', (-1, 1, False))
'((example >= -1) & (example < 1))'
>>> gen_query_string('example', (-1, 1, True))
'((example < -1) | (example >= 1))'
"""
smin, smax, invert = range_spec
cop1, cop2, lop = ('<','>=','|') if invert else ('>=','<','&')
cond_string = "((%s %s %g) %s (%s %s %g))" % \
(key, cop1, smin, lop, key, cop2, smax)
return cond_string
class Scene(object):
"""
This class is the programmer's interface to an HDF files containing
particle trajectory data. It manages access by frames or trajectories,
as well as by segments.
"""
def __init__(self, file_name, frame_range=None):
"""
Arguments:
file_name - path to the HDF file hilding the data.
frame_range - use only frames in this range for iterating the data.
the default is None, meaning to use all present frams.
"""
self._file = tables.open_file(file_name)
self._table = self._file.get_node('/particles')
try:
traj_tags = self._file.get_node('/bounds')
self._trids = traj_tags.col('trajid')
except:
self._trids = np.unique(self._table.col('trajid'))
self.set_frame_range(frame_range)
# Cache data on user-visible columns:
filt = ('trajid', 'time')
self._keys = []
self._shapes = []
desc = self._table.coldescrs
for name in self._table.colnames:
if name in filt:
continue
self._keys.append(name)
shape = desc[name].shape
self._shapes.append(1 if len(shape) == 0 else shape[0])
def trajectory_tags(self):
tags = self._file.get_node('/bounds')
return np.hstack([tags.col(name)[:,None] for name in ['trajid', 'first', 'last']])
return np.array([np.int(row[:]) for row in self._file.get_node('/bounds').read()])
def set_frame_range(self, frame_range):
"""
Prepare a query part that limits the frame numbers is needed.
Arguments:
frame_range - a tuple (first, last) frame number, with the usual
pythonic convention that first <= i < last. Any element may be
None, in which case no limit is generated for it, and for no limits
at all, passing none instead of a tuple is acceptable.
"""
self._frame_limit = ""
if frame_range is None:
t = self._table.col('time')
self._first = int(t.min())
self._last = int(t.max()) + 1
return
first, last = frame_range
rng_exprs = []
if first is None:
t = self._table.col('time')
self._first = int(t.min())
else:
self._first = first
rng_exprs.append("(time >= %d)" % first)
if last is None:
t = self._table.col('time')
self._last = int(t.max()) + 1
else:
self._last = last
rng_exprs.append("(time < %d)" % last)
self._frame_limit = ' & '.join(rng_exprs)
def frame_range(self):
return self._first, self._last
def __del__(self):
self._file.close()
def keys(self):
"""
Return all the possible trajectory properties that may be queried as
a data series (i.e. not the scalar property trajid), as a list of
strings.
"""
return self._keys
def shapes(self):
"""
Return the number of components per item of each key in the order
returned by ``keys()``.
"""
return self._shapes
def trajectory_ids(self):
"""
Returns all trajectory IDs in the scene as an array.
"""
return self._trids
def trajectory_by_id(self, trid):
"""
Get trajectory data by trajectory ID.
Arguments:
trid - trajectory ID, a unique number assigned to each trajectory when
the scene file was written.
Returns:
a Trajectory object.
"""
# I just repeat most of the code in iter_trajectories(). It is not the
# pretties thing but trying to abstract these 5 lines would be uglier.
query_string = '(trajid == trid)'
if self._frame_limit != '':
query_string += ' & ' + self._frame_limit
arr = self._table.read_where(query_string)
kwds = dict((field, arr[field]) for field in arr.dtype.fields \
if field != 'trajid')
kwds['trajid'] = trid
return Trajectory(**kwds)
def iter_trajectories(self):
"""
Iterator over trajectories. Generates a Trajectory object for each
trajectory in the file (in no particular order, but the same order
every time on the same PyTables version) and yields it.
"""
query_string = '(trajid == trid)'
if self._frame_limit != '':
query_string += ' & ' + self._frame_limit
for trid in self._trids:
arr = self._table.read_where(query_string)
kwds = dict((field, arr[field]) for field in arr.dtype.fields \
if field != 'trajid')
kwds['trajid'] = trid
yield Trajectory(**kwds)
def iter_frames(self):
"""
Iterator over frames. Generates a ParticleSnapshot object for each
frame, in the file, ordered by frame number, and yields it.
"""
for t, arr in self._iter_frame_arrays():
kwds = dict((field, arr[field]) for field in arr.dtype.fields \
if field != 'time')
kwds['time'] = t
yield ParticleSnapshot(**kwds)
def _iter_frame_arrays(self, cond=None):
"""
Private. Like iter_frames but does not create a ParticleSnapshot
object, leaving the raw array. Also allows heavier filtering.
Arguments:
cond - an optional PyTables condition string to apply to each frame.
"""
query_string = '(time == t)'
if cond is not None:
query_string = '&'.join(query_string, cond)
for t in range(self._first, self._last):
yield t, self._table.read_where(query_string)
def frame_by_time(self, t):
"""
Get a Frame object for data occuring at time t. Assumes that the time
exists in the data, and does not check range.
Arguments:
t - the frame count at the requested frame.
Returns:
a ParticleSnapshot object.
"""
query_string = '(time == t)'
arr = self._table.read_where(query_string)
kwds = dict((field, arr[field]) for field in arr.dtype.fields \
if field != 'time')
kwds['time'] = t
return ParticleSnapshot(**kwds)
def iter_segments(self):
"""
Iterates over frames, taking out only the particles whose trajectory
continues in the next frame.
Yields:
frame - a ParticleSnapshot object representing the current frame with
the particles that have continuing trajectories.
next_frame - same object, for the same particles in the next frame
(the time attribute is obviously +1 from ``frame``).
"""
for arr, next_arr in pairwise(self._iter_frame_arrays()):
t, arr = arr
tn, next_arr = next_arr
# find continuing trajectories:
arr_trids = arr['trajid']
next_arr_trids = next_arr['trajid']
trajids = set(arr_trids) & set(next_arr_trids)
# select only those from the two frames:
in_arr = np.array([True if tr in trajids else False \
for tr in arr_trids])
in_next_arr = np.array([True if tr in trajids else False \
for tr in next_arr_trids])
if len(in_arr) > 0:
arr = arr[in_arr]
if len(in_next_arr) > 0:
next_arr = next_arr[in_next_arr]
# format as ParticleSnapshot.
kwds = dict((field, arr[field]) for field in arr.dtype.fields \
if field != 'time')
kwds['time'] = t
frame = ParticleSnapshot(**kwds)
kwds = dict((field, next_arr[field]) for field in arr.dtype.fields \
if field != 'time')
kwds['time'] = tn
next_frame = ParticleSnapshot(**kwds)
yield frame, next_frame
def collect(self, keys, where=None):
"""
Get values of given keys, either all of them or the ones corresponding
to a selection given by 'where'.
Arguments:
keys - a list of keys to take from the data
where - a dictionary of particle property names, with a tuple
(min,max,invert) as values. If ``invert`` is false, the search
range is between min and max. Otherwise it is anywhere except that.
Returns:
a list of arrays, in the order of ``keys``.
"""
# Compose query to PyTables engine:
conds = [self._frame_limit]
if where is not None:
for key, rng in where.items():
conds.append(gen_query_string(key, rng))
cond_string = ' & '.join(conds)
# No frame range or user-defined conditions:
if cond_string == '':
return [self._table.col(k) for k in keys]
# Single key is natively handled in PyTables.
if len(keys) == 1:
return [self._table.read_where(cond_string, field=keys[0])]
# Otherwise do the extraction manually.
ret = []
raw = self._table.read_where(cond_string)
for k in keys:
ret.append(raw[k])
return ret
def bounding_box(self):
"""
Gets the min and max positions in the data - either from file
attributes if present, or from a brute-force collect().
Returns:
min_pos, max_pos - each a (3,) array.
"""
if 'min_pos' in self._table._v_attrs._f_list():
min_pos = self._table._v_attrs.min_pos
max_pos = self._table._v_attrs.max_pos
else:
poses = self.collect(['pos'])[0]
min_pos, max_pos = poses.min(axis=0), poses.max(axis=0)
return min_pos, max_pos
class DualScene(object):
"""
Holds a scene orresponding to the dual-PTV systems, which shoot separate
but coordinated streams for the tracers data and inertial particles data.
"""
def __init__(self, tracers_path, particles_path, frate, particle,
frame_range=None):
"""
Arguments:
tracers_path, particles_path - respectively the path to the tracers
and particles HDF files.
frate - frame rate at which the scene was shot, [1/s].
particle - a Particle object describing the inertial particles'
diameter and density.
frame_range - a uniform frame range to set to both of them. The
default is None, meaning to use all frames (assuming
equal-length data streams)
"""
self.frate = frate
self.part = particle
self._paths = (tracers_path, particles_path)
self._tracers = Scene(tracers_path, frame_range)
self._particles = Scene(particles_path, frame_range)
self._rng = frame_range # for restoring it after iteration.
def get_particles_path(self):
"""
Returns the path to the HDF file holding inertial particle data
"""
return self._paths[1]
def get_particles(self):
"""
Returns the :class:`Scene` that manages inertial particles' data.
"""
return self._particles
def get_tracers(self):
"""
Returns the :class:`Scene` that manages tracer data.
"""
return self._tracers
def get_range(self):
"""
Returns the frame renge set for the dual scene.
"""
return self._rng
def iter_frames(self, frame_range=-1):
"""
Iterates over a scene represented by two HDF files (one for inertial
particles, one for tracers), and returns a Frame object whose two
attributes (.tracers, .particles) hold a corresponding
ParticleSnapshot object.
Arguments:
frame_range - tuple (first, last) sets the frame range of both scenes
to an identical frame range. Argument format as in
Scene.set_frame_range(). Default is (-1) meaning to skip this.
Then the object's initialization range is used, so initialize
to a coordinated range if you use the default.
Yields:
the Frame object for each frame in turn.
"""
if frame_range != -1:
self._particles.set_frame_range(frame_range)
self._tracers.set_frame_range(frame_range)
for particles, tracers in it.izip(
self._particles.iter_frames(), self._tracers.iter_frames()):
frame = Frame()
frame.tracers = tracers
frame.particles = particles
yield frame
# restore original frame range.
if frame_range != -1:
self._particles.set_frame_range(self._rng)
self._tracers.set_frame_range(self._rng)
def iter_segments(self, frame_range=-1):
"""
Like iter_frames, but returns two consecutive frames, both having the
same trajids set (in other words, both contain only particles from
the first frame whose trajectory continues to the next frame).
Arguments:
frame_range - tuple (first, last) sets the frame range of both scenes
to an identical frame range. Argument format as in
Scene.set_frame_range(). Default is (-1) meaning to skip this.
Then the object's initialization range is used, so initialize
to a coordinated range if you use the default.
Yields:
two Frame objects, representing the consecutive selective frames.
"""
if frame_range != -1:
self._particles.set_frame_range(frame_range)
self._tracers.set_frame_range(frame_range)
for part_frames, tracer_frames in it.izip(
self._particles.iter_segments(), self._tracers.iter_segments()):
frame = Frame()
frame.tracers = tracer_frames[0]
frame.particles = part_frames[0]
next_frame = Frame()
next_frame.tracers = tracer_frames[1]
next_frame.particles = part_frames[1]
yield frame, next_frame
# restore original frame range.
if frame_range != -1:
self._particles.set_frame_range(self._rng)
self._tracers.set_frame_range(self._rng)
def read_dual_scene(conf_fname):
"""
Read dual-scene parameters, such as unchanging particle properties and
frame range. Values are stored in an INI-format file.
Arguments:
conf_fname - name of the config file
Returns:
a DualScene object initialized with the configuration values found.
"""
parser = ConfigParser()
parser.read(conf_fname)
particle = Particle(
parser.getfloat("Particle", "diameter"),
parser.getfloat("Particle", "density"))
frate = parser.getfloat("Scene", "frame rate")
tracer_file = parser.get("Scene", "tracers file")
part_file = parser.get("Scene", "particles file")
frange = (parser.getint("Scene", "first frame"),
parser.getint("Scene", "last frame") + 1)
return DualScene(tracer_file, part_file, frate, particle, frange)
| gpl-3.0 |
espadrine/opera | chromium/src/third_party/trace-viewer/third_party/closure_linter/closure_linter/full_test.py | 135 | 3464 | #!/usr/bin/env python
#
# Copyright 2007 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Full regression-type (Medium) tests for gjslint.
Tests every error that can be thrown by gjslint. Based heavily on
devtools/javascript/gpylint/full_test.py
"""
__author__ = ('robbyw@google.com (Robert Walker)',
'ajp@google.com (Andy Perelson)')
import re
import os
import sys
import unittest
import gflags as flags
import unittest as googletest
from closure_linter import checker
from closure_linter import errors
from closure_linter import error_check
from closure_linter.common import filetestcase
_RESOURCE_PREFIX = 'closure_linter/testdata'
flags.FLAGS.strict = True
flags.FLAGS.custom_jsdoc_tags = ('customtag', 'requires')
flags.FLAGS.closurized_namespaces = ('goog', 'dummy')
flags.FLAGS.limited_doc_files = ('externs.js', 'dummy.js',
'limited_doc_checks.js')
flags.FLAGS.jslint_error = error_check.Rule.ALL
# List of files under testdata to test.
# We need to list files explicitly since pyglib can't list directories.
# TODO(user): Figure out how to list the directory.
_TEST_FILES = [
'all_js_wrapped.js',
'blank_lines.js',
'ends_with_block.js',
'externs.js',
'externs_jsdoc.js',
'goog_scope.js',
'html_parse_error.html',
'indentation.js',
'interface.js',
'jsdoc.js',
'limited_doc_checks.js',
'minimal.js',
'other.js',
'provide_blank.js',
'provide_extra.js',
'provide_missing.js',
'require_all_caps.js',
'require_blank.js',
'require_extra.js',
'require_function.js',
'require_function_missing.js',
'require_function_through_both.js',
'require_function_through_namespace.js',
'require_interface.js',
'require_interface_base.js',
'require_lower_case.js',
'require_missing.js',
'require_numeric.js',
'require_provide_blank.js',
'require_provide_ok.js',
'require_provide_missing.js',
'simple.html',
'spaces.js',
'tokenizer.js',
'unparseable.js',
'unused_private_members.js',
'utf8.html'
]
class GJsLintTestSuite(unittest.TestSuite):
"""Test suite to run a GJsLintTest for each of several files.
If sys.argv[1:] is non-empty, it is interpreted as a list of filenames in
testdata to test. Otherwise, _TEST_FILES is used.
"""
def __init__(self, tests=()):
unittest.TestSuite.__init__(self, tests)
argv = sys.argv and sys.argv[1:] or []
if argv:
test_files = argv
else:
test_files = _TEST_FILES
for test_file in test_files:
resource_path = os.path.join(_RESOURCE_PREFIX, test_file)
self.addTest(filetestcase.AnnotatedFileTestCase(resource_path,
checker.GJsLintRunner(), errors.ByName))
if __name__ == '__main__':
# Don't let main parse args; it happens in the TestSuite.
googletest.main(argv=sys.argv[0:1], defaultTest='GJsLintTestSuite')
| bsd-3-clause |
nathanielvarona/airflow | airflow/sensors/s3_prefix_sensor.py | 1 | 1173 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use :mod:`airflow.providers.amazon.aws.sensors.s3_prefix`."""
import warnings
# pylint: disable=unused-import
from airflow.providers.amazon.aws.sensors.s3_prefix import S3PrefixSensor # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.amazon.aws.sensors.s3_prefix`.",
DeprecationWarning,
stacklevel=2,
)
| apache-2.0 |
crossbario/crossbarexamples | database/postgresql/caller/adder/__init__.py | 3 | 3892 | ###############################################################################
##
## Copyright (C) 2015, Tavendo GmbH and/or collaborators. All rights reserved.
##
## Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## 1. Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
##
## 2. Redistributions in binary form must reproduce the above copyright notice,
## this list of conditions and the following disclaimer in the documentation
## and/or other materials provided with the distribution.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
## IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
## ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
## LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
## CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
## SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
## INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
## CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
## ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
## POSSIBILITY OF SUCH DAMAGE.
##
###############################################################################
import math
from twisted.internet.defer import inlineCallbacks
from autobahn.twisted.util import sleep
from autobahn.twisted.wamp import ApplicationSession
from autobahn.wamp.exception import ApplicationError
from autobahn.wamp.types import CallResult
class AppSession(ApplicationSession):
@inlineCallbacks
def onJoin(self, details):
# a simple procedure
#
#def add2(*args, **kwargs):
# print("add2")
# print(args)
# print(kwargs)
def add2(x, y):
#print("add2() called with {} and {}".format(x, y))
return x + y
yield self.register(add2, 'com.example.add2')
# a procedure returning a positional result
#
def split_name(fullname):
print("split_name() called with '{}'".format(fullname))
parts = fullname.split()
return CallResult(*parts)
yield self.register(split_name, 'com.example.split_name')
# a procedure returning a keyword-base result
#
def add_complex(a, ai, b, bi):
print("add_complex() called with {}".format((a, ai, b, bi)))
return CallResult(c=a + b, ci=ai + bi)
yield self.register(add_complex, 'com.example.add_complex')
# raising standard exceptions
#
def sqrt(x):
if x == 0:
raise Exception("don't ask foolish questions;)")
else:
# this also will raise, if x < 0
return math.sqrt(x)
yield self.register(sqrt, 'com.example.sqrt')
# raising WAMP application exceptions
#
def checkname(name):
if name in ['foo', 'bar']:
raise ApplicationError('com.example.error.reserved')
if name.lower() != name and name.upper() != name:
# forward positional arguments in exceptions
raise ApplicationError('com.example.error.mixed_case', name.lower(), name, name.upper())
if len(name) < 3 or len(name) > 10:
# forward keyword arguments in exceptions
raise ApplicationError('com.example.error.invalid_length', min=3, max=10)
yield self.register(checkname, 'com.example.checkname')
print("all procedures registered")
| apache-2.0 |
aflaxman/scikit-learn | examples/linear_model/plot_bayesian_ridge.py | 33 | 3875 | """
=========================
Bayesian Ridge Regression
=========================
Computes a Bayesian Ridge Regression on a synthetic dataset.
See :ref:`bayesian_ridge_regression` for more information on the regressor.
Compared to the OLS (ordinary least squares) estimator, the coefficient
weights are slightly shifted toward zeros, which stabilises them.
As the prior on the weights is a Gaussian prior, the histogram of the
estimated weights is Gaussian.
The estimation of the model is done by iteratively maximizing the
marginal log-likelihood of the observations.
We also plot predictions and uncertainties for Bayesian Ridge Regression
for one dimensional regression using polynomial feature expansion.
Note the uncertainty starts going up on the right side of the plot.
This is because these test samples are outside of the range of the training
samples.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
from sklearn.linear_model import BayesianRidge, LinearRegression
# #############################################################################
# Generating simulated data with Gaussian weights
np.random.seed(0)
n_samples, n_features = 100, 100
X = np.random.randn(n_samples, n_features) # Create Gaussian data
# Create weights with a precision lambda_ of 4.
lambda_ = 4.
w = np.zeros(n_features)
# Only keep 10 weights of interest
relevant_features = np.random.randint(0, n_features, 10)
for i in relevant_features:
w[i] = stats.norm.rvs(loc=0, scale=1. / np.sqrt(lambda_))
# Create noise with a precision alpha of 50.
alpha_ = 50.
noise = stats.norm.rvs(loc=0, scale=1. / np.sqrt(alpha_), size=n_samples)
# Create the target
y = np.dot(X, w) + noise
# #############################################################################
# Fit the Bayesian Ridge Regression and an OLS for comparison
clf = BayesianRidge(compute_score=True)
clf.fit(X, y)
ols = LinearRegression()
ols.fit(X, y)
# #############################################################################
# Plot true weights, estimated weights, histogram of the weights, and
# predictions with standard deviations
lw = 2
plt.figure(figsize=(6, 5))
plt.title("Weights of the model")
plt.plot(clf.coef_, color='lightgreen', linewidth=lw,
label="Bayesian Ridge estimate")
plt.plot(w, color='gold', linewidth=lw, label="Ground truth")
plt.plot(ols.coef_, color='navy', linestyle='--', label="OLS estimate")
plt.xlabel("Features")
plt.ylabel("Values of the weights")
plt.legend(loc="best", prop=dict(size=12))
plt.figure(figsize=(6, 5))
plt.title("Histogram of the weights")
plt.hist(clf.coef_, bins=n_features, color='gold', log=True,
edgecolor='black')
plt.scatter(clf.coef_[relevant_features], 5 * np.ones(len(relevant_features)),
color='navy', label="Relevant features")
plt.ylabel("Features")
plt.xlabel("Values of the weights")
plt.legend(loc="upper left")
plt.figure(figsize=(6, 5))
plt.title("Marginal log-likelihood")
plt.plot(clf.scores_, color='navy', linewidth=lw)
plt.ylabel("Score")
plt.xlabel("Iterations")
# Plotting some predictions for polynomial regression
def f(x, noise_amount):
y = np.sqrt(x) * np.sin(x)
noise = np.random.normal(0, 1, len(x))
return y + noise_amount * noise
degree = 10
X = np.linspace(0, 10, 100)
y = f(X, noise_amount=0.1)
clf_poly = BayesianRidge()
clf_poly.fit(np.vander(X, degree), y)
X_plot = np.linspace(0, 11, 25)
y_plot = f(X_plot, noise_amount=0)
y_mean, y_std = clf_poly.predict(np.vander(X_plot, degree), return_std=True)
plt.figure(figsize=(6, 5))
plt.errorbar(X_plot, y_mean, y_std, color='navy',
label="Polynomial Bayesian Ridge Regression", linewidth=lw)
plt.plot(X_plot, y_plot, color='gold', linewidth=lw,
label="Ground Truth")
plt.ylabel("Output y")
plt.xlabel("Feature X")
plt.legend(loc="lower left")
plt.show()
| bsd-3-clause |
Peetz0r/micropython-esp32 | tests/basics/exceptpoly.py | 63 | 1769 | try:
raise ArithmeticError
except Exception:
print("Caught ArithmeticError via Exception")
try:
raise ArithmeticError
except ArithmeticError:
print("Caught ArithmeticError")
try:
raise AssertionError
except Exception:
print("Caught AssertionError via Exception")
try:
raise AssertionError
except AssertionError:
print("Caught AssertionError")
try:
raise AttributeError
except Exception:
print("Caught AttributeError via Exception")
try:
raise AttributeError
except AttributeError:
print("Caught AttributeError")
try:
raise EOFError
except Exception:
print("Caught EOFError via Exception")
try:
raise EOFError
except EOFError:
print("Caught EOFError")
try:
raise Exception
except BaseException:
print("Caught Exception via BaseException")
try:
raise Exception
except Exception:
print("Caught Exception")
try:
raise ImportError
except Exception:
print("Caught ImportError via Exception")
try:
raise ImportError
except ImportError:
print("Caught ImportError")
try:
raise IndentationError
except SyntaxError:
print("Caught IndentationError via SyntaxError")
try:
raise IndentationError
except IndentationError:
print("Caught IndentationError")
try:
raise IndexError
except LookupError:
print("Caught IndexError via LookupError")
try:
raise IndexError
except IndexError:
print("Caught IndexError")
try:
raise KeyError
except LookupError:
print("Caught KeyError via LookupError")
try:
raise KeyError
except KeyError:
print("Caught KeyError")
try:
raise LookupError
except Exception:
print("Caught LookupError via Exception")
try:
raise LookupError
except LookupError:
print("Caught LookupError")
| mit |
egelmex/ajenti | ajenti/daemon.py | 8 | 3705 | import sys
import os
import time
import atexit
from signal import SIGTERM
class Daemon:
"""
A generic daemon class.
Usage: subclass the Daemon class and override the run() method
"""
def __init__(self, pidfile, stdin='/dev/null',
stdout='/dev/null', stderr='/dev/null'):
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
self.pidfile = pidfile
def daemonize(self):
"""
do the UNIX double-fork magic, see Stevens' "Advanced
Programming in the UNIX Environment" for details (ISBN 0201563177)
http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
"""
try:
pid = os.fork()
if pid > 0:
# exit first parent
sys.exit(0)
except OSError, e:
sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1)
# decouple from parent environment
#os.chdir("/")
os.setsid()
os.umask(0)
# do second fork
try:
pid = os.fork()
if pid > 0:
# exit from second parent
sys.exit(0)
except OSError, e:
sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1)
# redirect standard file descriptors
sys.stdout.flush()
sys.stderr.flush()
si = file(self.stdin, 'r')
so = file(self.stdout, 'a+')
se = file(self.stderr, 'a+', 0)
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
# write pidfile
atexit.register(self.delpid)
pid = str(os.getpid())
try:
open(self.pidfile,'w+').write("%s\n" % pid)
except:
pass
def delpid(self):
os.remove(self.pidfile)
def start(self):
"""
Start the daemon
"""
# Check for a pidfile to see if the daemon already runs
try:
pf = file(self.pidfile,'r')
pid = int(pf.read().strip())
pf.close()
except IOError:
pid = None
if pid:
message = "pidfile %s already exist. Daemon already running?\n"
sys.stderr.write(message % self.pidfile)
sys.exit(1)
# Start the daemon
self.daemonize()
self.run()
def stop(self):
"""
Stop the daemon
"""
# Get the pid from the pidfile
try:
pf = file(self.pidfile,'r')
pid = int(pf.read().strip())
pf.close()
except IOError:
pid = None
if not pid:
message = "pidfile %s does not exist. Daemon not running?\n"
sys.stderr.write(message % self.pidfile)
return # not an error in a restart
# Try killing the daemon process
try:
while 1:
os.kill(pid, SIGTERM)
time.sleep(0.1)
except OSError, err:
err = str(err)
if err.find("No such process") > 0:
if os.path.exists(self.pidfile):
os.remove(self.pidfile)
else:
print str(err)
sys.exit(1)
def restart(self):
"""
Restart the daemon
"""
self.stop()
self.start()
def run(self):
"""
You should override this method when you subclass Daemon.
It will be called after the process has been
daemonized by start() or restart().
"""
| lgpl-3.0 |
pivanof/vitess | test/cluster/vtctl_helper.py | 12 | 3923 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper module for running vtctl commands.
This module allows for retry logic to ensure that vtctl commands are properly
executed. This should help reduce flakiness in the sandbox.
"""
import logging
import subprocess
import time
from vtctl import vtctl_client
class VtctlClientError(Exception):
pass
class VtctlHelper(object):
"""Various functions for running vtctl commands."""
def __init__(self, protocol, vtctl_addr):
self.protocol = protocol
self.client = None
self.vtctl_addr = vtctl_addr
if vtctl_addr and protocol != 'grpc':
self.client = vtctl_client.connect(protocol, vtctl_addr, 30)
def execute_vtctl_command(self, args, action_timeout=60.0, expect_fail=False,
max_wait_s=180.0):
"""Executes a vtctl command on a running vtctl job.
This function attempts to execute on any running vtctl job, returning
immediately when a call to execute_vtctl_command completes successfully.
Args:
args: args to pass to vtctl_client's execute_vtctl_command function
action_timeout: total timeout for the action (float, in seconds)
expect_fail: whether or not the vtctl command should fail (bool)
max_wait_s: maximum amount of time to wait for success (float, in seconds)
Returns:
Result of executing vtctl command
Raises:
VtctlClientError: Could not successfully call execute_vtctl_command
"""
start_time = time.time()
while time.time() - start_time < max_wait_s:
try:
if self.protocol == 'grpc':
results = subprocess.check_output(
['vtctlclient', '-vtctl_client_protocol', self.protocol,
'-server', self.vtctl_addr] + args, stderr=subprocess.STDOUT)
else:
results = vtctl_client.execute_vtctl_command(
self.client, args, action_timeout=action_timeout)
return results
except Exception as e:
if expect_fail:
logging.info('Expected vtctl error, got: %s', e.message or e.output)
raise VtctlClientError('Caught an expected vtctl error')
logging.info('Vtctl error (vtctl %s): %s',
' '.join(args), e.message or e.output)
time.sleep(5)
raise VtctlClientError('Timed out on vtctl_client execute_vtctl_command')
def execute_vtctl_command_until_success(
self, args, max_wait_s=180.0, retry_wait_s=5.0):
"""Executes a vtctl command on a running vtctl job.
This function attempts to execute on any running vtctl job, returning
immediately when a call to execute_vtctl_command returns nothing. Do not
use this if you expect execute_vtctl_client to return data.
Args:
args: args to pass to vtctl_client's execute_vtctl_command function
max_wait_s: maximum amount of time to wait for success (float, in seconds)
retry_wait_s: time between vtctl calls to wait (float, in seconds)
Raises:
VtctlClientError: execute_vtctl_command never returned empty data
"""
start_time = time.time()
while time.time() - start_time < max_wait_s:
try:
if not self.execute_vtctl_command(args):
return
except VtctlClientError:
pass
time.sleep(retry_wait_s)
raise VtctlClientError(
'Timed out on vtctl_client execute_vtctl_command_until_success')
| apache-2.0 |
vdel/CoMFoRT | src/modules/themodule_Coord.py | 2 | 19824 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# CoMFoRT: a COntent Management system FOr Researchers and Teachers!
#
# Copyright (C) 2008 Projet2-L3IF ENS Lyon.
#
# Contributors:
# * Jean-Alexandre Angles d'Auriac
# * Gabriel Beaulieu
# * Valentin Blot
# * Pierre Boutillier
# * Nicolas Brunie
# * Aloïs Brunel
# * Vincent Delaitre
# * Antoine Frénoy
# * Mathias Gaunard
# * Guilhem Jaber
# * Timo Jolivet
# * Jonas Lefèvre
# * Bastien Le Gloannec
# * Anne-Laure Mouly
# * Kevin Perrot
# * Jonathan Protzenko
# * Gabriel Renault
# * Philippe Robert
# * Pierre Roux
# * Abdallah Saffidine
# * David Salinas
# * Félix Sipma
# * Alexandra Sourisseau
# * Samuel Vaiter
# * Guillaume Vors
#
# Contact us with : comfort@listes.ens-lyon.fr
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>."
#
## TODO:
##
#
# TODO:
# MISE EN FORME
# Ajout de photo
from module_interfaces import *
from conf import confstatic
import conf_general, conf_private
from interface import class_forms
from db import db_manager
import re
class TheModule(ComfortModule, IModuleContentProvider, IModuleDB, IModuleAdminPage):
mode = "both"
def module_init(self):
self.table_prefix = "coord"
pass
def module_admin_info(self):
return "Permet d'ajouter vos coordonnées."
def module_title(self):
return ""
def module_name(self):
return _("Coordonnées")
def module_id(self):
return "Coord"
def init_content(self):
pass
def handle_admin_request(self, params, fields):
try:
form = params["form"]
except:
form=""
## Adding a coord set in the database
# Ajout d'un jeu de coordonnées dans la BDD
if form == "add_item":
if fields.has_key("item_title"):
record = {'f_name' : fields['item_f_name'].value, \
'l_name' : fields['item_l_name'].value, \
'tel1' : fields['item_tel1'].value, \
'tel2' : fields['item_tel2'].value, \
'tel1_text' : fields['item_tel1_text'].value, \
'tel2_text' : fields['item_tel2_text'].value, \
'addr_perso' : fields['item_addr_perso'].value, \
'addr_work' : fields['item_addr_work'].value, \
'mail' : fields['item_mail'].value, \
'fax' : fields['item_fax'].value, \
'labo' : fields['item_labo'].value, \
'title' : fields['item_title'].value, \
'labo_url' : fields['item_labo_url'].value}
table_db = self.db.table(self.table_prefix+"_items")
table_db.insert(record)
return "admin.py?id=Coord"
else:
return "handler.py"
## Deletion of some coords
# effacement d'un jeu de coordonnées
elif form == "del_item":
if fields.has_key("item_id"):
table = self.db.table(self.table_prefix+"_items")
table.delete([("id", "=", fields["item_id"].value)])
return "admin.py?id=Coord"
else:
return "handler.py"
## Coords edition
# edition d'un jeu de coordonnée
elif form == "edit_item":
if fields.has_key("item_id"):
record = {'f_name' : fields['item_f_name'].value, \
'l_name' : fields['item_l_name'].value, \
'tel1' : fields['item_tel1'].value, \
'tel2' : fields['item_tel2'].value, \
'tel1_text' : fields['item_tel1_text'].value, \
'tel2_text' : fields['item_tel2_text'].value, \
'addr_perso' : fields['item_addr_perso'].value, \
'addr_work' : fields['item_addr_work'].value, \
'mail' : fields['item_mail'].value, \
'fax' : fields['item_fax'].value, \
'labo' : fields['item_labo'].value, \
'title' : fields['item_title'].value, \
'labo_url' : fields['item_labo_url'].value}
table_db = self.db.table(self.table_prefix+"_items")
table_db.update(record, [("id", "=", fields["item_id"].value)])
return "admin.py?id=Coord"
else:
return "handler.py"
return "admin.py"
## Form to administrate the coords
# Le formulaire d'administration des coordonnées
def generate_admin_xhtml(self, form_page):
main = 0
## Options list
# Liste des options
try:
if form_page.params["form"] == "add_item":
main = 1
elif form_page.params["form"] == "edit_item":
main = 1
except:
pass
title=_("Module coordonnées")
form_page.page = class_forms.Page(title,conf_general.style)
title_content = form_page.page.add_complex_option_tag("div",[("id","title")])
title_content.add_text(title)
form_page.gen_nav()
## The whole module menu:
# Tout le menu du module:
body_content = form_page.page.add_complex_option_tag("div",[("class","body_class")])
body = body_content.add_complex_tag("fieldset")
body.add_simple_tag("legend",_("Opérations sur les jeux de coordonnées"))
table_f = body.add_table(0,0,5)
## News manager button
# Bouton de gestion des news
form_main = (class_forms.Content("", "")).add_form( "admin.py?id=Coord","post")
form_main.add_input("submit", "submit", _("Gestion générale"))
## Adding news button
# Bouton d'ajout d'une news
form_add = (class_forms.Content("", "")).add_form( "admin.py?id=Coord&form=add_item","post")
form_add.add_input("submit", "submit", _("Ajouter un jeu de coordonnées"))
table_f.add_line([("td", [], form_main), ("td", [], form_add)])
p = form_page.page.add_complex_tag("p")
p.add_br()
# ___________________________________________________________________ #
# #
## Main form ##
# Formulaire principal #
# ___________________________________________________________________ #
if main == 0:
body_content = form_page.page.add_complex_option_tag("div",[("class","body_class")])
body = body_content.add_complex_tag("fieldset")
body.add_simple_tag("legend",_("Coordonnées enregistrées"))
## Retrieve all the coord
# On recupere tous les jeux de coordonnées
table_db = self.db.table(self.table_prefix + "_items")
coord_db = table_db.select(cond = [], order = [("id",True)])
if len(coord_db) == 0:
body.add_simple_tag("h3", _("Il n'y a aucun jeu de coordonnées"))
else:
if len(coord_db) == 1:
body.add_simple_tag("h3", _("Il y a ")+"1"+ _(" jeu de coordonnées"))
else:
body.add_simple_tag("h3", _("Il y a ")+str(len(coord_db))+ _(" jeux de coordonnées"))
table_f = body.add_table(2,0,4)
table_f.add_line([("th",[("align","center")], class_forms.Text(_("Titre"))), \
("th",[("align","center")], class_forms.Text(_("Tel1"))), \
("th",[("align","center")], class_forms.Text(_("Tel2"))),\
("th",[("align","center")], class_forms.Text(_("mail"))),\
("th",[("align","center")], class_forms.Text(_("fax"))),\
("th",[("align","center")], class_forms.Text(_("labo")))\
])
for coord in coord_db:
## The nice buttons on the right
# Les p'tits boutons mignons de la droite
commands = (class_forms.Content("", "")).add_table(0, 0, 2)
form_del = (class_forms.Content("", "")).add_form( "adminvalid.py?id=Coord&form=del_item","post")
form_del.add_input("hidden", "item_id", str(coord["id"]))
form_del.add_input("submit", "submit", _("Supprimer"))
form_edit = (class_forms.Content("", "")).add_form( "admin.py?id=Coord&form=edit_item&item_id=" +str(coord["id"]),"post")
form_edit.add_input("submit", "submit", _("Éditer"))
commands.add_line([("td", [], form_del), ("td", [], form_edit)])
table_f.add_line([("td",[("align","left")], class_forms.Text(coord['title'])), \
("td",[("align","left")], class_forms.Text(coord['tel1'])),\
("td",[("align","left")], class_forms.Text(coord['tel2'])),\
("td",[("align","left")], class_forms.Text(coord['mail'])),\
("td",[("align","left")], class_forms.Text(coord['fax'])),\
("td",[("align","left")], class_forms.Text(coord['labo'])),\
("td",[("align","left")], commands) ])
body2 = body.add_complex_tag("p");
body2.add_br()
# ___________________________________________________________________ #
# #
## Adding coord form ##
# Formulaire d'ajout d'un jeu de coordonnées #
# ___________________________________________________________________ #
elif form_page.params['form'] == "add_item":
body_content = form_page.page.add_complex_option_tag("div",[("class","body_class")])
body = body_content.add_complex_tag("fieldset")
body.add_simple_tag("legend",_("Ajouter un jeu de coordonnées"))
## Adding form
# Le formulaire d'ajout
form = body.add_form("adminvalid.py?id=Coord&form=add_item", "post")
p = form.add_complex_tag("p")
addr_work_t = class_forms.Content("","")
addr_work_t = addr_work_t.add_textarea("item_addr_work",10,50)
addr_perso_t = class_forms.Content("","")
addr_perso_t = addr_perso_t.add_textarea("item_addr_perso",10,50)
p.add_input("hidden","item_tel1_text", "")
p.add_input("hidden","item_tel2_text", "")
table_f = p.add_table(0,0,3)
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Titre du jeu de coordonnées")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_title", "")) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Nom")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_f_name", "")) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Prenom")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_l_name", "")) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Telephone 1")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_tel1", "")) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Telephone 2")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_tel2","")) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Fax")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_fax", "")) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Adresse perso")+" : ")), \
("td",[("align","left")], addr_perso_t) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Adresse travail")+" : ")), \
("td",[("align","left")], addr_work_t) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Mail")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_mail", "")) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Laboratoire")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_labo", "")) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("URL du laboratoire")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_labo_url", "")) ])
p.add_br()
p.add_input("submit", "submit", _("Ajouter le jeu de coordonnées"))
p.add_br()
# ___________________________________________________________________ #
# #
## Coord edition form ##
# Formulaire d'édition d'un jeu de coordonnées #
# ___________________________________________________________________ #
elif form_page.params['form'] == "edit_item":
body_content = form_page.page.add_complex_option_tag("div",[("class","body_class")])
body = body_content.add_complex_tag("fieldset")
body.add_simple_tag("legend",_("Édition d'un jeu de coordonnées"))
table_db = self.db.table(self.table_prefix+"_items")
coord_db = table_db.select(cond=[("id", "=", form_page.params["item_id"])], order=[("id",True)]).next()
form = body.add_form("adminvalid.py?id=Coord&form=edit_item", "post")
p = form.add_complex_tag("p")
p.add_input("hidden","item_id",form_page.params["item_id"])
p.add_input("hidden","item_tel1_text", coord_db['tel1_text'])
p.add_input("hidden","item_tel2_text", coord_db['tel2_text'])
addr_work_t = class_forms.Content("","")
addr_work_t = addr_work_t.add_textarea("item_addr_work",10,50)
addr_work_t.add_text(coord_db['addr_work'])
addr_perso_t = class_forms.Content("","")
addr_perso_t = addr_perso_t.add_textarea("item_addr_perso",10,50)
addr_perso_t.add_text(coord_db['addr_perso'])
table_f = p.add_table(0,0,3)
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Titre du jeu de coordonnées")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_title", coord_db["title"])) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Nom")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_f_name", coord_db["f_name"])) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Prenom")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_l_name", coord_db["l_name"])) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Telephone 1")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_tel1", coord_db["tel1"])) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Telephone 2")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_tel2", coord_db["tel2"])) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Fax")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_fax", coord_db["fax"])) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Adresse perso")+" : ")), \
("td",[("align","left")], addr_perso_t) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Adresse travail")+" : ")), \
("td",[("align","left")], addr_work_t) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Mail")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_mail", coord_db["mail"])) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("Laboratoire")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_labo", coord_db["labo"])) ])
table_f.add_line([("td",[("align","left")], class_forms.Text(_("URL du laboratoire")+" : ")), \
("td",[("align","left")], class_forms.Input("text", "item_labo_url", coord_db["labo_url"])) ])
p.add_br()
p.add_input("submit", "submit", _("Éditer le jeu de coordonnées"))
## __ End of the page __
# ___ Bas de la page ___
form_page.gen_nav()
# __________________________________________________________________________#
def generate_content_xml(self, args):
ch = ""
## All the fields are retrieved
# On recupere tous les champs
table_db = self.db.table(self.table_prefix+"_items")
## If a specific coord set is on request
# Si un jeu spécifique de coordonnées est demandé
if args.has_key('coord_id'):
coord_db = table_db.select(cond=[("id", "=", args['coord_id']) ],order=[("id",True)])
else:
coord_db = table_db.select(cond=[], order=[("id", True)])
try:
coord = coord_db.next()
ch += " <para>\n"
ch += " <emphasis role='strong'>"+coord['title']+"</emphasis><sbr/>\n"
ch += " "+coord['f_name']+" "+coord['l_name']+"<sbr/>\n"
## Tel
# Téléphone
if coord['tel1'] != "" and coord['tel1_text'] != "":
ch += " <emphasis role='strong'>"+coord['tel1_text']+"</emphasis> "+coord['tel1']+"<sbr/>\n"
elif coord['tel1'] != "":
ch += " <emphasis role='strong'>Tel:</emphasis> "+coord['tel1']+"<sbr/>\n"
if coord['tel2'] != "" and coord['tel2_text'] != "":
ch += " <emphasis role='strong'>"+coord['tel2_text']+"</emphasis> "+coord['tel2']+"\n"
elif coord['tel2'] != "":
ch += " <emphasis role='strong'>Tel:</emphasis> "+coord['tel2']+"<sbr/>\n"
ch += " <para><ulink url='"+coord['labo_url']+"'><emphasis role='strong'>"+coord['labo']+"</emphasis></ulink></para>\n"
if coord['addr_work'] != "":
ch += " <para><emphasis role='strong'>- Travail -</emphasis><sbr/>"+coord['addr_work'].replace('\n', '<sbr/>\n')+"</para>\n"
if coord['addr_perso'] != "":
ch += " <para><emphasis role='strong'>- Personnel -</emphasis><sbr/>"+coord['addr_perso'].replace('\n', '<sbr/>\n')+"</para>\n"
ch += " <para></para><para><emphasis role='strong'>"+coord['mail']+"</emphasis></para>\n"
ch += " </para>\n"
except:
ch = ""
return ch
def setup_db(self, db):
self.db = db
## Coord table ceation
# Création de la table des coordonnées
schema = {'f_name' : 'text', \
'l_name' : 'text', \
'tel1' : 'text', \
'tel2' : 'text', \
'tel1_text' : 'text', \
'tel2_text' : 'text', \
'addr_perso' : 'text', \
'addr_work' : 'text', \
'mail' : 'text', \
'title' : 'text', \
'fax' : 'text', \
'labo' : 'text', \
'labo_url' : 'text'}
try:
self.db.table(self.table_prefix+"_items")
except:
self.db.create(self.table_prefix+"_items", schema);
| gpl-3.0 |
cekk/fantacalcio-react | fantacalcio/public/views.py | 1 | 7080 | # -*- coding: utf-8 -*-
'''Public section, including homepage and signup.'''
from flask import (Blueprint, request, render_template, flash, url_for,
redirect, session, make_response, jsonify, current_app)
from flask.ext.login import login_user, login_required, logout_user
from fantacalcio.extensions import login_manager
from fantacalcio.user.models import User
from fantacalcio.user.views import user_can_access
from fantacalcio.public.forms import LoginForm
from fantacalcio.public.forms import PlayersForm
from fantacalcio.user.forms import RegisterForm
from fantacalcio.players.models import Player
from fantacalcio.utils import flash_errors
from fantacalcio.database import db
from sqlalchemy.sql.expression import func
import json
import os
import csv
from sqlalchemy import desc
from werkzeug import secure_filename
blueprint = Blueprint('public', __name__, static_folder="../static")
auction_blueprint = Blueprint("auction", __name__, url_prefix='/auction',
static_folder="../static")
@login_manager.user_loader
def load_user(id):
return User.get_by_id(int(id))
@blueprint.route("/", methods=["GET", "POST"])
def home():
form = LoginForm(request.form)
# Handle logging in
if request.method == 'POST':
if form.validate_on_submit():
login_user(form.user)
flash("You are logged in.", 'success')
redirect_url = request.args.get("next") or url_for("user.user", user=form.user.username)
return redirect(redirect_url)
else:
flash_errors(form)
return render_template("public/home.html", form=form)
@blueprint.route('/logout/')
@login_required
def logout():
logout_user()
flash('You are logged out.', 'info')
return redirect(url_for('public.home'))
@blueprint.route("/register/", methods=['GET', 'POST'])
def register():
form = RegisterForm(request.form, csrf_enabled=False)
if form.validate_on_submit():
picture = request.files.get('picture')
picture_name = ""
if picture:
avatar_path = current_app.config['AVATAR_UPLOAD_FOLDER']
app_path = current_app.config['APP_FOLDER']
picture_name = secure_filename(picture.filename)
picture.save(os.path.join(app_path, avatar_path, picture_name))
new_user = User.create(username=form.username.data,
picture=picture_name,
password=form.password.data,
active=True)
flash("Thank you for registering. You can now log in.", 'success')
return redirect(url_for('public.home'))
else:
flash_errors(form)
return render_template('public/register.html', form=form)
@blueprint.route("/about/")
def about():
form = LoginForm(request.form)
return render_template("public/about.html", form=form)
@blueprint.route('/manage/', methods=["GET", "POST"])
@login_required
@user_can_access
def manage():
form = PlayersForm()
if form.validate_on_submit():
csvfile = request.files.get('file')
if csvfile:
csvreader = csv.reader(csvfile.stream, delimiter=',', quotechar='"')
for i, row in enumerate(csvreader):
if i == 0:
continue
player = Player(name=row[1],
team=row[2],
role=row[0],
original_price=int(round(float(row[3].replace(',', '.')))),)
db.session.add(player)
db.session.commit()
print "%s) Added %s to db" % (i, row[1])
flash("Giocatori importati correttamente", 'success')
else:
filename = None
return render_template('public/manage.html', form=form)
#ASTA
@auction_blueprint.route("/")
@login_required
@user_can_access
def auction():
return render_template("public/auction.html")
@auction_blueprint.route("/extract")
@login_required
@user_can_access
def extract():
cleanSelection()
session = db.session()
random_players = session.query(Player).filter_by(extracted=False, auction_price=0).order_by(func.random())
print "%s Players left to extract." % random_players.count()
player = random_players.first()
if not player:
return {}
response = make_response()
data = player.to_json()
data['users'] = [x.to_json() for x in User.query.order_by('username')]
data['statistics'] = {'players_left': random_players.count() - 1,
'extracted_players': session.query(Player).filter_by(extracted=True).count() + 1}
response.data = json.dumps(data)
player.currently_selected = True
player.extracted = True
db.session.add(player)
db.session.commit()
return response
@auction_blueprint.route("/selected")
@login_required
def selected():
session = db.session()
player = session.query(Player).filter_by(currently_selected=True).first()
response = make_response()
if not player:
response.data= {}
else:
data = player.to_json()
data['users'] = [x.to_json() for x in User.query.order_by('username')]
data['statistics'] = {'players_left': session.query(Player).filter_by(extracted=False, auction_price=0).count(),
'extracted_players': session.query(Player).filter_by(extracted=True).count()}
response.data = json.dumps(data)
return response
@auction_blueprint.route("/buy", methods=['POST'])
@login_required
@user_can_access
def buy():
errors = {}
player_id = request.form.get('player')
price = request.form.get('price')
if price:
price = int(price)
else:
price = 0
team = request.form.get('team')
if not player_id:
errors['player'] = True
if not price:
errors['price'] = True
if not team:
errors['team'] = True
user = User.query.filter_by(username=team).first()
player = Player.query.filter_by(id=player_id).first()
if user and user.auction_budget < price:
errors['price'] = True
errors['team'] = True
errors['msg'] = "La squadra %s non ha abbastanza crediti per acquistare %s" % (team, player.name)
max_players = current_app.config['%s_LIMIT' % player.role]
if user and user.players.filter_by(role=player.role).count() == max_players:
errors['team'] = True
errors['msg'] = u"La squadra %s ha già tutti i giocatori per questo ruolo." % team
if errors:
return make_response(jsonify(errors), 400)
user.players.append(player)
user.auction_budget = user.auction_budget - price
player.auction_price = price
db.session.add(user)
db.session.add(player)
db.session.commit()
return extract()
def cleanSelection():
session = db.session()
selected_player = session.query(Player).filter_by(currently_selected=True).first()
if selected_player:
selected_player.currently_selected = False
db.session.add(selected_player)
db.session.commit()
| bsd-3-clause |
dheesbeen/Mage2Gen | mage2gen/snippets/plugin.py | 1 | 4340 | # A Magento 2 module generator library
# Copyright (C) 2016 Derrick Heesbeen
# Copyright (C) 2016 Maikel Martens Changed add API and refactor code
#
# This file is part of Mage2Gen.
#
# Mage2Gen is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from .. import Module, Phpclass, Phpmethod, Xmlnode, StaticFile, Snippet, SnippetParam
class PluginSnippet(Snippet):
description = """Creates a Plugin
Plugins are designed to overwrite core Magento methods or methods from other 3rd party modules.
You can choose to change it before, after, or around the original method is called.
Example
-------
Change the product name to show pipes before and after the name.
Input for the plugin form
- **classname:** Magento\Catalog\Model\Product
- **methodname:** getName
- **plugintype:** After
.. code::
public function afterGetName(
Magento\Catalog\Model\Product $subject,
$result
){
return '|'.$result.'|';
}
"""
TYPE_BEFORE = 'before'
TYPE_AFTER = 'after'
TYPE_AROUND = 'around'
TYPE_CHOISES = [
(TYPE_BEFORE, 'Before'),
(TYPE_AFTER, 'After'),
(TYPE_AROUND, 'Around'),
]
SCOPE_ALL = 'all'
SCOPE_FRONTEND = 'frontend'
SCOPE_ADMINHTML = 'backend'
SCOPE_WEBAPI = 'webapi'
SCOPE_CHOISES = [
(SCOPE_ALL, 'All'),
(SCOPE_FRONTEND, 'Frontend'),
(SCOPE_ADMINHTML, 'Backend'),
(SCOPE_WEBAPI, 'Webapi'),
]
def add(self, classname, methodname, plugintype=TYPE_AFTER, scope=SCOPE_ALL, sortorder=10, disabled=False, extra_params=None):
# Add class
plugin = Phpclass('Plugin\\{}'.format(classname))
variable = '$result'
if plugintype == self.TYPE_BEFORE:
variable = '//$functionvariables'
elif plugintype == self.TYPE_AROUND:
variable = '\Closure $proceed'
plugin.add_method(Phpmethod(
plugintype + methodname[0].capitalize() + methodname[1:],
body="//Your plugin code",
params=[
'\\' + classname + ' $subject',
variable
]
))
# Add plug first will add the module namespace to PhpClass
self.add_class(plugin)
# Plugin XML
config = Xmlnode('config', attributes={'xmlns:xsi':'http://www.w3.org/2001/XMLSchema-instance','xsi:noNamespaceSchemaLocation':"urn:magento:framework:ObjectManager/etc/config.xsd"}, nodes=[
Xmlnode('type', attributes={'name': classname}, nodes=[
Xmlnode('plugin', attributes={
'name': plugin.class_namespace.replace('\\', '_'),
'type':plugin.class_namespace,
'sortOrder':sortorder,
'disabled':'true' if disabled else 'false'
})
])
])
xml_path = ['etc']
if scope == self.SCOPE_FRONTEND:
xml_path.append('frontend')
elif scope == self.SCOPE_ADMINHTML:
xml_path.append('adminhtml')
elif scope == self.SCOPE_WEBAPI:
soap_xml_path = ['etc']
xml_path.append('webapi_rest')
soap_xml_path.append('webapi_soap')
soap_xml_path.append('di.xml')
self.add_xml(os.path.join(*soap_xml_path), config)
xml_path.append('di.xml')
self.add_xml(os.path.join(*xml_path), config)
@classmethod
def params(cls):
return [
SnippetParam(name='classname', required=True,
description='Example: Magento\Catalog\Model\Product',
regex_validator=r'^[\w\\]+$',
error_message='Only alphanumeric, underscore and backslash characters are allowed'),
SnippetParam(name='methodname', required=True,
description='Example: getPrice',
regex_validator= r'^\w+$',
error_message='Only alphanumeric and underscore characters are allowed'),
SnippetParam(name='plugintype', choises=cls.TYPE_CHOISES, default=cls.TYPE_AFTER),
SnippetParam(name='scope', choises=cls.SCOPE_CHOISES, default=cls.SCOPE_ALL),
SnippetParam(name='sortorder', default=10,
regex_validator=r'^\d*$',
error_message='Must be numeric'),
SnippetParam(name='disabled', yes_no=True),
]
| gpl-3.0 |
xiaoyuanW/gem5 | src/mem/slicc/ast/StatementAST.py | 92 | 1756 | # Copyright (c) 2009 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from slicc.ast.AST import AST
class StatementAST(AST):
def __init__(self, slicc, pairs=None):
super(StatementAST, self).__init__(slicc, pairs)
def findResources(self, resources):
pass
| bsd-3-clause |
rwatson/chromium-capsicum | third_party/scons/scons-local/SCons/Tool/packaging/targz.py | 3 | 1683 | """SCons.Tool.Packaging.targz
The targz SRC packager.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/packaging/targz.py 3897 2009/01/13 06:45:54 scons"
from SCons.Tool.packaging import stripinstallbuilder, putintopackageroot
def package(env, target, source, PACKAGEROOT, **kw):
bld = env['BUILDERS']['Tar']
bld.set_suffix('.tar.gz')
target, source = stripinstallbuilder(target, source, env)
target, source = putintopackageroot(target, source, env, PACKAGEROOT)
return bld(env, target, source, TARFLAGS='-zc')
| bsd-3-clause |
Zhongqilong/kbengine | kbe/src/lib/python/Tools/scripts/fixdiv.py | 94 | 13938 | #! /usr/bin/env python3
"""fixdiv - tool to fix division operators.
To use this tool, first run `python -Qwarnall yourscript.py 2>warnings'.
This runs the script `yourscript.py' while writing warning messages
about all uses of the classic division operator to the file
`warnings'. The warnings look like this:
<file>:<line>: DeprecationWarning: classic <type> division
The warnings are written to stderr, so you must use `2>' for the I/O
redirect. I know of no way to redirect stderr on Windows in a DOS
box, so you will have to modify the script to set sys.stderr to some
kind of log file if you want to do this on Windows.
The warnings are not limited to the script; modules imported by the
script may also trigger warnings. In fact a useful technique is to
write a test script specifically intended to exercise all code in a
particular module or set of modules.
Then run `python fixdiv.py warnings'. This first reads the warnings,
looking for classic division warnings, and sorts them by file name and
line number. Then, for each file that received at least one warning,
it parses the file and tries to match the warnings up to the division
operators found in the source code. If it is successful, it writes
its findings to stdout, preceded by a line of dashes and a line of the
form:
Index: <file>
If the only findings found are suggestions to change a / operator into
a // operator, the output is acceptable input for the Unix 'patch'
program.
Here are the possible messages on stdout (N stands for a line number):
- A plain-diff-style change ('NcN', a line marked by '<', a line
containing '---', and a line marked by '>'):
A / operator was found that should be changed to //. This is the
recommendation when only int and/or long arguments were seen.
- 'True division / operator at line N' and a line marked by '=':
A / operator was found that can remain unchanged. This is the
recommendation when only float and/or complex arguments were seen.
- 'Ambiguous / operator (..., ...) at line N', line marked by '?':
A / operator was found for which int or long as well as float or
complex arguments were seen. This is highly unlikely; if it occurs,
you may have to restructure the code to keep the classic semantics,
or maybe you don't care about the classic semantics.
- 'No conclusive evidence on line N', line marked by '*':
A / operator was found for which no warnings were seen. This could
be code that was never executed, or code that was only executed
with user-defined objects as arguments. You will have to
investigate further. Note that // can be overloaded separately from
/, using __floordiv__. True division can also be separately
overloaded, using __truediv__. Classic division should be the same
as either of those. (XXX should I add a warning for division on
user-defined objects, to disambiguate this case from code that was
never executed?)
- 'Phantom ... warnings for line N', line marked by '*':
A warning was seen for a line not containing a / operator. The most
likely cause is a warning about code executed by 'exec' or eval()
(see note below), or an indirect invocation of the / operator, for
example via the div() function in the operator module. It could
also be caused by a change to the file between the time the test
script was run to collect warnings and the time fixdiv was run.
- 'More than one / operator in line N'; or
'More than one / operator per statement in lines N-N':
The scanner found more than one / operator on a single line, or in a
statement split across multiple lines. Because the warnings
framework doesn't (and can't) show the offset within the line, and
the code generator doesn't always give the correct line number for
operations in a multi-line statement, we can't be sure whether all
operators in the statement were executed. To be on the safe side,
by default a warning is issued about this case. In practice, these
cases are usually safe, and the -m option suppresses these warning.
- 'Can't find the / operator in line N', line marked by '*':
This really shouldn't happen. It means that the tokenize module
reported a '/' operator but the line it returns didn't contain a '/'
character at the indicated position.
- 'Bad warning for line N: XYZ', line marked by '*':
This really shouldn't happen. It means that a 'classic XYZ
division' warning was read with XYZ being something other than
'int', 'long', 'float', or 'complex'.
Notes:
- The augmented assignment operator /= is handled the same way as the
/ operator.
- This tool never looks at the // operator; no warnings are ever
generated for use of this operator.
- This tool never looks at the / operator when a future division
statement is in effect; no warnings are generated in this case, and
because the tool only looks at files for which at least one classic
division warning was seen, it will never look at files containing a
future division statement.
- Warnings may be issued for code not read from a file, but executed
using the exec() or eval() functions. These may have
<string> in the filename position, in which case the fixdiv script
will attempt and fail to open a file named '<string>' and issue a
warning about this failure; or these may be reported as 'Phantom'
warnings (see above). You're on your own to deal with these. You
could make all recommended changes and add a future division
statement to all affected files, and then re-run the test script; it
should not issue any warnings. If there are any, and you have a
hard time tracking down where they are generated, you can use the
-Werror option to force an error instead of a first warning,
generating a traceback.
- The tool should be run from the same directory as that from which
the original script was run, otherwise it won't be able to open
files given by relative pathnames.
"""
import sys
import getopt
import re
import tokenize
multi_ok = 0
def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "hm")
except getopt.error as msg:
usage(msg)
return 2
for o, a in opts:
if o == "-h":
print(__doc__)
return
if o == "-m":
global multi_ok
multi_ok = 1
if not args:
usage("at least one file argument is required")
return 2
if args[1:]:
sys.stderr.write("%s: extra file arguments ignored\n", sys.argv[0])
warnings = readwarnings(args[0])
if warnings is None:
return 1
files = list(warnings.keys())
if not files:
print("No classic division warnings read from", args[0])
return
files.sort()
exit = None
for filename in files:
x = process(filename, warnings[filename])
exit = exit or x
return exit
def usage(msg):
sys.stderr.write("%s: %s\n" % (sys.argv[0], msg))
sys.stderr.write("Usage: %s [-m] warnings\n" % sys.argv[0])
sys.stderr.write("Try `%s -h' for more information.\n" % sys.argv[0])
PATTERN = ("^(.+?):(\d+): DeprecationWarning: "
"classic (int|long|float|complex) division$")
def readwarnings(warningsfile):
prog = re.compile(PATTERN)
try:
f = open(warningsfile)
except IOError as msg:
sys.stderr.write("can't open: %s\n" % msg)
return
warnings = {}
while 1:
line = f.readline()
if not line:
break
m = prog.match(line)
if not m:
if line.find("division") >= 0:
sys.stderr.write("Warning: ignored input " + line)
continue
filename, lineno, what = m.groups()
list = warnings.get(filename)
if list is None:
warnings[filename] = list = []
list.append((int(lineno), sys.intern(what)))
f.close()
return warnings
def process(filename, list):
print("-"*70)
assert list # if this fails, readwarnings() is broken
try:
fp = open(filename)
except IOError as msg:
sys.stderr.write("can't open: %s\n" % msg)
return 1
print("Index:", filename)
f = FileContext(fp)
list.sort()
index = 0 # list[:index] has been processed, list[index:] is still to do
g = tokenize.generate_tokens(f.readline)
while 1:
startlineno, endlineno, slashes = lineinfo = scanline(g)
if startlineno is None:
break
assert startlineno <= endlineno is not None
orphans = []
while index < len(list) and list[index][0] < startlineno:
orphans.append(list[index])
index += 1
if orphans:
reportphantomwarnings(orphans, f)
warnings = []
while index < len(list) and list[index][0] <= endlineno:
warnings.append(list[index])
index += 1
if not slashes and not warnings:
pass
elif slashes and not warnings:
report(slashes, "No conclusive evidence")
elif warnings and not slashes:
reportphantomwarnings(warnings, f)
else:
if len(slashes) > 1:
if not multi_ok:
rows = []
lastrow = None
for (row, col), line in slashes:
if row == lastrow:
continue
rows.append(row)
lastrow = row
assert rows
if len(rows) == 1:
print("*** More than one / operator in line", rows[0])
else:
print("*** More than one / operator per statement", end=' ')
print("in lines %d-%d" % (rows[0], rows[-1]))
intlong = []
floatcomplex = []
bad = []
for lineno, what in warnings:
if what in ("int", "long"):
intlong.append(what)
elif what in ("float", "complex"):
floatcomplex.append(what)
else:
bad.append(what)
lastrow = None
for (row, col), line in slashes:
if row == lastrow:
continue
lastrow = row
line = chop(line)
if line[col:col+1] != "/":
print("*** Can't find the / operator in line %d:" % row)
print("*", line)
continue
if bad:
print("*** Bad warning for line %d:" % row, bad)
print("*", line)
elif intlong and not floatcomplex:
print("%dc%d" % (row, row))
print("<", line)
print("---")
print(">", line[:col] + "/" + line[col:])
elif floatcomplex and not intlong:
print("True division / operator at line %d:" % row)
print("=", line)
elif intlong and floatcomplex:
print("*** Ambiguous / operator (%s, %s) at line %d:" % (
"|".join(intlong), "|".join(floatcomplex), row))
print("?", line)
fp.close()
def reportphantomwarnings(warnings, f):
blocks = []
lastrow = None
lastblock = None
for row, what in warnings:
if row != lastrow:
lastblock = [row]
blocks.append(lastblock)
lastblock.append(what)
for block in blocks:
row = block[0]
whats = "/".join(block[1:])
print("*** Phantom %s warnings for line %d:" % (whats, row))
f.report(row, mark="*")
def report(slashes, message):
lastrow = None
for (row, col), line in slashes:
if row != lastrow:
print("*** %s on line %d:" % (message, row))
print("*", chop(line))
lastrow = row
class FileContext:
def __init__(self, fp, window=5, lineno=1):
self.fp = fp
self.window = 5
self.lineno = 1
self.eoflookahead = 0
self.lookahead = []
self.buffer = []
def fill(self):
while len(self.lookahead) < self.window and not self.eoflookahead:
line = self.fp.readline()
if not line:
self.eoflookahead = 1
break
self.lookahead.append(line)
def readline(self):
self.fill()
if not self.lookahead:
return ""
line = self.lookahead.pop(0)
self.buffer.append(line)
self.lineno += 1
return line
def truncate(self):
del self.buffer[-window:]
def __getitem__(self, index):
self.fill()
bufstart = self.lineno - len(self.buffer)
lookend = self.lineno + len(self.lookahead)
if bufstart <= index < self.lineno:
return self.buffer[index - bufstart]
if self.lineno <= index < lookend:
return self.lookahead[index - self.lineno]
raise KeyError
def report(self, first, last=None, mark="*"):
if last is None:
last = first
for i in range(first, last+1):
try:
line = self[first]
except KeyError:
line = "<missing line>"
print(mark, chop(line))
def scanline(g):
slashes = []
startlineno = None
endlineno = None
for type, token, start, end, line in g:
endlineno = end[0]
if startlineno is None:
startlineno = endlineno
if token in ("/", "/="):
slashes.append((start, line))
if type == tokenize.NEWLINE:
break
return startlineno, endlineno, slashes
def chop(line):
if line.endswith("\n"):
return line[:-1]
else:
return line
if __name__ == "__main__":
sys.exit(main())
| lgpl-3.0 |
sendgrid/sendgrid-python | setup.py | 1 | 1257 | import io
import os
from distutils.file_util import copy_file
from setuptools import setup, find_packages
__version__ = None
with open('sendgrid/version.py') as f:
exec(f.read())
def getRequires():
deps = [
'python_http_client>=3.2.1',
'starkbank-ecdsa>=1.0.0'
]
return deps
dir_path = os.path.abspath(os.path.dirname(__file__))
readme = io.open(os.path.join(dir_path, 'README.rst'), encoding='utf-8').read()
setup(
name='sendgrid',
version=str(__version__),
author='Elmer Thomas, Yamil Asusta',
author_email='help@twilio.com',
url='https://github.com/sendgrid/sendgrid-python/',
packages=find_packages(exclude=["temp*.py", "test"]),
include_package_data=True,
license='MIT',
description='Twilio SendGrid library for Python',
long_description=readme,
install_requires=getRequires(),
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
]
)
| mit |
MSEMJEJME/Get-Dumped | renpy/audio/__init__.py | 1 | 1154 | # Copyright 2004-2012 Tom Rothamel <pytom@bishoujo.us>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# This file intentionally left blank.
| gpl-2.0 |
twc-openstack/jenkins-job-builder | jenkins_jobs/config.py | 2 | 13153 | #!/usr/bin/env python
# Copyright (C) 2015 Wayne Warren
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Manage JJB Configuration sources, defaults, and access.
from collections import defaultdict
import io
import logging
import os
from six.moves import configparser, StringIO
from six import PY2
from jenkins_jobs import builder
from jenkins_jobs.errors import JJBConfigException
from jenkins_jobs.errors import JenkinsJobsException
__all__ = [
"JJBConfig"
]
logger = logging.getLogger(__name__)
DEFAULT_CONF = """
[job_builder]
keep_descriptions=False
ignore_cache=False
recursive=False
exclude=.*
allow_duplicates=False
allow_empty_variables=False
[jenkins]
url=http://localhost:8080/
query_plugins_info=True
[hipchat]
authtoken=dummy
send-as=Jenkins
"""
CONFIG_REQUIRED_MESSAGE = ("A valid configuration file is required. "
"No configuration file passed.")
class JJBConfig(object):
def __init__(self, config_filename=None, config_file_required=False):
"""
The JJBConfig class is intended to encapsulate and resolve priority
between all sources of configuration for the JJB library. This allows
the various sources of configuration to provide a consistent accessor
interface regardless of where they are used.
It also allows users of JJB-as-an-API to create minimally valid
configuration and easily make minor modifications to default values
without strictly adhering to the confusing setup (see the _setup
method, the behavior of which largely lived in the cmd.execute method
previously) necessary for the jenkins-jobs command line tool.
:arg str config_filename: Name of configuration file on which to base
this config object.
:arg bool config_file_required: Allows users of the JJBConfig class to
decide whether or not it's really necessary for a config file to be
passed in when creating an instance. This has two effects on the
behavior of JJBConfig initialization:
* It determines whether or not we try "local" and "global" config
files.
* It determines whether or not failure to read some config file
will raise an exception or simply print a warning message
indicating that no config file was found.
"""
config_parser = self._init_defaults()
global_conf = '/etc/jenkins_jobs/jenkins_jobs.ini'
user_conf = os.path.join(os.path.expanduser('~'), '.config',
'jenkins_jobs', 'jenkins_jobs.ini')
local_conf = os.path.join(os.path.dirname(__file__),
'jenkins_jobs.ini')
conf = None
if config_filename is not None:
conf = config_filename
else:
if os.path.isfile(local_conf):
conf = local_conf
elif os.path.isfile(user_conf):
conf = user_conf
else:
conf = global_conf
if config_file_required and conf is None:
raise JJBConfigException(CONFIG_REQUIRED_MESSAGE)
config_fp = None
if conf is not None:
try:
config_fp = self._read_config_file(conf)
except JJBConfigException:
if config_file_required:
raise JJBConfigException(CONFIG_REQUIRED_MESSAGE)
else:
logger.warn("Config file, {0}, not found. Using default "
"config values.".format(conf))
if config_fp is not None:
if PY2:
config_parser.readfp(config_fp)
else:
config_parser.read_file(config_fp)
self.config_parser = config_parser
self.ignore_cache = False
self.flush_cache = False
self.user = None
self.password = None
self.plugins_info = None
self.timeout = builder._DEFAULT_TIMEOUT
self.allow_empty_variables = None
self.jenkins = defaultdict(None)
self.builder = defaultdict(None)
self.yamlparser = defaultdict(None)
self.hipchat = defaultdict(None)
self._setup()
def _init_defaults(self):
""" Initialize default configuration values using DEFAULT_CONF
"""
config = configparser.ConfigParser()
# Load default config always
if PY2:
config.readfp(StringIO(DEFAULT_CONF))
else:
config.read_file(StringIO(DEFAULT_CONF))
return config
def _read_config_file(self, config_filename):
""" Given path to configuration file, read it in as a ConfigParser
object and return that object.
"""
if os.path.isfile(config_filename):
self.__config_file = config_filename # remember file we read from
logger.debug("Reading config from {0}".format(config_filename))
config_fp = io.open(config_filename, 'r', encoding='utf-8')
else:
raise JJBConfigException(
"A valid configuration file is required. "
"\n{0} is not valid.".format(config_filename))
return config_fp
def _setup(self):
config = self.config_parser
logger.debug("Config: {0}".format(config))
# check the ignore_cache setting
if config.has_option('jenkins', 'ignore_cache'):
logging.warn("ignore_cache option should be moved to the "
"[job_builder] section in the config file, the "
"one specified in the [jenkins] section will be "
"ignored in the future")
self.ignore_cache = config.getboolean('jenkins', 'ignore_cache')
elif config.has_option('job_builder', 'ignore_cache'):
self.ignore_cache = config.getboolean('job_builder',
'ignore_cache')
# check the flush_cache setting
if config.has_option('job_builder', 'flush_cache'):
self.flush_cache = config.getboolean('job_builder', 'flush_cache')
# Jenkins supports access as an anonymous user, which can be used to
# ensure read-only behaviour when querying the version of plugins
# installed for test mode to generate XML output matching what will be
# uploaded. To enable must pass 'None' as the value for user and
# password to python-jenkins
#
# catching 'TypeError' is a workaround for python 2.6 interpolation
# error
# https://bugs.launchpad.net/openstack-ci/+bug/1259631
try:
self.user = config.get('jenkins', 'user')
except (TypeError, configparser.NoOptionError):
pass
try:
self.password = config.get('jenkins', 'password')
except (TypeError, configparser.NoOptionError):
pass
# None -- no timeout, blocking mode; same as setblocking(True)
# 0.0 -- non-blocking mode; same as setblocking(False) <--- default
# > 0 -- timeout mode; operations time out after timeout seconds
# < 0 -- illegal; raises an exception
# to retain the default must use
# "timeout=jenkins_jobs.builder._DEFAULT_TIMEOUT" or not set timeout at
# all.
try:
self.timeout = config.getfloat('jenkins', 'timeout')
except (ValueError):
raise JenkinsJobsException("Jenkins timeout config is invalid")
except (TypeError, configparser.NoOptionError):
pass
if not config.getboolean("jenkins", "query_plugins_info"):
logger.debug("Skipping plugin info retrieval")
self.plugins_info = []
self.recursive = config.getboolean('job_builder', 'recursive')
self.excludes = config.get('job_builder', 'exclude').split(os.pathsep)
# The way we want to do things moving forward:
self.jenkins['url'] = config.get('jenkins', 'url')
self.jenkins['user'] = self.user
self.jenkins['password'] = self.password
self.jenkins['timeout'] = self.timeout
self.builder['ignore_cache'] = self.ignore_cache
self.builder['flush_cache'] = self.flush_cache
self.builder['plugins_info'] = self.plugins_info
# keep descriptions ? (used by yamlparser)
keep_desc = False
if (config and config.has_section('job_builder') and
config.has_option('job_builder', 'keep_descriptions')):
keep_desc = config.getboolean('job_builder',
'keep_descriptions')
self.yamlparser['keep_descriptions'] = keep_desc
# figure out the include path (used by yamlparser)
path = ["."]
if (config and config.has_section('job_builder') and
config.has_option('job_builder', 'include_path')):
path = config.get('job_builder',
'include_path').split(':')
self.yamlparser['include_path'] = path
# allow duplicates?
allow_duplicates = False
if config and config.has_option('job_builder', 'allow_duplicates'):
allow_duplicates = config.getboolean('job_builder',
'allow_duplicates')
self.yamlparser['allow_duplicates'] = allow_duplicates
# allow empty variables?
self.yamlparser['allow_empty_variables'] = (
self.allow_empty_variables or
config and config.has_section('job_builder') and
config.has_option('job_builder', 'allow_empty_variables') and
config.getboolean('job_builder', 'allow_empty_variables'))
def validate(self):
config = self.config_parser
# Inform the user as to what is likely to happen, as they may specify
# a real jenkins instance in test mode to get the plugin info to check
# the XML generated.
if self.jenkins['user'] is None and self.jenkins['password'] is None:
logger.info("Will use anonymous access to Jenkins if needed.")
elif ((self.jenkins['user'] is not None and
self.jenkins['password'] is None) or
(self.jenkins['user'] is None and
self.jenkins['password'] is not None)):
raise JenkinsJobsException(
"Cannot authenticate to Jenkins with only one of User and "
"Password provided, please check your configuration."
)
if (self.builder['plugins_info'] is not None and
not isinstance(self.builder['plugins_info'], list)):
raise JenkinsJobsException("plugins_info must contain a list!")
# Temporary until yamlparser is refactored to query config object
if self.yamlparser['allow_empty_variables'] is not None:
config.set('job_builder',
'allow_empty_variables',
str(self.yamlparser['allow_empty_variables']))
def get_module_config(self, section, key):
""" Given a section name and a key value, return the value assigned to
the key in the JJB .ini file if it exists, otherwise emit a warning
indicating that the value is not set. Default value returned if no
value is set in the file will be a blank string.
"""
result = ''
try:
result = self.config_parser.get(
section, key
)
except (configparser.NoSectionError, configparser.NoOptionError,
JenkinsJobsException) as e:
logger.warning("You didn't set a " + key +
" neither in the yaml job definition nor in" +
" the " + section + " section, blank default" +
" value will be applied:\n{0}".format(e))
return result
def get_plugin_config(self, plugin, key):
value = self.get_module_config('plugin "{}"'.format(plugin), key)
# Backwards compatibility for users who have not switched to the new
# plugin configuration format in their config. This code should be
# removed in future versions of JJB after 2.0.
if not value:
value = self.get_module_config(plugin, key)
logger.warning(
"Defining plugin configuration using [" + plugin + "] is"
" deprecated. The recommended way to define plugins now is by"
" configuring [plugin \"" + plugin + "\"]")
return value
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.