repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
|---|---|---|---|---|---|
ChemiKhazi/Sprytile
|
rx/testing/coldobservable.py
|
2
|
1816
|
from rx.core import ObservableBase, Observer, AnonymousObserver, Disposable
from rx.disposables import CompositeDisposable
from .subscription import Subscription
from .reactive_assert import AssertList
class ColdObservable(ObservableBase):
def __init__(self, scheduler, messages):
super(ColdObservable, self).__init__()
self.scheduler = scheduler
self.messages = messages
self.subscriptions = AssertList()
def subscribe(self, on_next=None, on_error=None, on_completed=None, observer=None):
# Be forgiving and accept an un-named observer as first parameter
if isinstance(on_next, Observer):
observer = on_next
elif not observer:
observer = AnonymousObserver(on_next, on_error, on_completed)
return self._subscribe_core(observer)
def _subscribe_core(self, observer):
clock = self.scheduler.to_relative(self.scheduler.now)
self.subscriptions.append(Subscription(clock))
index = len(self.subscriptions) - 1
disposable = CompositeDisposable()
def get_action(notification):
def action(scheduler, state):
notification.accept(observer)
return Disposable.empty()
return action
for message in self.messages:
notification = message.value
# Don't make closures within a loop
action = get_action(notification)
disposable.add(self.scheduler.schedule_relative(message.time, action))
def dispose():
start = self.subscriptions[index].subscribe
end = self.scheduler.to_relative(self.scheduler.now)
self.subscriptions[index] = Subscription(start, end)
disposable.dispose()
return Disposable.create(dispose)
|
mit
|
bricky/xbmc-addon-tvtumbler
|
resources/lib/unidecode/x097.py
|
252
|
4643
|
data = (
'Xu ', # 0x00
'Ji ', # 0x01
'Mu ', # 0x02
'Chen ', # 0x03
'Xiao ', # 0x04
'Zha ', # 0x05
'Ting ', # 0x06
'Zhen ', # 0x07
'Pei ', # 0x08
'Mei ', # 0x09
'Ling ', # 0x0a
'Qi ', # 0x0b
'Chou ', # 0x0c
'Huo ', # 0x0d
'Sha ', # 0x0e
'Fei ', # 0x0f
'Weng ', # 0x10
'Zhan ', # 0x11
'Yin ', # 0x12
'Ni ', # 0x13
'Chou ', # 0x14
'Tun ', # 0x15
'Lin ', # 0x16
'[?] ', # 0x17
'Dong ', # 0x18
'Ying ', # 0x19
'Wu ', # 0x1a
'Ling ', # 0x1b
'Shuang ', # 0x1c
'Ling ', # 0x1d
'Xia ', # 0x1e
'Hong ', # 0x1f
'Yin ', # 0x20
'Mo ', # 0x21
'Mai ', # 0x22
'Yun ', # 0x23
'Liu ', # 0x24
'Meng ', # 0x25
'Bin ', # 0x26
'Wu ', # 0x27
'Wei ', # 0x28
'Huo ', # 0x29
'Yin ', # 0x2a
'Xi ', # 0x2b
'Yi ', # 0x2c
'Ai ', # 0x2d
'Dan ', # 0x2e
'Deng ', # 0x2f
'Xian ', # 0x30
'Yu ', # 0x31
'Lu ', # 0x32
'Long ', # 0x33
'Dai ', # 0x34
'Ji ', # 0x35
'Pang ', # 0x36
'Yang ', # 0x37
'Ba ', # 0x38
'Pi ', # 0x39
'Wei ', # 0x3a
'[?] ', # 0x3b
'Xi ', # 0x3c
'Ji ', # 0x3d
'Mai ', # 0x3e
'Meng ', # 0x3f
'Meng ', # 0x40
'Lei ', # 0x41
'Li ', # 0x42
'Huo ', # 0x43
'Ai ', # 0x44
'Fei ', # 0x45
'Dai ', # 0x46
'Long ', # 0x47
'Ling ', # 0x48
'Ai ', # 0x49
'Feng ', # 0x4a
'Li ', # 0x4b
'Bao ', # 0x4c
'[?] ', # 0x4d
'He ', # 0x4e
'He ', # 0x4f
'Bing ', # 0x50
'Qing ', # 0x51
'Qing ', # 0x52
'Jing ', # 0x53
'Tian ', # 0x54
'Zhen ', # 0x55
'Jing ', # 0x56
'Cheng ', # 0x57
'Qing ', # 0x58
'Jing ', # 0x59
'Jing ', # 0x5a
'Dian ', # 0x5b
'Jing ', # 0x5c
'Tian ', # 0x5d
'Fei ', # 0x5e
'Fei ', # 0x5f
'Kao ', # 0x60
'Mi ', # 0x61
'Mian ', # 0x62
'Mian ', # 0x63
'Pao ', # 0x64
'Ye ', # 0x65
'Tian ', # 0x66
'Hui ', # 0x67
'Ye ', # 0x68
'Ge ', # 0x69
'Ding ', # 0x6a
'Cha ', # 0x6b
'Jian ', # 0x6c
'Ren ', # 0x6d
'Di ', # 0x6e
'Du ', # 0x6f
'Wu ', # 0x70
'Ren ', # 0x71
'Qin ', # 0x72
'Jin ', # 0x73
'Xue ', # 0x74
'Niu ', # 0x75
'Ba ', # 0x76
'Yin ', # 0x77
'Sa ', # 0x78
'Na ', # 0x79
'Mo ', # 0x7a
'Zu ', # 0x7b
'Da ', # 0x7c
'Ban ', # 0x7d
'Yi ', # 0x7e
'Yao ', # 0x7f
'Tao ', # 0x80
'Tuo ', # 0x81
'Jia ', # 0x82
'Hong ', # 0x83
'Pao ', # 0x84
'Yang ', # 0x85
'Tomo ', # 0x86
'Yin ', # 0x87
'Jia ', # 0x88
'Tao ', # 0x89
'Ji ', # 0x8a
'Xie ', # 0x8b
'An ', # 0x8c
'An ', # 0x8d
'Hen ', # 0x8e
'Gong ', # 0x8f
'Kohaze ', # 0x90
'Da ', # 0x91
'Qiao ', # 0x92
'Ting ', # 0x93
'Wan ', # 0x94
'Ying ', # 0x95
'Sui ', # 0x96
'Tiao ', # 0x97
'Qiao ', # 0x98
'Xuan ', # 0x99
'Kong ', # 0x9a
'Beng ', # 0x9b
'Ta ', # 0x9c
'Zhang ', # 0x9d
'Bing ', # 0x9e
'Kuo ', # 0x9f
'Ju ', # 0xa0
'La ', # 0xa1
'Xie ', # 0xa2
'Rou ', # 0xa3
'Bang ', # 0xa4
'Yi ', # 0xa5
'Qiu ', # 0xa6
'Qiu ', # 0xa7
'He ', # 0xa8
'Xiao ', # 0xa9
'Mu ', # 0xaa
'Ju ', # 0xab
'Jian ', # 0xac
'Bian ', # 0xad
'Di ', # 0xae
'Jian ', # 0xaf
'On ', # 0xb0
'Tao ', # 0xb1
'Gou ', # 0xb2
'Ta ', # 0xb3
'Bei ', # 0xb4
'Xie ', # 0xb5
'Pan ', # 0xb6
'Ge ', # 0xb7
'Bi ', # 0xb8
'Kuo ', # 0xb9
'Tang ', # 0xba
'Lou ', # 0xbb
'Gui ', # 0xbc
'Qiao ', # 0xbd
'Xue ', # 0xbe
'Ji ', # 0xbf
'Jian ', # 0xc0
'Jiang ', # 0xc1
'Chan ', # 0xc2
'Da ', # 0xc3
'Huo ', # 0xc4
'Xian ', # 0xc5
'Qian ', # 0xc6
'Du ', # 0xc7
'Wa ', # 0xc8
'Jian ', # 0xc9
'Lan ', # 0xca
'Wei ', # 0xcb
'Ren ', # 0xcc
'Fu ', # 0xcd
'Mei ', # 0xce
'Juan ', # 0xcf
'Ge ', # 0xd0
'Wei ', # 0xd1
'Qiao ', # 0xd2
'Han ', # 0xd3
'Chang ', # 0xd4
'[?] ', # 0xd5
'Rou ', # 0xd6
'Xun ', # 0xd7
'She ', # 0xd8
'Wei ', # 0xd9
'Ge ', # 0xda
'Bei ', # 0xdb
'Tao ', # 0xdc
'Gou ', # 0xdd
'Yun ', # 0xde
'[?] ', # 0xdf
'Bi ', # 0xe0
'Wei ', # 0xe1
'Hui ', # 0xe2
'Du ', # 0xe3
'Wa ', # 0xe4
'Du ', # 0xe5
'Wei ', # 0xe6
'Ren ', # 0xe7
'Fu ', # 0xe8
'Han ', # 0xe9
'Wei ', # 0xea
'Yun ', # 0xeb
'Tao ', # 0xec
'Jiu ', # 0xed
'Jiu ', # 0xee
'Xian ', # 0xef
'Xie ', # 0xf0
'Xian ', # 0xf1
'Ji ', # 0xf2
'Yin ', # 0xf3
'Za ', # 0xf4
'Yun ', # 0xf5
'Shao ', # 0xf6
'Le ', # 0xf7
'Peng ', # 0xf8
'Heng ', # 0xf9
'Ying ', # 0xfa
'Yun ', # 0xfb
'Peng ', # 0xfc
'Yin ', # 0xfd
'Yin ', # 0xfe
'Xiang ', # 0xff
)
|
gpl-3.0
|
almarklein/scikit-image
|
doc/examples/plot_gabors_from_lena.py
|
3
|
3341
|
"""
=======================================================
Gabors / Primary Visual Cortex "Simple Cells" from Lena
=======================================================
How to build a (bio-plausible) "sparse" dictionary (or 'codebook', or
'filterbank') for e.g. image classification without any fancy math and
with just standard python scientific libraries?
Please find below a short answer ;-)
This simple example shows how to get Gabor-like filters [1]_ using just
the famous Lena image. Gabor filters are good approximations of the
"Simple Cells" [2]_ receptive fields [3]_ found in the mammalian primary
visual cortex (V1) (for details, see e.g. the Nobel-prize winning work
of Hubel & Wiesel done in the 60s [4]_ [5]_).
Here we use McQueen's 'kmeans' algorithm [6]_, as a simple biologically
plausible hebbian-like learning rule and we apply it (a) to patches of
the original Lena image (retinal projection), and (b) to patches of an
LGN-like [7]_ Lena image using a simple difference of gaussians (DoG)
approximation.
Enjoy ;-) And keep in mind that getting Gabors on natural image patches
is not rocket science.
.. [1] http://en.wikipedia.org/wiki/Gabor_filter
.. [2] http://en.wikipedia.org/wiki/Simple_cell
.. [3] http://en.wikipedia.org/wiki/Receptive_field
.. [4] http://en.wikipedia.org/wiki/K-means_clustering
.. [5] http://en.wikipedia.org/wiki/Lateral_geniculate_nucleus
.. [6] D. H. Hubel and T. N., Wiesel Receptive Fields of Single Neurones
in the Cat's Striate Cortex, J. Physiol. pp. 574-591 (148) 1959
.. [7] D. H. Hubel and T. N., Wiesel Receptive Fields, Binocular
Interaction, and Functional Architecture in the Cat's Visual Cortex,
J. Physiol. 160 pp. 106-154 1962
"""
import numpy as np
from scipy.cluster.vq import kmeans2
from scipy import ndimage as ndi
import matplotlib.pyplot as plt
from skimage import data
from skimage import color
from skimage.util.shape import view_as_windows
from skimage.util.montage import montage2d
np.random.seed(42)
patch_shape = 8, 8
n_filters = 49
lena = color.rgb2gray(data.lena())
# -- filterbank1 on original Lena
patches1 = view_as_windows(lena, patch_shape)
patches1 = patches1.reshape(-1, patch_shape[0] * patch_shape[1])[::8]
fb1, _ = kmeans2(patches1, n_filters, minit='points')
fb1 = fb1.reshape((-1,) + patch_shape)
fb1_montage = montage2d(fb1, rescale_intensity=True)
# -- filterbank2 LGN-like Lena
lena_dog = ndi.gaussian_filter(lena, .5) - ndi.gaussian_filter(lena, 1)
patches2 = view_as_windows(lena_dog, patch_shape)
patches2 = patches2.reshape(-1, patch_shape[0] * patch_shape[1])[::8]
fb2, _ = kmeans2(patches2, n_filters, minit='points')
fb2 = fb2.reshape((-1,) + patch_shape)
fb2_montage = montage2d(fb2, rescale_intensity=True)
# --
fig, axes = plt.subplots(2, 2, figsize=(7, 6))
ax0, ax1, ax2, ax3 = axes.ravel()
ax0.imshow(lena, cmap=plt.cm.gray)
ax0.set_title("Lena (original)")
ax1.imshow(fb1_montage, cmap=plt.cm.gray, interpolation='nearest')
ax1.set_title("K-means filterbank (codebook)\non Lena (original)")
ax2.imshow(lena_dog, cmap=plt.cm.gray)
ax2.set_title("Lena (LGN-like DoG)")
ax3.imshow(fb2_montage, cmap=plt.cm.gray, interpolation='nearest')
ax3.set_title("K-means filterbank (codebook)\non Lena (LGN-like DoG)")
for ax in axes.ravel():
ax.axis('off')
fig.subplots_adjust(hspace=0.3)
plt.show()
|
bsd-3-clause
|
smohsinali/HPOlib
|
optimizers/tpe/random_hyperopt_august2013_mod.py
|
5
|
7067
|
#!/usr/bin/env python
##
# wrapping: A program making it easy to use hyperparameter
# optimization software.
# Copyright (C) 2013 Katharina Eggensperger and Matthias Feurer
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import cPickle
import logging
import os
import sys
import HPOlib.wrapping_util as wrappingUtil
__authors__ = ["Katharina Eggensperger", "Matthias Feurer"]
__contact__ = "automl.org"
logger = logging.getLogger("HPOlib.optimizers.tpe.randomtpe")
version_info = ("# %76s #" % "https://github.com/hyperopt/hyperopt/tree/486aebec8a4170e4781d99bbd6cca09123b12717")
# noinspection PyUnresolvedReferences
def check_dependencies():
try:
import nose
logger.debug("\tNose: %s\n" % str(nose.__version__))
except ImportError:
raise ImportError("Nose cannot be imported. Are you sure it's "
"installed?")
try:
import networkx
logger.debug("\tnetworkx: %s\n" % str(networkx.__version__))
except ImportError:
raise ImportError("Networkx cannot be imported. Are you sure it's "
"installed?")
try:
import pymongo
logger.debug("\tpymongo: %s\n" % str(pymongo.version))
from bson.objectid import ObjectId
except ImportError:
raise ImportError("Pymongo cannot be imported. Are you sure it's"
" installed?")
try:
import numpy
logger.debug("\tnumpy: %s" % str(numpy.__version__))
except ImportError:
raise ImportError("Numpy cannot be imported. Are you sure that it's"
" installed?")
try:
import scipy
logger.debug("\tscipy: %s" % str(scipy.__version__))
except ImportError:
raise ImportError("Scipy cannot be imported. Are you sure that it's"
" installed?")
def build_random_call(config, options, optimizer_dir):
call = "python " + os.path.dirname(os.path.realpath(__file__)) + \
"/tpecall.py"
call = ' '.join([call, '-p', os.path.join(optimizer_dir, os.path.basename(config.get('TPE', 'space'))),
"-m", config.get('TPE', 'number_evals'),
"-s", str(options.seed),
"--cwd", optimizer_dir, "--random"])
if options.restore:
call = ' '.join([call, '-r'])
return call
# noinspection PyUnusedLocal
def restore(config, optimizer_dir, **kwargs):
restore_file = os.path.join(optimizer_dir, 'state.pkl')
if not os.path.exists(restore_file):
logger.error("Oups, this should have been checked before")
raise Exception("%s does not exist" % (restore_file,))
fh = open(restore_file)
state = cPickle.load(fh)
fh.close()
complete_runs = 0
# noinspection PyProtectedMember
tpe_trials = state['trials']._trials
for trial in tpe_trials:
# Assumes that all states no valid state is marked crashed
if trial['state'] == 2:
complete_runs += 1
restored_runs = complete_runs * config.getint('HPOLIB', 'number_cv_folds')
return restored_runs
# noinspection PyUnusedLocal
def main(config, options, experiment_dir, experiment_directory_prefix, **kwargs):
# config: Loaded .cfg file
# options: Options containing seed, restore,
# experiment_dir: Experiment directory/Benchmarkdirectory
# **kwargs: Nothing so far
time_string = wrappingUtil.get_time_string()
cmd = ""
# Add path_to_optimizer to PYTHONPATH and to sys.path
if not 'PYTHONPATH' in os.environ:
os.environ['PYTHONPATH'] = config.get('TPE', 'path_to_optimizer')
else:
os.environ['PYTHONPATH'] = config.get('TPE', 'path_to_optimizer') + os.pathsep + os.environ['PYTHONPATH']
sys.path.append(config.get('TPE', 'path_to_optimizer'))
optimizer_str = os.path.splitext(os.path.basename(__file__))[0]
# Find experiment directory
if options.restore:
if not os.path.exists(options.restore):
raise Exception("The restore directory does not exist")
optimizer_dir = options.restore
else:
optimizer_dir = os.path.join(experiment_dir,
experiment_directory_prefix
+ optimizer_str + "_" +
str(options.seed) + "_" +
time_string)
# Build call
cmd = build_random_call(config, options, optimizer_dir)
# Set up experiment directory
if not os.path.exists(optimizer_dir):
os.mkdir(optimizer_dir)
space = config.get('TPE', 'space')
abs_space = os.path.abspath(space)
parent_space = os.path.join(experiment_dir, optimizer_str, space)
if os.path.exists(abs_space):
space = abs_space
elif os.path.exists(parent_space):
space = parent_space
else:
raise Exception("TPE search space not found. Searched at %s and "
"%s" % (abs_space, parent_space))
# Copy the hyperopt search space
if not os.path.exists(os.path.join(optimizer_dir, os.path.basename(space))):
os.symlink(os.path.join(experiment_dir, optimizer_str, space),
os.path.join(optimizer_dir, os.path.basename(space)))
import hyperopt
path_to_loaded_optimizer = os.path.abspath(os.path.dirname(os.path.dirname(hyperopt.__file__)))
logger.info("### INFORMATION ################################################################")
logger.info("# You are running: #")
logger.info("# %76s #" % path_to_loaded_optimizer)
if not os.path.samefile(path_to_loaded_optimizer, config.get('TPE', 'path_to_optimizer')):
logger.warning("# BUT random_hyperopt_august2013_modDefault.cfg says:")
logger.warning("# %76s #" % config.get('TPE', 'path_to_optimizer'))
logger.warning("# Found a global hyperopt version. This installation will be used! #")
else:
logger.info("# To reproduce our results you need version 0.0.3.dev, which can be found here:#")
logger.info("%s" % version_info)
logger.info("# A newer version might be available, but not yet built in. #")
logger.info("################################################################################")
return cmd, optimizer_dir
|
gpl-3.0
|
pranavtendolkr/horizon
|
openstack_dashboard/dashboards/project/stacks/urls.py
|
56
|
1765
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.project.stacks import views
urlpatterns = patterns(
'',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^select_template$',
views.SelectTemplateView.as_view(),
name='select_template'),
url(r'^launch$', views.CreateStackView.as_view(), name='launch'),
url(r'^preview_template$',
views.PreviewTemplateView.as_view(), name='preview_template'),
url(r'^preview$', views.PreviewStackView.as_view(), name='preview'),
url(r'^preview_details$',
views.PreviewStackDetailsView.as_view(), name='preview_details'),
url(r'^stack/(?P<stack_id>[^/]+)/$',
views.DetailView.as_view(), name='detail'),
url(r'^(?P<stack_id>[^/]+)/change_template$',
views.ChangeTemplateView.as_view(), name='change_template'),
url(r'^(?P<stack_id>[^/]+)/edit_stack$',
views.EditStackView.as_view(), name='edit_stack'),
url(r'^stack/(?P<stack_id>[^/]+)/(?P<resource_name>[^/]+)/$',
views.ResourceView.as_view(), name='resource'),
url(r'^get_d3_data/(?P<stack_id>[^/]+)/$',
views.JSONView.as_view(), name='d3_data'),
)
|
apache-2.0
|
UOMx/edx-platform
|
cms/djangoapps/contentstore/tests/test_crud.py
|
15
|
5989
|
from xmodule import templates
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase, TEST_DATA_SPLIT_MODULESTORE
from xmodule.course_module import CourseDescriptor
from xmodule.seq_module import SequenceDescriptor
from xmodule.capa_module import CapaDescriptor
from xmodule.html_module import HtmlDescriptor
from xmodule.modulestore.exceptions import DuplicateCourseError
class TemplateTests(ModuleStoreTestCase):
"""
Test finding and using the templates (boilerplates) for xblocks.
"""
MODULESTORE = TEST_DATA_SPLIT_MODULESTORE
def test_get_templates(self):
found = templates.all_templates()
self.assertIsNotNone(found.get('course'))
self.assertIsNotNone(found.get('about'))
self.assertIsNotNone(found.get('html'))
self.assertIsNotNone(found.get('problem'))
self.assertEqual(len(found.get('course')), 0)
self.assertEqual(len(found.get('about')), 1)
self.assertGreaterEqual(len(found.get('html')), 2)
self.assertGreaterEqual(len(found.get('problem')), 10)
dropdown = None
for template in found['problem']:
self.assertIn('metadata', template)
self.assertIn('display_name', template['metadata'])
if template['metadata']['display_name'] == 'Dropdown':
dropdown = template
break
self.assertIsNotNone(dropdown)
self.assertIn('markdown', dropdown['metadata'])
self.assertIn('data', dropdown)
self.assertRegexpMatches(dropdown['metadata']['markdown'], r'^Dropdown.*')
self.assertRegexpMatches(dropdown['data'], r'<problem>\s*<p>Dropdown.*')
def test_get_some_templates(self):
self.assertEqual(len(SequenceDescriptor.templates()), 0)
self.assertGreater(len(HtmlDescriptor.templates()), 0)
self.assertIsNone(SequenceDescriptor.get_template('doesntexist.yaml'))
self.assertIsNone(HtmlDescriptor.get_template('doesntexist.yaml'))
self.assertIsNotNone(HtmlDescriptor.get_template('announcement.yaml'))
def test_factories(self):
test_course = CourseFactory.create(
org='testx',
course='course',
run='2014',
display_name='fun test course',
user_id='testbot'
)
self.assertIsInstance(test_course, CourseDescriptor)
self.assertEqual(test_course.display_name, 'fun test course')
course_from_store = self.store.get_course(test_course.id)
self.assertEqual(course_from_store.id.org, 'testx')
self.assertEqual(course_from_store.id.course, 'course')
self.assertEqual(course_from_store.id.run, '2014')
test_chapter = ItemFactory.create(
parent_location=test_course.location,
category='chapter',
display_name='chapter 1'
)
self.assertIsInstance(test_chapter, SequenceDescriptor)
# refetch parent which should now point to child
test_course = self.store.get_course(test_course.id.version_agnostic())
self.assertIn(test_chapter.location, test_course.children)
with self.assertRaises(DuplicateCourseError):
CourseFactory.create(
org='testx',
course='course',
run='2014',
display_name='fun test course',
user_id='testbot'
)
def test_temporary_xblocks(self):
"""
Test create_xblock to create non persisted xblocks
"""
test_course = CourseFactory.create(
course='course', run='2014', org='testx',
display_name='fun test course', user_id='testbot'
)
test_chapter = self.store.create_xblock(
test_course.system, test_course.id, 'chapter', fields={'display_name': 'chapter n'},
parent_xblock=test_course
)
self.assertIsInstance(test_chapter, SequenceDescriptor)
self.assertEqual(test_chapter.display_name, 'chapter n')
self.assertIn(test_chapter, test_course.get_children())
# test w/ a definition (e.g., a problem)
test_def_content = '<problem>boo</problem>'
test_problem = self.store.create_xblock(
test_course.system, test_course.id, 'problem', fields={'data': test_def_content},
parent_xblock=test_chapter
)
self.assertIsInstance(test_problem, CapaDescriptor)
self.assertEqual(test_problem.data, test_def_content)
self.assertIn(test_problem, test_chapter.get_children())
test_problem.display_name = 'test problem'
self.assertEqual(test_problem.display_name, 'test problem')
def test_delete_course(self):
test_course = CourseFactory.create(
org='edu.harvard',
course='history',
run='doomed',
display_name='doomed test course',
user_id='testbot')
ItemFactory.create(
parent_location=test_course.location,
category='chapter',
display_name='chapter 1'
)
id_locator = test_course.id.for_branch(ModuleStoreEnum.BranchName.draft)
# verify it can be retrieved by id
self.assertIsInstance(self.store.get_course(id_locator), CourseDescriptor)
# TODO reenable when split_draft supports getting specific versions
# guid_locator = test_course.location.course_agnostic()
# Verify it can be retrieved by guid
# self.assertIsInstance(self.store.get_item(guid_locator), CourseDescriptor)
self.store.delete_course(id_locator, 'testbot')
# Test can no longer retrieve by id.
self.assertIsNone(self.store.get_course(id_locator))
# But can retrieve by guid -- same TODO as above
# self.assertIsInstance(self.store.get_item(guid_locator), CourseDescriptor)
|
agpl-3.0
|
mathLab/RBniCS
|
rbnics/utils/decorators/reduction_method_decorator_for.py
|
1
|
1598
|
# Copyright (C) 2015-2021 by the RBniCS authors
#
# This file is part of RBniCS.
#
# SPDX-License-Identifier: LGPL-3.0-or-later
import inspect
import types
from rbnics.utils.cache import cache
from rbnics.utils.decorators.dispatch import dispatch
def ReductionMethodDecoratorFor(Algorithm, replaces=None, replaces_if=None, exact_decorator_for=None):
# Convert replaces into a reduction method decorator generator
if replaces is not None:
assert inspect.isfunction(replaces)
replaces = _ReductionMethodDecoratorGenerator(replaces)
# Prepare decorator
def ReductionMethodDecoratorFor_Decorator(ReductionMethodDecorator):
# Prepare a reduction method decorator generator
assert inspect.isfunction(ReductionMethodDecorator)
ReductionMethodDecoratorGenerator = _ReductionMethodDecoratorGenerator(ReductionMethodDecorator)
# Add to cache (object is a placeholder for Problem type)
dispatch(object, name=Algorithm.__name__, module=_cache, replaces=replaces,
replaces_if=replaces_if)(ReductionMethodDecoratorGenerator)
# Return unchanged reduction method decorator
return ReductionMethodDecorator
return ReductionMethodDecoratorFor_Decorator
@cache
def _ReductionMethodDecoratorGenerator(ReductionMethodDecorator):
def _ReductionMethodDecoratorGenerator_Function(truth_problem, **kwargs):
return ReductionMethodDecorator
return _ReductionMethodDecoratorGenerator_Function
_cache = types.ModuleType("reduction method decorators", "Storage for reduction method decorators")
|
lgpl-3.0
|
markYoungH/chromium.src
|
third_party/typ/typ/stats.py
|
82
|
2958
|
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Stats(object):
def __init__(self, status_format, time_fn, size):
self.fmt = status_format
self.finished = 0
self.started = 0
self.total = 0
self.started_time = time_fn()
self._times = []
self._size = size
self._time = time_fn
self._times.append(self.started_time)
def add_time(self):
if len(self._times) > self._size:
self._times.pop(0)
self._times.append(self._time())
def format(self):
# Too many statements pylint: disable=R0915
out = ''
p = 0
end = len(self.fmt)
while p < end:
c = self.fmt[p]
if c == '%' and p < end - 1:
cn = self.fmt[p + 1]
if cn == 'c':
elapsed = self._times[-1] - self._times[0]
if elapsed > 0:
out += '%5.1f' % ((len(self._times) - 1) / elapsed)
else:
out += '-'
elif cn == 'e':
now = self._time()
assert now >= self.started_time
out += '%-5.3f' % (now - self.started_time)
elif cn == 'f':
out += str(self.finished)
elif cn == 'o':
now = self._time()
if now > self.started_time:
out += '%5.1f' % (self.finished * 1.0 /
(now - self.started_time))
else:
out += '-'
elif cn == 'p':
if self.total:
out += '%5.1f' % (self.started * 100.0 / self.total)
else:
out += '-'
elif cn == 'r':
out += str(self.started - self.finished)
elif cn == 's':
out += str(self.started)
elif cn == 't':
out += str(self.total)
elif cn == 'u':
out += str(self.total - self.finished)
elif cn == '%':
out += '%'
else:
out += c + cn
p += 2
else:
out += c
p += 1
return out
|
bsd-3-clause
|
arnavd96/Cinemiezer
|
myvenv/lib/python3.4/site-packages/boto/sdb/db/test_db.py
|
153
|
5427
|
import logging
import time
from datetime import datetime
from boto.sdb.db.model import Model
from boto.sdb.db.property import StringProperty, IntegerProperty, BooleanProperty
from boto.sdb.db.property import DateTimeProperty, FloatProperty, ReferenceProperty
from boto.sdb.db.property import PasswordProperty, ListProperty, MapProperty
from boto.exception import SDBPersistenceError
logging.basicConfig()
log = logging.getLogger('test_db')
log.setLevel(logging.DEBUG)
_objects = {}
#
# This will eventually be moved to the boto.tests module and become a real unit test
# but for now it will live here. It shows examples of each of the Property types in
# use and tests the basic operations.
#
class TestBasic(Model):
name = StringProperty()
size = IntegerProperty()
foo = BooleanProperty()
date = DateTimeProperty()
class TestFloat(Model):
name = StringProperty()
value = FloatProperty()
class TestRequired(Model):
req = StringProperty(required=True, default='foo')
class TestReference(Model):
ref = ReferenceProperty(reference_class=TestBasic, collection_name='refs')
class TestSubClass(TestBasic):
answer = IntegerProperty()
class TestPassword(Model):
password = PasswordProperty()
class TestList(Model):
name = StringProperty()
nums = ListProperty(int)
class TestMap(Model):
name = StringProperty()
map = MapProperty()
class TestListReference(Model):
name = StringProperty()
basics = ListProperty(TestBasic)
class TestAutoNow(Model):
create_date = DateTimeProperty(auto_now_add=True)
modified_date = DateTimeProperty(auto_now=True)
class TestUnique(Model):
name = StringProperty(unique=True)
def test_basic():
global _objects
t = TestBasic()
t.name = 'simple'
t.size = -42
t.foo = True
t.date = datetime.now()
log.debug('saving object')
t.put()
_objects['test_basic_t'] = t
time.sleep(5)
log.debug('now try retrieving it')
tt = TestBasic.get_by_id(t.id)
_objects['test_basic_tt'] = tt
assert tt.id == t.id
l = TestBasic.get_by_id([t.id])
assert len(l) == 1
assert l[0].id == t.id
assert t.size == tt.size
assert t.foo == tt.foo
assert t.name == tt.name
#assert t.date == tt.date
return t
def test_float():
global _objects
t = TestFloat()
t.name = 'float object'
t.value = 98.6
log.debug('saving object')
t.save()
_objects['test_float_t'] = t
time.sleep(5)
log.debug('now try retrieving it')
tt = TestFloat.get_by_id(t.id)
_objects['test_float_tt'] = tt
assert tt.id == t.id
assert tt.name == t.name
assert tt.value == t.value
return t
def test_required():
global _objects
t = TestRequired()
_objects['test_required_t'] = t
t.put()
return t
def test_reference(t=None):
global _objects
if not t:
t = test_basic()
tt = TestReference()
tt.ref = t
tt.put()
time.sleep(10)
tt = TestReference.get_by_id(tt.id)
_objects['test_reference_tt'] = tt
assert tt.ref.id == t.id
for o in t.refs:
log.debug(o)
def test_subclass():
global _objects
t = TestSubClass()
_objects['test_subclass_t'] = t
t.name = 'a subclass'
t.size = -489
t.save()
def test_password():
global _objects
t = TestPassword()
_objects['test_password_t'] = t
t.password = "foo"
t.save()
time.sleep(5)
# Make sure it stored ok
tt = TestPassword.get_by_id(t.id)
_objects['test_password_tt'] = tt
#Testing password equality
assert tt.password == "foo"
#Testing password not stored as string
assert str(tt.password) != "foo"
def test_list():
global _objects
t = TestList()
_objects['test_list_t'] = t
t.name = 'a list of ints'
t.nums = [1, 2, 3, 4, 5]
t.put()
tt = TestList.get_by_id(t.id)
_objects['test_list_tt'] = tt
assert tt.name == t.name
for n in tt.nums:
assert isinstance(n, int)
def test_list_reference():
global _objects
t = TestBasic()
t.put()
_objects['test_list_ref_t'] = t
tt = TestListReference()
tt.name = "foo"
tt.basics = [t]
tt.put()
time.sleep(5)
_objects['test_list_ref_tt'] = tt
ttt = TestListReference.get_by_id(tt.id)
assert ttt.basics[0].id == t.id
def test_unique():
global _objects
t = TestUnique()
name = 'foo' + str(int(time.time()))
t.name = name
t.put()
_objects['test_unique_t'] = t
time.sleep(10)
tt = TestUnique()
_objects['test_unique_tt'] = tt
tt.name = name
try:
tt.put()
assert False
except(SDBPersistenceError):
pass
def test_datetime():
global _objects
t = TestAutoNow()
t.put()
_objects['test_datetime_t'] = t
time.sleep(5)
tt = TestAutoNow.get_by_id(t.id)
assert tt.create_date.timetuple() == t.create_date.timetuple()
def test():
log.info('test_basic')
t1 = test_basic()
log.info('test_required')
test_required()
log.info('test_reference')
test_reference(t1)
log.info('test_subclass')
test_subclass()
log.info('test_password')
test_password()
log.info('test_list')
test_list()
log.info('test_list_reference')
test_list_reference()
log.info("test_datetime")
test_datetime()
log.info('test_unique')
test_unique()
if __name__ == "__main__":
test()
|
mit
|
kraj/systemd
|
test/run-unit-tests.py
|
35
|
1710
|
#!/usr/bin/env python3
import argparse
import dataclasses
import glob
import os
import subprocess
import sys
try:
import colorama as c
GREEN = c.Fore.GREEN
YELLOW = c.Fore.YELLOW
RED = c.Fore.RED
RESET_ALL = c.Style.RESET_ALL
BRIGHT = c.Style.BRIGHT
except ImportError:
GREEN = YELLOW = RED = RESET_ALL = BRIGHT = ''
@dataclasses.dataclass
class Total:
total:int
good:int = 0
skip:int = 0
fail:int = 0
def argument_parser():
p = argparse.ArgumentParser()
p.add_argument('-u', '--unsafe', action='store_true',
help='run "unsafe" tests too')
return p
opts = argument_parser().parse_args()
tests = glob.glob('/usr/lib/systemd/tests/test-*')
if opts.unsafe:
tests += glob.glob('/usr/lib/systemd/tests/unsafe/test-*')
total = Total(total=len(tests))
for test in tests:
name = os.path.basename(test)
ex = subprocess.run(test, stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if ex.returncode == 0:
print(f'{GREEN}PASS: {name}{RESET_ALL}')
total.good += 1
elif ex.returncode == 77:
print(f'{YELLOW}SKIP: {name}{RESET_ALL}')
total.skip += 1
else:
print(f'{RED}FAIL: {name}{RESET_ALL}')
total.fail += 1
# stdout/stderr might not be valid unicode, let's just dump it to the terminal.
# Also let's reset the style afterwards, in case our output sets something.
sys.stdout.buffer.write(ex.stdout)
print(f'{RESET_ALL}{BRIGHT}')
sys.stdout.buffer.write(ex.stderr)
print(f'{RESET_ALL}')
print(f'{BRIGHT}OK: {total.good} SKIP: {total.skip} FAIL: {total.fail}{RESET_ALL}')
sys.exit(total.fail > 0)
|
gpl-2.0
|
peterbrook/assetjet
|
deploy/setup_esky.py
|
1
|
2048
|
import sys, os
from esky.bdist_esky import Executable
from distutils.core import setup
import assetjet
from deploy import exeName, appName
from glob import glob
def get_data_files(dirs):
"""
Recursively include data directories.
"""
results = []
for directory in dirs:
for root, dirs, files in os.walk(directory):
files = [os.path.join(root, file_) for file_ in files]
targetdir = os.path.relpath(root, os.path.join(directory, os.path.pardir))
results.append((targetdir, files))
return results
if sys.platform in ['win32','cygwin','win64']:
# Add http files
data_files = get_data_files([r'../app/src/httpdocs']) + [
r'../app/src/local_server.pyc']
# We can customise the executable's creation by passing an instance
# of Executable() instead of just the script name.
exe = Executable('../app/src/main.py',
icon='../resources/images/Pie-chart.ico',
gui_only=True,
name=exeName,
)
setup(
data_files = data_files,
name = appName,
version = assetjet.__version__,
scripts = [exe],
options = {'bdist_esky':{
# forcibly include some other modules
'includes': ['lxml.etree', 'lxml._elementpath',
'gzip','numpy',
'PySide.QtWebKit', 'PySide.QtNetwork', 'PySide.QtSvg'],
# forcibly exclude some other modules
'excludes': ['Tkinter', 'Tkconstants', 'pydoc', 'tcl', 'tk', 'matplotlib', 'PIL', 'nose', 'setuptools', 'xlrd', 'xlwt', 'PyQt4', 'markdown', 'IPython', 'docutils'],
# force esky to freeze the app using py2exe
'freezer_module': 'cx_freeze',
# tweak the options used by cx_freezer
'freezer_options': {'packages': ['pygments', 'sqlalchemy.dialects.sqlite', 'assetjet']}
}}
)
|
gpl-3.0
|
lento/cortex
|
test/IECore/ops/splineInput/splineInput-1.py
|
12
|
2469
|
##########################################################################
#
# Copyright (c) 2009-2010, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import IECore
class splineInput( IECore.Op ) :
def __init__( self ) :
IECore.Op.__init__( self,
"",
IECore.IntParameter(
name = "result",
description = "",
defaultValue = 0,
)
)
self.parameters().addParameter(
IECore.SplineffParameter(
name = "spline",
description = "description",
defaultValue = IECore.SplineffData(
IECore.Splineff(
IECore.CubicBasisf.catmullRom(),
(
( 0, 1 ),
( 0, 1 ),
( 1, 0 ),
( 1, 0 ),
),
),
),
)
)
def doOperation( self, args ) :
return IECore.IntData( len( args["spline"].value.points() ) )
IECore.registerRunTimeTyped( splineInput )
|
bsd-3-clause
|
ltiao/networkx
|
networkx/utils/tests/test_heaps.py
|
64
|
3979
|
from nose.tools import *
import networkx as nx
from networkx.utils import *
class X(object):
def __eq__(self, other):
raise self is other
def __ne__(self, other):
raise self is not other
def __lt__(self, other):
raise TypeError('cannot compare')
def __le__(self, other):
raise TypeError('cannot compare')
def __ge__(self, other):
raise TypeError('cannot compare')
def __gt__(self, other):
raise TypeError('cannot compare')
def __hash__(self):
return hash(id(self))
x = X()
data = [# min should not invent an element.
('min', nx.NetworkXError),
# Popping an empty heap should fail.
('pop', nx.NetworkXError),
# Getting nonexisting elements should return None.
('get', 0, None),
('get', x, None),
('get', None, None),
# Inserting a new key should succeed.
('insert', x, 1, True),
('get', x, 1),
('min', (x, 1)),
# min should not pop the top element.
('min', (x, 1)),
# Inserting a new key of different type should succeed.
('insert', 1, -2.0, True),
# int and float values should interop.
('min', (1, -2.0)),
# pop removes minimum-valued element.
('insert', 3, -10 ** 100, True),
('insert', 4, 5, True),
('pop', (3, -10 ** 100)),
('pop', (1, -2.0)),
# Decrease-insert should succeed.
('insert', 4, -50, True),
('insert', 4, -60, False, True),
# Decrease-insert should not create duplicate keys.
('pop', (4, -60)),
('pop', (x, 1)),
# Popping all elements should empty the heap.
('min', nx.NetworkXError),
('pop', nx.NetworkXError),
# Non-value-changing insert should fail.
('insert', x, 0, True),
('insert', x, 0, False, False),
('min', (x, 0)),
('insert', x, 0, True, False),
('min', (x, 0)),
# Failed insert should not create duplicate keys.
('pop', (x, 0)),
('pop', nx.NetworkXError),
# Increase-insert should succeed when allowed.
('insert', None, 0, True),
('insert', 2, -1, True),
('min', (2, -1)),
('insert', 2, 1, True, False),
('min', (None, 0)),
# Increase-insert should fail when disallowed.
('insert', None, 2, False, False),
('min', (None, 0)),
# Failed increase-insert should not create duplicate keys.
('pop', (None, 0)),
('pop', (2, 1)),
('min', nx.NetworkXError),
('pop', nx.NetworkXError)]
def _test_heap_class(cls, *args, **kwargs):
heap = cls(*args, **kwargs)
# Basic behavioral test
for op in data:
if op[-1] is not nx.NetworkXError:
assert_equal(op[-1], getattr(heap, op[0])(*op[1:-1]))
else:
assert_raises(op[-1], getattr(heap, op[0]), *op[1:-1])
# Coverage test.
for i in range(99, -1, -1):
assert_true(heap.insert(i, i))
for i in range(50):
assert_equal(heap.pop(), (i, i))
for i in range(100):
assert_equal(heap.insert(i, i), i < 50)
for i in range(100):
assert_false(heap.insert(i, i + 1))
for i in range(50):
assert_equal(heap.pop(), (i, i))
for i in range(100):
assert_equal(heap.insert(i, i + 1), i < 50)
for i in range(49):
assert_equal(heap.pop(), (i, i + 1))
assert_equal(sorted([heap.pop(), heap.pop()]), [(49, 50), (50, 50)])
for i in range(51, 100):
assert_false(heap.insert(i, i + 1, True))
for i in range(51, 70):
assert_equal(heap.pop(), (i, i + 1))
for i in range(100):
assert_true(heap.insert(i, i))
for i in range(100):
assert_equal(heap.pop(), (i, i))
assert_raises(nx.NetworkXError, heap.pop)
def test_PairingHeap():
_test_heap_class(PairingHeap)
def test_BinaryHeap():
_test_heap_class(BinaryHeap)
|
bsd-3-clause
|
domenicosolazzo/practice-django
|
venv/lib/python2.7/site-packages/django/core/files/utils.py
|
901
|
1230
|
class FileProxyMixin(object):
"""
A mixin class used to forward file methods to an underlaying file
object. The internal file object has to be called "file"::
class FileProxy(FileProxyMixin):
def __init__(self, file):
self.file = file
"""
encoding = property(lambda self: self.file.encoding)
fileno = property(lambda self: self.file.fileno)
flush = property(lambda self: self.file.flush)
isatty = property(lambda self: self.file.isatty)
newlines = property(lambda self: self.file.newlines)
read = property(lambda self: self.file.read)
readinto = property(lambda self: self.file.readinto)
readline = property(lambda self: self.file.readline)
readlines = property(lambda self: self.file.readlines)
seek = property(lambda self: self.file.seek)
softspace = property(lambda self: self.file.softspace)
tell = property(lambda self: self.file.tell)
truncate = property(lambda self: self.file.truncate)
write = property(lambda self: self.file.write)
writelines = property(lambda self: self.file.writelines)
xreadlines = property(lambda self: self.file.xreadlines)
def __iter__(self):
return iter(self.file)
|
mit
|
sdierauf/laundrytime
|
app/bower_components/bootstrap/test-infra/s3_cache.py
|
1700
|
3523
|
#!/usr/bin/env python2.7
from __future__ import absolute_import, unicode_literals, print_function, division
from sys import argv
from os import environ, stat, remove as _delete_file
from os.path import isfile, dirname, basename, abspath
from hashlib import sha256
from subprocess import check_call as run
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from boto.exception import S3ResponseError
NEED_TO_UPLOAD_MARKER = '.need-to-upload'
BYTES_PER_MB = 1024 * 1024
try:
BUCKET_NAME = environ['TWBS_S3_BUCKET']
except KeyError:
raise SystemExit("TWBS_S3_BUCKET environment variable not set!")
def _sha256_of_file(filename):
hasher = sha256()
with open(filename, 'rb') as input_file:
hasher.update(input_file.read())
file_hash = hasher.hexdigest()
print('sha256({}) = {}'.format(filename, file_hash))
return file_hash
def _delete_file_quietly(filename):
try:
_delete_file(filename)
except (OSError, IOError):
pass
def _tarball_size(directory):
kib = stat(_tarball_filename_for(directory)).st_size // BYTES_PER_MB
return "{} MiB".format(kib)
def _tarball_filename_for(directory):
return abspath('./{}.tar.gz'.format(basename(directory)))
def _create_tarball(directory):
print("Creating tarball of {}...".format(directory))
run(['tar', '-czf', _tarball_filename_for(directory), '-C', dirname(directory), basename(directory)])
def _extract_tarball(directory):
print("Extracting tarball of {}...".format(directory))
run(['tar', '-xzf', _tarball_filename_for(directory), '-C', dirname(directory)])
def download(directory):
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
try:
print("Downloading {} tarball from S3...".format(friendly_name))
key.get_contents_to_filename(_tarball_filename_for(directory))
except S3ResponseError as err:
open(NEED_TO_UPLOAD_MARKER, 'a').close()
print(err)
raise SystemExit("Cached {} download failed!".format(friendly_name))
print("Downloaded {}.".format(_tarball_size(directory)))
_extract_tarball(directory)
print("{} successfully installed from cache.".format(friendly_name))
def upload(directory):
_create_tarball(directory)
print("Uploading {} tarball to S3... ({})".format(friendly_name, _tarball_size(directory)))
key.set_contents_from_filename(_tarball_filename_for(directory))
print("{} cache successfully updated.".format(friendly_name))
_delete_file_quietly(NEED_TO_UPLOAD_MARKER)
if __name__ == '__main__':
# Uses environment variables:
# AWS_ACCESS_KEY_ID -- AWS Access Key ID
# AWS_SECRET_ACCESS_KEY -- AWS Secret Access Key
argv.pop(0)
if len(argv) != 4:
raise SystemExit("USAGE: s3_cache.py <download | upload> <friendly name> <dependencies file> <directory>")
mode, friendly_name, dependencies_file, directory = argv
conn = S3Connection()
bucket = conn.lookup(BUCKET_NAME, validate=False)
if bucket is None:
raise SystemExit("Could not access bucket!")
dependencies_file_hash = _sha256_of_file(dependencies_file)
key = Key(bucket, dependencies_file_hash)
key.storage_class = 'REDUCED_REDUNDANCY'
if mode == 'download':
download(directory)
elif mode == 'upload':
if isfile(NEED_TO_UPLOAD_MARKER): # FIXME
upload(directory)
else:
print("No need to upload anything.")
else:
raise SystemExit("Unrecognized mode {!r}".format(mode))
|
mit
|
joeythesaint/yocto-autobuilder
|
lib/python2.7/site-packages/Twisted-12.2.0-py2.7-linux-x86_64.egg/twisted/words/tap.py
|
60
|
2422
|
# -*- test-case-name: twisted.words.test.test_tap -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Shiny new words service maker
"""
import sys, socket
from twisted.application import strports
from twisted.application.service import MultiService
from twisted.python import usage
from twisted import plugin
from twisted.words import iwords, service
from twisted.cred import checkers, credentials, portal, strcred
class Options(usage.Options, strcred.AuthOptionMixin):
supportedInterfaces = [credentials.IUsernamePassword]
optParameters = [
('hostname', None, socket.gethostname(),
'Name of this server; purely an informative')]
compData = usage.Completions(multiUse=["group"])
interfacePlugins = {}
plg = None
for plg in plugin.getPlugins(iwords.IProtocolPlugin):
assert plg.name not in interfacePlugins
interfacePlugins[plg.name] = plg
optParameters.append((
plg.name + '-port',
None, None,
'strports description of the port to bind for the ' + plg.name + ' server'))
del plg
def __init__(self, *a, **kw):
usage.Options.__init__(self, *a, **kw)
self['groups'] = []
def opt_group(self, name):
"""Specify a group which should exist
"""
self['groups'].append(name.decode(sys.stdin.encoding))
def opt_passwd(self, filename):
"""
Name of a passwd-style file. (This is for
backwards-compatibility only; you should use the --auth
command instead.)
"""
self.addChecker(checkers.FilePasswordDB(filename))
def makeService(config):
credCheckers = config.get('credCheckers', [])
wordsRealm = service.InMemoryWordsRealm(config['hostname'])
wordsPortal = portal.Portal(wordsRealm, credCheckers)
msvc = MultiService()
# XXX Attribute lookup on config is kind of bad - hrm.
for plgName in config.interfacePlugins:
port = config.get(plgName + '-port')
if port is not None:
factory = config.interfacePlugins[plgName].getFactory(wordsRealm, wordsPortal)
svc = strports.service(port, factory)
svc.setServiceParent(msvc)
# This is bogus. createGroup is async. makeService must be
# allowed to return a Deferred or some crap.
for g in config['groups']:
wordsRealm.createGroup(g)
return msvc
|
gpl-2.0
|
VanirAOSP/external_chromium_org
|
native_client_sdk/src/doc/conf.py
|
24
|
5904
|
# -*- coding: utf-8 -*-
#
# Native Client documentation build configuration file, created by
# sphinx-quickstart on Thu Aug 15 11:31:06 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# Where we keep our extensions...
sys.path.insert(0, os.path.abspath('_sphinxext'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['devsite_builder']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Native Client'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '31'
# The full version, including alpha/beta/rc tags.
release = '31'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', '**/.#*']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
html_translator_class = 'devsite_builder.DevsiteHTMLTranslator'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static', 'images']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'NativeClientdoc'
linkcheck_ignore = [
# General links not worth checking
r'http://localhost.*',
r'about:.*',
r'chrome:.*',
r'.*local_extensions\.css',
#
# Specific known bad cases go here.
#
# linkcheck's anchor checker can't check these because they are
# server-generated and don't actually appear in the HTML of the page.
r'https://code.google.com.*browse#svn.*',
]
|
bsd-3-clause
|
Mercador/python-keystoneclient
|
keystoneclient/auth/cli.py
|
6
|
3244
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import os
from keystoneclient.auth import base
from keystoneclient import utils
@utils.positional()
def register_argparse_arguments(parser, argv, default=None):
"""Register CLI options needed to create a plugin.
The function inspects the provided arguments so that it can also register
the options required for that specific plugin if available.
:param argparse.ArgumentParser: the parser to attach argparse options to.
:param list argv: the arguments provided to the appliation.
:param str/class default: a default plugin name or a plugin object to use
if one isn't specified by the CLI. default: None.
:returns: The plugin class that will be loaded or None if not provided.
:rtype: :py:class:`keystoneclient.auth.BaseAuthPlugin`
:raises keystoneclient.exceptions.NoMatchingPlugin: if a plugin cannot be
created.
"""
in_parser = argparse.ArgumentParser(add_help=False)
env_plugin = os.environ.get('OS_AUTH_PLUGIN', default)
for p in (in_parser, parser):
p.add_argument('--os-auth-plugin',
metavar='<name>',
default=env_plugin,
help='The auth plugin to load')
options, _args = in_parser.parse_known_args(argv)
if not options.os_auth_plugin:
return None
if isinstance(options.os_auth_plugin, type):
msg = 'Default Authentication options'
plugin = options.os_auth_plugin
else:
msg = 'Options specific to the %s plugin.' % options.os_auth_plugin
plugin = base.get_plugin_class(options.os_auth_plugin)
group = parser.add_argument_group('Authentication Options', msg)
plugin.register_argparse_arguments(group)
return plugin
def load_from_argparse_arguments(namespace, **kwargs):
"""Retrieve the created plugin from the completed argparse results.
Loads and creates the auth plugin from the information parsed from the
command line by argparse.
:param Namespace namespace: The result from CLI parsing.
:returns: An auth plugin, or None if a name is not provided.
:rtype: :py:class:`keystoneclient.auth.BaseAuthPlugin`
:raises keystoneclient.exceptions.NoMatchingPlugin: if a plugin cannot be
created.
"""
if not namespace.os_auth_plugin:
return None
if isinstance(namespace.os_auth_plugin, type):
plugin = namespace.os_auth_plugin
else:
plugin = base.get_plugin_class(namespace.os_auth_plugin)
return plugin.load_from_argparse_arguments(namespace, **kwargs)
|
apache-2.0
|
xantin/qualitybots
|
src/webdriver/chrome_resize.py
|
26
|
4879
|
#!/usr/bin/python2.6
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Resize the chrome browser window to the appropriate size."""
import getpass
import os
import platform
import re
import subprocess
import time
import client_logging
CHROME_WINDOWS_USER_DATA = ('C:\\Users\\%s\\AppData\\Local\\Google\\Chrome\\'
'User Data\\')
CHROME_LINUX_USER_DATA = '~/.config/google-chrome/'
CHROME_WINDOWS_EXECUTABLE = ('C:\\Program Files\\Google\\Chrome\\'
'Application\\chrome.exe')
CHROME_LINUX_EXECUTABLE = 'google-chrome'
# This is structured data from the Chrome profile for defining the size.
WINDOW_PLACEMENT = """"window_placement": {
"bottom": %(height)d,
"left": 0,
"maximized": false,
"right": %(width)d,
"top": 0,
"work_area_bottom": 1080,
"work_area_left": 0,
"work_area_right": 1920,
"work_area_top": 0
}"""
BROWSER_PREFERENCE = '\n"browser": {\n%s\n},'
# This is the outer area surrounding the browser viewport (the browser chrome)
CHROME_HEIGHT = 82
CHROME_WIDTH = 20
LOGGER_NAME = 'chrome_resize'
# Initialize the logger for this module
logger = client_logging.GetLogger(LOGGER_NAME)
def GetChromeProfilePath():
"""Return the path the to the Chrome profile.
Returns:
A string representing the path to the Chrome profile for the current OS.
"""
operating_system = platform.uname()[0]
if operating_system == 'Windows':
return CHROME_WINDOWS_USER_DATA % getpass.getuser()
elif operating_system == 'Linux':
return CHROME_LINUX_USER_DATA
def GetChromeProfilePreferences():
"""Return the path the to the Chrome profile preferences file.
Returns:
A string representing the path to the Chrome profile preferences file
for the current OS.
"""
return os.path.join(GetChromeProfilePath(), 'Default', 'Preferences')
def _GetChromeExecutable():
"""Return the path the to the Chrome executable.
Returns:
A string representing the path to the Chrome executable for the current OS.
"""
operating_system = platform.uname()[0]
if operating_system == 'Windows':
return CHROME_WINDOWS_EXECUTABLE
elif operating_system == 'Linux':
return CHROME_LINUX_EXECUTABLE
def _SpawnAndKillChrome():
"""Spawn and kill Chrome in order to set up the profile."""
# Spawn and kill chrome to set up the profile
chrome_process = subprocess.Popen(_GetChromeExecutable())
time.sleep(10)
chrome_process.terminate()
def SetChromeWindowSize(width, height):
"""Set the default size for the Chrome window.
The Chrome window size must be set before Chrome is opened.
Args:
width: An integer representing the width for the browser.
height: An integer representing the height for the browser.
"""
try:
with open(GetChromeProfilePreferences(), 'r') as f:
file_contents = f.read()
except IOError:
logger.info('The Preferences file does not exist, spawning Chrome.')
_SpawnAndKillChrome()
_SetChromeWindowPreferences(GetChromeProfilePreferences(),
width, height)
def _SetChromeWindowPreferences(preference_filename, width, height):
"""Update the Chrome window size by editing the Preferences file directly.
Args:
preference_filename: A string representing the Chrome Preference file to
open and edit.
width: An integer representing the width for the browser.
height: An integer representing the height for the browser.
"""
# Check if the file exists
file_contents = ' '
try:
with open(preference_filename, 'r') as f:
file_contents = f.read()
except IOError:
logger.error('The Preferences file does not exist, not setting the size.')
return
with open(preference_filename, 'w') as f:
# Find the window_placement section and replace it.
window_placement = WINDOW_PLACEMENT % {'width': width + CHROME_WIDTH,
'height': height + CHROME_HEIGHT}
if re.search(r'"window_placement": {[^}]*}', file_contents):
f.write(re.sub(r'"window_placement": {[^}]*}', window_placement,
file_contents))
else:
f.write(file_contents[0] +
(BROWSER_PREFERENCE % window_placement) +
file_contents[1:])
|
apache-2.0
|
deepinsight/Deformable-ConvNets
|
Cdiscount/symbols/resnet.py
|
56
|
9630
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
'''
Adapted from https://github.com/tornadomeet/ResNet/blob/master/symbol_resnet.py
Original author Wei Wu
Implemented the following paper:
Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun. "Identity Mappings in Deep Residual Networks"
'''
import mxnet as mx
import numpy as np
def residual_unit(data, num_filter, stride, dim_match, name, bottle_neck=True, bn_mom=0.9, workspace=256, memonger=False):
"""Return ResNet Unit symbol for building ResNet
Parameters
----------
data : str
Input data
num_filter : int
Number of output channels
bnf : int
Bottle neck channels factor with regard to num_filter
stride : tuple
Stride used in convolution
dim_match : Boolean
True means channel number between input and output is the same, otherwise means differ
name : str
Base name of the operators
workspace : int
Workspace used in convolution operator
"""
if bottle_neck:
# the same as https://github.com/facebook/fb.resnet.torch#notes, a bit difference with origin paper
bn1 = mx.sym.BatchNorm(data=data, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_bn1')
act1 = mx.sym.Activation(data=bn1, act_type='relu', name=name + '_relu1')
conv1 = mx.sym.Convolution(data=act1, num_filter=int(num_filter*0.25), kernel=(1,1), stride=(1,1), pad=(0,0),
no_bias=True, workspace=workspace, name=name + '_conv1')
bn2 = mx.sym.BatchNorm(data=conv1, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_bn2')
act2 = mx.sym.Activation(data=bn2, act_type='relu', name=name + '_relu2')
conv2 = mx.sym.Convolution(data=act2, num_filter=int(num_filter*0.25), kernel=(3,3), stride=stride, pad=(1,1),
no_bias=True, workspace=workspace, name=name + '_conv2')
bn3 = mx.sym.BatchNorm(data=conv2, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_bn3')
act3 = mx.sym.Activation(data=bn3, act_type='relu', name=name + '_relu3')
conv3 = mx.sym.Convolution(data=act3, num_filter=num_filter, kernel=(1,1), stride=(1,1), pad=(0,0), no_bias=True,
workspace=workspace, name=name + '_conv3')
if dim_match:
shortcut = data
else:
shortcut = mx.sym.Convolution(data=act1, num_filter=num_filter, kernel=(1,1), stride=stride, no_bias=True,
workspace=workspace, name=name+'_sc')
if memonger:
shortcut._set_attr(mirror_stage='True')
return conv3 + shortcut
else:
bn1 = mx.sym.BatchNorm(data=data, fix_gamma=False, momentum=bn_mom, eps=2e-5, name=name + '_bn1')
act1 = mx.sym.Activation(data=bn1, act_type='relu', name=name + '_relu1')
conv1 = mx.sym.Convolution(data=act1, num_filter=num_filter, kernel=(3,3), stride=stride, pad=(1,1),
no_bias=True, workspace=workspace, name=name + '_conv1')
bn2 = mx.sym.BatchNorm(data=conv1, fix_gamma=False, momentum=bn_mom, eps=2e-5, name=name + '_bn2')
act2 = mx.sym.Activation(data=bn2, act_type='relu', name=name + '_relu2')
conv2 = mx.sym.Convolution(data=act2, num_filter=num_filter, kernel=(3,3), stride=(1,1), pad=(1,1),
no_bias=True, workspace=workspace, name=name + '_conv2')
if dim_match:
shortcut = data
else:
shortcut = mx.sym.Convolution(data=act1, num_filter=num_filter, kernel=(1,1), stride=stride, no_bias=True,
workspace=workspace, name=name+'_sc')
if memonger:
shortcut._set_attr(mirror_stage='True')
return conv2 + shortcut
def resnet(units, num_stages, filter_list, num_classes, image_shape, bottle_neck=True, bn_mom=0.9, workspace=256, dtype='float32', memonger=False):
"""Return ResNet symbol of
Parameters
----------
units : list
Number of units in each stage
num_stages : int
Number of stage
filter_list : list
Channel size of each stage
num_classes : int
Ouput size of symbol
dataset : str
Dataset type, only cifar10 and imagenet supports
workspace : int
Workspace used in convolution operator
dtype : str
Precision (float32 or float16)
"""
num_unit = len(units)
assert(num_unit == num_stages)
data = mx.sym.Variable(name='data')
if dtype == 'float32':
data = mx.sym.identity(data=data, name='id')
else:
if dtype == 'float16':
data = mx.sym.Cast(data=data, dtype=np.float16)
data = mx.sym.BatchNorm(data=data, fix_gamma=True, eps=2e-5, momentum=bn_mom, name='bn_data')
(nchannel, height, width) = image_shape
if height <= 32: # such as cifar10
body = mx.sym.Convolution(data=data, num_filter=filter_list[0], kernel=(3, 3), stride=(1,1), pad=(1, 1),
no_bias=True, name="conv0", workspace=workspace)
else: # often expected to be 224 such as imagenet
body = mx.sym.Convolution(data=data, num_filter=filter_list[0], kernel=(7, 7), stride=(2,2), pad=(3, 3),
no_bias=True, name="conv0", workspace=workspace)
body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn0')
body = mx.sym.Activation(data=body, act_type='relu', name='relu0')
body = mx.sym.Pooling(data=body, kernel=(3, 3), stride=(2,2), pad=(1,1), pool_type='max')
for i in range(num_stages):
body = residual_unit(body, filter_list[i+1], (1 if i==0 else 2, 1 if i==0 else 2), False,
name='stage%d_unit%d' % (i + 1, 1), bottle_neck=bottle_neck, workspace=workspace,
memonger=memonger)
for j in range(units[i]-1):
body = residual_unit(body, filter_list[i+1], (1,1), True, name='stage%d_unit%d' % (i + 1, j + 2),
bottle_neck=bottle_neck, workspace=workspace, memonger=memonger)
bn1 = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn1')
relu1 = mx.sym.Activation(data=bn1, act_type='relu', name='relu1')
# Although kernel is not used here when global_pool=True, we should put one
pool1 = mx.sym.Pooling(data=relu1, global_pool=True, kernel=(7, 7), pool_type='avg', name='pool1')
flat = mx.sym.Flatten(data=pool1)
fc1 = mx.sym.FullyConnected(data=flat, num_hidden=num_classes, name='fc1')
if dtype == 'float16':
fc1 = mx.sym.Cast(data=fc1, dtype=np.float32)
return mx.sym.SoftmaxOutput(data=fc1, name='softmax')
def get_symbol(num_classes, num_layers, image_shape, conv_workspace=256, dtype='float32', **kwargs):
"""
Adapted from https://github.com/tornadomeet/ResNet/blob/master/train_resnet.py
Original author Wei Wu
"""
image_shape = [int(l) for l in image_shape.split(',')]
(nchannel, height, width) = image_shape
if height <= 28:
num_stages = 3
if (num_layers-2) % 9 == 0 and num_layers >= 164:
per_unit = [(num_layers-2)//9]
filter_list = [16, 64, 128, 256]
bottle_neck = True
elif (num_layers-2) % 6 == 0 and num_layers < 164:
per_unit = [(num_layers-2)//6]
filter_list = [16, 16, 32, 64]
bottle_neck = False
else:
raise ValueError("no experiments done on num_layers {}, you can do it yourself".format(num_layers))
units = per_unit * num_stages
else:
if num_layers >= 50:
filter_list = [64, 256, 512, 1024, 2048]
bottle_neck = True
else:
filter_list = [64, 64, 128, 256, 512]
bottle_neck = False
num_stages = 4
if num_layers == 18:
units = [2, 2, 2, 2]
elif num_layers == 34:
units = [3, 4, 6, 3]
elif num_layers == 50:
units = [3, 4, 6, 3]
elif num_layers == 101:
units = [3, 4, 23, 3]
elif num_layers == 152:
units = [3, 8, 36, 3]
elif num_layers == 200:
units = [3, 24, 36, 3]
elif num_layers == 269:
units = [3, 30, 48, 8]
else:
raise ValueError("no experiments done on num_layers {}, you can do it yourself".format(num_layers))
return resnet(units = units,
num_stages = num_stages,
filter_list = filter_list,
num_classes = num_classes,
image_shape = image_shape,
bottle_neck = bottle_neck,
workspace = conv_workspace,
dtype = dtype)
|
apache-2.0
|
HellerCommaA/flask-angular
|
lib/python2.7/site-packages/gunicorn/sock.py
|
31
|
6594
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import errno
import os
import socket
import stat
import sys
import time
from gunicorn import util
from gunicorn.six import string_types
SD_LISTEN_FDS_START = 3
class BaseSocket(object):
def __init__(self, address, conf, log, fd=None):
self.log = log
self.conf = conf
self.cfg_addr = address
if fd is None:
sock = socket.socket(self.FAMILY, socket.SOCK_STREAM)
else:
sock = socket.fromfd(fd, self.FAMILY, socket.SOCK_STREAM)
self.sock = self.set_options(sock, bound=(fd is not None))
def __str__(self, name):
return "<socket %d>" % self.sock.fileno()
def __getattr__(self, name):
return getattr(self.sock, name)
def set_options(self, sock, bound=False):
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if not bound:
self.bind(sock)
sock.setblocking(0)
sock.listen(self.conf.backlog)
return sock
def bind(self, sock):
sock.bind(self.cfg_addr)
def close(self):
try:
self.sock.close()
except socket.error as e:
self.log.info("Error while closing socket %s", str(e))
time.sleep(0.3)
del self.sock
class TCPSocket(BaseSocket):
FAMILY = socket.AF_INET
def __str__(self):
if self.conf.is_ssl:
scheme = "https"
else:
scheme = "http"
addr = self.sock.getsockname()
return "%s://%s:%d" % (scheme, addr[0], addr[1])
def set_options(self, sock, bound=False):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
return super(TCPSocket, self).set_options(sock, bound=bound)
class TCP6Socket(TCPSocket):
FAMILY = socket.AF_INET6
def __str__(self):
(host, port, fl, sc) = self.sock.getsockname()
return "http://[%s]:%d" % (host, port)
class UnixSocket(BaseSocket):
FAMILY = socket.AF_UNIX
def __init__(self, addr, conf, log, fd=None):
if fd is None:
try:
st = os.stat(addr)
except OSError as e:
if e.args[0] != errno.ENOENT:
raise
else:
if stat.S_ISSOCK(st.st_mode):
os.remove(addr)
else:
raise ValueError("%r is not a socket" % addr)
super(UnixSocket, self).__init__(addr, conf, log, fd=fd)
def __str__(self):
return "unix:%s" % self.cfg_addr
def bind(self, sock):
old_umask = os.umask(self.conf.umask)
sock.bind(self.cfg_addr)
util.chown(self.cfg_addr, self.conf.uid, self.conf.gid)
os.umask(old_umask)
def close(self):
super(UnixSocket, self).close()
os.unlink(self.cfg_addr)
def _sock_type(addr):
if isinstance(addr, tuple):
if util.is_ipv6(addr[0]):
sock_type = TCP6Socket
else:
sock_type = TCPSocket
elif isinstance(addr, string_types):
sock_type = UnixSocket
else:
raise TypeError("Unable to create socket from: %r" % addr)
return sock_type
def create_sockets(conf, log):
"""
Create a new socket for the given address. If the
address is a tuple, a TCP socket is created. If it
is a string, a Unix socket is created. Otherwise
a TypeError is raised.
"""
listeners = []
if 'LISTEN_PID' in os.environ and int(os.environ.get('LISTEN_PID')) == os.getpid():
for i in range(int(os.environ.get('LISTEN_FDS', 0))):
fd = i + SD_LISTEN_FDS_START
try:
sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM)
sockname = sock.getsockname()
if isinstance(sockname, str) and sockname.startswith('/'):
listeners.append(UnixSocket(sockname, conf, log, fd=fd))
elif len(sockname) == 2 and '.' in sockname[0]:
listeners.append(TCPSocket("%s:%s" % sockname, conf, log, fd=fd))
elif len(sockname) == 4 and ':' in sockname[0]:
listeners.append(TCP6Socket("[%s]:%s" % sockname[:2], conf, log, fd=fd))
except socket.error:
pass
del os.environ['LISTEN_PID'], os.environ['LISTEN_FDS']
if listeners:
log.debug('Socket activation sockets: %s', ",".join([str(l) for l in listeners]))
return listeners
# get it only once
laddr = conf.address
# check ssl config early to raise the error on startup
# only the certfile is needed since it can contains the keyfile
if conf.certfile and not os.path.exists(conf.certfile):
raise ValueError('certfile "%s" does not exist' % conf.certfile)
if conf.keyfile and not os.path.exists(conf.keyfile):
raise ValueError('keyfile "%s" does not exist' % conf.keyfile)
# sockets are already bound
if 'GUNICORN_FD' in os.environ:
fds = os.environ.pop('GUNICORN_FD').split(',')
for i, fd in enumerate(fds):
fd = int(fd)
addr = laddr[i]
sock_type = _sock_type(addr)
try:
listeners.append(sock_type(addr, conf, log, fd=fd))
except socket.error as e:
if e.args[0] == errno.ENOTCONN:
log.error("GUNICORN_FD should refer to an open socket.")
else:
raise
return listeners
# no sockets is bound, first initialization of gunicorn in this env.
for addr in laddr:
sock_type = _sock_type(addr)
# If we fail to create a socket from GUNICORN_FD
# we fall through and try and open the socket
# normally.
sock = None
for i in range(5):
try:
sock = sock_type(addr, conf, log)
except socket.error as e:
if e.args[0] == errno.EADDRINUSE:
log.error("Connection in use: %s", str(addr))
if e.args[0] == errno.EADDRNOTAVAIL:
log.error("Invalid address: %s", str(addr))
sys.exit(1)
if i < 5:
log.error("Retrying in 1 second.")
time.sleep(1)
else:
break
if sock is None:
log.error("Can't connect to %s", str(addr))
sys.exit(1)
listeners.append(sock)
return listeners
|
mit
|
Havate/havate-openstack
|
proto-build/gui/horizon/Horizon_GUI/openstack_dashboard/dashboards/project/images_and_snapshots/images/forms.py
|
4
|
9619
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing images.
"""
from django.conf import settings # noqa
from django.forms import ValidationError # noqa
from django.forms.widgets import HiddenInput # noqa
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
IMAGE_BACKEND_SETTINGS = getattr(settings, 'OPENSTACK_IMAGE_BACKEND', {})
IMAGE_FORMAT_CHOICES = IMAGE_BACKEND_SETTINGS.get('image_formats', [])
class CreateImageForm(forms.SelfHandlingForm):
name = forms.CharField(max_length="255", label=_("Name"), required=True)
description = forms.CharField(widget=forms.widgets.Textarea(),
label=_("Description"),
required=False)
source_type = forms.ChoiceField(
label=_('Image Source'),
choices=[('url', _('Image Location')),
('file', _('Image File'))],
widget=forms.Select(attrs={
'class': 'switchable',
'data-slug': 'source'}))
copy_from = forms.CharField(max_length="255",
label=_("Image Location"),
help_text=_("An external (HTTP) URL to load "
"the image from."),
widget=forms.TextInput(attrs={
'class': 'switched',
'data-switch-on': 'source',
'data-source-url': _('Image Location')}),
required=False)
image_file = forms.FileField(label=_("Image File"),
help_text=_("A local image to upload."),
widget=forms.FileInput(attrs={
'class': 'switched',
'data-switch-on': 'source',
'data-source-file': _('Image File')}),
required=False)
disk_format = forms.ChoiceField(label=_('Format'),
required=True,
choices=[],
widget=forms.Select(attrs={'class':
'switchable'}))
minimum_disk = forms.IntegerField(label=_("Minimum Disk (GB)"),
help_text=_('The minimum disk size'
' required to boot the'
' image. If unspecified, this'
' value defaults to 0'
' (no minimum).'),
required=False)
minimum_ram = forms.IntegerField(label=_("Minimum Ram (MB)"),
help_text=_('The minimum disk size'
' required to boot the'
' image. If unspecified, this'
' value defaults to 0 (no'
' minimum).'),
required=False)
is_public = forms.BooleanField(label=_("Public"), required=False)
protected = forms.BooleanField(label=_("Protected"), required=False)
def __init__(self, *args, **kwargs):
super(CreateImageForm, self).__init__(*args, **kwargs)
if not settings.HORIZON_IMAGES_ALLOW_UPLOAD:
self.fields['image_file'].widget = HiddenInput()
self.fields['disk_format'].choices = IMAGE_FORMAT_CHOICES
def clean(self):
data = super(CreateImageForm, self).clean()
# The image_file key can be missing based on particular upload
# conditions. Code defensively for it here...
image_file = data.get('image_file', None)
if not data['copy_from'] and not image_file:
raise ValidationError(
_("A image or external image location must be specified."))
elif data['copy_from'] and image_file:
raise ValidationError(
_("Can not specify both image and external image location."))
else:
return data
def handle(self, request, data):
# Glance does not really do anything with container_format at the
# moment. It requires it is set to the same disk_format for the three
# Amazon image types, otherwise it just treats them as 'bare.' As such
# we will just set that to be that here instead of bothering the user
# with asking them for information we can already determine.
if data['disk_format'] in ('ami', 'aki', 'ari',):
container_format = data['disk_format']
else:
container_format = 'bare'
meta = {'is_public': data['is_public'],
'protected': data['protected'],
'disk_format': data['disk_format'],
'container_format': container_format,
'min_disk': (data['minimum_disk'] or 0),
'min_ram': (data['minimum_ram'] or 0),
'name': data['name'],
'properties': {}}
if data['description']:
meta['properties']['description'] = data['description']
if (settings.HORIZON_IMAGES_ALLOW_UPLOAD and
data.get('image_file', None)):
meta['data'] = self.files['image_file']
else:
meta['copy_from'] = data['copy_from']
try:
image = api.glance.image_create(request, **meta)
messages.success(request,
_('Your image %s has been queued for creation.') %
data['name'])
return image
except Exception:
exceptions.handle(request, _('Unable to create new image.'))
class UpdateImageForm(forms.SelfHandlingForm):
image_id = forms.CharField(widget=forms.HiddenInput())
name = forms.CharField(max_length="255", label=_("Name"))
description = forms.CharField(widget=forms.widgets.Textarea(),
label=_("Description"),
required=False)
kernel = forms.CharField(max_length="36", label=_("Kernel ID"),
required=False,
widget=forms.TextInput(
attrs={'readonly': 'readonly'}
))
ramdisk = forms.CharField(max_length="36", label=_("Ramdisk ID"),
required=False,
widget=forms.TextInput(
attrs={'readonly': 'readonly'}
))
architecture = forms.CharField(label=_("Architecture"), required=False,
widget=forms.TextInput(
attrs={'readonly': 'readonly'}
))
disk_format = forms.CharField(label=_("Format"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}
))
public = forms.BooleanField(label=_("Public"), required=False)
protected = forms.BooleanField(label=_("Protected"), required=False)
def handle(self, request, data):
image_id = data['image_id']
error_updating = _('Unable to update image "%s".')
if data['disk_format'] in ['aki', 'ari', 'ami']:
container_format = data['disk_format']
else:
container_format = 'bare'
meta = {'is_public': data['public'],
'protected': data['protected'],
'disk_format': data['disk_format'],
'container_format': container_format,
'name': data['name'],
'properties': {}}
if data['description']:
meta['properties']['description'] = data['description']
if data['kernel']:
meta['properties']['kernel_id'] = data['kernel']
if data['ramdisk']:
meta['properties']['ramdisk_id'] = data['ramdisk']
if data['architecture']:
meta['properties']['architecture'] = data['architecture']
# Ensure we do not delete properties that have already been
# set on an image.
meta['purge_props'] = False
try:
image = api.glance.image_update(request, image_id, **meta)
messages.success(request, _('Image was successfully updated.'))
return image
except Exception:
exceptions.handle(request, error_updating % image_id)
|
apache-2.0
|
grumpyjames/buck
|
third-party/py/unittest2/unittest2/loader.py
|
139
|
13421
|
"""Loading unittests."""
import os
import re
import sys
import traceback
import types
import unittest
from fnmatch import fnmatch
from unittest2 import case, suite
try:
from os.path import relpath
except ImportError:
from unittest2.compatibility import relpath
__unittest = True
def _CmpToKey(mycmp):
'Convert a cmp= function into a key= function'
class K(object):
def __init__(self, obj):
self.obj = obj
def __lt__(self, other):
return mycmp(self.obj, other.obj) == -1
return K
# what about .pyc or .pyo (etc)
# we would need to avoid loading the same tests multiple times
# from '.py', '.pyc' *and* '.pyo'
VALID_MODULE_NAME = re.compile(r'[_a-z]\w*\.py$', re.IGNORECASE)
def _make_failed_import_test(name, suiteClass):
message = 'Failed to import test module: %s' % name
if hasattr(traceback, 'format_exc'):
# Python 2.3 compatibility
# format_exc returns two frames of discover.py as well
message += '\n%s' % traceback.format_exc()
return _make_failed_test('ModuleImportFailure', name, ImportError(message),
suiteClass)
def _make_failed_load_tests(name, exception, suiteClass):
return _make_failed_test('LoadTestsFailure', name, exception, suiteClass)
def _make_failed_test(classname, methodname, exception, suiteClass):
def testFailure(self):
raise exception
attrs = {methodname: testFailure}
TestClass = type(classname, (case.TestCase,), attrs)
return suiteClass((TestClass(methodname),))
class TestLoader(unittest.TestLoader):
"""
This class is responsible for loading tests according to various criteria
and returning them wrapped in a TestSuite
"""
testMethodPrefix = 'test'
sortTestMethodsUsing = cmp
suiteClass = suite.TestSuite
_top_level_dir = None
def loadTestsFromTestCase(self, testCaseClass):
"""Return a suite of all tests cases contained in testCaseClass"""
if issubclass(testCaseClass, suite.TestSuite):
raise TypeError("Test cases should not be derived from TestSuite."
" Maybe you meant to derive from TestCase?")
testCaseNames = self.getTestCaseNames(testCaseClass)
if not testCaseNames and hasattr(testCaseClass, 'runTest'):
testCaseNames = ['runTest']
loaded_suite = self.suiteClass(map(testCaseClass, testCaseNames))
return loaded_suite
def loadTestsFromModule(self, module, use_load_tests=True):
"""Return a suite of all tests cases contained in the given module"""
tests = []
for name in dir(module):
obj = getattr(module, name)
if isinstance(obj, type) and issubclass(obj, unittest.TestCase):
tests.append(self.loadTestsFromTestCase(obj))
load_tests = getattr(module, 'load_tests', None)
tests = self.suiteClass(tests)
if use_load_tests and load_tests is not None:
try:
return load_tests(self, tests, None)
except Exception, e:
return _make_failed_load_tests(module.__name__, e,
self.suiteClass)
return tests
def loadTestsFromName(self, name, module=None):
"""Return a suite of all tests cases given a string specifier.
The name may resolve either to a module, a test case class, a
test method within a test case class, or a callable object which
returns a TestCase or TestSuite instance.
The method optionally resolves the names relative to a given module.
"""
parts = name.split('.')
if module is None:
parts_copy = parts[:]
while parts_copy:
try:
module = __import__('.'.join(parts_copy))
break
except ImportError:
del parts_copy[-1]
if not parts_copy:
raise
parts = parts[1:]
obj = module
for part in parts:
parent, obj = obj, getattr(obj, part)
if isinstance(obj, types.ModuleType):
return self.loadTestsFromModule(obj)
elif isinstance(obj, type) and issubclass(obj, unittest.TestCase):
return self.loadTestsFromTestCase(obj)
elif (isinstance(obj, types.UnboundMethodType) and
isinstance(parent, type) and
issubclass(parent, case.TestCase)):
return self.suiteClass([parent(obj.__name__)])
elif isinstance(obj, unittest.TestSuite):
return obj
elif hasattr(obj, '__call__'):
test = obj()
if isinstance(test, unittest.TestSuite):
return test
elif isinstance(test, unittest.TestCase):
return self.suiteClass([test])
else:
raise TypeError("calling %s returned %s, not a test" %
(obj, test))
else:
raise TypeError("don't know how to make test from: %s" % obj)
def loadTestsFromNames(self, names, module=None):
"""Return a suite of all tests cases found using the given sequence
of string specifiers. See 'loadTestsFromName()'.
"""
suites = [self.loadTestsFromName(name, module) for name in names]
return self.suiteClass(suites)
def getTestCaseNames(self, testCaseClass):
"""Return a sorted sequence of method names found within testCaseClass
"""
def isTestMethod(attrname, testCaseClass=testCaseClass,
prefix=self.testMethodPrefix):
return attrname.startswith(prefix) and \
hasattr(getattr(testCaseClass, attrname), '__call__')
testFnNames = filter(isTestMethod, dir(testCaseClass))
if self.sortTestMethodsUsing:
testFnNames.sort(key=_CmpToKey(self.sortTestMethodsUsing))
return testFnNames
def discover(self, start_dir, pattern='test*.py', top_level_dir=None):
"""Find and return all test modules from the specified start
directory, recursing into subdirectories to find them. Only test files
that match the pattern will be loaded. (Using shell style pattern
matching.)
All test modules must be importable from the top level of the project.
If the start directory is not the top level directory then the top
level directory must be specified separately.
If a test package name (directory with '__init__.py') matches the
pattern then the package will be checked for a 'load_tests' function. If
this exists then it will be called with loader, tests, pattern.
If load_tests exists then discovery does *not* recurse into the package,
load_tests is responsible for loading all tests in the package.
The pattern is deliberately not stored as a loader attribute so that
packages can continue discovery themselves. top_level_dir is stored so
load_tests does not need to pass this argument in to loader.discover().
"""
set_implicit_top = False
if top_level_dir is None and self._top_level_dir is not None:
# make top_level_dir optional if called from load_tests in a package
top_level_dir = self._top_level_dir
elif top_level_dir is None:
set_implicit_top = True
top_level_dir = start_dir
top_level_dir = os.path.abspath(top_level_dir)
if not top_level_dir in sys.path:
# all test modules must be importable from the top level directory
# should we *unconditionally* put the start directory in first
# in sys.path to minimise likelihood of conflicts between installed
# modules and development versions?
sys.path.insert(0, top_level_dir)
self._top_level_dir = top_level_dir
is_not_importable = False
if os.path.isdir(os.path.abspath(start_dir)):
start_dir = os.path.abspath(start_dir)
if start_dir != top_level_dir:
is_not_importable = not os.path.isfile(os.path.join(start_dir, '__init__.py'))
else:
# support for discovery from dotted module names
try:
__import__(start_dir)
except ImportError:
is_not_importable = True
else:
the_module = sys.modules[start_dir]
top_part = start_dir.split('.')[0]
start_dir = os.path.abspath(os.path.dirname((the_module.__file__)))
if set_implicit_top:
self._top_level_dir = os.path.abspath(os.path.dirname(os.path.dirname(sys.modules[top_part].__file__)))
sys.path.remove(top_level_dir)
if is_not_importable:
raise ImportError('Start directory is not importable: %r' % start_dir)
tests = list(self._find_tests(start_dir, pattern))
return self.suiteClass(tests)
def _get_name_from_path(self, path):
path = os.path.splitext(os.path.normpath(path))[0]
_relpath = relpath(path, self._top_level_dir)
assert not os.path.isabs(_relpath), "Path must be within the project"
assert not _relpath.startswith('..'), "Path must be within the project"
name = _relpath.replace(os.path.sep, '.')
return name
def _get_module_from_name(self, name):
__import__(name)
return sys.modules[name]
def _match_path(self, path, full_path, pattern):
# override this method to use alternative matching strategy
return fnmatch(path, pattern)
def _find_tests(self, start_dir, pattern):
"""Used by discovery. Yields test suites it loads."""
paths = os.listdir(start_dir)
for path in paths:
full_path = os.path.join(start_dir, path)
if os.path.isfile(full_path):
if not VALID_MODULE_NAME.match(path):
# valid Python identifiers only
continue
if not self._match_path(path, full_path, pattern):
continue
# if the test file matches, load it
name = self._get_name_from_path(full_path)
try:
module = self._get_module_from_name(name)
except:
yield _make_failed_import_test(name, self.suiteClass)
else:
mod_file = os.path.abspath(getattr(module, '__file__', full_path))
realpath = os.path.splitext(mod_file)[0]
fullpath_noext = os.path.splitext(full_path)[0]
if realpath.lower() != fullpath_noext.lower():
module_dir = os.path.dirname(realpath)
mod_name = os.path.splitext(os.path.basename(full_path))[0]
expected_dir = os.path.dirname(full_path)
msg = ("%r module incorrectly imported from %r. Expected %r. "
"Is this module globally installed?")
raise ImportError(msg % (mod_name, module_dir, expected_dir))
yield self.loadTestsFromModule(module)
elif os.path.isdir(full_path):
if not os.path.isfile(os.path.join(full_path, '__init__.py')):
continue
load_tests = None
tests = None
if fnmatch(path, pattern):
# only check load_tests if the package directory itself matches the filter
name = self._get_name_from_path(full_path)
package = self._get_module_from_name(name)
load_tests = getattr(package, 'load_tests', None)
tests = self.loadTestsFromModule(package, use_load_tests=False)
if load_tests is None:
if tests is not None:
# tests loaded from package file
yield tests
# recurse into the package
for test in self._find_tests(full_path, pattern):
yield test
else:
try:
yield load_tests(self, tests, pattern)
except Exception, e:
yield _make_failed_load_tests(package.__name__, e,
self.suiteClass)
defaultTestLoader = TestLoader()
def _makeLoader(prefix, sortUsing, suiteClass=None):
loader = TestLoader()
loader.sortTestMethodsUsing = sortUsing
loader.testMethodPrefix = prefix
if suiteClass:
loader.suiteClass = suiteClass
return loader
def getTestCaseNames(testCaseClass, prefix, sortUsing=cmp):
return _makeLoader(prefix, sortUsing).getTestCaseNames(testCaseClass)
def makeSuite(testCaseClass, prefix='test', sortUsing=cmp,
suiteClass=suite.TestSuite):
return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromTestCase(testCaseClass)
def findTestCases(module, prefix='test', sortUsing=cmp,
suiteClass=suite.TestSuite):
return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromModule(module)
|
apache-2.0
|
ElDeveloper/scikit-learn
|
examples/decomposition/plot_pca_iris.py
|
253
|
1801
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
=========================================================
PCA example with Iris Data-set
=========================================================
Principal Component Analysis applied to the Iris dataset.
See `here <http://en.wikipedia.org/wiki/Iris_flower_data_set>`_ for more
information on this dataset.
"""
print(__doc__)
# Code source: Gaël Varoquaux
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from sklearn import decomposition
from sklearn import datasets
np.random.seed(5)
centers = [[1, 1], [-1, -1], [1, -1]]
iris = datasets.load_iris()
X = iris.data
y = iris.target
fig = plt.figure(1, figsize=(4, 3))
plt.clf()
ax = Axes3D(fig, rect=[0, 0, .95, 1], elev=48, azim=134)
plt.cla()
pca = decomposition.PCA(n_components=3)
pca.fit(X)
X = pca.transform(X)
for name, label in [('Setosa', 0), ('Versicolour', 1), ('Virginica', 2)]:
ax.text3D(X[y == label, 0].mean(),
X[y == label, 1].mean() + 1.5,
X[y == label, 2].mean(), name,
horizontalalignment='center',
bbox=dict(alpha=.5, edgecolor='w', facecolor='w'))
# Reorder the labels to have colors matching the cluster results
y = np.choose(y, [1, 2, 0]).astype(np.float)
ax.scatter(X[:, 0], X[:, 1], X[:, 2], c=y, cmap=plt.cm.spectral)
x_surf = [X[:, 0].min(), X[:, 0].max(),
X[:, 0].min(), X[:, 0].max()]
y_surf = [X[:, 0].max(), X[:, 0].max(),
X[:, 0].min(), X[:, 0].min()]
x_surf = np.array(x_surf)
y_surf = np.array(y_surf)
v0 = pca.transform(pca.components_[0])
v0 /= v0[-1]
v1 = pca.transform(pca.components_[1])
v1 /= v1[-1]
ax.w_xaxis.set_ticklabels([])
ax.w_yaxis.set_ticklabels([])
ax.w_zaxis.set_ticklabels([])
plt.show()
|
bsd-3-clause
|
FescueFungiShare/hydroshare
|
hs_tracking/views.py
|
2
|
2158
|
import csv
from cStringIO import StringIO
from django.views.generic import TemplateView
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import user_passes_test
from django.http import HttpResponse
from . import models as hs_tracking
class UseTrackingView(TemplateView):
template_name = 'hs_tracking/tracking.html'
@method_decorator(user_passes_test(lambda u: u.is_staff))
def dispatch(self, *args, **kwargs):
return super(UseTrackingView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
return {}
class VisitorProfileReport(TemplateView):
@method_decorator(user_passes_test(lambda u: u.is_staff))
def dispatch(self, *args, **kwargs):
return super(VisitorProfileReport, self).dispatch(*args, **kwargs)
def get(self, request, **kwargs):
"""Download a CSV report of use tracking data."""
f = StringIO()
w = csv.writer(f)
w.writerow(hs_tracking.VISITOR_FIELDS)
visitors = hs_tracking.Visitor.objects.all()
for v in visitors:
info = v.export_visitor_information()
row = [info[field] for field in hs_tracking.VISITOR_FIELDS]
w.writerow(row)
f.seek(0)
return HttpResponse(f.read(), content_type="text/csv")
class HistoryReport(TemplateView):
# @method_decorator(user_passes_test(lambda u: u.is_staff))
def dispatch(self, *args, **kwargs):
return super(HistoryReport, self).dispatch(*args, **kwargs)
def get(self, request, **kwargs):
"""Download a CSV report of use tracking data."""
f = StringIO()
w = csv.writer(f)
w.writerow(
['visitor', 'session', 'session start', 'timestamp', 'variable', 'type', 'value'])
variables = hs_tracking.Variable.objects.all().order_by('timestamp')
for v in variables:
row = [v.session.visitor.id, v.session.id, v.session.begin, v.timestamp,
v.name, v.get_type_display(), v.value]
w.writerow(row)
f.seek(0)
return HttpResponse(f.read(), content_type="text/csv")
|
bsd-3-clause
|
avidoggy/Pygorithm
|
ArtificialIntelligence/Path_Finding.py
|
1
|
6073
|
#Your code here
#You can import some modules or create additional functions
def checkio(maze_map):
#replace this for solution
#This is just example for first maze
MOVE = {"S": (1, 0), "N": (-1, 0), "W": (0, -1), "E": (0, 1)}
copy_maze_map = [row[:] for row in maze_map]
#print type(copy_maze_map)
current_pos = (1, 1)
copy_maze_map[current_pos[0]][current_pos[1]] = 1
route = [current_pos]
goal = (10, 10)
stack = []
while current_pos[0] != goal[0] or current_pos[1] != goal[1]:
go_pos = []
for mx, my in [(current_pos[0] + m[0], current_pos[1] + m[1]) for m in MOVE.values()]:
if copy_maze_map[mx][my] == 0:
go_pos.append((mx, my))
#print go_pos
if len(go_pos) == 0:
if len(stack) == 0:
print("can't find route")
return ""
current_pos, back_pos = stack.pop()
while route[-1] != back_pos:
route.pop()
else:
for pos in go_pos:
stack.append((pos, current_pos[:]))
current_pos = stack.pop()[0]
copy_maze_map[current_pos[0]][current_pos[1]] = 1
route.append(current_pos)
result = ""
#print route
for i in xrange (0, len(route) - 1):
value = (route[i + 1][0] - route[i][0], route[i + 1][1] - route[i][1])
for k, v in MOVE.items():
if value == v:result += k
#print result
return result
if __name__ == '__main__':
#web page:http://www.checkio.org/mission/open-labyrinth/
#This code using only for self-checking and not necessary for auto-testing
def check_route(func, labyrinth):
MOVE = {"S": (1, 0), "N": (-1, 0), "W": (0, -1), "E": (0, 1)}
#copy maze
route = func([row[:] for row in labyrinth])
pos = (1, 1)
goal = (10, 10)
for i, d in enumerate(route):
move = MOVE.get(d, None)
if not move:
print("Wrong symbol in route")
return False
pos = pos[0] + move[0], pos[1] + move[1]
if pos == goal:
return True
if labyrinth[pos[0]][pos[1]] == 1:
print("Player in the pit")
return False
print("Player did not reach exit")
return False
# These assert are using only for self-testing as examples.
assert check_route(checkio, [
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1],
[1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1],
[1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 1],
[1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1],
[1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1],
[1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1],
[1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]), "First maze"
assert check_route(checkio, [
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]), "Empty maze"
assert check_route(checkio, [
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1],
[1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1],
[1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1],
[1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1],
[1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1],
[1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1],
[1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1],
[1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1],
[1, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]), "Up and down maze"
assert check_route(checkio, [
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1],
[1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1],
[1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1],
[1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 1],
[1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1],
[1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]), "Dotted maze"
assert check_route(checkio, [
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1],
[1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1],
[1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1],
[1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1],
[1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1],
[1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]), "Need left maze"
assert check_route(checkio, [
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1],
[1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1],
[1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1],
[1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]), "The big dead end."
print("The local tests are done.")
|
gpl-2.0
|
zbqf109/goodo
|
openerp/tests/common.py
|
2
|
14850
|
# -*- coding: utf-8 -*-
"""
The module :mod:`openerp.tests.common` provides unittest2 test cases and a few
helpers and classes to write tests.
"""
import errno
import glob
import importlib
import json
import logging
import os
import select
import subprocess
import threading
import time
import itertools
import unittest2
import urllib2
import xmlrpclib
from contextlib import contextmanager
from datetime import datetime, timedelta
from pprint import pformat
import werkzeug
import openerp
from openerp import api
from openerp.modules.registry import RegistryManager
_logger = logging.getLogger(__name__)
# The openerp library is supposed already configured.
ADDONS_PATH = openerp.tools.config['addons_path']
HOST = '127.0.0.1'
PORT = openerp.tools.config['xmlrpc_port']
# Useless constant, tests are aware of the content of demo data
ADMIN_USER_ID = openerp.SUPERUSER_ID
def get_db_name():
db = openerp.tools.config['db_name']
# If the database name is not provided on the command-line,
# use the one on the thread (which means if it is provided on
# the command-line, this will break when installing another
# database from XML-RPC).
if not db and hasattr(threading.current_thread(), 'dbname'):
return threading.current_thread().dbname
return db
# For backwards-compatibility - get_db_name() should be used instead
DB = get_db_name()
def at_install(flag):
""" Sets the at-install state of a test, the flag is a boolean specifying
whether the test should (``True``) or should not (``False``) run during
module installation.
By default, tests are run right after installing the module, before
starting the installation of the next module.
"""
def decorator(obj):
obj.at_install = flag
return obj
return decorator
def post_install(flag):
""" Sets the post-install state of a test. The flag is a boolean
specifying whether the test should or should not run after a set of
module installations.
By default, tests are *not* run after installation of all modules in the
current installation set.
"""
def decorator(obj):
obj.post_install = flag
return obj
return decorator
class BaseCase(unittest2.TestCase):
"""
Subclass of TestCase for common OpenERP-specific code.
This class is abstract and expects self.registry, self.cr and self.uid to be
initialized by subclasses.
"""
def cursor(self):
return self.registry.cursor()
def ref(self, xid):
""" Returns database ID for the provided :term:`external identifier`,
shortcut for ``get_object_reference``
:param xid: fully-qualified :term:`external identifier`, in the form
:samp:`{module}.{identifier}`
:raise: ValueError if not found
:returns: registered id
"""
assert "." in xid, "this method requires a fully qualified parameter, in the following form: 'module.identifier'"
module, xid = xid.split('.')
_, id = self.registry('ir.model.data').get_object_reference(self.cr, self.uid, module, xid)
return id
def browse_ref(self, xid):
""" Returns a record object for the provided
:term:`external identifier`
:param xid: fully-qualified :term:`external identifier`, in the form
:samp:`{module}.{identifier}`
:raise: ValueError if not found
:returns: :class:`~openerp.models.BaseModel`
"""
assert "." in xid, "this method requires a fully qualified parameter, in the following form: 'module.identifier'"
module, xid = xid.split('.')
return self.registry('ir.model.data').get_object(self.cr, self.uid, module, xid)
@contextmanager
def _assertRaises(self, exception):
""" Context manager that clears the environment upon failure. """
with super(BaseCase, self).assertRaises(exception) as cm:
with self.env.clear_upon_failure():
yield cm
def assertRaises(self, exception, func=None, *args, **kwargs):
if func:
with self._assertRaises(exception):
func(*args, **kwargs)
else:
return self._assertRaises(exception)
class TransactionCase(BaseCase):
""" TestCase in which each test method is run in its own transaction,
and with its own cursor. The transaction is rolled back and the cursor
is closed after each test.
"""
def setUp(self):
self.registry = RegistryManager.get(get_db_name())
#: current transaction's cursor
self.cr = self.cursor()
self.uid = openerp.SUPERUSER_ID
#: :class:`~openerp.api.Environment` for the current test case
self.env = api.Environment(self.cr, self.uid, {})
def tearDown(self):
# rollback and close the cursor, and reset the environments
self.registry.clear_caches()
self.env.reset()
self.cr.rollback()
self.cr.close()
class SingleTransactionCase(BaseCase):
""" TestCase in which all test methods are run in the same transaction,
the transaction is started with the first test method and rolled back at
the end of the last.
"""
@classmethod
def setUpClass(cls):
cls.registry = RegistryManager.get(get_db_name())
cls.cr = cls.registry.cursor()
cls.uid = openerp.SUPERUSER_ID
cls.env = api.Environment(cls.cr, cls.uid, {})
@classmethod
def tearDownClass(cls):
# rollback and close the cursor, and reset the environments
cls.registry.clear_caches()
cls.env.reset()
cls.cr.rollback()
cls.cr.close()
savepoint_seq = itertools.count()
class SavepointCase(SingleTransactionCase):
""" Similar to :class:`SingleTransactionCase` in that all test methods
are run in a single transaction *but* each test case is run inside a
rollbacked savepoint (sub-transaction).
Useful for test cases containing fast tests but with significant database
setup common to all cases (complex in-db test data): :meth:`~.setUpClass`
can be used to generate db test data once, then all test cases use the
same data without influencing one another but without having to recreate
the test data either.
"""
def setUp(self):
self._savepoint_id = next(savepoint_seq)
self.cr.execute('SAVEPOINT test_%d' % self._savepoint_id)
def tearDown(self):
self.cr.execute('ROLLBACK TO SAVEPOINT test_%d' % self._savepoint_id)
self.env.clear()
self.registry.clear_caches()
class RedirectHandler(urllib2.HTTPRedirectHandler):
"""
HTTPRedirectHandler is predicated upon HTTPErrorProcessor being used and
works by intercepting 3xy "errors".
Inherit from it to handle 3xy non-error responses instead, as we're not
using the error processor
"""
def http_response(self, request, response):
code, msg, hdrs = response.code, response.msg, response.info()
if 300 <= code < 400:
return self.parent.error(
'http', request, response, code, msg, hdrs)
return response
https_response = http_response
class HttpCase(TransactionCase):
""" Transactional HTTP TestCase with url_open and phantomjs helpers.
"""
def __init__(self, methodName='runTest'):
super(HttpCase, self).__init__(methodName)
# v8 api with correct xmlrpc exception handling.
self.xmlrpc_url = url_8 = 'http://%s:%d/xmlrpc/2/' % (HOST, PORT)
self.xmlrpc_common = xmlrpclib.ServerProxy(url_8 + 'common')
self.xmlrpc_db = xmlrpclib.ServerProxy(url_8 + 'db')
self.xmlrpc_object = xmlrpclib.ServerProxy(url_8 + 'object')
def setUp(self):
super(HttpCase, self).setUp()
self.registry.enter_test_mode()
# setup a magic session_id that will be rollbacked
self.session = openerp.http.root.session_store.new()
self.session_id = self.session.sid
self.session.db = get_db_name()
openerp.http.root.session_store.save(self.session)
# setup an url opener helper
self.opener = urllib2.OpenerDirector()
self.opener.add_handler(urllib2.UnknownHandler())
self.opener.add_handler(urllib2.HTTPHandler())
self.opener.add_handler(urllib2.HTTPSHandler())
self.opener.add_handler(urllib2.HTTPCookieProcessor())
self.opener.add_handler(RedirectHandler())
self.opener.addheaders.append(('Cookie', 'session_id=%s' % self.session_id))
def tearDown(self):
self.registry.leave_test_mode()
super(HttpCase, self).tearDown()
def url_open(self, url, data=None, timeout=10):
if url.startswith('/'):
url = "http://%s:%s%s" % (HOST, PORT, url)
return self.opener.open(url, data, timeout)
def authenticate(self, user, password):
if user is not None:
url = '/login?%s' % werkzeug.urls.url_encode({'db': get_db_name(),'login': user, 'key': password})
auth = self.url_open(url)
assert auth.getcode() < 400, "Auth failure %d" % auth.getcode()
def phantom_poll(self, phantom, timeout):
""" Phantomjs Test protocol.
Use console.log in phantomjs to output test results:
- for a success: console.log("ok")
- for an error: console.log("error")
Other lines are relayed to the test log.
"""
t0 = datetime.now()
td = timedelta(seconds=timeout)
buf = bytearray()
while True:
# timeout
self.assertLess(datetime.now() - t0, td,
"PhantomJS tests should take less than %s seconds" % timeout)
# read a byte
try:
ready, _, _ = select.select([phantom.stdout], [], [], 0.5)
except select.error, e:
# In Python 2, select.error has no relation to IOError or
# OSError, and no errno/strerror/filename, only a pair of
# unnamed arguments (matching errno and strerror)
err, _ = e.args
if err == errno.EINTR:
continue
raise
if ready:
s = phantom.stdout.read(1)
if not s:
break
buf.append(s)
# process lines
if '\n' in buf and (not buf.startswith('<phantomLog>') or '</phantomLog>' in buf):
if buf.startswith('<phantomLog>'):
line = buf[12:buf.index('</phantomLog>')]
buf = bytearray()
else:
line, buf = buf.split('\n', 1)
line = str(line)
lline = line.lower()
if lline.startswith(("error", "server application error")):
try:
# when errors occur the execution stack may be sent as a JSON
prefix = lline.index('error') + 6
_logger.error("phantomjs: %s", pformat(json.loads(line[prefix:])))
except ValueError:
line_ = line.split('\n\n')
_logger.error("phantomjs: %s", line_[0])
# The second part of the log is for debugging
if len(line_) > 1:
_logger.info("phantomjs: \n%s", line.split('\n\n', 1)[1])
pass
break
elif lline.startswith("warning"):
_logger.warn("phantomjs: %s", line)
else:
_logger.info("phantomjs: %s", line)
if line == "ok":
break
def phantom_run(self, cmd, timeout):
_logger.info('phantom_run executing %s', ' '.join(cmd))
ls_glob = os.path.expanduser('~/.qws/share/data/Ofi Labs/PhantomJS/http_%s_%s.*' % (HOST, PORT))
for i in glob.glob(ls_glob):
_logger.info('phantomjs unlink localstorage %s', i)
os.unlink(i)
try:
phantom = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=None)
except OSError:
raise unittest2.SkipTest("PhantomJS not found")
try:
self.phantom_poll(phantom, timeout)
finally:
# kill phantomjs if phantom.exit() wasn't called in the test
if phantom.poll() is None:
phantom.terminate()
phantom.wait()
self._wait_remaining_requests()
# we ignore phantomjs return code as we kill it as soon as we have ok
_logger.info("phantom_run execution finished")
def _wait_remaining_requests(self):
t0 = int(time.time())
for thread in threading.enumerate():
if thread.name.startswith('openerp.service.http.request.'):
while thread.isAlive():
# Need a busyloop here as thread.join() masks signals
# and would prevent the forced shutdown.
thread.join(0.05)
time.sleep(0.05)
t1 = int(time.time())
if t0 != t1:
_logger.info('remaining requests')
openerp.tools.misc.dumpstacks()
t0 = t1
def phantom_js(self, url_path, code, ready="window", login=None, timeout=60, **kw):
""" Test js code running in the browser
- optionnally log as 'login'
- load page given by url_path
- wait for ready object to be available
- eval(code) inside the page
To signal success test do:
console.log('ok')
To signal failure do:
console.log('error')
If neither are done before timeout test fails.
"""
options = {
'port': PORT,
'db': get_db_name(),
'url_path': url_path,
'code': code,
'ready': ready,
'timeout' : timeout,
'login' : login,
'session_id': self.session_id,
}
options.update(kw)
options.setdefault('password', options.get('login'))
phantomtest = os.path.join(os.path.dirname(__file__), 'phantomtest.js')
cmd = ['phantomjs', phantomtest, json.dumps(options)]
self.phantom_run(cmd, timeout)
def can_import(module):
""" Checks if <module> can be imported, returns ``True`` if it can be,
``False`` otherwise.
To use with ``unittest.skipUnless`` for tests conditional on *optional*
dependencies, which may or may be present but must still be tested if
possible.
"""
try:
importlib.import_module(module)
except ImportError:
return False
else:
return True
|
gpl-3.0
|
EnriqueIzel2/jogo
|
funcoes.py
|
1
|
6140
|
import pygame
import random
from pygame.locals import*
pygame.init()
x = 800
y = 600
janela = pygame.display.set_mode((x, y))
fundo_verde = (4, 166, 1) # cor do fundo da tela
cor_letra = (0, 0, 0) # cor da letra
cor_cursor = (50, 60, 50)
cor_cobra = (255, 55, 255)
red = (255, 0, 0)
# snake
corpo = [(80, 80), (80, 100)] # cria um vetor, entre os parênteses estão as posições tupla
cabeca = corpo
# sons do jogo
som2 = pygame.mixer.Sound("pin.ogg")
som2.set_volume(0.7)
# fonte de texto
fonte_titulo = pygame.font.SysFont("Noto Sans CJK JP Black", 50)
fonte_menu = pygame.font.SysFont("Verdana", 25)
fonte_texto = pygame.font.SysFont("Times New Roman", 15)
fonte_sobre = pygame.font.SysFont("Times New Roman", 25)
# textos que aparecerão na tela
titulo = fonte_titulo.render("Snake", True, (cor_letra))
game_over = fonte_titulo.render("Você morreu", True, (cor_letra))
game_over1 = fonte_titulo.render("Pressione 'q' para sair", True, (cor_letra))
game_over2 = fonte_titulo.render("Pressione 'm' para voltar ao menu", True, (cor_letra))
jogador = fonte_menu.render("Jogar", True, (cor_letra))
sobre = fonte_menu.render("Sobre", True, (cor_letra))
instrucao = fonte_texto.render("Pressione 'ENTER' para selecionar uma opção", True, (cor_letra))
instrucao2 = fonte_texto.render("Pressione 'ESC' para voltar", True, (cor_letra))
sobre1 = fonte_sobre.render("*Desenvolvedores:", True, (cor_letra))
sobre2 = fonte_sobre.render("Nayara Cerdeira", True, (cor_letra))
sobre3 = fonte_sobre.render("Enrique Izel", True, (cor_letra))
sobre4 = fonte_sobre.render("Diogo Roberto", True, (cor_letra))
sobre5 = fonte_sobre.render("Natalia Xavier", True, (cor_letra))
sobre6 = fonte_sobre.render("Carlos Eduardo", True, (cor_letra))
sobre7 = fonte_sobre.render("*Orientador:", True, (cor_letra))
sobre8 = fonte_sobre.render("Dr Jucimar Jr", True, (cor_letra))
sobre9 = fonte_sobre.render("Músicas usadas:", True, (cor_letra))
sobre10 = fonte_sobre.render("Rubel - Ben", True, (cor_letra))
sobre11 = fonte_sobre.render("Universidade do Estado do Amazonas", True, (cor_letra))
sobre12 = fonte_sobre.render("Agradecimentos especiais ao", True, (cor_letra))
sobre13 = fonte_sobre.render("veterano Wilson que nos ajudou", True, (cor_letra))
placar = fonte_sobre.render("Score: ", True, (cor_letra))
class grafica():
def __init__(self):
pass
def fundo(self):
janela.fill(fundo_verde)
def titulo(self, opcoes): # "opcao" é a sombra do cursor que indica o que o usuário selecionará
if opcoes == 0:
pygame.draw.rect(janela, cor_cursor, (110, 200, 300, 30))
elif opcoes == 1:
pygame.draw.rect(janela, cor_cursor, (110, 230, 300, 30))
janela.blit(titulo, (80, 50))
janela.blit(jogador, (120, 200))
janela.blit(sobre, (120, 230))
janela.blit(instrucao, (10, 550))
pass
def sobre(self):
janela.blit(sobre, (350, 50))
janela.blit(sobre1, (20, 60))
janela.blit(sobre2, (50, 300))
janela.blit(sobre3, (50, 200))
janela.blit(sobre4, (50, 150))
janela.blit(sobre5, (50, 250))
janela.blit(sobre6, (50, 100))
janela.blit(sobre7, (20, 370))
janela.blit(sobre8, (50, 410))
janela.blit(sobre9, (500, 60))
janela.blit(sobre10, (500, 100))
janela.blit(sobre11, (190, 10))
janela.blit(sobre12, (450, 300))
janela.blit(sobre13, (450, 330))
janela.blit(instrucao2, (10, 550))
def cobra(self, orientacao, comidinha, pontuacao):
for i in corpo:
jiboia = pygame.draw.rect(janela, cor_cobra, (i[0]+1, i[1]+1, 18, 18))
if jiboia.y > 570: # baixo
janela.blit(game_over, (150, 100))
janela.blit(game_over1, (150, 250))
janela.blit(game_over2, (150, 300))
return (comidinha, pontuacao)
elif jiboia.x > 770.00: # da direita
janela.blit(game_over, (150, 100))
janela.blit(game_over1, (150, 250))
janela.blit(game_over2, (150, 300))
return (comidinha, pontuacao)
elif jiboia.y == 1.00: # cima
janela.blit(game_over, (150, 100))
janela.blit(game_over1, (150, 250))
janela.blit(game_over2, (150, 300))
return (comidinha, pontuacao)
elif jiboia.x == 1.00: # esquerda
janela.blit(game_over, (150, 100))
janela.blit(game_over1, (150, 250))
janela.blit(game_over2, (150, 300))
return (comidinha, pontuacao)
elif corpo.count(cabeca) > 0:
print("morreu")
return (comidinha, pontuacao)
if corpo.index(i) == len(corpo)-1:
if orientacao == 0:
corpo.append((i[0] + 20, i[1]))
if orientacao == 90:
corpo.append((i[0], i[1]-20))
if orientacao == 270:
corpo.append((i[0], i[1]+20))
if orientacao == 180:
corpo.append((i[0] - 20, i[1]))
if i != comidinha:
del corpo[0]
return (comidinha, pontuacao)
break
else:
pontuacao += 1
som2.play()
return ((random.randint(0, 39)*20, random.randint(0, 29)*20), pontuacao)
break
def comida(self, comidinha):
pygame.draw.rect(janela, cor_cursor, (comidinha[0], comidinha[1], 18, 18))
def placar(self, pontuacao):
janela.blit(placar, (0, 0))
pontos = fonte_sobre.render(str(pontuacao), True, cor_letra)
janela.blit(pontos, (80, 0))
def limite(self):
pygame.draw.rect(janela, red, [0, 0, 800, 0])
pygame.draw.rect(janela, red, [0, 0, 0, 600])
pygame.draw.rect(janela, red, [0, 600, 800, 0])
pygame.draw.rect(janela, red, [800, 0, 0, 800])
|
apache-2.0
|
matrix-org/synapse
|
tests/storage/test_end_to_end_keys.py
|
1
|
3855
|
# Copyright 2016-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests.unittest import HomeserverTestCase
class EndToEndKeyStoreTestCase(HomeserverTestCase):
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
def test_key_without_device_name(self):
now = 1470174257070
json = {"key": "value"}
self.get_success(self.store.store_device("user", "device", None))
self.get_success(self.store.set_e2e_device_keys("user", "device", now, json))
res = self.get_success(
self.store.get_e2e_device_keys_for_cs_api((("user", "device"),))
)
self.assertIn("user", res)
self.assertIn("device", res["user"])
dev = res["user"]["device"]
self.assertDictContainsSubset(json, dev)
def test_reupload_key(self):
now = 1470174257070
json = {"key": "value"}
self.get_success(self.store.store_device("user", "device", None))
changed = self.get_success(
self.store.set_e2e_device_keys("user", "device", now, json)
)
self.assertTrue(changed)
# If we try to upload the same key then we should be told nothing
# changed
changed = self.get_success(
self.store.set_e2e_device_keys("user", "device", now, json)
)
self.assertFalse(changed)
def test_get_key_with_device_name(self):
now = 1470174257070
json = {"key": "value"}
self.get_success(self.store.set_e2e_device_keys("user", "device", now, json))
self.get_success(self.store.store_device("user", "device", "display_name"))
res = self.get_success(
self.store.get_e2e_device_keys_for_cs_api((("user", "device"),))
)
self.assertIn("user", res)
self.assertIn("device", res["user"])
dev = res["user"]["device"]
self.assertDictContainsSubset(
{"key": "value", "unsigned": {"device_display_name": "display_name"}}, dev
)
def test_multiple_devices(self):
now = 1470174257070
self.get_success(self.store.store_device("user1", "device1", None))
self.get_success(self.store.store_device("user1", "device2", None))
self.get_success(self.store.store_device("user2", "device1", None))
self.get_success(self.store.store_device("user2", "device2", None))
self.get_success(
self.store.set_e2e_device_keys("user1", "device1", now, {"key": "json11"})
)
self.get_success(
self.store.set_e2e_device_keys("user1", "device2", now, {"key": "json12"})
)
self.get_success(
self.store.set_e2e_device_keys("user2", "device1", now, {"key": "json21"})
)
self.get_success(
self.store.set_e2e_device_keys("user2", "device2", now, {"key": "json22"})
)
res = self.get_success(
self.store.get_e2e_device_keys_for_cs_api(
(("user1", "device1"), ("user2", "device2"))
)
)
self.assertIn("user1", res)
self.assertIn("device1", res["user1"])
self.assertNotIn("device2", res["user1"])
self.assertIn("user2", res)
self.assertNotIn("device1", res["user2"])
self.assertIn("device2", res["user2"])
|
apache-2.0
|
drybjed/ansible-modules-extras
|
system/osx_defaults.py
|
77
|
12888
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, GeekChimp - Franck Nijhof <franck@geekchimp.com>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: osx_defaults
author: Franck Nijhof (@frenck)
short_description: osx_defaults allows users to read, write, and delete Mac OS X user defaults from Ansible
description:
- osx_defaults allows users to read, write, and delete Mac OS X user defaults from Ansible scripts.
Mac OS X applications and other programs use the defaults system to record user preferences and other
information that must be maintained when the applications aren't running (such as default font for new
documents, or the position of an Info panel).
version_added: "2.0"
options:
domain:
description:
- The domain is a domain name of the form com.companyname.appname.
required: false
default: NSGlobalDomain
key:
description:
- The key of the user preference
required: true
type:
description:
- The type of value to write.
required: false
default: string
choices: [ "array", "bool", "boolean", "date", "float", "int", "integer", "string" ]
array_add:
description:
- Add new elements to the array for a key which has an array as its value.
required: false
default: false
choices: [ "true", "false" ]
value:
description:
- The value to write. Only required when state = present.
required: false
default: null
state:
description:
- The state of the user defaults
required: false
default: present
choices: [ "present", "absent" ]
notes:
- Apple Mac caches defaults. You may need to logout and login to apply the changes.
'''
EXAMPLES = '''
- osx_defaults: domain=com.apple.Safari key=IncludeInternalDebugMenu type=bool value=true state=present
- osx_defaults: domain=NSGlobalDomain key=AppleMeasurementUnits type=string value=Centimeters state=present
- osx_defaults: key=AppleMeasurementUnits type=string value=Centimeters
- osx_defaults:
key: AppleLanguages
type: array
value: ["en", "nl"]
- osx_defaults: domain=com.geekchimp.macable key=ExampleKeyToRemove state=absent
'''
from datetime import datetime
# exceptions --------------------------------------------------------------- {{{
class OSXDefaultsException(Exception):
pass
# /exceptions -------------------------------------------------------------- }}}
# class MacDefaults -------------------------------------------------------- {{{
class OSXDefaults(object):
""" Class to manage Mac OS user defaults """
# init ---------------------------------------------------------------- {{{
""" Initialize this module. Finds 'defaults' executable and preps the parameters """
def __init__(self, **kwargs):
# Initial var for storing current defaults value
self.current_value = None
# Just set all given parameters
for key, val in kwargs.iteritems():
setattr(self, key, val)
# Try to find the defaults executable
self.executable = self.module.get_bin_path(
'defaults',
required=False,
opt_dirs=self.path.split(':'),
)
if not self.executable:
raise OSXDefaultsException("Unable to locate defaults executable.")
# When state is present, we require a parameter
if self.state == "present" and self.value is None:
raise OSXDefaultsException("Missing value parameter")
# Ensure the value is the correct type
self.value = self._convert_type(self.type, self.value)
# /init --------------------------------------------------------------- }}}
# tools --------------------------------------------------------------- {{{
""" Converts value to given type """
def _convert_type(self, type, value):
if type == "string":
return str(value)
elif type in ["bool", "boolean"]:
if value.lower() in [True, 1, "true", "1", "yes"]:
return True
elif value.lower() in [False, 0, "false", "0", "no"]:
return False
raise OSXDefaultsException("Invalid boolean value: {0}".format(repr(value)))
elif type == "date":
try:
return datetime.strptime(value.split("+")[0].strip(), "%Y-%m-%d %H:%M:%S")
except ValueError:
raise OSXDefaultsException(
"Invalid date value: {0}. Required format yyy-mm-dd hh:mm:ss.".format(repr(value))
)
elif type in ["int", "integer"]:
if not str(value).isdigit():
raise OSXDefaultsException("Invalid integer value: {0}".format(repr(value)))
return int(value)
elif type == "float":
try:
value = float(value)
except ValueError:
raise OSXDefaultsException("Invalid float value: {0}".format(repr(value)))
return value
elif type == "array":
if not isinstance(value, list):
raise OSXDefaultsException("Invalid value. Expected value to be an array")
return value
raise OSXDefaultsException('Type is not supported: {0}'.format(type))
""" Converts array output from defaults to an list """
@staticmethod
def _convert_defaults_str_to_list(value):
# Split output of defaults. Every line contains a value
value = value.splitlines()
# Remove first and last item, those are not actual values
value.pop(0)
value.pop(-1)
# Remove extra spaces and comma (,) at the end of values
value = [re.sub(',$', '', x.strip(' ')) for x in value]
return value
# /tools -------------------------------------------------------------- }}}
# commands ------------------------------------------------------------ {{{
""" Reads value of this domain & key from defaults """
def read(self):
# First try to find out the type
rc, out, err = self.module.run_command([self.executable, "read-type", self.domain, self.key])
# If RC is 1, the key does not exists
if rc == 1:
return None
# If the RC is not 0, then terrible happened! Ooooh nooo!
if rc != 0:
raise OSXDefaultsException("An error occurred while reading key type from defaults: " + out)
# Ok, lets parse the type from output
type = out.strip().replace('Type is ', '')
# Now get the current value
rc, out, err = self.module.run_command([self.executable, "read", self.domain, self.key])
# Strip output
out = out.strip()
# An non zero RC at this point is kinda strange...
if rc != 0:
raise OSXDefaultsException("An error occurred while reading key value from defaults: " + out)
# Convert string to list when type is array
if type == "array":
out = self._convert_defaults_str_to_list(out)
# Store the current_value
self.current_value = self._convert_type(type, out)
""" Writes value to this domain & key to defaults """
def write(self):
# We need to convert some values so the defaults commandline understands it
if type(self.value) is bool:
if self.value:
value = "TRUE"
else:
value = "FALSE"
elif type(self.value) is int or type(self.value) is float:
value = str(self.value)
elif self.array_add and self.current_value is not None:
value = list(set(self.value) - set(self.current_value))
elif isinstance(self.value, datetime):
value = self.value.strftime('%Y-%m-%d %H:%M:%S')
else:
value = self.value
# When the type is array and array_add is enabled, morph the type :)
if self.type == "array" and self.array_add:
self.type = "array-add"
# All values should be a list, for easy passing it to the command
if not isinstance(value, list):
value = [value]
rc, out, err = self.module.run_command([self.executable, 'write', self.domain, self.key, '-' + self.type] + value)
if rc != 0:
raise OSXDefaultsException('An error occurred while writing value to defaults: ' + out)
""" Deletes defaults key from domain """
def delete(self):
rc, out, err = self.module.run_command([self.executable, 'delete', self.domain, self.key])
if rc != 0:
raise OSXDefaultsException("An error occurred while deleting key from defaults: " + out)
# /commands ----------------------------------------------------------- }}}
# run ----------------------------------------------------------------- {{{
""" Does the magic! :) """
def run(self):
# Get the current value from defaults
self.read()
# Handle absent state
if self.state == "absent":
print "Absent state detected!"
if self.current_value is None:
return False
self.delete()
return True
# There is a type mismatch! Given type does not match the type in defaults
if self.current_value is not None and type(self.current_value) is not type(self.value):
raise OSXDefaultsException("Type mismatch. Type in defaults: " + type(self.current_value).__name__)
# Current value matches the given value. Nothing need to be done. Arrays need extra care
if self.type == "array" and self.current_value is not None and not self.array_add and \
set(self.current_value) == set(self.value):
return False
elif self.type == "array" and self.current_value is not None and self.array_add and \
len(list(set(self.value) - set(self.current_value))) == 0:
return False
elif self.current_value == self.value:
return False
# Change/Create/Set given key/value for domain in defaults
self.write()
return True
# /run ---------------------------------------------------------------- }}}
# /class MacDefaults ------------------------------------------------------ }}}
# main -------------------------------------------------------------------- {{{
def main():
module = AnsibleModule(
argument_spec=dict(
domain=dict(
default="NSGlobalDomain",
required=False,
),
key=dict(
default=None,
),
type=dict(
default="string",
required=False,
choices=[
"array",
"bool",
"boolean",
"date",
"float",
"int",
"integer",
"string",
],
),
array_add=dict(
default=False,
required=False,
choices=BOOLEANS,
),
value=dict(
default=None,
required=False,
),
state=dict(
default="present",
required=False,
choices=[
"absent", "present"
],
),
path=dict(
default="/usr/bin:/usr/local/bin",
required=False,
)
),
supports_check_mode=True,
)
domain = module.params['domain']
key = module.params['key']
type = module.params['type']
array_add = module.params['array_add']
value = module.params['value']
state = module.params['state']
path = module.params['path']
try:
defaults = OSXDefaults(module=module, domain=domain, key=key, type=type,
array_add=array_add, value=value, state=state, path=path)
changed = defaults.run()
module.exit_json(changed=changed)
except OSXDefaultsException, e:
module.fail_json(msg=e.message)
# /main ------------------------------------------------------------------- }}}
from ansible.module_utils.basic import *
main()
|
gpl-3.0
|
jonathanslenders/pymux-test
|
pymux/rc.py
|
2
|
2237
|
"""
Initial configuration.
"""
from __future__ import unicode_literals
__all__ = (
'STARTUP_COMMANDS'
)
STARTUP_COMMANDS = """
bind-key '"' split-window -v
bind-key % split-window -h
bind-key c new-window
bind-key Right select-pane -R
bind-key Left select-pane -L
bind-key Up select-pane -U
bind-key Down select-pane -D
bind-key C-l select-pane -R
bind-key C-h select-pane -L
bind-key C-j select-pane -D
bind-key C-k select-pane -U
bind-key ; last-pane
bind-key ! break-pane
bind-key d detach-client
bind-key t clock-mode
bind-key Space next-layout
bind-key C-z suspend-client
bind-key z resize-pane -Z
bind-key k resize-pane -U 2
bind-key j resize-pane -D 2
bind-key h resize-pane -L 2
bind-key l resize-pane -R 2
bind-key q display-panes
bind-key C-Up resize-pane -U 2
bind-key C-Down resize-pane -D 2
bind-key C-Left resize-pane -L 2
bind-key C-Right resize-pane -R 2
bind-key M-Up resize-pane -U 5
bind-key M-Down resize-pane -D 5
bind-key M-Left resize-pane -L 5
bind-key M-Right resize-pane -R 5
bind-key : command-prompt
bind-key 0 select-window -t :0
bind-key 1 select-window -t :1
bind-key 2 select-window -t :2
bind-key 3 select-window -t :3
bind-key 4 select-window -t :4
bind-key 5 select-window -t :5
bind-key 6 select-window -t :6
bind-key 7 select-window -t :7
bind-key 8 select-window -t :8
bind-key 9 select-window -t :9
bind-key n next-window
bind-key p previous-window
bind-key o select-pane -t :.+
bind-key { swap-pane -U
bind-key } swap-pane -D
bind-key x confirm-before -p "kill-pane #P?" kill-pane
bind-key & confirm-before -p "kill-window #W?" kill-window
bind-key C-o rotate-window
bind-key M-o rotate-window -D
bind-key C-b send-prefix
bind-key . command-prompt "move-window -t '%%'"
bind-key [ copy-mode
bind-key ] paste-buffer
bind-key ? list-keys
# Layouts.
bind-key M-1 select-layout even-horizontal
bind-key M-2 select-layout even-vertical
bind-key M-3 select-layout main-horizontal
bind-key M-4 select-layout main-vertical
bind-key M-5 select-layout tiled
# Renaming stuff.
bind-key , command-prompt -I #W "rename-window '%%'"
#bind-key "'" command-prompt -I #W "rename-pane '%%'"
bind-key "'" command-prompt -p index "select-window -t ':%%'"
bind-key . command-prompt "move-window -t '%%'"
"""
|
bsd-3-clause
|
stephane-martin/salt-debian-packaging
|
salt-2016.3.2/salt/runners/asam.py
|
2
|
11014
|
# -*- coding: utf-8 -*-
'''
Novell ASAM Runner
==================
.. versionadded:: Beryllium
Runner to interact with Novell ASAM Fan-Out Driver
:codeauthor: Nitin Madhok <nmadhok@clemson.edu>
To use this runner, set up the Novell Fan-Out Driver URL, username and password in the
master configuration at ``/etc/salt/master`` or ``/etc/salt/master.d/asam.conf``:
.. code-block:: yaml
asam:
prov1.domain.com
username: "testuser"
password: "verybadpass"
prov2.domain.com
username: "testuser"
password: "verybadpass"
.. note::
Optionally, ``protocol`` and ``port`` can be specified if the Fan-Out Driver server
is not using the defaults. Default is ``protocol: https`` and ``port: 3451``.
'''
from __future__ import absolute_import
# Import python libs
import logging
# Import third party libs
HAS_LIBS = False
HAS_SIX = False
try:
import requests
from salt.ext.six.moves.html_parser import HTMLParser # pylint: disable=E0611
try:
import salt.ext.six as six
HAS_SIX = True
except ImportError:
# Salt version <= 2014.7.0
try:
import six
except ImportError:
pass
HAS_LIBS = True
class ASAMHTMLParser(HTMLParser): # fix issue #30477
def __init__(self):
HTMLParser.__init__(self)
self.data = []
def handle_starttag(self, tag, attrs):
if tag != "a":
return
for attr in attrs:
if attr[0] != "href":
return
self.data.append(attr[1])
except ImportError:
pass
log = logging.getLogger(__name__)
def __virtual__():
'''
Check for ASAM Fan-Out driver configuration in master config file
or directory and load runner only if it is specified
'''
if not HAS_LIBS or not HAS_SIX:
return False
if _get_asam_configuration() is False:
return False
return True
def _get_asam_configuration(driver_url=''):
'''
Return the configuration read from the master configuration
file or directory
'''
asam_config = __opts__['asam'] if 'asam' in __opts__ else None
if asam_config:
try:
for asam_server, service_config in six.iteritems(asam_config):
username = service_config.get('username', None)
password = service_config.get('password', None)
protocol = service_config.get('protocol', 'https')
port = service_config.get('port', 3451)
if not username or not password:
log.error(
"Username or Password has not been specified in the master "
"configuration for {0}".format(asam_server)
)
return False
ret = {
'platform_edit_url': "{0}://{1}:{2}/config/PlatformEdit.html".format(protocol, asam_server, port),
'platform_config_url': "{0}://{1}:{2}/config/PlatformConfig.html".format(protocol, asam_server, port),
'platformset_edit_url': "{0}://{1}:{2}/config/PlatformSetEdit.html".format(protocol, asam_server, port),
'platformset_config_url': "{0}://{1}:{2}/config/PlatformSetConfig.html".format(protocol, asam_server, port),
'username': username,
'password': password
}
if (not driver_url) or (driver_url == asam_server):
return ret
except Exception as exc:
log.error(
"Exception encountered: {0}".format(exc)
)
return False
if driver_url:
log.error(
"Configuration for {0} has not been specified in the master "
"configuration".format(driver_url)
)
return False
return False
def _make_post_request(url, data, auth, verify=True):
r = requests.post(url, data=data, auth=auth, verify=verify)
if r.status_code != requests.codes.ok:
r.raise_for_status()
else:
return r.text.split('\n')
def _parse_html_content(html_content):
parser = ASAMHTMLParser()
for line in html_content:
if line.startswith("<META"):
html_content.remove(line)
else:
parser.feed(line)
return parser
def _get_platformset_name(data, platform_name):
for item in data:
if platform_name in item and item.startswith('PlatformEdit.html?'):
parameter_list = item.split('&')
for parameter in parameter_list:
if parameter.startswith("platformSetName"):
return parameter.split('=')[1]
return None
def _get_platforms(data):
platform_list = []
for item in data:
if item.startswith('PlatformEdit.html?'):
parameter_list = item.split('PlatformEdit.html?', 1)[1].split('&')
for parameter in parameter_list:
if parameter.startswith("platformName"):
platform_list.append(parameter.split('=')[1])
return platform_list
def _get_platform_sets(data):
platform_set_list = []
for item in data:
if item.startswith('PlatformSetEdit.html?'):
parameter_list = item.split('PlatformSetEdit.html?', 1)[1].split('&')
for parameter in parameter_list:
if parameter.startswith("platformSetName"):
platform_set_list.append(parameter.split('=')[1].replace('%20', ' '))
return platform_set_list
def remove_platform(name, server_url):
'''
To remove specified ASAM platform from the Novell Fan-Out Driver
CLI Example:
.. code-block:: bash
salt-run asam.remove_platform my-test-vm prov1.domain.com
'''
config = _get_asam_configuration(server_url)
if not config:
return False
url = config['platform_config_url']
data = {
'manual': 'false',
}
auth = (
config['username'],
config['password']
)
try:
html_content = _make_post_request(url, data, auth, verify=False)
except Exception as exc:
err_msg = "Failed to look up existing platforms on {0}".format(server_url)
log.error("{0}:\n{1}".format(err_msg, exc))
return {name: err_msg}
parser = _parse_html_content(html_content)
platformset_name = _get_platformset_name(parser.data, name)
if platformset_name:
log.debug(platformset_name)
data['platformName'] = name
data['platformSetName'] = str(platformset_name)
data['postType'] = 'platformRemove'
data['Submit'] = 'Yes'
try:
html_content = _make_post_request(url, data, auth, verify=False)
except Exception as exc:
err_msg = "Failed to delete platform from {1}".format(server_url)
log.error("{0}:\n{1}".format(err_msg, exc))
return {name: err_msg}
parser = _parse_html_content(html_content)
platformset_name = _get_platformset_name(parser.data, name)
if platformset_name:
return {name: "Failed to delete platform from {0}".format(server_url)}
else:
return {name: "Successfully deleted platform from {0}".format(server_url)}
else:
return {name: "Specified platform name does not exist on {0}".format(server_url)}
def list_platforms(server_url):
'''
To list all ASAM platforms present on the Novell Fan-Out Driver
CLI Example:
.. code-block:: bash
salt-run asam.list_platforms prov1.domain.com
'''
config = _get_asam_configuration(server_url)
if not config:
return False
url = config['platform_config_url']
data = {
'manual': 'false',
}
auth = (
config['username'],
config['password']
)
try:
html_content = _make_post_request(url, data, auth, verify=False)
except Exception as exc:
err_msg = "Failed to look up existing platforms"
log.error("{0}:\n{1}".format(err_msg, exc))
return {server_url: err_msg}
parser = _parse_html_content(html_content)
platform_list = _get_platforms(parser.data)
if platform_list:
return {server_url: platform_list}
else:
return {server_url: "No existing platforms found"}
def list_platform_sets(server_url):
'''
To list all ASAM platform sets present on the Novell Fan-Out Driver
CLI Example:
.. code-block:: bash
salt-run asam.list_platform_sets prov1.domain.com
'''
config = _get_asam_configuration(server_url)
if not config:
return False
url = config['platformset_config_url']
data = {
'manual': 'false',
}
auth = (
config['username'],
config['password']
)
try:
html_content = _make_post_request(url, data, auth, verify=False)
except Exception as exc:
err_msg = "Failed to look up existing platform sets"
log.error("{0}:\n{1}".format(err_msg, exc))
return {server_url: err_msg}
parser = _parse_html_content(html_content)
platform_set_list = _get_platform_sets(parser.data)
if platform_set_list:
return {server_url: platform_set_list}
else:
return {server_url: "No existing platform sets found"}
def add_platform(name, platform_set, server_url):
'''
To add an ASAM platform using the specified ASAM platform set on the Novell
Fan-Out Driver
CLI Example:
.. code-block:: bash
salt-run asam.add_platform my-test-vm test-platform-set prov1.domain.com
'''
config = _get_asam_configuration(server_url)
if not config:
return False
platforms = list_platforms(server_url)
if name in platforms[server_url]:
return {name: "Specified platform already exists on {0}".format(server_url)}
platform_sets = list_platform_sets(server_url)
if platform_set not in platform_sets[server_url]:
return {name: "Specified platform set does not exist on {0}".format(server_url)}
url = config['platform_edit_url']
data = {
'platformName': name,
'platformSetName': platform_set,
'manual': 'false',
'previousURL': '/config/platformAdd.html',
'postType': 'PlatformAdd',
'Submit': 'Apply'
}
auth = (
config['username'],
config['password']
)
try:
html_content = _make_post_request(url, data, auth, verify=False)
except Exception as exc:
err_msg = "Failed to add platform on {0}".format(server_url)
log.error("{0}:\n{1}".format(err_msg, exc))
return {name: err_msg}
platforms = list_platforms(server_url)
if name in platforms[server_url]:
return {name: "Successfully added platform on {0}".format(server_url)}
else:
return {name: "Failed to add platform on {0}".format(server_url)}
|
apache-2.0
|
RobertABT/heightmap
|
build/scipy/scipy/lib/lapack/tests/test_gesv.py
|
13
|
3510
|
from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import TestCase, assert_array_almost_equal, dec, \
assert_equal, assert_, run_module_suite
from common import FUNCS_TP, FLAPACK_IS_EMPTY, CLAPACK_IS_EMPTY, FUNCS_FLAPACK, \
FUNCS_CLAPACK, PREC
A = np.array([[1,2,3],[2,2,3],[3,3,6]])
B = np.array([[10,-1,1],[-1,8,-2],[1,-2,6]])
class TestSygv(TestCase):
def _test_base(self, func, lang, itype):
tp = FUNCS_TP[func]
a = A.astype(tp)
b = B.astype(tp)
if lang == 'C':
f = FUNCS_CLAPACK[func]
elif lang == 'F':
f = FUNCS_FLAPACK[func]
else:
raise ValueError("Lang %s ??" % lang)
w, v, info = f(a, b, itype=itype)
assert_(not info, msg=repr(info))
for i in range(3):
if itype == 1:
assert_array_almost_equal(np.dot(a,v[:,i]), w[i]*np.dot(b,v[:,i]),
decimal=PREC[tp])
elif itype == 2:
assert_array_almost_equal(np.dot(a,np.dot(b,v[:,i])), w[i]*v[:,i],
decimal=PREC[tp])
elif itype == 3:
assert_array_almost_equal(np.dot(b,np.dot(a,v[:,i])),
w[i]*v[:,i], decimal=PREC[tp] - 1)
else:
raise ValueError(itype)
@dec.skipif(FLAPACK_IS_EMPTY, "Flapack empty, skip flapack test")
def test_ssygv_1(self):
self._test_base('ssygv', 'F', 1)
@dec.skipif(FLAPACK_IS_EMPTY, "Flapack empty, skip flapack test")
def test_ssygv_2(self):
self._test_base('ssygv', 'F', 2)
@dec.skipif(FLAPACK_IS_EMPTY, "Flapack empty, skip flapack test")
def test_ssygv_3(self):
self._test_base('ssygv', 'F', 3)
@dec.skipif(FLAPACK_IS_EMPTY, "Flapack empty, skip flapack test")
def test_dsygv_1(self):
self._test_base('dsygv', 'F', 1)
@dec.skipif(FLAPACK_IS_EMPTY, "Flapack empty, skip flapack test")
def test_dsygv_2(self):
self._test_base('dsygv', 'F', 2)
@dec.skipif(FLAPACK_IS_EMPTY, "Flapack empty, skip flapack test")
def test_dsygv_3(self):
self._test_base('dsygv', 'F', 3)
@dec.skipif(CLAPACK_IS_EMPTY or not FUNCS_CLAPACK["ssygv"],
"Clapack empty, skip flapack test")
def test_clapack_ssygv_1(self):
self._test_base('ssygv', 'C', 1)
@dec.skipif(CLAPACK_IS_EMPTY or not FUNCS_CLAPACK["ssygv"],
"Clapack empty, skip flapack test")
def test_clapack_ssygv_2(self):
self._test_base('ssygv', 'C', 2)
@dec.skipif(CLAPACK_IS_EMPTY or not FUNCS_CLAPACK["ssygv"],
"Clapack empty, skip flapack test")
def test_clapack_ssygv_3(self):
self._test_base('ssygv', 'C', 3)
@dec.skipif(CLAPACK_IS_EMPTY or not FUNCS_CLAPACK["dsygv"],
"Clapack empty, skip flapack test")
def test_clapack_dsygv_1(self):
self._test_base('dsygv', 'C', 1)
@dec.skipif(CLAPACK_IS_EMPTY or not FUNCS_CLAPACK["dsygv"],
"Clapack empty, skip flapack test")
def test_clapack_dsygv_2(self):
self._test_base('dsygv', 'C', 2)
@dec.skipif(CLAPACK_IS_EMPTY or not FUNCS_CLAPACK["dsygv"],
"Clapack empty, skip flapack test")
def test_clapack_dsygv_3(self):
self._test_base('dsygv', 'C', 3)
if __name__ == "__main__":
run_module_suite()
|
mit
|
bakerlover/project4
|
lib/jinja2/_compat.py
|
638
|
4042
|
# -*- coding: utf-8 -*-
"""
jinja2._compat
~~~~~~~~~~~~~~
Some py2/py3 compatibility support based on a stripped down
version of six so we don't have to depend on a specific version
of it.
:copyright: Copyright 2013 by the Jinja team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
PY2 = sys.version_info[0] == 2
PYPY = hasattr(sys, 'pypy_translation_info')
_identity = lambda x: x
if not PY2:
unichr = chr
range_type = range
text_type = str
string_types = (str,)
iterkeys = lambda d: iter(d.keys())
itervalues = lambda d: iter(d.values())
iteritems = lambda d: iter(d.items())
import pickle
from io import BytesIO, StringIO
NativeStringIO = StringIO
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
ifilter = filter
imap = map
izip = zip
intern = sys.intern
implements_iterator = _identity
implements_to_string = _identity
encode_filename = _identity
get_next = lambda x: x.__next__
else:
unichr = unichr
text_type = unicode
range_type = xrange
string_types = (str, unicode)
iterkeys = lambda d: d.iterkeys()
itervalues = lambda d: d.itervalues()
iteritems = lambda d: d.iteritems()
import cPickle as pickle
from cStringIO import StringIO as BytesIO, StringIO
NativeStringIO = BytesIO
exec('def reraise(tp, value, tb=None):\n raise tp, value, tb')
from itertools import imap, izip, ifilter
intern = intern
def implements_iterator(cls):
cls.next = cls.__next__
del cls.__next__
return cls
def implements_to_string(cls):
cls.__unicode__ = cls.__str__
cls.__str__ = lambda x: x.__unicode__().encode('utf-8')
return cls
get_next = lambda x: x.next
def encode_filename(filename):
if isinstance(filename, unicode):
return filename.encode('utf-8')
return filename
try:
next = next
except NameError:
def next(it):
return it.next()
def with_metaclass(meta, *bases):
# This requires a bit of explanation: the basic idea is to make a
# dummy metaclass for one level of class instanciation that replaces
# itself with the actual metaclass. Because of internal type checks
# we also need to make sure that we downgrade the custom metaclass
# for one level to something closer to type (that's why __call__ and
# __init__ comes back from type etc.).
#
# This has the advantage over six.with_metaclass in that it does not
# introduce dummy classes into the final MRO.
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
try:
from collections import Mapping as mapping_types
except ImportError:
import UserDict
mapping_types = (UserDict.UserDict, UserDict.DictMixin, dict)
# common types. These do exist in the special types module too which however
# does not exist in IronPython out of the box. Also that way we don't have
# to deal with implementation specific stuff here
class _C(object):
def method(self): pass
def _func():
yield None
function_type = type(_func)
generator_type = type(_func())
method_type = type(_C().method)
code_type = type(_C.method.__code__)
try:
raise TypeError()
except TypeError:
_tb = sys.exc_info()[2]
traceback_type = type(_tb)
frame_type = type(_tb.tb_frame)
try:
from urllib.parse import quote_from_bytes as url_quote
except ImportError:
from urllib import quote as url_quote
try:
from thread import allocate_lock
except ImportError:
try:
from threading import Lock as allocate_lock
except ImportError:
from dummy_thread import allocate_lock
|
apache-2.0
|
the100rabh/Barcamp-Bangalore-Android-App
|
gcm_flask/werkzeug/debug/__init__.py
|
81
|
7867
|
# -*- coding: utf-8 -*-
"""
werkzeug.debug
~~~~~~~~~~~~~~
WSGI application traceback debugger.
:copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import mimetypes
from os.path import join, dirname, basename, isfile
from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response
from werkzeug.debug.tbtools import get_current_traceback, render_console_html
from werkzeug.debug.console import Console
from werkzeug.security import gen_salt
#: import this here because it once was documented as being available
#: from this module. In case there are users left ...
from werkzeug.debug.repr import debug_repr
class _ConsoleFrame(object):
"""Helper class so that we can reuse the frame console code for the
standalone console.
"""
def __init__(self, namespace):
self.console = Console(namespace)
self.id = 0
class DebuggedApplication(object):
"""Enables debugging support for a given application::
from werkzeug.debug import DebuggedApplication
from myapp import app
app = DebuggedApplication(app, evalex=True)
The `evalex` keyword argument allows evaluating expressions in a
traceback's frame context.
.. versionadded:: 0.7
The `lodgeit_url` parameter was added.
:param app: the WSGI application to run debugged.
:param evalex: enable exception evaluation feature (interactive
debugging). This requires a non-forking server.
:param request_key: The key that points to the request object in ths
environment. This parameter is ignored in current
versions.
:param console_path: the URL for a general purpose console.
:param console_init_func: the function that is executed before starting
the general purpose console. The return value
is used as initial namespace.
:param show_hidden_frames: by default hidden traceback frames are skipped.
You can show them by setting this parameter
to `True`.
:param lodgeit_url: the base URL of the LodgeIt instance to use for
pasting tracebacks.
"""
# this class is public
__module__ = 'werkzeug'
def __init__(self, app, evalex=False, request_key='werkzeug.request',
console_path='/console', console_init_func=None,
show_hidden_frames=False,
lodgeit_url='http://paste.pocoo.org/'):
if not console_init_func:
console_init_func = dict
self.app = app
self.evalex = evalex
self.frames = {}
self.tracebacks = {}
self.request_key = request_key
self.console_path = console_path
self.console_init_func = console_init_func
self.show_hidden_frames = show_hidden_frames
self.lodgeit_url = lodgeit_url
self.secret = gen_salt(20)
def debug_application(self, environ, start_response):
"""Run the application and conserve the traceback frames."""
app_iter = None
try:
app_iter = self.app(environ, start_response)
for item in app_iter:
yield item
if hasattr(app_iter, 'close'):
app_iter.close()
except Exception:
if hasattr(app_iter, 'close'):
app_iter.close()
traceback = get_current_traceback(skip=1, show_hidden_frames=
self.show_hidden_frames,
ignore_system_exceptions=True)
for frame in traceback.frames:
self.frames[frame.id] = frame
self.tracebacks[traceback.id] = traceback
try:
start_response('500 INTERNAL SERVER ERROR', [
('Content-Type', 'text/html; charset=utf-8')
])
except Exception:
# if we end up here there has been output but an error
# occurred. in that situation we can do nothing fancy any
# more, better log something into the error log and fall
# back gracefully.
environ['wsgi.errors'].write(
'Debugging middleware caught exception in streamed '
'response at a point where response headers were already '
'sent.\n')
else:
yield traceback.render_full(evalex=self.evalex,
lodgeit_url=self.lodgeit_url,
secret=self.secret) \
.encode('utf-8', 'replace')
traceback.log(environ['wsgi.errors'])
def execute_command(self, request, command, frame):
"""Execute a command in a console."""
return Response(frame.console.eval(command), mimetype='text/html')
def display_console(self, request):
"""Display a standalone shell."""
if 0 not in self.frames:
self.frames[0] = _ConsoleFrame(self.console_init_func())
return Response(render_console_html(secret=self.secret),
mimetype='text/html')
def paste_traceback(self, request, traceback):
"""Paste the traceback and return a JSON response."""
paste_id = traceback.paste(self.lodgeit_url)
return Response('{"url": "%sshow/%s/", "id": "%s"}'
% (self.lodgeit_url, paste_id, paste_id),
mimetype='application/json')
def get_source(self, request, frame):
"""Render the source viewer."""
return Response(frame.render_source(), mimetype='text/html')
def get_resource(self, request, filename):
"""Return a static resource from the shared folder."""
filename = join(dirname(__file__), 'shared', basename(filename))
if isfile(filename):
mimetype = mimetypes.guess_type(filename)[0] \
or 'application/octet-stream'
f = file(filename, 'rb')
try:
return Response(f.read(), mimetype=mimetype)
finally:
f.close()
return Response('Not Found', status=404)
def __call__(self, environ, start_response):
"""Dispatch the requests."""
# important: don't ever access a function here that reads the incoming
# form data! Otherwise the application won't have access to that data
# any more!
request = Request(environ)
response = self.debug_application
if request.args.get('__debugger__') == 'yes':
cmd = request.args.get('cmd')
arg = request.args.get('f')
secret = request.args.get('s')
traceback = self.tracebacks.get(request.args.get('tb', type=int))
frame = self.frames.get(request.args.get('frm', type=int))
if cmd == 'resource' and arg:
response = self.get_resource(request, arg)
elif cmd == 'paste' and traceback is not None and \
secret == self.secret:
response = self.paste_traceback(request, traceback)
elif cmd == 'source' and frame and self.secret == secret:
response = self.get_source(request, frame)
elif self.evalex and cmd is not None and frame is not None and \
self.secret == secret:
response = self.execute_command(request, cmd, frame)
elif self.evalex and self.console_path is not None and \
request.path == self.console_path:
response = self.display_console(request)
return response(environ, start_response)
|
apache-2.0
|
mjdarby/RogueDetective
|
tiles.py
|
1
|
2723
|
# Tiles, tiles, tiles! Doors, walls.. If it's not a player but it's
# on the map, it goes here.
# Our imports
from constants import Constants
class Tile(object):
"""Represents a tile in vision.
Once seen, a tile will show what is currently on it via the game draw method.
Once a tile goes out of view, everything that isn't a wall disappears during draw."""
def __init__(self):
self.visible = False
self.seen = False
if not Constants.FOV_ENABLED:
self.seen = True
class Decoration(object):
"""Just a decorative tile."""
def __init__(self):
self.character = '?'
self.colour = Constants.COLOUR_GREEN
class Fence(object):
"""Fences are walls that can be jumped over."""
def __init__(self):
self.character = '#'
self.colour = Constants.COLOUR_WHITE
class Door(object):
"""Players can open or close these!"""
def __init__(self, game, y, x):
self.y = y
self.x = x
self.character = '+'
self.closed = True
self.locked = False
self.colour = Constants.COLOUR_RED
self.game = game
self.timer = -1
def update(self):
if self.timer == 0:
# Close the door if it's open.
if not self.closed:
self.open() # This function could be named better
if self.timer >= 0:
self.timer -= 1
def npcOpen(self): # NPCs share all their house keys, okay?
self.open()
def playerOpen(self):
"""Open the door such that it doesn't blend with the walls
Don't let them open it if it's locked"""
# To prevent the player from being locked in a house, we let him exit
# if he's inside it. We know he's inside if he's above the door.
playerInside = (self.game.player.y + 1, self.game.player.x) == (self.y, self.x)
if not playerInside and self.locked:
self.game.printStatus("The door is locked.")
return
# Reset status line
self.game.printStatus("")
self.open()
def open(self):
self.closed = not self.closed
self.character = '+' if self.closed else '-'
if not self.closed:
self.timer = Constants.DOOR_CLOSE_TIME
try:
self.game.walls[(self.y, self.x-1)] # Ridiculous test for wall on the left
self.character = '|'
except:
pass
class Wall(object):
"""Wall objects, which the player cannot walk through"""
def __init__(self):
"""Initialise the player object"""
self.x = 21
self.y = 20
self.character = '|'
self.colour = Constants.COLOUR_WHITE
|
gpl-2.0
|
jiachenning/odoo
|
addons/stock_landed_costs/__openerp__.py
|
220
|
1914
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'WMS Landed Costs',
'version': '1.1',
'author': 'OpenERP SA',
'summary': 'Landed Costs',
'description': """
Landed Costs Management
=======================
This module allows you to easily add extra costs on pickings and decide the split of these costs among their stock moves in order to take them into account in your stock valuation.
""",
'website': 'https://www.odoo.com/page/warehouse',
'depends': ['stock_account'],
'category': 'Warehouse Management',
'sequence': 16,
'demo': [
],
'data': [
'security/ir.model.access.csv',
'stock_landed_costs_sequence.xml',
'product_view.xml',
'stock_landed_costs_view.xml',
'stock_landed_costs_data.xml',
],
'test': [
'test/stock_landed_costs.yml'
],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
kornicameister/ansible-modules-extras
|
cloud/vmware/vmware_vmkernel.py
|
75
|
7458
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Joseph Callen <jcallen () csc.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: vmware_vmkernel
short_description: Create a VMware VMkernel Interface
description:
- Create a VMware VMkernel Interface
version_added: 2.0
author: "Joseph Callen (@jcpowermac), Russell Teague (@mtnbikenc)"
notes:
- Tested on vSphere 5.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
vswitch_name:
description:
- The name of the vswitch where to add the VMK interface
required: True
portgroup_name:
description:
- The name of the portgroup for the VMK interface
required: True
ip_address:
description:
- The IP Address for the VMK interface
required: True
subnet_mask:
description:
- The Subnet Mask for the VMK interface
required: True
vland_id:
description:
- The VLAN ID for the VMK interface
required: True
mtu:
description:
- The MTU for the VMK interface
required: False
enable_vsan:
description:
- Enable the VMK interface for VSAN traffic
required: False
enable_vmotion:
description:
- Enable the VMK interface for vMotion traffic
required: False
enable_mgmt:
description:
- Enable the VMK interface for Management traffic
required: False
enable_ft:
description:
- Enable the VMK interface for Fault Tolerance traffic
required: False
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
# Example command from Ansible Playbook
- name: Add Management vmkernel port (vmk1)
local_action:
module: vmware_vmkernel
hostname: esxi_hostname
username: esxi_username
password: esxi_password
vswitch_name: vswitch_name
portgroup_name: portgroup_name
vlan_id: vlan_id
ip_address: ip_address
subnet_mask: subnet_mask
enable_mgmt: True
'''
try:
from pyVmomi import vim, vmodl
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
def create_vmkernel_adapter(host_system, port_group_name,
vlan_id, vswitch_name,
ip_address, subnet_mask,
mtu, enable_vsan, enable_vmotion, enable_mgmt, enable_ft):
host_config_manager = host_system.configManager
host_network_system = host_config_manager.networkSystem
host_virtual_vic_manager = host_config_manager.virtualNicManager
config = vim.host.NetworkConfig()
config.portgroup = [vim.host.PortGroup.Config()]
config.portgroup[0].changeOperation = "add"
config.portgroup[0].spec = vim.host.PortGroup.Specification()
config.portgroup[0].spec.name = port_group_name
config.portgroup[0].spec.vlanId = vlan_id
config.portgroup[0].spec.vswitchName = vswitch_name
config.portgroup[0].spec.policy = vim.host.NetworkPolicy()
config.vnic = [vim.host.VirtualNic.Config()]
config.vnic[0].changeOperation = "add"
config.vnic[0].portgroup = port_group_name
config.vnic[0].spec = vim.host.VirtualNic.Specification()
config.vnic[0].spec.ip = vim.host.IpConfig()
config.vnic[0].spec.ip.dhcp = False
config.vnic[0].spec.ip.ipAddress = ip_address
config.vnic[0].spec.ip.subnetMask = subnet_mask
if mtu:
config.vnic[0].spec.mtu = mtu
host_network_config_result = host_network_system.UpdateNetworkConfig(config, "modify")
for vnic_device in host_network_config_result.vnicDevice:
if enable_vsan:
vsan_system = host_config_manager.vsanSystem
vsan_config = vim.vsan.host.ConfigInfo()
vsan_config.networkInfo = vim.vsan.host.ConfigInfo.NetworkInfo()
vsan_config.networkInfo.port = [vim.vsan.host.ConfigInfo.NetworkInfo.PortConfig()]
vsan_config.networkInfo.port[0].device = vnic_device
host_vsan_config_result = vsan_system.UpdateVsan_Task(vsan_config)
if enable_vmotion:
host_virtual_vic_manager.SelectVnicForNicType("vmotion", vnic_device)
if enable_mgmt:
host_virtual_vic_manager.SelectVnicForNicType("management", vnic_device)
if enable_ft:
host_virtual_vic_manager.SelectVnicForNicType("faultToleranceLogging", vnic_device)
return True
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(dict(portgroup_name=dict(required=True, type='str'),
ip_address=dict(required=True, type='str'),
subnet_mask=dict(required=True, type='str'),
mtu=dict(required=False, type='int'),
enable_vsan=dict(required=False, type='bool'),
enable_vmotion=dict(required=False, type='bool'),
enable_mgmt=dict(required=False, type='bool'),
enable_ft=dict(required=False, type='bool'),
vswitch_name=dict(required=True, type='str'),
vlan_id=dict(required=True, type='int')))
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi is required for this module')
port_group_name = module.params['portgroup_name']
ip_address = module.params['ip_address']
subnet_mask = module.params['subnet_mask']
mtu = module.params['mtu']
enable_vsan = module.params['enable_vsan']
enable_vmotion = module.params['enable_vmotion']
enable_mgmt = module.params['enable_mgmt']
enable_ft = module.params['enable_ft']
vswitch_name = module.params['vswitch_name']
vlan_id = module.params['vlan_id']
try:
content = connect_to_api(module)
host = get_all_objs(content, [vim.HostSystem])
if not host:
module.fail_json(msg="Unable to locate Physical Host.")
host_system = host.keys()[0]
changed = create_vmkernel_adapter(host_system, port_group_name,
vlan_id, vswitch_name,
ip_address, subnet_mask,
mtu, enable_vsan, enable_vmotion, enable_mgmt, enable_ft)
module.exit_json(changed=changed)
except vmodl.RuntimeFault as runtime_fault:
module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
module.fail_json(msg=method_fault.msg)
except Exception as e:
module.fail_json(msg=str(e))
from ansible.module_utils.vmware import *
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
openiitbombayx/edx-platform
|
common/djangoapps/track/backends/mongodb.py
|
41
|
3046
|
"""MongoDB event tracker backend."""
from __future__ import absolute_import
import logging
import pymongo
from pymongo import MongoClient
from pymongo.errors import PyMongoError
from bson.errors import BSONError
from track.backends import BaseBackend
log = logging.getLogger(__name__)
class MongoBackend(BaseBackend):
"""Class for a MongoDB event tracker Backend"""
def __init__(self, **kwargs):
"""
Connect to a MongoDB.
:Parameters:
- `host`: hostname
- `port`: port
- `user`: collection username
- `password`: collection user password
- `database`: name of the database
- `collection`: name of the collection
- `extra`: parameters to pymongo.MongoClient not listed above
"""
super(MongoBackend, self).__init__(**kwargs)
# Extract connection parameters from kwargs
host = kwargs.get('host', 'localhost')
port = kwargs.get('port', 27017)
user = kwargs.get('user', '')
password = kwargs.get('password', '')
db_name = kwargs.get('database', 'track')
collection_name = kwargs.get('collection', 'events')
# Other mongo connection arguments
extra = kwargs.get('extra', {})
# By default disable write acknowledgments, reducing the time
# blocking during an insert
extra['w'] = extra.get('w', 0)
# Make timezone aware by default
extra['tz_aware'] = extra.get('tz_aware', True)
# Connect to database and get collection
self.connection = MongoClient(
host=host,
port=port,
**extra
)
database = self.connection[db_name]
if user or password:
database.authenticate(user, password)
self.collection = database[collection_name]
self._create_indexes()
def _create_indexes(self):
"""Ensures the proper fields are indexed"""
# WARNING: The collection will be locked during the index
# creation. If the collection has a large number of
# documents in it, the operation can take a long time.
# TODO: The creation of indexes can be moved to a Django
# management command or equivalent. There is also an option to
# run the indexing on the background, without locking.
self.collection.ensure_index([('time', pymongo.DESCENDING)])
self.collection.ensure_index('event_type')
def send(self, event):
"""Insert the event in to the Mongo collection"""
try:
self.collection.insert(event, manipulate=False)
except (PyMongoError, BSONError):
# The event will be lost in case of a connection error or any error
# that occurs when trying to insert the event into Mongo.
# pymongo will re-connect/re-authenticate automatically
# during the next event.
msg = 'Error inserting to MongoDB event tracker backend'
log.exception(msg)
|
agpl-3.0
|
defionscode/ansible
|
lib/ansible/module_utils/azure_rm_common_rest.py
|
35
|
2605
|
# Copyright (c) 2018 Zim Kalinowski, <zikalino@microsoft.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
try:
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_configuration import AzureConfiguration
from msrest.service_client import ServiceClient
import json
except ImportError:
# This is handled in azure_rm_common
AzureConfiguration = object
class GenericRestClientConfiguration(AzureConfiguration):
def __init__(self, credentials, subscription_id, base_url=None):
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
if not base_url:
base_url = 'https://management.azure.com'
super(GenericRestClientConfiguration, self).__init__(base_url)
self.add_user_agent('genericrestclient/1.0')
self.add_user_agent('Azure-SDK-For-Python')
self.credentials = credentials
self.subscription_id = subscription_id
class GenericRestClient(object):
def __init__(self, credentials, subscription_id, base_url=None):
self.config = GenericRestClientConfiguration(credentials, subscription_id, base_url)
self._client = ServiceClient(self.config.credentials, self.config)
self.models = None
def query(self, url, method, query_parameters, header_parameters, body, expected_status_codes):
# Construct and send request
operation_config = {}
request = None
if method == 'GET':
request = self._client.get(url, query_parameters)
elif method == 'PUT':
request = self._client.put(url, query_parameters)
elif method == 'POST':
request = self._client.post(url, query_parameters)
elif method == 'HEAD':
request = self._client.head(url, query_parameters)
elif method == 'PATCH':
request = self._client.patch(url, query_parameters)
elif method == 'DELETE':
request = self._client.delete(url, query_parameters)
elif method == 'MERGE':
request = self._client.merge(url, query_parameters)
response = self._client.send(request, header_parameters, body, **operation_config)
if response.status_code not in expected_status_codes:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
|
gpl-3.0
|
ENCODE-DCC/chip-seq-pipeline
|
dnanexus/make_trackhub_from_analysis.py
|
1
|
18930
|
#!/usr/bin/env python
import os, sys, subprocess, logging, dxpy, json, re, socket, getpass, urlparse
from posixpath import basename, dirname
import common
EPILOG = '''Notes:
Examples:
%(prog)s
'''
DEFAULT_APPLET_PROJECT = 'E3 ChIP-seq'
KEYFILE = os.path.expanduser("~/keypairs.json")
def get_args():
import argparse
parser = argparse.ArgumentParser(
description=__doc__, epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('infile', help="analysis ID's", nargs='?', type=argparse.FileType('r'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w'), default=sys.stdout)
parser.add_argument('--debug', help="Print debug messages", default=False, action='store_true')
parser.add_argument('--project', help="Project name or ID", default=dxpy.WORKSPACE_ID)
parser.add_argument('--nodownload', help="Don't transfer data files, only make the hub", default=False, action='store_true')
parser.add_argument('--dxf', help="Generate DNAnexus URL's. Implies --nodownload", default=False, action='store_true')
parser.add_argument('--truncate', help="Replace existing trackDb file", default=False, action='store_true')
parser.add_argument('--key', help="The keypair identifier from the keyfile.", default='www')
parser.add_argument('--ddir', help="The local directory to store data files", default=os.path.expanduser('~/tracks'))
parser.add_argument('--tdbpath', help="The local path to the trackhub trackDb", default=os.path.expanduser('~/tracks/E3_ChIP_hub/mm10/trackDb.txt'))
parser.add_argument('--turl', help="The base URL to the tracks", default='http://'+socket.getfqdn()+'/'+getpass.getuser()+'/tracks/')
parser.add_argument('--tag', help="A short string to add to the composite track longLabel")
parser.add_argument('--lowpass', help="Add replicated peak tracks with peaks less than this(these) width(s)", nargs='*', type=int)
parser.add_argument('--pipeline', help="tf or histone. If omitted, try to determine automatically", default=None)
args = parser.parse_args()
if args.debug:
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG)
else: #use the defaulf logging level
logging.basicConfig(format='%(levelname)s:%(message)s')
if args.dxf:
args.nodownload = True
return args
def processkey(key):
import json
if key:
keysf = open(KEYFILE,'r')
keys_json_string = keysf.read()
keysf.close()
keys = json.loads(keys_json_string)
key_dict = keys[key]
else:
key_dict = {}
AUTHID = key_dict.get('key')
AUTHPW = key_dict.get('secret')
if key:
SERVER = key_dict.get('server')
else:
SERVER = DEFAULT_SERVER
if not SERVER.endswith("/"):
SERVER += "/"
return (AUTHID,AUTHPW,SERVER)
def encoded_get(url, keypair=None):
import urlparse, requests
HEADERS = {'content-type': 'application/json'}
url = urlparse.urljoin(url,'?format=json&frame=embedded')
if keypair:
response = requests.get(url, auth=keypair, headers=HEADERS)
else:
response = requests.get(url, headers=HEADERS)
return response.json()
def pprint_json(JSON_obj):
print json.dumps(JSON_obj, sort_keys=True, indent=4, separators=(',', ': '))
def composite_stanza(accession, longLabel):
return(
"track %s\n" %(accession) + \
"compositeTrack on\n" +
"shortLabel %s\n" %(accession) + \
"longLabel %s\n" %(longLabel) + \
"type bed 3\n" + \
"visibility full\n" + \
"subGroup1 view Views PK=Peaks SIG=Signals\n\n")
def viewpeaks_stanza(accession):
return(
"\ttrack %sviewpeaks\n" %(accession) + \
"\tparent %s on\n" %(accession) + \
"\tshortLabel Peaks\n" + \
"\tlongLabel Peaks\n" + \
"\tview PK\n" + \
"\tvisibility dense\n" + \
"\ttype bigBed 6 +\n" + \
"\tscoreFilter 0\n" + \
"\tscoreFilterLimits 0:1000\n" + \
"\tviewUi on\n\n")
def peaks_stanza(accession, url, name, n, tracktype='bigBed 6 +', lowpass=[], dx=None):
return_string = \
"\t\ttrack %s%d\n" %(accession,n) + \
"\t\tbigDataUrl %s\n" %(url) + \
"\t\tshortLabel %s\n" %(name[:17]) + \
"\t\tparent %sviewpeaks on\n" %(accession) + \
"\t\ttype %s\n" %(tracktype) + \
"\t\tvisibility dense\n" + \
"\t\tview PK\n" + \
"\t\tpriority %d\n\n" %(n)
n_stanzas = 1
if not lowpass:
lowpass = []
if isinstance(lowpass,int):
lowpass = [lowpass]
extra_stanza_count = 0
for (i, cutoff) in enumerate(lowpass,start=1):
fn = dx.get_id()
if not os.path.isfile(fn):
dxpy.download_dxfile(dx.get_id(),fn)
cutoffstr = '-lt%d' %(cutoff)
outfn = fn + cutoffstr
print fn, os.path.getsize(fn), subprocess.check_output('wc -l %s' %(fn), shell=True).split()[0]
bed_fn = fn + '.bed'
common.block_on('bigBedToBed %s %s' %(fn, bed_fn))
common.run_pipe([
'cat %s' %(bed_fn),
r"""awk 'BEGIN{FS="\t";OFS="\t"}{if (($3-$2) < %d) {print $0}}'""" %(cutoff)], outfn)
print outfn, os.path.getsize(outfn), subprocess.check_output('wc -l %s' %(outfn), shell=True).split()[0]
if tracktype =='bigBed 6 +':
as_file = 'narrowPeak.as'
elif tracktype == 'bigBed 12 +':
as_file = 'gappedPeak.as'
else:
print "Cannot match tracktype %s to any .as file" %(tracktype)
bb_fn = common.bed2bb(outfn,'mm10.chrom.sizes',as_file)
newdx = dxpy.upload_local_file(filename=bb_fn, folder="/tracks", wait_on_close=True)
new_url, headers = newdx.get_download_url(duration=sys.maxint, preauthenticated=True)
new_lines = [
"\t\ttrack %s%d" %(accession,n+i),
"\t\tbigDataUrl %s" %(new_url),
"\t\tshortLabel %s" %(name[:17-len(cutoffstr)] + cutoffstr),
"\t\tparent %sviewpeaks on" %(accession),
"\t\ttype %s" %(tracktype),
"\t\tvisibility dense",
"\t\tview PK",
"\t\tpriority %d\n\n" %(n+i)]
new_stanza = '\n'.join(new_lines)
return_string += new_stanza
n_stanzas += 1
os.remove(bed_fn)
os.remove(bb_fn)
os.remove(outfn)
os.remove(fn)
return(return_string, n_stanzas)
def viewsignal_stanza(accession):
return(
"\ttrack %sviewsignals\n" %(accession) + \
"\tparent %s on\n" %(accession) + \
"\tshortLabel Signals\n" + \
"\tlongLabel Signals\n" + \
"\tview SIG\n" + \
"\tvisibility dense\n" + \
"\ttype bigWig\n" + \
"\tviewUi on\n\n")
def signal_stanza(accession, url, name, n, tracktype='bigWig'):
return(
"\t\ttrack %s%d\n" %(accession,n) + \
"\t\tbigDataUrl %s\n" %(url) + \
"\t\tshortLabel %s\n" %(name[:17]) + \
"\t\tparent %sviewsignals on\n" %(accession) + \
"\t\ttype %s\n" %(tracktype) + \
"\t\tview SIG\n" + \
"\t\tvisibility dense\n" + \
"\t\tviewLimits 1:10\n" + \
"\t\tmaxHeightPixels 127:64:2\n" + \
"\t\tpriority %d\n\n" %(n))
def tf(args, analysis, experiment_accession, first_analysis):
authid, authpw, server = processkey(args.key)
keypair = (authid, authpw)
stages = analysis.get('stages')
peaks_stage = next(stage for stage in stages if stage['execution']['name'] == "SPP Peaks")['execution']
signals_stage = next(stage for stage in stages if stage['execution']['name'] == "ENCODE Peaks")['execution']
final_idr_stage = next(stage for stage in stages if stage['execution']['name'] == "Final IDR peak calls")['execution']
# this is just a cheap way of determining singlicate or replicate analysis
# singlicate analyses have no rescue_ratio
singlicate_analysis = final_idr_stage['output'].get('rescue_ratio') is None
peaks_output_names = [
'rep1_peaks_bb'
] if singlicate_analysis else [
'rep1_peaks_bb',
'rep2_peaks_bb',
'pooled_peaks_bb'
]
signals_output_names = [
'rep1_pvalue_signal',
'rep1_fc_signal'
] if singlicate_analysis else [
'rep1_pvalue_signal',
'rep2_pvalue_signal',
'pooled_pvalue_signal',
'rep1_fc_signal',
'rep2_fc_signal',
'pooled_fc_signal'
]
idr_output_names = [
'stable_set_bb'
] if singlicate_analysis else [
'conservative_set_bb',
'optimal_set_bb'
]
output_names = peaks_output_names + signals_output_names + idr_output_names
outputs = {}
outputs.update(dict(zip(peaks_output_names,[{'dx': dxpy.DXFile(peaks_stage['output'][output_name])} for output_name in peaks_output_names])))
outputs.update(dict(zip(signals_output_names,[{'dx': dxpy.DXFile(signals_stage['output'][output_name])} for output_name in signals_output_names])))
outputs.update(dict(zip(idr_output_names,[{'dx': dxpy.DXFile(final_idr_stage['output'][output_name])} for output_name in idr_output_names])))
track_directory = os.path.join(args.ddir, experiment_accession)
url_base = urlparse.urljoin(args.turl, experiment_accession+'/')
#print "url_base %s" %(url_base)
if not args.nodownload and not os.path.exists(track_directory):
os.makedirs(track_directory)
if first_analysis:
if os.path.exists(args.tdbpath):
if args.truncate:
trackDb = open(args.tdbpath,'w')
else:
trackDb = open(args.tdbpath,'a')
else:
if not os.path.exists(os.path.dirname(args.tdbpath)):
os.makedirs(os.path.dirname(args.tdbpath))
trackDb = open(args.tdbpath, 'w')
else:
trackDb = open(args.tdbpath,'a')
for (output_name, output) in outputs.iteritems():
local_path = os.path.join(track_directory, output['dx'].name)
print output_name, output['dx'].get_id(), local_path
if not args.nodownload:
dxpy.download_dxfile(output['dx'].get_id(), local_path)
outputs[output_name].update({'local_path' : local_path})
#print "Joining %s and %s" %(url_base, os.path.basename(local_path))
if args.dxf:
url, headers = output['dx'].get_download_url(duration=sys.maxint, preauthenticated=True)
outputs[output_name].update({'url': url})
else:
outputs[output_name].update({'url': urlparse.urljoin(url_base,os.path.basename(local_path))})
#print outputs[output_name]['url']
experiment = encoded_get(urlparse.urljoin(server,'/experiments/%s' %(experiment_accession)), keypair)
description = '%s %s' %(
experiment['target']['label'],
experiment['replicates'][0]['library']['biosample']['biosample_term_name'])
longLabel = 'E3 TF ChIP - %s - %s' %(experiment_accession, description)
if args.tag:
longLabel += ' - %s' %(args.tag)
trackDb.write(composite_stanza(experiment_accession, longLabel))
first_peaks = True
first_signal = True
priority = 1
for (n, output_name) in enumerate(output_names,start=1):
if output_name.endswith('peaks_bb') or output_name.endswith('set_bb'):
if first_peaks:
trackDb.write(viewpeaks_stanza(experiment_accession))
first_peaks = False
stanzas, n_stanzas = peaks_stanza(experiment_accession, outputs[output_name]['url'], output_name, priority, tracktype="bigBed 6 +", lowpass=args.lowpass, dx=outputs[output_name]['dx'])
trackDb.write(stanzas)
priority += n_stanzas
elif output_name.endswith('_signal'):
if first_signal:
trackDb.write(viewsignal_stanza(experiment_accession))
first_signal = False
trackDb.write(signal_stanza(experiment_accession, outputs[output_name]['url'], output_name, priority, tracktype="bigWig"))
priority += 1
trackDb.close()
def histone(args, analysis, experiment_accession, first_analysis):
authid, authpw, server = processkey(args.key)
keypair = (authid,authpw)
stages = analysis.get('stages')
peaks_stage = next(stage for stage in stages if stage['execution']['name'] == "ENCODE Peaks")['execution']
replicated_stages = [stage['execution'] for stage in stages if 'Final' in stage['execution']['name']]
# this is just a cheap way of determining singlicate or replicate analysis
# singlicate analyses have no rescue_ratio
singlicate_analysis = all(stage['output'].get('rep2_signal') is None for stage in replicated_stages)
output_names = [
'rep1_narrowpeaks_bb',
'rep1_gappedpeaks_bb',
'rep1_pvalue_signal',
'rep1_fc_signal',
] if singlicate_analysis else [
'rep1_narrowpeaks_bb',
'rep2_narrowpeaks_bb',
'pooled_narrowpeaks_bb',
'rep1_gappedpeaks_bb',
'rep2_gappedpeaks_bb',
'pooled_gappedpeaks_bb',
'rep1_pvalue_signal',
'rep2_pvalue_signal',
'pooled_pvalue_signal',
'rep1_fc_signal',
'rep2_fc_signal',
'pooled_fc_signal'
]
outputs = dict(zip(output_names,[{'dx': dxpy.DXFile(peaks_stage['output'][output_name])} for output_name in output_names]))
output_names.insert(3,'replicated_narrowpeaks_bb')
outputs.update({'replicated_narrowpeaks_bb' : {'dx': dxpy.DXFile(next(stage['execution']['output']['overlapping_peaks_bb'] for stage in stages if stage['execution']['name'] == 'Final narrowpeaks'))}})
output_names.insert(7,'replicated_gappedpeaks_bb')
outputs.update({'replicated_gappedpeaks_bb' : {'dx': dxpy.DXFile(next(stage['execution']['output']['overlapping_peaks_bb'] for stage in stages if stage['execution']['name'] == 'Final gappedpeaks'))}})
track_directory = os.path.join(args.ddir, experiment_accession)
url_base = urlparse.urljoin(args.turl, experiment_accession+'/')
#print "url_base %s" %(url_base)
if not args.nodownload and not os.path.exists(track_directory):
os.makedirs(track_directory)
if first_analysis:
if os.path.exists(args.tdbpath):
if args.truncate:
trackDb = open(args.tdbpath,'w')
else:
trackDb = open(args.tdbpath,'a')
else:
if not os.path.exists(os.path.dirname(args.tdbpath)):
os.makedirs(os.path.dirname(args.tdbpath))
trackDb = open(args.tdbpath, 'w')
else:
trackDb = open(args.tdbpath,'a')
for (output_name, output) in outputs.iteritems():
local_path = os.path.join(track_directory, output['dx'].name)
print output_name, output['dx'].get_id(), local_path
if not args.nodownload:
dxpy.download_dxfile(output['dx'].get_id(), local_path)
outputs[output_name].update({'local_path' : local_path})
#print "Joining %s and %s" %(url_base, os.path.basename(local_path))
if args.dxf:
url, headers = output['dx'].get_download_url(duration=sys.maxint, preauthenticated=True)
outputs[output_name].update({'url': url})
else:
outputs[output_name].update({'url': urlparse.urljoin(url_base,os.path.basename(local_path))})
#print outputs[output_name]['url']
experiment = encoded_get(urlparse.urljoin(server,'/experiments/%s' %(experiment_accession)), keypair)
description = '%s %s %s %s' % (
experiment['target']['label'],
experiment['replicates'][0]['library']['biosample']['biosample_term_name'],
experiment['replicates'][0]['library']['biosample'].get('life_stage'),
experiment['replicates'][0]['library']['biosample'].get('age_display'))
longLabel = 'E3 Histone ChIP - %s - %s' %(experiment_accession, description)
if args.tag:
longLabel += ' - %s' %(args.tag)
trackDb.write(composite_stanza(experiment_accession, longLabel))
first_peaks = True
first_signal = True
priority = 1
for (n, output_name) in enumerate(output_names,start=1):
if output_name.endswith('narrowpeaks_bb'):
if first_peaks:
trackDb.write(viewpeaks_stanza(experiment_accession))
first_peaks = False
stanzas, n_stanzas = peaks_stanza(experiment_accession, outputs[output_name]['url'], output_name, priority, tracktype="bigBed 6 +", lowpass=args.lowpass, dx=outputs[output_name]['dx'])
trackDb.write(stanzas)
priority += n_stanzas
elif output_name.endswith('gappedpeaks_bb'):
if first_peaks:
trackDb.write(viewpeaks_stanza(experiment_accession))
first_peaks = False
stanzas, n_stanzas = peaks_stanza(experiment_accession, outputs[output_name]['url'], output_name, priority, tracktype="bigBed 12 +", lowpass=args.lowpass, dx=outputs[output_name]['dx'])
trackDb.write(stanzas)
priority += n_stanzas
elif output_name.endswith('_signal'):
if first_signal:
trackDb.write(viewsignal_stanza(experiment_accession))
first_signal = False
trackDb.write(signal_stanza(experiment_accession, outputs[output_name]['url'], output_name, priority, tracktype="bigWig"))
priority += 1
trackDb.close()
def main():
args = get_args()
first_analysis = True
for (i, analysis_id) in enumerate(args.infile):
analysis_id = analysis_id.strip()
try:
analysis = dxpy.describe(analysis_id)
except:
print "Invalid analysis ID %s. Skipping." % (analysis_id)
continue
experiment_m = re.match('^(ENCSR[0-9]{3}[A-Z]{3})', analysis['name'])
if not experiment_m:
print "No accession in %s, skipping." % (analysis['name'])
continue
# print "Temporary hack"
# experiment_accession = "ENCSR048KZD"
else:
experiment_accession = experiment_m.group(1)
if args.pipeline:
pipeline = args.pipeline
elif analysis['executableName'] == 'histone_chip_seq':
pipeline = 'histone'
elif analysis['executableName'] == 'tf_chip_seq':
pipeline = 'tf'
if pipeline == 'histone':
histone(args, analysis, experiment_accession, first_analysis)
elif pipeline == 'tf':
tf(args, analysis, experiment_accession, first_analysis)
else:
print "Unrecognized pipeline: %s, skipping." % (pipeline)
continue
first_analysis = False
if __name__ == '__main__':
main()
|
mit
|
tensorflow/agents
|
tf_agents/bandits/agents/mixture_agent.py
|
1
|
7660
|
# coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An agent that mixes a list of agents with a constant mixture distribution."""
from __future__ import absolute_import
from __future__ import division
# Using Type Annotations.
from __future__ import print_function
import abc
from typing import List, Optional, Sequence, Text
import gin
import tensorflow as tf
from tf_agents.agents import data_converter
from tf_agents.agents import tf_agent
from tf_agents.bandits.policies import mixture_policy
from tf_agents.trajectories import trajectory
from tf_agents.typing import types
from tf_agents.utils import common
from tf_agents.utils import nest_utils
def _dynamic_partition_of_nested_tensors(
nested_tensor: types.NestedTensor, partitions: types.Int,
num_partitions: int) -> List[types.NestedTensor]:
"""This function takes a nested structure and partitions every element of it.
Specifically it outputs a list of nest that all have the same structure as the
original, and every element of the list is a nest that contains a dynamic
partition of the corresponding original tensors.
Note that this function uses tf.dynamic_partition, and thus
'MixtureAgent' is not compatible with XLA.
Args:
nested_tensor: The input nested structure to partition.
partitions: int32 tensor based on which the partitioning happens.
num_partitions: The number of expected partitions.
Returns:
A list of nested tensors with the same structure as `nested_tensor`.
"""
flattened_tensors = tf.nest.flatten(nested_tensor)
if not flattened_tensors:
return [nested_tensor] * num_partitions
partitioned_flat_tensors = [
tf.dynamic_partition(
data=t, partitions=partitions, num_partitions=num_partitions)
for t in flattened_tensors
]
list_of_partitions = list(map(list, zip(*partitioned_flat_tensors)))
return [
tf.nest.pack_sequence_as(nested_tensor, i) for i in list_of_partitions
]
@gin.configurable
class MixtureAgent(tf_agent.TFAgent):
"""An agent that mixes a set of agents with a given mixture.
For every data sample, the agent updates the sub-agent that was used to make
the action choice in that sample. For this update to happen, the mixture agent
needs to have the information on which sub-agent is "responsible" for the
action. This information is in a policy info field `mixture_agent_id`.
Note that this agent makes use of `tf.dynamic_partition`, and thus it is not
compatible with XLA.
"""
def __init__(self,
mixture_distribution: types.Distribution,
agents: Sequence[tf_agent.TFAgent],
name: Optional[Text] = None):
"""Initializes an instance of `MixtureAgent`.
Args:
mixture_distribution: An instance of `tfd.Categorical` distribution. This
distribution is used to draw sub-policies by the mixture policy. The
parameters of the distribution is trained by the mixture agent.
agents: List of instances of TF-Agents bandit agents. These agents will be
trained and used to select actions. The length of this list should match
that of `mixture_weights`.
name: The name of this instance of `MixtureAgent`.
"""
tf.Module.__init__(self, name=name)
time_step_spec = agents[0].time_step_spec
action_spec = agents[0].action_spec
self._original_info_spec = agents[0].policy.info_spec
error_message = None
for agent in agents[1:]:
if action_spec != agent.action_spec:
error_message = 'Inconsistent action specs.'
if time_step_spec != agent.time_step_spec:
error_message = 'Inconsistent time step specs.'
if self._original_info_spec != agent.policy.info_spec:
error_message = 'Inconsistent info specs.'
if error_message is not None:
raise ValueError(error_message)
self._agents = agents
self._num_agents = len(agents)
self._mixture_distribution = mixture_distribution
policies = [agent.collect_policy for agent in agents]
policy = mixture_policy.MixturePolicy(mixture_distribution, policies)
super(MixtureAgent, self).__init__(
time_step_spec, action_spec, policy, policy, train_sequence_length=None)
self._as_trajectory = data_converter.AsTrajectory(
self.data_context, sequence_length=None)
def _initialize(self):
tf.compat.v1.variables_initializer(self.variables)
for agent in self._agents:
agent.initialize()
# Subclasses must implement this method.
@abc.abstractmethod
def _update_mixture_distribution(self, experience):
"""This function updates the mixture weights given training experience."""
raise NotImplementedError('`_update_mixture_distribution` should be '
'implemented by subclasses of `MixtureAgent`.')
def _train(self, experience, weights=None):
del weights # unused
experience = self._as_trajectory(experience)
reward, _ = nest_utils.flatten_multi_batched_nested_tensors(
experience.reward, self._time_step_spec.reward)
action, _ = nest_utils.flatten_multi_batched_nested_tensors(
experience.action, self._action_spec)
observation, _ = nest_utils.flatten_multi_batched_nested_tensors(
experience.observation, self._time_step_spec.observation)
policy_choice, _ = nest_utils.flatten_multi_batched_nested_tensors(
experience.policy_info[mixture_policy.MIXTURE_AGENT_ID],
self._time_step_spec.reward)
original_infos, _ = nest_utils.flatten_multi_batched_nested_tensors(
experience.policy_info[mixture_policy.SUBPOLICY_INFO],
self._original_info_spec)
partitioned_nested_infos = nest_utils.batch_nested_tensors(
_dynamic_partition_of_nested_tensors(original_infos, policy_choice,
self._num_agents))
partitioned_nested_rewards = [
nest_utils.batch_nested_tensors(t)
for t in _dynamic_partition_of_nested_tensors(reward, policy_choice,
self._num_agents)
]
partitioned_nested_actions = [
nest_utils.batch_nested_tensors(t)
for t in _dynamic_partition_of_nested_tensors(action, policy_choice,
self._num_agents)
]
partitioned_nested_observations = [
nest_utils.batch_nested_tensors(t)
for t in _dynamic_partition_of_nested_tensors(
observation, policy_choice, self._num_agents)
]
loss = 0
for k in range(self._num_agents):
per_policy_experience = trajectory.single_step(
observation=partitioned_nested_observations[k],
action=partitioned_nested_actions[k],
policy_info=partitioned_nested_infos[k],
reward=partitioned_nested_rewards[k],
discount=tf.zeros_like(partitioned_nested_rewards[k]))
loss_info = self._agents[k].train(per_policy_experience)
loss += loss_info.loss
common.function_in_tf1()(self._update_mixture_distribution)(experience)
return tf_agent.LossInfo(loss=(loss), extra=())
|
apache-2.0
|
cvegaj/ElectriCERT
|
venv3/lib/python3.6/site-packages/chardet/universaldetector.py
|
244
|
12485
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
"""
Module containing the UniversalDetector detector class, which is the primary
class a user of ``chardet`` should use.
:author: Mark Pilgrim (initial port to Python)
:author: Shy Shalom (original C code)
:author: Dan Blanchard (major refactoring for 3.0)
:author: Ian Cordasco
"""
import codecs
import logging
import re
from .charsetgroupprober import CharSetGroupProber
from .enums import InputState, LanguageFilter, ProbingState
from .escprober import EscCharSetProber
from .latin1prober import Latin1Prober
from .mbcsgroupprober import MBCSGroupProber
from .sbcsgroupprober import SBCSGroupProber
class UniversalDetector(object):
"""
The ``UniversalDetector`` class underlies the ``chardet.detect`` function
and coordinates all of the different charset probers.
To get a ``dict`` containing an encoding and its confidence, you can simply
run:
.. code::
u = UniversalDetector()
u.feed(some_bytes)
u.close()
detected = u.result
"""
MINIMUM_THRESHOLD = 0.20
HIGH_BYTE_DETECTOR = re.compile(b'[\x80-\xFF]')
ESC_DETECTOR = re.compile(b'(\033|~{)')
WIN_BYTE_DETECTOR = re.compile(b'[\x80-\x9F]')
ISO_WIN_MAP = {'iso-8859-1': 'Windows-1252',
'iso-8859-2': 'Windows-1250',
'iso-8859-5': 'Windows-1251',
'iso-8859-6': 'Windows-1256',
'iso-8859-7': 'Windows-1253',
'iso-8859-8': 'Windows-1255',
'iso-8859-9': 'Windows-1254',
'iso-8859-13': 'Windows-1257'}
def __init__(self, lang_filter=LanguageFilter.ALL):
self._esc_charset_prober = None
self._charset_probers = []
self.result = None
self.done = None
self._got_data = None
self._input_state = None
self._last_char = None
self.lang_filter = lang_filter
self.logger = logging.getLogger(__name__)
self._has_win_bytes = None
self.reset()
def reset(self):
"""
Reset the UniversalDetector and all of its probers back to their
initial states. This is called by ``__init__``, so you only need to
call this directly in between analyses of different documents.
"""
self.result = {'encoding': None, 'confidence': 0.0, 'language': None}
self.done = False
self._got_data = False
self._has_win_bytes = False
self._input_state = InputState.PURE_ASCII
self._last_char = b''
if self._esc_charset_prober:
self._esc_charset_prober.reset()
for prober in self._charset_probers:
prober.reset()
def feed(self, byte_str):
"""
Takes a chunk of a document and feeds it through all of the relevant
charset probers.
After calling ``feed``, you can check the value of the ``done``
attribute to see if you need to continue feeding the
``UniversalDetector`` more data, or if it has made a prediction
(in the ``result`` attribute).
.. note::
You should always call ``close`` when you're done feeding in your
document if ``done`` is not already ``True``.
"""
if self.done:
return
if not len(byte_str):
return
if not isinstance(byte_str, bytearray):
byte_str = bytearray(byte_str)
# First check for known BOMs, since these are guaranteed to be correct
if not self._got_data:
# If the data starts with BOM, we know it is UTF
if byte_str.startswith(codecs.BOM_UTF8):
# EF BB BF UTF-8 with BOM
self.result = {'encoding': "UTF-8-SIG",
'confidence': 1.0,
'language': ''}
elif byte_str.startswith((codecs.BOM_UTF32_LE,
codecs.BOM_UTF32_BE)):
# FF FE 00 00 UTF-32, little-endian BOM
# 00 00 FE FF UTF-32, big-endian BOM
self.result = {'encoding': "UTF-32",
'confidence': 1.0,
'language': ''}
elif byte_str.startswith(b'\xFE\xFF\x00\x00'):
# FE FF 00 00 UCS-4, unusual octet order BOM (3412)
self.result = {'encoding': "X-ISO-10646-UCS-4-3412",
'confidence': 1.0,
'language': ''}
elif byte_str.startswith(b'\x00\x00\xFF\xFE'):
# 00 00 FF FE UCS-4, unusual octet order BOM (2143)
self.result = {'encoding': "X-ISO-10646-UCS-4-2143",
'confidence': 1.0,
'language': ''}
elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)):
# FF FE UTF-16, little endian BOM
# FE FF UTF-16, big endian BOM
self.result = {'encoding': "UTF-16",
'confidence': 1.0,
'language': ''}
self._got_data = True
if self.result['encoding'] is not None:
self.done = True
return
# If none of those matched and we've only see ASCII so far, check
# for high bytes and escape sequences
if self._input_state == InputState.PURE_ASCII:
if self.HIGH_BYTE_DETECTOR.search(byte_str):
self._input_state = InputState.HIGH_BYTE
elif self._input_state == InputState.PURE_ASCII and \
self.ESC_DETECTOR.search(self._last_char + byte_str):
self._input_state = InputState.ESC_ASCII
self._last_char = byte_str[-1:]
# If we've seen escape sequences, use the EscCharSetProber, which
# uses a simple state machine to check for known escape sequences in
# HZ and ISO-2022 encodings, since those are the only encodings that
# use such sequences.
if self._input_state == InputState.ESC_ASCII:
if not self._esc_charset_prober:
self._esc_charset_prober = EscCharSetProber(self.lang_filter)
if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT:
self.result = {'encoding':
self._esc_charset_prober.charset_name,
'confidence':
self._esc_charset_prober.get_confidence(),
'language':
self._esc_charset_prober.language}
self.done = True
# If we've seen high bytes (i.e., those with values greater than 127),
# we need to do more complicated checks using all our multi-byte and
# single-byte probers that are left. The single-byte probers
# use character bigram distributions to determine the encoding, whereas
# the multi-byte probers use a combination of character unigram and
# bigram distributions.
elif self._input_state == InputState.HIGH_BYTE:
if not self._charset_probers:
self._charset_probers = [MBCSGroupProber(self.lang_filter)]
# If we're checking non-CJK encodings, use single-byte prober
if self.lang_filter & LanguageFilter.NON_CJK:
self._charset_probers.append(SBCSGroupProber())
self._charset_probers.append(Latin1Prober())
for prober in self._charset_probers:
if prober.feed(byte_str) == ProbingState.FOUND_IT:
self.result = {'encoding': prober.charset_name,
'confidence': prober.get_confidence(),
'language': prober.language}
self.done = True
break
if self.WIN_BYTE_DETECTOR.search(byte_str):
self._has_win_bytes = True
def close(self):
"""
Stop analyzing the current document and come up with a final
prediction.
:returns: The ``result`` attribute, a ``dict`` with the keys
`encoding`, `confidence`, and `language`.
"""
# Don't bother with checks if we're already done
if self.done:
return self.result
self.done = True
if not self._got_data:
self.logger.debug('no data received!')
# Default to ASCII if it is all we've seen so far
elif self._input_state == InputState.PURE_ASCII:
self.result = {'encoding': 'ascii',
'confidence': 1.0,
'language': ''}
# If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD
elif self._input_state == InputState.HIGH_BYTE:
prober_confidence = None
max_prober_confidence = 0.0
max_prober = None
for prober in self._charset_probers:
if not prober:
continue
prober_confidence = prober.get_confidence()
if prober_confidence > max_prober_confidence:
max_prober_confidence = prober_confidence
max_prober = prober
if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD):
charset_name = max_prober.charset_name
lower_charset_name = max_prober.charset_name.lower()
confidence = max_prober.get_confidence()
# Use Windows encoding name instead of ISO-8859 if we saw any
# extra Windows-specific bytes
if lower_charset_name.startswith('iso-8859'):
if self._has_win_bytes:
charset_name = self.ISO_WIN_MAP.get(lower_charset_name,
charset_name)
self.result = {'encoding': charset_name,
'confidence': confidence,
'language': max_prober.language}
# Log all prober confidences if none met MINIMUM_THRESHOLD
if self.logger.getEffectiveLevel() == logging.DEBUG:
if self.result['encoding'] is None:
self.logger.debug('no probers hit minimum threshold')
for group_prober in self._charset_probers:
if not group_prober:
continue
if isinstance(group_prober, CharSetGroupProber):
for prober in group_prober.probers:
self.logger.debug('%s %s confidence = %s',
prober.charset_name,
prober.language,
prober.get_confidence())
else:
self.logger.debug('%s %s confidence = %s',
prober.charset_name,
prober.language,
prober.get_confidence())
return self.result
|
gpl-3.0
|
amboxer21/scrapy
|
scrapy/spiders/sitemap.py
|
56
|
2706
|
import re
import logging
from scrapy.spiders import Spider
from scrapy.http import Request, XmlResponse
from scrapy.utils.sitemap import Sitemap, sitemap_urls_from_robots
from scrapy.utils.gz import gunzip, is_gzipped
logger = logging.getLogger(__name__)
class SitemapSpider(Spider):
sitemap_urls = ()
sitemap_rules = [('', 'parse')]
sitemap_follow = ['']
sitemap_alternate_links = False
def __init__(self, *a, **kw):
super(SitemapSpider, self).__init__(*a, **kw)
self._cbs = []
for r, c in self.sitemap_rules:
if isinstance(c, basestring):
c = getattr(self, c)
self._cbs.append((regex(r), c))
self._follow = [regex(x) for x in self.sitemap_follow]
def start_requests(self):
return (Request(x, callback=self._parse_sitemap) for x in self.sitemap_urls)
def _parse_sitemap(self, response):
if response.url.endswith('/robots.txt'):
for url in sitemap_urls_from_robots(response.body):
yield Request(url, callback=self._parse_sitemap)
else:
body = self._get_sitemap_body(response)
if body is None:
logger.warning("Ignoring invalid sitemap: %(response)s",
{'response': response}, extra={'spider': self})
return
s = Sitemap(body)
if s.type == 'sitemapindex':
for loc in iterloc(s, self.sitemap_alternate_links):
if any(x.search(loc) for x in self._follow):
yield Request(loc, callback=self._parse_sitemap)
elif s.type == 'urlset':
for loc in iterloc(s):
for r, c in self._cbs:
if r.search(loc):
yield Request(loc, callback=c)
break
def _get_sitemap_body(self, response):
"""Return the sitemap body contained in the given response, or None if the
response is not a sitemap.
"""
if isinstance(response, XmlResponse):
return response.body
elif is_gzipped(response):
return gunzip(response.body)
elif response.url.endswith('.xml'):
return response.body
elif response.url.endswith('.xml.gz'):
return gunzip(response.body)
def regex(x):
if isinstance(x, basestring):
return re.compile(x)
return x
def iterloc(it, alt=False):
for d in it:
yield d['loc']
# Also consider alternate URLs (xhtml:link rel="alternate")
if alt and 'alternate' in d:
for l in d['alternate']:
yield l
|
bsd-3-clause
|
NonnEmilia/OpenGenfri
|
pos/webpos/dbmanager.py
|
2
|
2516
|
from django.contrib.auth.models import User
from webpos.models import Item, Bill, BillItem
def commit_bill(output, reqdata, user):
billhd = Bill(customer_name=reqdata['customer_name'],
server=User.objects.get(pk=user.id).username)
billitms = []
reqquants = reqdata['items']
dbitms = Item.objects.filter(name__in=reqquants.keys())
for dbitm in dbitms:
reqitem = reqquants[dbitm.name]
quant = reqitem['qty']
notes = reqitem['notes']
db_quant = dbitm.quantity
if db_quant is not None:
newquant = db_quant - quant
if newquant < 0:
output['errors'].append((dbitm.name, dbitm.quantity))
else:
if output['errors']:
continue
output['total'] += dbitm.price * quant
billitms.append(BillItem(item=dbitm, quantity=quant,
category=dbitm.category,
item_price=dbitm.price,
note=notes))
dbitm.quantity = newquant
else:
output['total'] += dbitm.price * quant
billitms.append(BillItem(item=dbitm, quantity=quant,
category=dbitm.category,
item_price=dbitm.price,
note=notes))
if output['errors']:
output['total'] = 0
output['customer_id'] = None
output['errors'] = dict(output['errors'])
return output, None
else:
output['errors'] = dict(output['errors'])
if output['total'] < 0:
output['total'] = 0
billhd.total = output['total']
billhd.customer_id = output['customer_id']
billhd.save()
output['date'] = billhd.date
output['bill_id'] = billhd.id
for billitm, dbitm in zip(billitms, dbitms):
billitm.bill = billhd
billitm.save()
dbitm.save()
return output, billhd
def undo_bill(billid, user):
bill = Bill.objects.get(pk=billid)
if not bill.is_committed():
return 'Bill has already been deleted!'
for billitem in bill.billitem_set.all():
if billitem.item.quantity is not None:
billitem.item.quantity += billitem.quantity
billitem.item.save()
bill.deleted_by = user.username
bill.save()
return 'Bill #' + billid + ' deleted!'
|
mit
|
belmiromoreira/nova
|
nova/console/websocketproxy.py
|
12
|
6728
|
# Copyright (c) 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''
Websocket proxy that is compatible with OpenStack Nova.
Leverages websockify.py by Joel Martin
'''
import Cookie
import socket
import sys
import urlparse
from oslo_log import log as logging
import websockify
from nova.consoleauth import rpcapi as consoleauth_rpcapi
from nova import context
from nova import exception
from nova.i18n import _
LOG = logging.getLogger(__name__)
class NovaProxyRequestHandlerBase(object):
def address_string(self):
# NOTE(rpodolyaka): override the superclass implementation here and
# explicitly disable the reverse DNS lookup, which might fail on some
# deployments due to DNS configuration and break VNC access completely
return str(self.client_address[0])
def verify_origin_proto(self, connection_info, origin_proto):
access_url = connection_info.get('access_url')
if not access_url:
detail = _("No access_url in connection_info. "
"Cannot validate protocol")
raise exception.ValidationError(detail=detail)
expected_protos = [urlparse.urlparse(access_url).scheme]
# NOTE: For serial consoles the expected protocol could be ws or
# wss which correspond to http and https respectively in terms of
# security.
if 'ws' in expected_protos:
expected_protos.append('http')
if 'wss' in expected_protos:
expected_protos.append('https')
return origin_proto in expected_protos
def new_websocket_client(self):
"""Called after a new WebSocket connection has been established."""
# Reopen the eventlet hub to make sure we don't share an epoll
# fd with parent and/or siblings, which would be bad
from eventlet import hubs
hubs.use_hub()
# The nova expected behavior is to have token
# passed to the method GET of the request
parse = urlparse.urlparse(self.path)
if parse.scheme not in ('http', 'https'):
# From a bug in urlparse in Python < 2.7.4 we cannot support
# special schemes (cf: http://bugs.python.org/issue9374)
if sys.version_info < (2, 7, 4):
raise exception.NovaException(
_("We do not support scheme '%s' under Python < 2.7.4, "
"please use http or https") % parse.scheme)
query = parse.query
token = urlparse.parse_qs(query).get("token", [""]).pop()
if not token:
# NoVNC uses it's own convention that forward token
# from the request to a cookie header, we should check
# also for this behavior
hcookie = self.headers.getheader('cookie')
if hcookie:
cookie = Cookie.SimpleCookie()
cookie.load(hcookie)
if 'token' in cookie:
token = cookie['token'].value
ctxt = context.get_admin_context()
rpcapi = consoleauth_rpcapi.ConsoleAuthAPI()
connect_info = rpcapi.check_token(ctxt, token=token)
if not connect_info:
raise exception.InvalidToken(token=token)
# Verify Origin
expected_origin_hostname = self.headers.getheader('Host')
if ':' in expected_origin_hostname:
e = expected_origin_hostname
if '[' in e and ']' in e:
expected_origin_hostname = e.split(']')[0][1:]
else:
expected_origin_hostname = e.split(':')[0]
origin_url = self.headers.getheader('Origin')
# missing origin header indicates non-browser client which is OK
if origin_url is not None:
origin = urlparse.urlparse(origin_url)
origin_hostname = origin.hostname
origin_scheme = origin.scheme
if origin_hostname == '' or origin_scheme == '':
detail = _("Origin header not valid.")
raise exception.ValidationError(detail=detail)
if expected_origin_hostname != origin_hostname:
detail = _("Origin header does not match this host.")
raise exception.ValidationError(detail=detail)
if not self.verify_origin_proto(connect_info, origin_scheme):
detail = _("Origin header protocol does not match this host.")
raise exception.ValidationError(detail=detail)
self.msg(_('connect info: %s'), str(connect_info))
host = connect_info['host']
port = int(connect_info['port'])
# Connect to the target
self.msg(_("connecting to: %(host)s:%(port)s") % {'host': host,
'port': port})
tsock = self.socket(host, port, connect=True)
# Handshake as necessary
if connect_info.get('internal_access_path'):
tsock.send("CONNECT %s HTTP/1.1\r\n\r\n" %
connect_info['internal_access_path'])
while True:
data = tsock.recv(4096, socket.MSG_PEEK)
if data.find("\r\n\r\n") != -1:
if data.split("\r\n")[0].find("200") == -1:
raise exception.InvalidConnectionInfo()
tsock.recv(len(data))
break
# Start proxying
try:
self.do_proxy(tsock)
except Exception:
if tsock:
tsock.shutdown(socket.SHUT_RDWR)
tsock.close()
self.vmsg(_("%(host)s:%(port)s: Target closed") %
{'host': host, 'port': port})
raise
class NovaProxyRequestHandler(NovaProxyRequestHandlerBase,
websockify.ProxyRequestHandler):
def __init__(self, *args, **kwargs):
websockify.ProxyRequestHandler.__init__(self, *args, **kwargs)
def socket(self, *args, **kwargs):
return websockify.WebSocketServer.socket(*args, **kwargs)
class NovaWebSocketProxy(websockify.WebSocketProxy):
@staticmethod
def get_logger():
return LOG
|
apache-2.0
|
vacuus/django-codesnip
|
codesnip/migrations/0001_initial.py
|
1
|
13039
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-25 00:48
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Snippet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
('slug', models.SlugField(max_length=255, unique=True)),
('language', models.CharField(choices=[('AntlrCppLexer', 'ANTLR With CPP Target'), ('MoonScriptLexer', 'MoonScript'), ('ErbLexer', 'ERB'), ('HtmlSmartyLexer', 'HTML+Smarty'), ('IoLexer', 'Io'), ('MyghtyXmlLexer', 'XML+Myghty'), ('CsoundDocumentLexer', 'Csound Document'), ('EvoqueXmlLexer', 'XML+Evoque'), ('RagelCLexer', 'Ragel in C Host'), ('JsonLdLexer', 'JSON-LD'), ('TeaTemplateLexer', 'Tea'), ('MatlabLexer', 'Matlab'), ('AutohotkeyLexer', 'autohotkey'), ('EiffelLexer', 'Eiffel'), ('Cfengine3Lexer', 'CFEngine3'), ('XtendLexer', 'Xtend'), ('KalLexer', 'Kal'), ('LiterateCryptolLexer', 'Literate Cryptol'), ('RoboconfInstancesLexer', 'Roboconf Instances'), ('ActionScript3Lexer', 'ActionScript 3'), ('VCTreeStatusLexer', 'VCTreeStatus'), ('FishShellLexer', 'Fish'), ('XsltLexer', 'XSLT'), ('ArduinoLexer', 'Arduino'), ('HtmlGenshiLexer', 'HTML+Genshi'), ('RagelObjectiveCLexer', 'Ragel in Objective C Host'), ('MatlabSessionLexer', 'Matlab session'), ('IsabelleLexer', 'Isabelle'), ('CPSALexer', 'CPSA'), ('CsoundScoreLexer', 'Csound Score'), ('VelocityXmlLexer', 'XML+Velocity'), ('ECLLexer', 'ECL'), ('ObjectiveJLexer', 'Objective-J'), ('TurtleLexer', 'Turtle'), ('JsonLexer', 'JSON'), ('EarlGreyLexer', 'Earl Grey'), ('VbNetAspxLexer', 'aspx-vb'), ('TAPLexer', 'TAP'), ('GasLexer', 'GAS'), ('JavascriptGenshiLexer', 'JavaScript+Genshi Text'), ('LlvmLexer', 'LLVM'), ('CrocLexer', 'Croc'), ('NSISLexer', 'NSIS'), ('SqlLexer', 'SQL'), ('OcamlLexer', 'OCaml'), ('RagelLexer', 'Ragel'), ('PraatLexer', 'Praat'), ('SystemVerilogLexer', 'systemverilog'), ('CbmBasicV2Lexer', 'CBM BASIC V2'), ('MonkeyLexer', 'Monkey'), ('CrmshLexer', 'Crmsh'), ('PyPyLogLexer', 'PyPy Log'), ('CypherLexer', 'Cypher'), ('BatchLexer', 'Batchfile'), ('PerlLexer', 'Perl'), ('AspectJLexer', 'AspectJ'), ('KconfigLexer', 'Kconfig'), ('ECLexer', 'eC'), ('BashSessionLexer', 'Bash Session'), ('PikeLexer', 'Pike'), ('CadlLexer', 'cADL'), ('SourcesListLexer', 'Debian Sourcelist'), ('GAPLexer', 'GAP'), ('MxmlLexer', 'MXML'), ('OpenEdgeLexer', 'OpenEdge ABL'), ('FelixLexer', 'Felix'), ('AntlrPerlLexer', 'ANTLR With Perl Target'), ('CssGenshiLexer', 'CSS+Genshi Text'), ('LiquidLexer', 'liquid'), ('CeylonLexer', 'Ceylon'), ('DjangoLexer', 'Django/Jinja'), ('DylanLexer', 'Dylan'), ('DObjdumpLexer', 'd-objdump'), ('JagsLexer', 'JAGS'), ('CAmkESLexer', 'CAmkES'), ('GroffLexer', 'Groff'), ('Modula2Lexer', 'Modula-2'), ('AntlrObjectiveCLexer', 'ANTLR With ObjectiveC Target'), ('ErlangShellLexer', 'Erlang erl session'), ('CoffeeScriptLexer', 'CoffeeScript'), ('DartLexer', 'Dart'), ('FortranFixedLexer', 'FortranFixed'), ('ComponentPascalLexer', 'Component Pascal'), ('FactorLexer', 'Factor'), ('UrbiscriptLexer', 'UrbiScript'), ('GoLexer', 'Go'), ('VhdlLexer', 'vhdl'), ('Inform6Lexer', 'Inform 6'), ('CSharpLexer', 'C#'), ('QBasicLexer', 'QBasic'), ('RagelJavaLexer', 'Ragel in Java Host'), ('DarcsPatchLexer', 'Darcs Patch'), ('MyghtyJavascriptLexer', 'JavaScript+Myghty'), ('MozPreprocXulLexer', 'XUL+mozpreproc'), ('PowerShellSessionLexer', 'PowerShell Session'), ('JavascriptErbLexer', 'JavaScript+Ruby'), ('MakoCssLexer', 'CSS+Mako'), ('CudaLexer', 'CUDA'), ('RubyLexer', 'Ruby'), ('OocLexer', 'Ooc'), ('AppleScriptLexer', 'AppleScript'), ('SnobolLexer', 'Snobol'), ('TclLexer', 'Tcl'), ('IrcLogsLexer', 'IRC logs'), ('ObjdumpLexer', 'objdump'), ('TextLexer', 'Text only'), ('VimLexer', 'VimL'), ('MoinWikiLexer', 'MoinMoin/Trac Wiki markup'), ('CheetahLexer', 'Cheetah'), ('RdLexer', 'Rd'), ('MakoXmlLexer', 'XML+Mako'), ('EzhilLexer', 'Ezhil'), ('CsoundOrchestraLexer', 'Csound Orchestra'), ('GoodDataCLLexer', 'GoodData-CL'), ('RubyConsoleLexer', 'Ruby irb session'), ('BBCodeLexer', 'BBCode'), ('MSDOSSessionLexer', 'MSDOS Session'), ('Ca65Lexer', 'ca65 assembler'), ('EvoqueLexer', 'Evoque'), ('CMakeLexer', 'CMake'), ('ColdfusionHtmlLexer', 'Coldfusion HTML'), ('RagelRubyLexer', 'Ragel in Ruby Host'), ('ObjectiveCppLexer', 'Objective-C++'), ('BugsLexer', 'BUGS'), ('SlimLexer', 'Slim'), ('RawTokenLexer', 'Raw token data'), ('JspLexer', 'Java Server Page'), ('GherkinLexer', 'Gherkin'), ('DiffLexer', 'Diff'), ('ErlangLexer', 'Erlang'), ('CythonLexer', 'Cython'), ('AntlrActionScriptLexer', 'ANTLR With ActionScript Target'), ('EvoqueHtmlLexer', 'HTML+Evoque'), ('CObjdumpLexer', 'c-objdump'), ('SMLLexer', 'Standard ML'), ('GoloLexer', 'Golo'), ('LassoXmlLexer', 'XML+Lasso'), ('BoogieLexer', 'Boogie'), ('JavascriptSmartyLexer', 'JavaScript+Smarty'), ('HaskellLexer', 'Haskell'), ('Python3Lexer', 'Python 3'), ('CheetahJavascriptLexer', 'JavaScript+Cheetah'), ('GosuTemplateLexer', 'Gosu Template'), ('SspLexer', 'Scalate Server Page'), ('BashLexer', 'Bash'), ('GLShaderLexer', 'GLSL'), ('GenshiTextLexer', 'Genshi Text'), ('AutoItLexer', 'AutoIt'), ('TerraformLexer', 'Terraform'), ('XmlLexer', 'XML'), ('NemerleLexer', 'Nemerle'), ('AntlrRubyLexer', 'ANTLR With Ruby Target'), ('RustLexer', 'Rust'), ('GenshiLexer', 'Genshi'), ('SmaliLexer', 'Smali'), ('OdinLexer', 'ODIN'), ('HybrisLexer', 'Hybris'), ('TcshLexer', 'Tcsh'), ('APLLexer', 'APL'), ('HaxeLexer', 'Haxe'), ('ModelicaLexer', 'Modelica'), ('NitLexer', 'Nit'), ('HtmlLexer', 'HTML'), ('RstLexer', 'reStructuredText'), ('ClojureScriptLexer', 'ClojureScript'), ('IgorLexer', 'Igor'), ('BlitzMaxLexer', 'BlitzMax'), ('PrologLexer', 'Prolog'), ('NimrodLexer', 'Nimrod'), ('JasminLexer', 'Jasmin'), ('ElixirConsoleLexer', 'Elixir iex session'), ('MyghtyHtmlLexer', 'HTML+Myghty'), ('PigLexer', 'Pig'), ('FortranLexer', 'Fortran'), ('TexLexer', 'TeX'), ('CssLexer', 'CSS'), ('XQueryLexer', 'XQuery'), ('AntlrPythonLexer', 'ANTLR With Python Target'), ('DgLexer', 'dg'), ('DLexer', 'D'), ('HandlebarsLexer', 'Handlebars'), ('AdaLexer', 'Ada'), ('SLexer', 'S'), ('IdrisLexer', 'Idris'), ('LiveScriptLexer', 'LiveScript'), ('HexdumpLexer', 'Hexdump'), ('AdlLexer', 'ADL'), ('MscgenLexer', 'Mscgen'), ('PostScriptLexer', 'PostScript'), ('BooLexer', 'Boo'), ('CheetahXmlLexer', 'XML+Cheetah'), ('OctaveLexer', 'Octave'), ('ChapelLexer', 'Chapel'), ('Perl6Lexer', 'Perl6'), ('JavascriptDjangoLexer', 'JavaScript+Django/Jinja'), ('DylanConsoleLexer', 'Dylan session'), ('JuliaConsoleLexer', 'Julia console'), ('MakoLexer', 'Mako'), ('RebolLexer', 'REBOL'), ('RoboconfGraphLexer', 'Roboconf Graph'), ('ParaSailLexer', 'ParaSail'), ('SchemeLexer', 'Scheme'), ('ZephirLexer', 'Zephir'), ('JavascriptLexer', 'JavaScript'), ('LeanLexer', 'Lean'), ('ObjectiveCLexer', 'Objective-C'), ('PlPgsqlLexer', 'PL/pgSQL'), ('LighttpdConfLexer', 'Lighttpd configuration file'), ('ColdfusionCFCLexer', 'Coldfusion CFC'), ('CirruLexer', 'Cirru'), ('GosuLexer', 'Gosu'), ('SuperColliderLexer', 'SuperCollider'), ('BnfLexer', 'BNF'), ('FSharpLexer', 'FSharp'), ('BCLexer', 'BC'), ('CobolLexer', 'COBOL'), ('CppObjdumpLexer', 'cpp-objdump'), ('YamlLexer', 'YAML'), ('MakoJavascriptLexer', 'JavaScript+Mako'), ('OpaLexer', 'Opa'), ('ColdfusionLexer', 'cfstatement'), ('TwigHtmlLexer', 'HTML+Twig'), ('CppLexer', 'C++'), ('RPMSpecLexer', 'RPMSpec'), ('PhpLexer', 'PHP'), ('AlloyLexer', 'Alloy'), ('NasmLexer', 'NASM'), ('CryptolLexer', 'Cryptol'), ('LimboLexer', 'Limbo'), ('HttpLexer', 'HTTP'), ('SwigLexer', 'SWIG'), ('FantomLexer', 'Fantom'), ('ClayLexer', 'Clay'), ('JclLexer', 'JCL'), ('MyghtyCssLexer', 'CSS+Myghty'), ('BefungeLexer', 'Befunge'), ('FoxProLexer', 'FoxPro'), ('RslLexer', 'RSL'), ('TodotxtLexer', 'Todotxt'), ('ValaLexer', 'Vala'), ('LassoLexer', 'Lasso'), ('LassoHtmlLexer', 'HTML+Lasso'), ('LiterateIdrisLexer', 'Literate Idris'), ('SmartyLexer', 'Smarty'), ('ScssLexer', 'SCSS'), ('Inform6TemplateLexer', 'Inform 6 template'), ('RhtmlLexer', 'RHTML'), ('RegeditLexer', 'reg'), ('SassLexer', 'Sass'), ('BroLexer', 'Bro'), ('VelocityHtmlLexer', 'HTML+Velocity'), ('EasytrieveLexer', 'Easytrieve'), ('MySqlLexer', 'MySQL'), ('MozPreprocHashLexer', 'mozhashpreproc'), ('KokaLexer', 'Koka'), ('QVToLexer', 'QVTO'), ('BrainfuckLexer', 'Brainfuck'), ('ActionScriptLexer', 'ActionScript'), ('GettextLexer', 'Gettext Catalog'), ('X10Lexer', 'X10'), ('HtmlDjangoLexer', 'HTML+Django/Jinja'), ('CSharpAspxLexer', 'aspx-cs'), ('IniLexer', 'INI'), ('GroovyLexer', 'Groovy'), ('ABAPLexer', 'ABAP'), ('DtdLexer', 'DTD'), ('CssDjangoLexer', 'CSS+Django/Jinja'), ('DuelLexer', 'Duel'), ('RtsLexer', 'TrafficScript'), ('LuaLexer', 'Lua'), ('KotlinLexer', 'Kotlin'), ('RagelEmbeddedLexer', 'Embedded Ragel'), ('VerilogLexer', 'verilog'), ('LessCssLexer', 'LessCss'), ('VGLLexer', 'VGL'), ('CssErbLexer', 'CSS+Ruby'), ('PkgConfigLexer', 'PkgConfig'), ('HandlebarsHtmlLexer', 'HTML+Handlebars'), ('PuppetLexer', 'Puppet'), ('MakefileLexer', 'Makefile'), ('MakoHtmlLexer', 'HTML+Mako'), ('HamlLexer', 'Haml'), ('SmalltalkLexer', 'Smalltalk'), ('NginxConfLexer', 'Nginx configuration file'), ('RobotFrameworkLexer', 'RobotFramework'), ('HxmlLexer', 'Hxml'), ('PythonConsoleLexer', 'Python console session'), ('DebianControlLexer', 'Debian Control file'), ('IDLLexer', 'IDL'), ('NewspeakLexer', 'Newspeak'), ('ScilabLexer', 'Scilab'), ('MaskLexer', 'Mask'), ('NasmObjdumpLexer', 'objdump-nasm'), ('AgdaLexer', 'Agda'), ('TcshSessionLexer', 'Tcsh Session'), ('MiniDLexer', 'MiniD'), ('RagelDLexer', 'Ragel in D Host'), ('AntlrLexer', 'ANTLR'), ('CssSmartyLexer', 'CSS+Smarty'), ('DockerLexer', 'Docker'), ('NumPyLexer', 'NumPy'), ('BlitzBasicLexer', 'BlitzBasic'), ('AntlrCSharpLexer', 'ANTLR With C# Target'), ('Inform7Lexer', 'Inform 7'), ('AntlrJavaLexer', 'ANTLR With Java Target'), ('PythonLexer', 'Python'), ('PawnLexer', 'Pawn'), ('XmlErbLexer', 'XML+Ruby'), ('Tads3Lexer', 'TADS 3'), ('CobolFreeformatLexer', 'COBOLFree'), ('JavaLexer', 'Java'), ('DylanLidLexer', 'DylanLID'), ('IokeLexer', 'Ioke'), ('PostgresLexer', 'PostgreSQL SQL dialect'), ('EbnfLexer', 'EBNF'), ('PowerShellLexer', 'PowerShell'), ('VelocityLexer', 'Velocity'), ('MozPreprocPercentLexer', 'mozpercentpreproc'), ('RConsoleLexer', 'RConsole'), ('Python3TracebackLexer', 'Python 3.0 Traceback'), ('LSLLexer', 'LSL'), ('CheetahHtmlLexer', 'HTML+Cheetah'), ('SwiftLexer', 'Swift'), ('NixLexer', 'Nix'), ('MOOCodeLexer', 'MOOCode'), ('FancyLexer', 'Fancy'), ('MqlLexer', 'MQL'), ('BaseMakefileLexer', 'Base Makefile'), ('JadeLexer', 'Jade'), ('QmlLexer', 'QML'), ('ShenLexer', 'Shen'), ('MathematicaLexer', 'Mathematica'), ('MyghtyLexer', 'Myghty'), ('XmlPhpLexer', 'XML+PHP'), ('MaqlLexer', 'MAQL'), ('CLexer', 'C'), ('PovrayLexer', 'POVRay'), ('HyLexer', 'Hy'), ('MuPADLexer', 'MuPAD'), ('CssPhpLexer', 'CSS+PHP'), ('CoqLexer', 'Coq'), ('ScamlLexer', 'Scaml'), ('LiterateAgdaLexer', 'Literate Agda'), ('ApacheConfLexer', 'ApacheConf'), ('MozPreprocJavascriptLexer', 'Javascript+mozpreproc'), ('RedcodeLexer', 'Redcode'), ('LassoCssLexer', 'CSS+Lasso'), ('SourcePawnLexer', 'SourcePawn'), ('GnuplotLexer', 'Gnuplot'), ('NesCLexer', 'nesC'), ('ClojureLexer', 'Clojure'), ('NewLispLexer', 'NewLisp'), ('SparqlLexer', 'SPARQL'), ('HtmlPhpLexer', 'HTML+PHP'), ('DelphiLexer', 'Delphi'), ('EmacsLispLexer', 'EmacsLisp'), ('RexxLexer', 'Rexx'), ('YamlJinjaLexer', 'YAML+Jinja'), ('TermcapLexer', 'Termcap'), ('TreetopLexer', 'Treetop'), ('MozPreprocCssLexer', 'CSS+mozpreproc'), ('PanLexer', 'Pan'), ('TypeScriptLexer', 'TypeScript'), ('ResourceLexer', 'ResourceBundle'), ('ElmLexer', 'Elm'), ('StanLexer', 'Stan'), ('AwkLexer', 'Awk'), ('JuliaLexer', 'Julia'), ('LiterateHaskellLexer', 'Literate Haskell'), ('SquidConfLexer', 'SquidConf'), ('PostgresConsoleLexer', 'PostgreSQL console (psql)'), ('LogosLexer', 'Logos'), ('PacmanConfLexer', 'PacmanConf'), ('PythonTracebackLexer', 'Python Traceback'), ('LassoJavascriptLexer', 'JavaScript+Lasso'), ('XmlDjangoLexer', 'XML+Django/Jinja'), ('ChaiscriptLexer', 'ChaiScript'), ('JavascriptPhpLexer', 'JavaScript+PHP'), ('PropertiesLexer', 'Properties'), ('ElixirLexer', 'Elixir'), ('ScalaLexer', 'Scala'), ('RagelCppLexer', 'Ragel in CPP Host'), ('CommonLispLexer', 'Common Lisp'), ('AsymptoteLexer', 'Asymptote'), ('RqlLexer', 'RQL'), ('ThriftLexer', 'Thrift'), ('RacketLexer', 'Racket'), ('TerminfoLexer', 'Terminfo'), ('RedLexer', 'Red'), ('JLexer', 'J'), ('AbnfLexer', 'ABNF'), ('VbNetLexer', 'VB.net'), ('SqliteConsoleLexer', 'sqlite3con'), ('ProtoBufLexer', 'Protocol Buffer'), ('MasonLexer', 'Mason'), ('XmlSmartyLexer', 'XML+Smarty'), ('LogtalkLexer', 'Logtalk'), ('TwigLexer', 'Twig'), ('AmbientTalkLexer', 'AmbientTalk')], max_length=255)),
('code', models.TextField()),
('pygmentized', models.TextField(blank=True)),
],
),
]
|
lgpl-3.0
|
pepeportela/edx-platform
|
common/lib/xmodule/xmodule/block_metadata_utils.py
|
32
|
3027
|
"""
Simple utility functions that operate on block metadata.
This is a place to put simple functions that operate on block metadata. It
allows us to share code between the XModuleMixin and CourseOverview and
BlockStructure.
"""
def url_name_for_block(block):
"""
Given a block, returns the block's URL name.
Arguments:
block (XModuleMixin|CourseOverview|BlockStructureBlockData):
Block that is being accessed
"""
return block.location.name
def display_name_with_default(block):
"""
Calculates the display name for a block.
Default to the display_name if it isn't None, else fall back to creating
a name based on the URL.
Unlike the rest of this module's functions, this function takes an entire
course descriptor/overview as a parameter. This is because a few test cases
(specifically, {Text|Image|Video}AnnotationModuleTestCase.test_student_view)
create scenarios where course.display_name is not None but course.location
is None, which causes calling course.url_name to fail. So, although we'd
like to just pass course.display_name and course.url_name as arguments to
this function, we can't do so without breaking those tests.
Note: This method no longer escapes as it once did, so the caller must
ensure it is properly escaped where necessary.
Arguments:
block (XModuleMixin|CourseOverview|BlockStructureBlockData):
Block that is being accessed
"""
return (
block.display_name if block.display_name is not None
else url_name_for_block(block).replace('_', ' ')
)
def display_name_with_default_escaped(block):
"""
DEPRECATED: use display_name_with_default
Calculates the display name for a block with some HTML escaping.
This follows the same logic as display_name_with_default, with
the addition of the escaping.
Here is an example of how to move away from this method in Mako html:
Before:
<span class="course-name">${course.display_name_with_default_escaped}</span>
After:
<span class="course-name">${course.display_name_with_default | h}</span>
If the context is Javascript in Mako, you'll need to follow other best practices.
Note: Switch to display_name_with_default, and ensure the caller
properly escapes where necessary.
Note: This newly introduced method should not be used. It was only
introduced to enable a quick search/replace and the ability to slowly
migrate and test switching to display_name_with_default, which is no
longer escaped.
Arguments:
block (XModuleMixin|CourseOverview|BlockStructureBlockData):
Block that is being accessed
"""
# This escaping is incomplete. However, rather than switching this to use
# markupsafe.escape() and fixing issues, better to put that energy toward
# migrating away from this method altogether.
return display_name_with_default(block).replace('<', '<').replace('>', '>')
|
agpl-3.0
|
Eric89GXL/vispy
|
examples/demo/gloo/quiver.py
|
2
|
2368
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2014, Nicolas P. Rougier. All Rights Reserved.
# Distributed under the (new) BSD License.
# -----------------------------------------------------------------------------
from vispy import app, gloo
vertex = """
attribute vec2 position;
void main()
{
gl_Position = vec4(position, 0.0, 1.0);
}
"""
fragment = """
#include "math/constants.glsl"
#include "arrows/arrows.glsl"
#include "antialias/antialias.glsl"
uniform vec2 iResolution;
uniform vec2 iMouse;
void main()
{
const float M_PI = 3.14159265358979323846;
const float SQRT_2 = 1.4142135623730951;
const float linewidth = 3.0;
const float antialias = 1.0;
const float rows = 32.0;
const float cols = 32.0;
float body = min(iResolution.x/cols, iResolution.y/rows) / SQRT_2;
vec2 texcoord = gl_FragCoord.xy;
vec2 size = iResolution.xy / vec2(cols,rows);
vec2 center = (floor(texcoord/size) + vec2(0.5,0.5)) * size;
texcoord -= center;
float theta = M_PI-atan(center.y-iMouse.y, center.x-iMouse.x);
float cos_theta = cos(theta);
float sin_theta = sin(theta);
texcoord = vec2(cos_theta*texcoord.x - sin_theta*texcoord.y,
sin_theta*texcoord.x + cos_theta*texcoord.y);
float d = arrow_stealth(texcoord, body, 0.25*body, linewidth, antialias);
gl_FragColor = filled(d, linewidth, antialias, vec4(0,0,0,1));
}
"""
canvas = app.Canvas(size=(2*512, 2*512), keys='interactive')
canvas.context.set_state(blend=True,
blend_func=('src_alpha', 'one_minus_src_alpha'),
blend_equation='func_add')
@canvas.connect
def on_draw(event):
gloo.clear('white')
program.draw('triangle_strip')
@canvas.connect
def on_resize(event):
program["iResolution"] = event.size
gloo.set_viewport(0, 0, event.size[0], event.size[1])
@canvas.connect
def on_mouse_move(event):
x, y = event.pos
program["iMouse"] = x, canvas.size[1] - y
canvas.update()
program = gloo.Program(vertex, fragment, count=4)
dx, dy = 1, 1
program['position'] = (-dx, -dy), (-dx, +dy), (+dx, -dy), (+dx, +dy)
program["iResolution"] = (2 * 512, 2 * 512)
program["iMouse"] = (0., 0.)
if __name__ == '__main__':
canvas.show()
app.run()
|
bsd-3-clause
|
pytroll/pygac
|
pygac/gac_io.py
|
1
|
27380
|
#!/usr/bin/env python
# Copyright (c) 2012, 2014 Abhay Devasthale
# Author(s):
# Abhay Devasthale <abhay.devasthale@smhi.se>
# Adam Dybbroe <adam.dybbroe@smhi.se>
# Sara Hornquist <sara.hornquist@smhi.se>
# Martin Raspaud <martin.raspaud@smhi.se>
# Carlos Horn <carlos.horn@external.eumetsat.int>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import calendar
import datetime
import logging
import os
import time
import h5py
import numpy as np
from pygac.utils import slice_channel, strip_invalid_lat, check_user_scanlines
LOG = logging.getLogger(__name__)
MISSING_DATA = -32001
MISSING_DATA_LATLON = -999999
def save_gac(satellite_name,
xutcs,
lats, lons,
ref1, ref2, ref3,
bt3, bt4, bt5,
sun_zen, sat_zen, sun_azi, sat_azi, rel_azi,
qual_flags, start_line, end_line,
gac_file, meta_data,
output_file_prefix, avhrr_dir, qual_dir, sunsatangles_dir):
midnight_scanline = meta_data['midnight_scanline']
miss_lines = meta_data['missing_scanlines']
corr = meta_data['sun_earth_distance_correction_factor']
last_scan_line_number = qual_flags[-1, 0]
# Strip invalid coordinates
first_valid_lat, last_valid_lat = strip_invalid_lat(lats)
if first_valid_lat > start_line:
LOG.info('New start_line chosen (due to invalid lat/lon '
'info) = ' + str(first_valid_lat))
if end_line > last_valid_lat:
LOG.info('New end_line chosen (due to invalid lat/lon '
'info) = ' + str(last_valid_lat))
# Check user-defined scanlines
start_line, end_line = check_user_scanlines(
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat)
# Slice data using new start/end lines
_, miss_lines, midnight_scanline = slice_channel(
np.zeros(lats.shape),
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat,
qual_flags=qual_flags,
miss_lines=miss_lines,
midnight_scanline=midnight_scanline)
ref1, _, _ = slice_channel(ref1,
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat)
ref2, _, _ = slice_channel(ref2,
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat)
ref3, _, _ = slice_channel(ref3,
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat)
bt3, _, _ = slice_channel(bt3,
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat)
bt4, _, _ = slice_channel(bt4,
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat)
bt5, _, _ = slice_channel(bt5,
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat)
sun_zen, _, _ = slice_channel(sun_zen,
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat)
sun_azi, _, _ = slice_channel(sun_azi,
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat)
sat_zen, _, _ = slice_channel(sat_zen,
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat)
sat_azi, _, _ = slice_channel(sat_azi,
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat)
rel_azi, _, _ = slice_channel(rel_azi,
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat)
lons, _, _ = slice_channel(lons,
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat)
lats, _, _ = slice_channel(lats,
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat)
qual_flags, _, _ = slice_channel(qual_flags,
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat)
xutcs, _, _ = slice_channel(xutcs,
start_line=start_line,
end_line=end_line,
first_valid_lat=first_valid_lat,
last_valid_lat=last_valid_lat)
total_number_of_scan_lines = lats.shape[0]
# Reading time from the body of the gac file
start = xutcs[0].astype(datetime.datetime)
end = xutcs[-1].astype(datetime.datetime)
startdate = start.strftime("%Y%m%d")
starttime = start.strftime("%H%M%S%f")[:-5]
enddate = end.strftime("%Y%m%d")
endtime = end.strftime("%H%M%S%f")[:-5]
# Apply scaling & offset
bt3 -= 273.15
bt4 -= 273.15
bt5 -= 273.15
for array in [bt3, bt4, bt5, ref1, ref2, ref3, sun_zen, sat_zen, sun_azi,
sat_azi, rel_azi]:
array *= 100.0
for array in [lats, lons]:
array *= 1000.0
# Replace NaN with fill values
for array in [ref1, ref2, ref3, bt3, bt4, bt5, sun_zen, sat_zen, sun_azi,
sat_azi, rel_azi]:
array[np.isnan(array)] = MISSING_DATA
for array in [lats, lons]:
array[np.isnan(array)] = MISSING_DATA_LATLON
avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime,
lats, lons, ref1, ref2, ref3, bt3, bt4, bt5,
sun_zen, sat_zen, sun_azi, sat_azi, rel_azi, qual_flags,
start_line, end_line, total_number_of_scan_lines,
last_scan_line_number, corr, gac_file, midnight_scanline,
miss_lines, output_file_prefix, avhrr_dir, qual_dir, sunsatangles_dir)
def avhrrGAC_io(satellite_name, xutcs, startdate, enddate, starttime, endtime,
arrLat_full, arrLon_full, ref1, ref2, ref3, bt3, bt4, bt5,
arrSZA, arrSTZ, arrSAA, arrSTA, arrRAA, qual_flags,
start_line, end_line, total_number_of_scan_lines,
last_scan_line_number, corr, gac_file, midnight_scanline,
miss_lines, output_file_prefix, avhrr_dir, qual_dir, sunsatangles_dir):
# Calculate start and end time in sec1970
t_obj = time.strptime(startdate + starttime[0:6], "%Y%m%d%H%M%S")
starttime_sec1970 = calendar.timegm(t_obj)
t_obj = time.strptime(enddate + endtime[0:6], "%Y%m%d%H%M%S")
endtime_sec1970 = calendar.timegm(t_obj)
LOG.info('Output file prefix = ' + str(output_file_prefix))
LOG.info('AVHRR data will be written to ' + str(avhrr_dir))
ofn = os.path.join(avhrr_dir, (output_file_prefix + '_avhrr_' +
satellite_name + '_99999_' +
startdate + 'T' + starttime + 'Z_' +
enddate + 'T' + endtime + 'Z.h5'))
LOG.info('Filename: ' + str(os.path.basename(ofn)))
fout = h5py.File(ofn, "w")
dset1 = fout.create_dataset("/image1/data", dtype='int16', data=ref1)
dset2 = fout.create_dataset("/image2/data", dtype='int16', data=ref2)
dset3 = fout.create_dataset("/image3/data", dtype='int16', data=bt3)
dset4 = fout.create_dataset("/image4/data", dtype='int16', data=bt4)
dset5 = fout.create_dataset("/image5/data", dtype='int16', data=bt5)
dset6 = fout.create_dataset("/image6/data", dtype='int16', data=ref3)
dset7 = fout.create_dataset("/where/lat/data", dtype='int32',
data=arrLat_full)
dset8 = fout.create_dataset("/where/lon/data", dtype='int32',
data=arrLon_full)
del dset8
channellist = []
channellist.append("channel1".encode('utf8'))
channellist.append("channel2".encode('utf8'))
channellist.append("channel3b".encode('utf8'))
channellist.append("channel4".encode('utf8'))
channellist.append("channel5".encode('utf8'))
channellist.append("channel3a".encode('utf8'))
dset10 = fout.create_dataset("/how/channel_list",
data=channellist)
del dset10
# Attributes directly on highest level groups
g1 = fout.require_group("/image1")
g2 = fout.require_group("/image2")
g3 = fout.require_group("/image3")
g4 = fout.require_group("/image4")
g5 = fout.require_group("/image5")
g6 = fout.require_group("/image6")
g7 = fout.require_group("/where")
g1.attrs["channel"] = np.string_("1")
g1.attrs["description"] = np.string_("AVHRR ch1")
g2.attrs["channel"] = np.string_("2")
g2.attrs["description"] = np.string_("AVHRR ch2")
g3.attrs["channel"] = np.string_("3b")
g3.attrs["description"] = np.string_("AVHRR ch3b")
g4.attrs["channel"] = np.string_("4")
g4.attrs["description"] = np.string_("AVHRR ch4")
g5.attrs["channel"] = np.string_("5")
g5.attrs["description"] = np.string_("AVHRR ch5")
g6.attrs["channel"] = np.string_("3a")
g6.attrs["description"] = np.string_("AVHRR ch3a")
g7.attrs["num_of_pixels"] = np.int32(arrSZA.shape[1])
g7.attrs["num_of_lines"] = np.int32(arrSZA.shape[0])
g7.attrs["xscale"] = np.float32(0.0)
g7.attrs["yscale"] = np.float32(0.0)
g7.attrs["start_line"] = start_line
g7.attrs["end_line"] = end_line
# Attributes in the 'what' groups
g1 = fout.create_group("/image1/what")
g2 = fout.create_group("/image2/what")
g3 = fout.create_group("/image3/what")
g4 = fout.create_group("/image4/what")
g5 = fout.create_group("/image5/what")
g6 = fout.create_group("/image6/what")
g7 = fout.create_group("/where/lat/what")
g8 = fout.create_group("/where/lon/what")
g9 = fout.create_group("/what")
g1.attrs["product"] = np.string_("SATCH")
g1.attrs["quantity"] = np.string_("REFL")
g1.attrs["dataset_name"] = np.string_('Channel 1 reflectance')
g1.attrs["units"] = np.string_('%')
g1.attrs["gain"] = np.float32(0.01)
g1.attrs["offset"] = np.float32(0.0)
g1.attrs["missingdata"] = np.int32(MISSING_DATA)
g1.attrs["nodata"] = np.int32(MISSING_DATA)
g1.attrs["starttime"] = np.string_(starttime[0:6])
g1.attrs["endtime"] = np.string_(endtime[0:6])
g1.attrs["startdate"] = np.string_(startdate)
g1.attrs["enddate"] = np.string_(enddate)
g2.attrs["product"] = np.string_("SATCH")
g2.attrs["quantity"] = np.string_("REFL")
g2.attrs["dataset_name"] = np.string_('Channel 2 reflectance')
g2.attrs["units"] = np.string_('%')
g2.attrs["gain"] = np.float32(0.01)
g2.attrs["offset"] = np.float32(0.0)
g2.attrs["missingdata"] = np.int32(MISSING_DATA)
g2.attrs["nodata"] = np.int32(MISSING_DATA)
g2.attrs["starttime"] = np.string_(starttime[0:6])
g2.attrs["endtime"] = np.string_(endtime[0:6])
g2.attrs["startdate"] = np.string_(startdate)
g2.attrs["enddate"] = np.string_(enddate)
g6.attrs["product"] = np.string_("SATCH")
g6.attrs["quantity"] = np.string_("REFL")
g6.attrs["dataset_name"] = np.string_('Channel 3a reflectance')
g6.attrs["units"] = np.string_('%')
g6.attrs["gain"] = np.float32(0.01)
g6.attrs["offset"] = np.float32(0.0)
g6.attrs["missingdata"] = np.int32(MISSING_DATA)
g6.attrs["nodata"] = np.int32(MISSING_DATA)
g6.attrs["starttime"] = np.string_(starttime[0:6])
g6.attrs["endtime"] = np.string_(endtime[0:6])
g6.attrs["startdate"] = np.string_(startdate)
g6.attrs["enddate"] = np.string_(enddate)
g3.attrs["product"] = np.string_("SATCH")
g3.attrs["quantity"] = np.string_("TB")
g3.attrs["dataset_name"] = np.string_('Channel 3b brightness temperature')
g3.attrs["units"] = np.string_('K')
g3.attrs["gain"] = np.float32(0.01)
g3.attrs["offset"] = np.float32(273.15)
g3.attrs["missingdata"] = np.int32(MISSING_DATA)
g3.attrs["nodata"] = np.int32(MISSING_DATA)
g3.attrs["starttime"] = np.string_(starttime[0:6])
g3.attrs["endtime"] = np.string_(endtime[0:6])
g3.attrs["startdate"] = np.string_(startdate)
g3.attrs["enddate"] = np.string_(enddate)
g4.attrs["product"] = np.string_("SATCH")
g4.attrs["quantity"] = np.string_("TB")
g4.attrs["dataset_name"] = np.string_('Channel 4 brightness temperature')
g4.attrs["units"] = np.string_('K')
g4.attrs["gain"] = np.float32(0.01)
g4.attrs["offset"] = np.float32(273.15)
g4.attrs["missingdata"] = np.int32(MISSING_DATA)
g4.attrs["nodata"] = np.int32(MISSING_DATA)
g4.attrs["starttime"] = np.string_(starttime[0:6])
g4.attrs["endtime"] = np.string_(endtime[0:6])
g4.attrs["startdate"] = np.string_(startdate)
g4.attrs["enddate"] = np.string_(enddate)
g5.attrs["product"] = np.string_("SATCH")
g5.attrs["quantity"] = np.string_("TB")
g5.attrs["dataset_name"] = np.string_('Channel 5 brightness temperature')
g5.attrs["units"] = np.string_('K')
g5.attrs["gain"] = np.float32(0.01)
g5.attrs["offset"] = np.float32(273.15)
g5.attrs["missingdata"] = np.int32(MISSING_DATA)
g5.attrs["nodata"] = np.int32(MISSING_DATA)
g5.attrs["starttime"] = np.string_(starttime[0:6])
g5.attrs["endtime"] = np.string_(endtime[0:6])
g5.attrs["startdate"] = np.string_(startdate)
g5.attrs["enddate"] = np.string_(enddate)
g7.attrs["dataset_name"] = np.string_('Latitude')
g7.attrs["units"] = np.string_('Deg')
g7.attrs["gain"] = np.float32(0.0010)
g7.attrs["offset"] = np.float32(0.0)
g7.attrs["missingdata"] = np.int32(MISSING_DATA_LATLON)
g7.attrs["nodata"] = np.int32(MISSING_DATA_LATLON)
g7.attrs["starttime"] = np.string_(starttime[0:6])
g7.attrs["endtime"] = np.string_(endtime[0:6])
g7.attrs["startdate"] = np.string_(startdate)
g7.attrs["enddate"] = np.string_(enddate)
g8.attrs["dataset_name"] = np.string_('Longitude')
g8.attrs["units"] = np.string_('Deg')
g8.attrs["gain"] = np.float32(0.0010)
g8.attrs["offset"] = np.float32(0.0)
g8.attrs["missingdata"] = np.int32(MISSING_DATA_LATLON)
g8.attrs["nodata"] = np.int32(MISSING_DATA_LATLON)
g8.attrs["starttime"] = np.string_(starttime[0:6])
g8.attrs["endtime"] = np.string_(endtime[0:6])
g8.attrs["startdate"] = np.string_(startdate)
g8.attrs["enddate"] = np.string_(enddate)
g9.attrs["object"] = np.string_("SATP")
g9.attrs["sets"] = np.int32(len(channellist))
g9.attrs["version"] = np.string_("H5rad ?.?")
g9.attrs["date"] = np.string_(startdate)
g9.attrs["time"] = np.string_(starttime[0:6])
# Attributes in the 'how' groups
g1 = fout.create_group("/image1/how")
g2 = fout.create_group("/image2/how")
g3 = fout.create_group("/image3/how")
g4 = fout.create_group("/image4/how")
g5 = fout.create_group("/image5/how")
g6 = fout.create_group("/image6/how")
g10 = fout.require_group("/how")
# SHq: Is the sun_earth_distance correction applied?
g1.attrs["sun_earth_distance_correction_applied"] = np.string_("TRUE")
g1.attrs["sun_earth_distance_correction_factor"] = corr
g2.attrs["sun_earth_distance_correction_applied"] = np.string_("TRUE")
g2.attrs["sun_earth_distance_correction_factor"] = corr
# No attributes on 'how' for image3,4,5
g6.attrs["sun_earth_distance_correction_applied"] = np.string_("TRUE")
g6.attrs["sun_earth_distance_correction_factor"] = corr
# We do not know much about how; mostly use no-data
g10.attrs["yaw_error"] = 0.0
g10.attrs["roll_error"] = 0.0
g10.attrs["pitch_error"] = 0.0
g10.attrs["startepochs"] = starttime_sec1970
g10.attrs["endepochs"] = endtime_sec1970
g10.attrs["platform"] = np.string_(satellite_name)
g10.attrs["instrument"] = np.string_("avhrr")
g10.attrs["orbit_number"] = np.int32(99999)
g10.attrs["gac_file"] = np.string_(gac_file)
g10.attrs["software"] = np.string_("pyGAC")
g10.attrs["version"] = np.string_("1.0")
fout.close()
LOG.info('Sun and Satellite viewing angles will be ' +
'written to ' + str(sunsatangles_dir))
ofn = os.path.join(sunsatangles_dir,
(output_file_prefix + '_sunsatangles_' +
satellite_name + '_99999_' + startdate +
'T' + starttime + 'Z_' +
enddate + 'T' + endtime + 'Z.h5'))
LOG.info('Filename: ' + str(os.path.basename(ofn)))
fout = h5py.File(ofn, "w")
dset1 = fout.create_dataset("/image1/data", dtype='int16', data=arrSZA)
dset2 = fout.create_dataset("/image2/data", dtype='int16', data=arrSTZ)
dset3 = fout.create_dataset("/image3/data", dtype='int16', data=arrRAA)
dset4 = fout.create_dataset("/image4/data", dtype='int16', data=arrSAA)
dset5 = fout.create_dataset("/image5/data", dtype='int16', data=arrSTA)
dset6 = fout.create_dataset("/where/lat/data", dtype='int32',
data=arrLat_full)
dset7 = fout.create_dataset("/where/lon/data", dtype='int32',
data=arrLon_full)
del dset4, dset5, dset6, dset7
# Attributes directly on highest level groups
g1 = fout.require_group("/image1")
g2 = fout.require_group("/image2")
g3 = fout.require_group("/image3")
g4 = fout.require_group("/image4")
g5 = fout.require_group("/image5")
g6 = fout.require_group("/where")
g1.attrs["description"] = np.string_('Solar zenith angle')
g2.attrs["description"] = np.string_('Satellite zenith angle')
g3.attrs["description"] = np.string_(
'Relative satellite-sun azimuth angle')
g4.attrs["description"] = np.string_('Solar azimuth angle')
g5.attrs["description"] = np.string_('Satellite azimuth angle')
g6.attrs["num_of_pixels"] = np.int32(arrSZA.shape[1])
g6.attrs["num_of_lines"] = np.int32(arrSZA.shape[0])
g6.attrs["xscale"] = np.float32(0.0)
g6.attrs["yscale"] = np.float32(0.0)
g6.attrs["start_line"] = start_line
g6.attrs["end_line"] = end_line
# Attributes in the 'what' groups + 'how'
g1 = fout.create_group("/image1/what")
g2 = fout.create_group("/image2/what")
g3 = fout.create_group("/image3/what")
g4 = fout.create_group("/image4/what")
g5 = fout.create_group("/image5/what")
g6 = fout.create_group("/where/lat/what")
g7 = fout.create_group("/where/lon/what")
g8 = fout.create_group("/what")
g9 = fout.create_group("/how")
g1.attrs["product"] = np.string_("SUNZ")
g1.attrs["quantity"] = np.string_("DEG")
g1.attrs["dataset_name"] = np.string_('Solar zenith angle')
g1.attrs["units"] = np.string_('Deg')
g1.attrs["gain"] = np.float32(0.01)
g1.attrs["offset"] = np.float32(0.0)
g1.attrs["missingdata"] = np.int32(MISSING_DATA)
g1.attrs["nodata"] = np.int32(MISSING_DATA)
g1.attrs["starttime"] = np.string_(starttime[0:6])
g1.attrs["endtime"] = np.string_(endtime[0:6])
g1.attrs["startdate"] = np.string_(startdate)
g1.attrs["enddate"] = np.string_(enddate)
g2.attrs["product"] = np.string_("SATZ")
g2.attrs["quantity"] = np.string_("DEG")
g2.attrs["dataset_name"] = np.string_('Satellite zenith angle')
g2.attrs["units"] = np.string_('Deg')
g2.attrs["gain"] = np.float32(0.01)
g2.attrs["offset"] = np.float32(0.0)
g2.attrs["missingdata"] = np.int32(MISSING_DATA)
g2.attrs["nodata"] = np.int32(MISSING_DATA)
g2.attrs["starttime"] = np.string_(starttime[0:6])
g2.attrs["endtime"] = np.string_(endtime[0:6])
g2.attrs["startdate"] = np.string_(startdate)
g2.attrs["enddate"] = np.string_(enddate)
g3.attrs["product"] = np.string_("SSAZD")
g3.attrs["quantity"] = np.string_("DEG")
g3.attrs["dataset_name"] = np.string_(
'Relative satellite-sun azimuth angle')
g3.attrs["units"] = np.string_('Deg')
g3.attrs["gain"] = np.float32(0.01)
g3.attrs["offset"] = np.float32(0.0)
g3.attrs["missingdata"] = np.int32(MISSING_DATA)
g3.attrs["nodata"] = np.int32(MISSING_DATA)
g3.attrs["starttime"] = np.string_(starttime[0:6])
g3.attrs["endtime"] = np.string_(endtime[0:6])
g3.attrs["startdate"] = np.string_(startdate)
g3.attrs["enddate"] = np.string_(enddate)
g4.attrs["product"] = np.string_("SUNA")
g4.attrs["quantity"] = np.string_("DEG")
g4.attrs["dataset_name"] = np.string_('Solar azimuth angle')
g4.attrs["units"] = np.string_('Deg')
g4.attrs["gain"] = np.float32(0.01)
g4.attrs["offset"] = np.float32(0.0)
g4.attrs["missingdata"] = np.int32(MISSING_DATA)
g4.attrs["nodata"] = np.int32(MISSING_DATA)
g4.attrs["starttime"] = np.string_(starttime[0:6])
g4.attrs["endtime"] = np.string_(endtime[0:6])
g4.attrs["startdate"] = np.string_(startdate)
g4.attrs["enddate"] = np.string_(enddate)
g5.attrs["product"] = np.string_("SATA")
g5.attrs["quantity"] = np.string_("DEG")
g5.attrs["dataset_name"] = np.string_('Satellite azimuth angle')
g5.attrs["units"] = np.string_('Deg')
g5.attrs["gain"] = np.float32(0.01)
g5.attrs["offset"] = np.float32(0.0)
g5.attrs["missingdata"] = np.int32(MISSING_DATA)
g5.attrs["nodata"] = np.int32(MISSING_DATA)
g5.attrs["starttime"] = np.string_(starttime[0:6])
g5.attrs["endtime"] = np.string_(endtime[0:6])
g5.attrs["startdate"] = np.string_(startdate)
g5.attrs["enddate"] = np.string_(enddate)
g6.attrs["dataset_name"] = np.string_('Latitude')
g6.attrs["units"] = np.string_('Deg')
g6.attrs["gain"] = np.float32(0.0010)
g6.attrs["offset"] = np.float32(0.0)
g6.attrs["missingdata"] = np.int32(MISSING_DATA_LATLON)
g6.attrs["nodata"] = np.int32(MISSING_DATA_LATLON)
g6.attrs["starttime"] = np.string_(starttime[0:6])
g6.attrs["endtime"] = np.string_(endtime[0:6])
g6.attrs["startdate"] = np.string_(startdate)
g6.attrs["enddate"] = np.string_(enddate)
g7.attrs["dataset_name"] = np.string_('Longitude')
g7.attrs["units"] = np.string_('Deg')
g7.attrs["gain"] = np.float32(0.0010)
g7.attrs["offset"] = np.float32(0.0)
g7.attrs["missingdata"] = np.int32(MISSING_DATA_LATLON)
g7.attrs["nodata"] = np.int32(MISSING_DATA_LATLON)
g7.attrs["starttime"] = np.string_(starttime[0:6])
g7.attrs["endtime"] = np.string_(endtime[0:6])
g7.attrs["startdate"] = np.string_(startdate)
g7.attrs["enddate"] = np.string_(enddate)
g8.attrs["object"] = np.string_("SATP")
g8.attrs["sets"] = np.int32(5)
g8.attrs["version"] = np.string_("H5rad ?.?")
g8.attrs["date"] = np.string_(startdate)
g8.attrs["time"] = np.string_(starttime[0:6])
# We do not know much about how; mostly use no-data
g9.attrs["yaw_error"] = 0.0
g9.attrs["roll_error"] = 0.0
g9.attrs["pitch_error"] = 0.0
g9.attrs["startepochs"] = starttime_sec1970
g9.attrs["endepochs"] = endtime_sec1970
g9.attrs["platform"] = np.string_(satellite_name)
g9.attrs["instrument"] = np.string_("avhrr")
g9.attrs["orbit_number"] = np.int32(99999)
g9.attrs["gac_file"] = np.string_(gac_file)
g9.attrs["software"] = np.string_("pyGAC")
g9.attrs["version"] = np.string_("1.0")
fout.close()
LOG.info('Quality flags will be ' +
'written to ' + str(qual_dir))
ofn = os.path.join(qual_dir,
(output_file_prefix + '_qualflags_' +
satellite_name + '_99999_' + startdate +
'T' + starttime + 'Z_' +
enddate + 'T' + endtime + 'Z.h5'))
LOG.info('Filename: ' + str(os.path.basename(ofn)))
fout = h5py.File(ofn, "w")
g1 = fout.require_group("/qual_flags")
dset1 = g1.create_dataset("data", dtype='int16', data=qual_flags)
del dset1
g1.attrs["product"] = np.string_("QFLAG")
g1.attrs["quantity"] = np.string_("INT")
g1.attrs["dataset_name"] = np.string_('Scanline quality flags')
g1.attrs["units"] = np.string_('None')
g1.attrs["gain"] = np.int32(1)
g1.attrs["offset"] = np.int32(0)
g1.attrs["missingdata"] = np.int32(MISSING_DATA)
g1.attrs["nodata"] = np.int32(MISSING_DATA)
g1.attrs["starttime"] = np.string_(starttime[0:6])
g1.attrs["endtime"] = np.string_(endtime[0:6])
g1.attrs["startdate"] = np.string_(startdate)
g1.attrs["enddate"] = np.string_(enddate)
g1.attrs["gac_file"] = np.string_(gac_file)
g1.attrs["total_number_of_data_records"] = total_number_of_scan_lines
g1.attrs["last_scan_line_number"] = last_scan_line_number
g2 = fout.require_group("/ancillary")
dset2 = g2.create_dataset("missing_scanlines", dtype='int16',
data=miss_lines)
del dset2
dset3 = g2.create_dataset("scanline_timestamps", dtype='int64',
data=xutcs.astype('int64'))
dset3.attrs['units'] = 'Milliseconds since 1970-01-01 00:00:00 UTC'
dset3.attrs['calendar'] = 'standard'
g2.attrs["midnight_scanline"] = np.string_(midnight_scanline)
fout.close()
|
gpl-3.0
|
hectord/lettuce
|
tests/integration/lib/Django-1.3/django/contrib/localflavor/is_/is_postalcodes.py
|
438
|
4913
|
# -*- coding: utf-8 -*-
IS_POSTALCODES = (
('101', u'101 Reykjavík'),
('103', u'103 Reykjavík'),
('104', u'104 Reykjavík'),
('105', u'105 Reykjavík'),
('107', u'107 Reykjavík'),
('108', u'108 Reykjavík'),
('109', u'109 Reykjavík'),
('110', u'110 Reykjavík'),
('111', u'111 Reykjavík'),
('112', u'112 Reykjavík'),
('113', u'113 Reykjavík'),
('116', u'116 Kjalarnes'),
('121', u'121 Reykjavík'),
('123', u'123 Reykjavík'),
('124', u'124 Reykjavík'),
('125', u'125 Reykjavík'),
('127', u'127 Reykjavík'),
('128', u'128 Reykjavík'),
('129', u'129 Reykjavík'),
('130', u'130 Reykjavík'),
('132', u'132 Reykjavík'),
('150', u'150 Reykjavík'),
('155', u'155 Reykjavík'),
('170', u'170 Seltjarnarnes'),
('172', u'172 Seltjarnarnes'),
('190', u'190 Vogar'),
('200', u'200 Kópavogur'),
('201', u'201 Kópavogur'),
('202', u'202 Kópavogur'),
('203', u'203 Kópavogur'),
('210', u'210 Garðabær'),
('212', u'212 Garðabær'),
('220', u'220 Hafnarfjörður'),
('221', u'221 Hafnarfjörður'),
('222', u'222 Hafnarfjörður'),
('225', u'225 Álftanes'),
('230', u'230 Reykjanesbær'),
('232', u'232 Reykjanesbær'),
('233', u'233 Reykjanesbær'),
('235', u'235 Keflavíkurflugvöllur'),
('240', u'240 Grindavík'),
('245', u'245 Sandgerði'),
('250', u'250 Garður'),
('260', u'260 Reykjanesbær'),
('270', u'270 Mosfellsbær'),
('300', u'300 Akranes'),
('301', u'301 Akranes'),
('302', u'302 Akranes'),
('310', u'310 Borgarnes'),
('311', u'311 Borgarnes'),
('320', u'320 Reykholt í Borgarfirði'),
('340', u'340 Stykkishólmur'),
('345', u'345 Flatey á Breiðafirði'),
('350', u'350 Grundarfjörður'),
('355', u'355 Ólafsvík'),
('356', u'356 Snæfellsbær'),
('360', u'360 Hellissandur'),
('370', u'370 Búðardalur'),
('371', u'371 Búðardalur'),
('380', u'380 Reykhólahreppur'),
('400', u'400 Ísafjörður'),
('401', u'401 Ísafjörður'),
('410', u'410 Hnífsdalur'),
('415', u'415 Bolungarvík'),
('420', u'420 Súðavík'),
('425', u'425 Flateyri'),
('430', u'430 Suðureyri'),
('450', u'450 Patreksfjörður'),
('451', u'451 Patreksfjörður'),
('460', u'460 Tálknafjörður'),
('465', u'465 Bíldudalur'),
('470', u'470 Þingeyri'),
('471', u'471 Þingeyri'),
('500', u'500 Staður'),
('510', u'510 Hólmavík'),
('512', u'512 Hólmavík'),
('520', u'520 Drangsnes'),
('522', u'522 Kjörvogur'),
('523', u'523 Bær'),
('524', u'524 Norðurfjörður'),
('530', u'530 Hvammstangi'),
('531', u'531 Hvammstangi'),
('540', u'540 Blönduós'),
('541', u'541 Blönduós'),
('545', u'545 Skagaströnd'),
('550', u'550 Sauðárkrókur'),
('551', u'551 Sauðárkrókur'),
('560', u'560 Varmahlíð'),
('565', u'565 Hofsós'),
('566', u'566 Hofsós'),
('570', u'570 Fljót'),
('580', u'580 Siglufjörður'),
('600', u'600 Akureyri'),
('601', u'601 Akureyri'),
('602', u'602 Akureyri'),
('603', u'603 Akureyri'),
('610', u'610 Grenivík'),
('611', u'611 Grímsey'),
('620', u'620 Dalvík'),
('621', u'621 Dalvík'),
('625', u'625 Ólafsfjörður'),
('630', u'630 Hrísey'),
('640', u'640 Húsavík'),
('641', u'641 Húsavík'),
('645', u'645 Fosshóll'),
('650', u'650 Laugar'),
('660', u'660 Mývatn'),
('670', u'670 Kópasker'),
('671', u'671 Kópasker'),
('675', u'675 Raufarhöfn'),
('680', u'680 Þórshöfn'),
('681', u'681 Þórshöfn'),
('685', u'685 Bakkafjörður'),
('690', u'690 Vopnafjörður'),
('700', u'700 Egilsstaðir'),
('701', u'701 Egilsstaðir'),
('710', u'710 Seyðisfjörður'),
('715', u'715 Mjóifjörður'),
('720', u'720 Borgarfjörður eystri'),
('730', u'730 Reyðarfjörður'),
('735', u'735 Eskifjörður'),
('740', u'740 Neskaupstaður'),
('750', u'750 Fáskrúðsfjörður'),
('755', u'755 Stöðvarfjörður'),
('760', u'760 Breiðdalsvík'),
('765', u'765 Djúpivogur'),
('780', u'780 Höfn í Hornafirði'),
('781', u'781 Höfn í Hornafirði'),
('785', u'785 Öræfi'),
('800', u'800 Selfoss'),
('801', u'801 Selfoss'),
('802', u'802 Selfoss'),
('810', u'810 Hveragerði'),
('815', u'815 Þorlákshöfn'),
('820', u'820 Eyrarbakki'),
('825', u'825 Stokkseyri'),
('840', u'840 Laugarvatn'),
('845', u'845 Flúðir'),
('850', u'850 Hella'),
('851', u'851 Hella'),
('860', u'860 Hvolsvöllur'),
('861', u'861 Hvolsvöllur'),
('870', u'870 Vík'),
('871', u'871 Vík'),
('880', u'880 Kirkjubæjarklaustur'),
('900', u'900 Vestmannaeyjar'),
('902', u'902 Vestmannaeyjar')
)
|
gpl-3.0
|
roderickmackenzie/gpvdm
|
gpvdm_gui/gui/command_args.py
|
1
|
7098
|
#
# General-purpose Photovoltaic Device Model - a drift diffusion base/Shockley-Read-Hall
# model for 1st, 2nd and 3rd generation solar cells.
# Copyright (C) 2012-2017 Roderick C. I. MacKenzie r.c.i.mackenzie at googlemail.com
#
# https://www.gpvdm.com
# Room B86 Coates, University Park, Nottingham, NG7 2RD, UK
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License v2.0, as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#
## @package command_args
# Handle command line arguments.
#
import sys
import os
from clone import gpvdm_clone
from export_as import export_as
from import_archive import import_archive
from util import gpvdm_copy_src
from scan_io import scan_io
from ver import ver
from ver import version
from import_archive import import_scan_dirs
from make_man import make_man
from scan_tree import tree_gen
from server import base_server
from cal_path import get_exe_command
from dat_file import dat_file
from plot_io import plot_load_info
from scan_plot import scan_gen_plot_data
from server_io import server_find_simulations_to_run
from clean_sim import clean_sim_dir
from ver import ver_sync_ver
from code_ctrl import enable_cluster
from win_lin import running_on_linux
from inp import inp_update_token_value
from device_lib_io import device_lib_replace
from cal_path import test_arg_for_sim_file
from cal_path import set_sim_path
from import_archive import patch_file
from util_zip import archive_decompress
from util_zip import archive_compress
from scan_tree import tree_load_flat_list
from gui_enable import set_gui
from gui_enable import gui_get
from materials_io import archive_materials
import i18n
_ = i18n.language.gettext
import argparse
parser = argparse.ArgumentParser(epilog=_("Additional information about gpvdm is available at")+" https://www.gpvdm.com"+"\n"+_("Report bugs to:")+" roderick.mackenzie@nottingham.ac.uk")
parser.add_argument("--version", help=_("displays the current version"), action='store_true')
parser.add_argument("--ver", help=_("displays the current version"), action='store_true')
parser.add_argument("--replace", help=_("replaces file in device lib --replace file.inp path_to_device_lib"), nargs=2)
parser.add_argument("--clean", help=_("cleans the current simulation directory deleting .dat files but not scan dirs"), action='store_true')
parser.add_argument("--export", help=_("export a simulation to a gz file"), nargs=1)
parser.add_argument("--syncver", help=_("Synchronizes the saved file version to that of the source code."), action='store_true')
parser.add_argument("--makeman", help=_("Generate the manual pages referring to the output files.."), action='store_true')
parser.add_argument("--importscandirs", help=_("Only imports the scan directories."), nargs=1)
parser.add_argument("--cleanscandirs", help=_("Deletes the content of all scan directories."), nargs=1)
parser.add_argument("--patch", help=_("Patch a .gpvdm file with an older .gpvdm file."), nargs=2)
parser.add_argument("--patchfile", help=_("Patch an .inp file with an older .inp file. usage --patchfile dest_file base_file input_file"), nargs=3)
parser.add_argument("--importfile", help=_("usage --import abc.gpvdm ./path/to/output/ "), nargs=2)
parser.add_argument("--dumptab", help=_("Dumps simulation parameters as jpg, usage: --dump-tab output_path"), nargs=1)
parser.add_argument("--clone", help=_("Generate a clean simulation in the current directory"), action='store_true')
parser.add_argument("--clonesrc", help=_("Clone the source code."), action='store_true')
parser.add_argument("--editvalue", help=_("edits a value in a .gpvdm archive. Usage --edit-value /path/to/sim.gpvdm #token_to_change new_value "), nargs=3)
parser.add_argument("--scanplot", help=_("Runs an oplot file, usage --scanplot /path/to/oplot/file.oplot "), nargs=1)
parser.add_argument("--load", help=_("Loads a simulation --load /path/containing/simulation/sim.gpvdm"), nargs=1)
parser.add_argument("--encrypt", help=_("Encrypt a gpvdm file --file sim.gpvdm"), nargs=1)
parser.add_argument("--unpack", help=_("Extract the sim.gpvdm archive --unpack"), action='store_true')
parser.add_argument("--pack", help=_("Extract the sim.gpvdm archive --pack"), action='store_true')
parser.add_argument("--matcompress", help=_("Compresses the materials dir"), action='store_true')
if test_arg_for_sim_file()==False:
args = parser.parse_args()
def command_args(argc,argv):
if test_arg_for_sim_file()!=False:
return
if argc>=2:
if args.version:
print(version())
sys.exit(0)
elif args.ver:
print(ver())
sys.exit(0)
elif args.syncver:
ver_sync_ver()
sys.exit(0)
elif args.importscandirs:
import_scan_dirs(os.getcwd(),args.importscandirs[0])
exit(0)
elif args.replace:
device_lib_replace(args.replace[0],dir_name=args.replace[1])
exit(0)
elif args.clean:
clean_sim_dir()
sys.exit(0)
elif args.export:
export_as(args.export[0])
sys.exit(0)
elif args.makeman:
make_man()
sys.exit(0)
elif args.cleanscandirs:
scan=scans_io(os.getcwd())
scan.clean_all()
sys.exit(0)
elif args.importfile:
import_archive(args.importfile[0],os.path.join(os.getcwd(),"sim.gpvdm"),False)
sys.exit(0)
elif args.dumptab:
export_as(args.dumptab[0])
sys.exit(0)
elif args.patch:
import_archive(args.patch[0],args.patch[1],True)
sys.exit(0)
elif args.patchfile:
patch_file(args.patchfile[0],args.patchfile[1],args.patchfile[2])
sys.exit(0)
elif args.clone:
gpvdm_clone(os.getcwd(),copy_dirs=True)
sys.exit(0)
elif args.matcompress:
archive_materials(os.path.join(os.getcwd(),"materials"))
sys.exit(0)
elif args.clonesrc:
gpvdm_copy_src(clone-src[0])
sys.exit(0)
elif args.editvalue:
inp_update_token_value(args.editvalue[0], args.editvalue[1], args.editvalue[2])
sys.exit(0)
elif args.load:
set_sim_path(os.path.dirname(args.load[0]))
#print("a")
elif args.encrypt:
inp_encrypt(args.encrypt[0])
sys.exit(0)
elif args.unpack:
archive_decompress(os.path.join(os.getcwd(),"sim.gpvdm"),remove_gpvdm_file=False)
sys.exit(0)
elif args.pack:
archive_compress(os.path.join(os.getcwd(),"sim.gpvdm"))
sys.exit(0)
elif args.scanplot:
plot_token=dat_file()
oplot_file=args.scan-plot[0]
if plot_load_info(plot_token,oplot_file)==True:
print("file0=",plot_token.file0,"<")
plot_files, plot_labels, save_file = scan_gen_plot_data(plot_token,os.path.dirname(oplot_file))
print("written data to",save_file)
else:
print("Problem loading oplot file")
sys.exit(0)
|
gpl-2.0
|
wweiradio/django
|
tests/contenttypes_tests/models.py
|
70
|
1803
|
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.http import urlquote
@python_2_unicode_compatible
class Author(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
def get_absolute_url(self):
return '/authors/%s/' % self.id
@python_2_unicode_compatible
class Article(models.Model):
title = models.CharField(max_length=100)
slug = models.SlugField()
author = models.ForeignKey(Author, models.CASCADE)
date_created = models.DateTimeField()
def __str__(self):
return self.title
@python_2_unicode_compatible
class SchemeIncludedURL(models.Model):
url = models.URLField(max_length=100)
def __str__(self):
return self.url
def get_absolute_url(self):
return self.url
class ConcreteModel(models.Model):
name = models.CharField(max_length=10)
class ProxyModel(ConcreteModel):
class Meta:
proxy = True
@python_2_unicode_compatible
class FooWithoutUrl(models.Model):
"""
Fake model not defining ``get_absolute_url`` for
ContentTypesTests.test_shortcut_view_without_get_absolute_url()
"""
name = models.CharField(max_length=30, unique=True)
def __str__(self):
return self.name
class FooWithUrl(FooWithoutUrl):
"""
Fake model defining ``get_absolute_url`` for
ContentTypesTests.test_shortcut_view().
"""
def get_absolute_url(self):
return "/users/%s/" % urlquote(self.name)
class FooWithBrokenAbsoluteUrl(FooWithoutUrl):
"""
Fake model defining a ``get_absolute_url`` method containing an error
"""
def get_absolute_url(self):
return "/users/%s/" % self.unknown_field
|
bsd-3-clause
|
SatoshiNXSimudrone/sl4a-damon-clone
|
python/gdata/src/gdata/tlslite/integration/ClientHelper.py
|
285
|
7021
|
"""
A helper class for using TLS Lite with stdlib clients
(httplib, xmlrpclib, imaplib, poplib).
"""
from gdata.tlslite.Checker import Checker
class ClientHelper:
"""This is a helper class used to integrate TLS Lite with various
TLS clients (e.g. poplib, smtplib, httplib, etc.)"""
def __init__(self,
username=None, password=None, sharedKey=None,
certChain=None, privateKey=None,
cryptoID=None, protocol=None,
x509Fingerprint=None,
x509TrustList=None, x509CommonName=None,
settings = None):
"""
For client authentication, use one of these argument
combinations:
- username, password (SRP)
- username, sharedKey (shared-key)
- certChain, privateKey (certificate)
For server authentication, you can either rely on the
implicit mutual authentication performed by SRP or
shared-keys, or you can do certificate-based server
authentication with one of these argument combinations:
- cryptoID[, protocol] (requires cryptoIDlib)
- x509Fingerprint
- x509TrustList[, x509CommonName] (requires cryptlib_py)
Certificate-based server authentication is compatible with
SRP or certificate-based client authentication. It is
not compatible with shared-keys.
The constructor does not perform the TLS handshake itself, but
simply stores these arguments for later. The handshake is
performed only when this class needs to connect with the
server. Then you should be prepared to handle TLS-specific
exceptions. See the client handshake functions in
L{tlslite.TLSConnection.TLSConnection} for details on which
exceptions might be raised.
@type username: str
@param username: SRP or shared-key username. Requires the
'password' or 'sharedKey' argument.
@type password: str
@param password: SRP password for mutual authentication.
Requires the 'username' argument.
@type sharedKey: str
@param sharedKey: Shared key for mutual authentication.
Requires the 'username' argument.
@type certChain: L{tlslite.X509CertChain.X509CertChain} or
L{cryptoIDlib.CertChain.CertChain}
@param certChain: Certificate chain for client authentication.
Requires the 'privateKey' argument. Excludes the SRP or
shared-key related arguments.
@type privateKey: L{tlslite.utils.RSAKey.RSAKey}
@param privateKey: Private key for client authentication.
Requires the 'certChain' argument. Excludes the SRP or
shared-key related arguments.
@type cryptoID: str
@param cryptoID: cryptoID for server authentication. Mutually
exclusive with the 'x509...' arguments.
@type protocol: str
@param protocol: cryptoID protocol URI for server
authentication. Requires the 'cryptoID' argument.
@type x509Fingerprint: str
@param x509Fingerprint: Hex-encoded X.509 fingerprint for
server authentication. Mutually exclusive with the 'cryptoID'
and 'x509TrustList' arguments.
@type x509TrustList: list of L{tlslite.X509.X509}
@param x509TrustList: A list of trusted root certificates. The
other party must present a certificate chain which extends to
one of these root certificates. The cryptlib_py module must be
installed to use this parameter. Mutually exclusive with the
'cryptoID' and 'x509Fingerprint' arguments.
@type x509CommonName: str
@param x509CommonName: The end-entity certificate's 'CN' field
must match this value. For a web server, this is typically a
server name such as 'www.amazon.com'. Mutually exclusive with
the 'cryptoID' and 'x509Fingerprint' arguments. Requires the
'x509TrustList' argument.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
"""
self.username = None
self.password = None
self.sharedKey = None
self.certChain = None
self.privateKey = None
self.checker = None
#SRP Authentication
if username and password and not \
(sharedKey or certChain or privateKey):
self.username = username
self.password = password
#Shared Key Authentication
elif username and sharedKey and not \
(password or certChain or privateKey):
self.username = username
self.sharedKey = sharedKey
#Certificate Chain Authentication
elif certChain and privateKey and not \
(username or password or sharedKey):
self.certChain = certChain
self.privateKey = privateKey
#No Authentication
elif not password and not username and not \
sharedKey and not certChain and not privateKey:
pass
else:
raise ValueError("Bad parameters")
#Authenticate the server based on its cryptoID or fingerprint
if sharedKey and (cryptoID or protocol or x509Fingerprint):
raise ValueError("Can't use shared keys with other forms of"\
"authentication")
self.checker = Checker(cryptoID, protocol, x509Fingerprint,
x509TrustList, x509CommonName)
self.settings = settings
self.tlsSession = None
def _handshake(self, tlsConnection):
if self.username and self.password:
tlsConnection.handshakeClientSRP(username=self.username,
password=self.password,
checker=self.checker,
settings=self.settings,
session=self.tlsSession)
elif self.username and self.sharedKey:
tlsConnection.handshakeClientSharedKey(username=self.username,
sharedKey=self.sharedKey,
settings=self.settings)
else:
tlsConnection.handshakeClientCert(certChain=self.certChain,
privateKey=self.privateKey,
checker=self.checker,
settings=self.settings,
session=self.tlsSession)
self.tlsSession = tlsConnection.session
|
apache-2.0
|
Mozta/pagina-diagnostijuego
|
venv/lib/python2.7/site-packages/setuptools/ssl_support.py
|
86
|
8131
|
import os
import socket
import atexit
import re
from setuptools.extern.six.moves import urllib, http_client, map
import pkg_resources
from pkg_resources import ResolutionError, ExtractionError
try:
import ssl
except ImportError:
ssl = None
__all__ = [
'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths',
'opener_for'
]
cert_paths = """
/etc/pki/tls/certs/ca-bundle.crt
/etc/ssl/certs/ca-certificates.crt
/usr/share/ssl/certs/ca-bundle.crt
/usr/local/share/certs/ca-root.crt
/etc/ssl/cert.pem
/System/Library/OpenSSL/certs/cert.pem
/usr/local/share/certs/ca-root-nss.crt
/etc/ssl/ca-bundle.pem
""".strip().split()
try:
HTTPSHandler = urllib.request.HTTPSHandler
HTTPSConnection = http_client.HTTPSConnection
except AttributeError:
HTTPSHandler = HTTPSConnection = object
is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection)
try:
from ssl import CertificateError, match_hostname
except ImportError:
try:
from backports.ssl_match_hostname import CertificateError
from backports.ssl_match_hostname import match_hostname
except ImportError:
CertificateError = None
match_hostname = None
if not CertificateError:
class CertificateError(ValueError):
pass
if not match_hostname:
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r'.')
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
class VerifyingHTTPSHandler(HTTPSHandler):
"""Simple verifying handler: no auth, subclasses, timeouts, etc."""
def __init__(self, ca_bundle):
self.ca_bundle = ca_bundle
HTTPSHandler.__init__(self)
def https_open(self, req):
return self.do_open(
lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req
)
class VerifyingHTTPSConn(HTTPSConnection):
"""Simple verifying connection: no auth, subclasses, timeouts, etc."""
def __init__(self, host, ca_bundle, **kw):
HTTPSConnection.__init__(self, host, **kw)
self.ca_bundle = ca_bundle
def connect(self):
sock = socket.create_connection(
(self.host, self.port), getattr(self, 'source_address', None)
)
# Handle the socket if a (proxy) tunnel is present
if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None):
self.sock = sock
self._tunnel()
# http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7
# change self.host to mean the proxy server host when tunneling is
# being used. Adapt, since we are interested in the destination
# host for the match_hostname() comparison.
actual_host = self._tunnel_host
else:
actual_host = self.host
self.sock = ssl.wrap_socket(
sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle
)
try:
match_hostname(self.sock.getpeercert(), actual_host)
except CertificateError:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
raise
def opener_for(ca_bundle=None):
"""Get a urlopen() replacement that uses ca_bundle for verification"""
return urllib.request.build_opener(
VerifyingHTTPSHandler(ca_bundle or find_ca_bundle())
).open
_wincerts = None
def get_win_certfile():
global _wincerts
if _wincerts is not None:
return _wincerts.name
try:
from wincertstore import CertFile
except ImportError:
return None
class MyCertFile(CertFile):
def __init__(self, stores=(), certs=()):
CertFile.__init__(self)
for store in stores:
self.addstore(store)
self.addcerts(certs)
atexit.register(self.close)
def close(self):
try:
super(MyCertFile, self).close()
except OSError:
pass
_wincerts = MyCertFile(stores=['CA', 'ROOT'])
return _wincerts.name
def find_ca_bundle():
"""Return an existing CA bundle path, or None"""
if os.name == 'nt':
return get_win_certfile()
else:
for cert_path in cert_paths:
if os.path.isfile(cert_path):
return cert_path
try:
import certifi
return certifi.where()
except (ImportError, ResolutionError, ExtractionError):
return None
|
gpl-3.0
|
0jpq0/kbengine
|
kbe/res/scripts/common/Lib/distutils/tests/test_dir_util.py
|
106
|
4654
|
"""Tests for distutils.dir_util."""
import unittest
import os
import stat
import sys
from unittest.mock import patch
from distutils import dir_util, errors
from distutils.dir_util import (mkpath, remove_tree, create_tree, copy_tree,
ensure_relative)
from distutils import log
from distutils.tests import support
from test.support import run_unittest
class DirUtilTestCase(support.TempdirManager, unittest.TestCase):
def _log(self, msg, *args):
if len(args) > 0:
self._logs.append(msg % args)
else:
self._logs.append(msg)
def setUp(self):
super(DirUtilTestCase, self).setUp()
self._logs = []
tmp_dir = self.mkdtemp()
self.root_target = os.path.join(tmp_dir, 'deep')
self.target = os.path.join(self.root_target, 'here')
self.target2 = os.path.join(tmp_dir, 'deep2')
self.old_log = log.info
log.info = self._log
def tearDown(self):
log.info = self.old_log
super(DirUtilTestCase, self).tearDown()
def test_mkpath_remove_tree_verbosity(self):
mkpath(self.target, verbose=0)
wanted = []
self.assertEqual(self._logs, wanted)
remove_tree(self.root_target, verbose=0)
mkpath(self.target, verbose=1)
wanted = ['creating %s' % self.root_target,
'creating %s' % self.target]
self.assertEqual(self._logs, wanted)
self._logs = []
remove_tree(self.root_target, verbose=1)
wanted = ["removing '%s' (and everything under it)" % self.root_target]
self.assertEqual(self._logs, wanted)
@unittest.skipIf(sys.platform.startswith('win'),
"This test is only appropriate for POSIX-like systems.")
def test_mkpath_with_custom_mode(self):
# Get and set the current umask value for testing mode bits.
umask = os.umask(0o002)
os.umask(umask)
mkpath(self.target, 0o700)
self.assertEqual(
stat.S_IMODE(os.stat(self.target).st_mode), 0o700 & ~umask)
mkpath(self.target2, 0o555)
self.assertEqual(
stat.S_IMODE(os.stat(self.target2).st_mode), 0o555 & ~umask)
def test_create_tree_verbosity(self):
create_tree(self.root_target, ['one', 'two', 'three'], verbose=0)
self.assertEqual(self._logs, [])
remove_tree(self.root_target, verbose=0)
wanted = ['creating %s' % self.root_target]
create_tree(self.root_target, ['one', 'two', 'three'], verbose=1)
self.assertEqual(self._logs, wanted)
remove_tree(self.root_target, verbose=0)
def test_copy_tree_verbosity(self):
mkpath(self.target, verbose=0)
copy_tree(self.target, self.target2, verbose=0)
self.assertEqual(self._logs, [])
remove_tree(self.root_target, verbose=0)
mkpath(self.target, verbose=0)
a_file = os.path.join(self.target, 'ok.txt')
with open(a_file, 'w') as f:
f.write('some content')
wanted = ['copying %s -> %s' % (a_file, self.target2)]
copy_tree(self.target, self.target2, verbose=1)
self.assertEqual(self._logs, wanted)
remove_tree(self.root_target, verbose=0)
remove_tree(self.target2, verbose=0)
def test_copy_tree_skips_nfs_temp_files(self):
mkpath(self.target, verbose=0)
a_file = os.path.join(self.target, 'ok.txt')
nfs_file = os.path.join(self.target, '.nfs123abc')
for f in a_file, nfs_file:
with open(f, 'w') as fh:
fh.write('some content')
copy_tree(self.target, self.target2)
self.assertEqual(os.listdir(self.target2), ['ok.txt'])
remove_tree(self.root_target, verbose=0)
remove_tree(self.target2, verbose=0)
def test_ensure_relative(self):
if os.sep == '/':
self.assertEqual(ensure_relative('/home/foo'), 'home/foo')
self.assertEqual(ensure_relative('some/path'), 'some/path')
else: # \\
self.assertEqual(ensure_relative('c:\\home\\foo'), 'c:home\\foo')
self.assertEqual(ensure_relative('home\\foo'), 'home\\foo')
def test_copy_tree_exception_in_listdir(self):
"""
An exception in listdir should raise a DistutilsFileError
"""
with patch("os.listdir", side_effect=OSError()), \
self.assertRaises(errors.DistutilsFileError):
src = self.tempdirs[-1]
dir_util.copy_tree(src, None)
def test_suite():
return unittest.makeSuite(DirUtilTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
|
lgpl-3.0
|
keven/ibpy
|
build/lib/ib/lib/__init__.py
|
3
|
8650
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##
# Just enough auxiliary bits to make the translated code work.
#
# This package provides the support necessary to use the translated
# code. The configuration modules used in translation take care of
# many semantic differences between Java and Python, while this
# package provides the rest.
##
import copy
import functools
import socket
import struct
import sys
class classmethod_(classmethod):
""" Classmethod that provides attribute delegation.
"""
def __init__(self, func):
classmethod.__init__(self, func)
self.func = func
def __getattr__(self, name):
return getattr(self.func, name)
def synchronized(lock):
""" Synchronization decorator.
from http://wiki.python.org/moin/PythonDecoratorLibrary
@param lock Lock or RLock instance
@return decorator that provides automatic locking
"""
def wrapper(func):
@functools.wraps(func)
def inner(*args, **kwds):
lock.acquire()
try:
return func(*args, **kwds)
finally:
lock.release()
return inner
return wrapper
class Boolean(object):
""" Partial implementation of Java Boolean type.
"""
def __init__(self, value):
""" Constructor.
@param value bool instance, True or False
"""
self.value = value
def booleanValue(self):
""" The value of this instance (a bool).
@return True or False
"""
return self.value
@classmethod
def valueOf(cls, text):
""" Creates an instance of this class with a bool value.
@param cls this class
@param text string
@return instance of cls
"""
value = str(text).lower() == 'true'
return cls(value)
class Cloneable(object):
""" Stub for the Cloneable Java interface.
Some of the translated code implements the Java Cloneable
interface, but its methods are never used. We provide this class
for sub typing, and will implement methods as needed later.
"""
def clone(self):
return copy.copy(self)
class DataInputStream(object):
""" Partial implementation of the Java DataInputStream type.
"""
def __init__(self, stream):
""" Constructor.
@param stream any object with recv method
"""
self.stream = stream
self.recv = stream.recv
def readByte(self, unpack=struct.unpack):
""" Reads a byte from the contained stream.
@return string read from stream
"""
return unpack('!b', self.recv(1))[0]
class DataOutputStream(object):
""" Partial implementation of the Java DataOutputStream type
"""
def __init__(self, stream):
""" Constructor.
@param stream any object with send method
"""
self.send = stream.send
def write(self, data, pack=struct.pack, eol=struct.pack('!b', 0)):
""" Writes data to the contained stream.
@param data string to send, or 0
@return None
"""
send = self.send
if data == 0:
send(eol)
else:
for char in data:
send(pack('!c', char))
class Double(float):
""" Partial implementation of Java Double type.
"""
##
# sentinel value used by the socket writer
MAX_VALUE = sys.maxint
@staticmethod
def parseDouble(text):
""" Float double (float) from string.
@param text value to parse
@return float instance
"""
return float(text or 0)
class Integer(int):
""" Partial implementation of Java Integer type.
"""
##
# sentinel value used by the socket writer
MAX_VALUE = sys.maxint
@staticmethod
def parseInt(text):
""" Int from string.
@param text value to parse
@return int instance
"""
return int(text or 0)
@staticmethod
def parseLong(text):
""" Long from string.
@param text value to parse
@return long instance
"""
return long(text or 0)
##
# The generated code uses Longs just like Integers, so we use an alias
# instead of a subclass (for now).
Long = Integer
class Socket(socket.socket):
""" Partial implementation of the Java Socket type.
"""
def __init__(self, host, port):
""" Constructor; attempts connection immediately.
@param host hostname as string
@param port port number as integer
"""
socket.socket.__init__(self, socket.AF_INET, socket.SOCK_STREAM)
self.connect((host, port))
def getInputStream(self):
""" Returns this instance, which has a send method.
"""
return self
def getOutputStream(self):
""" Returns this instance, which has a recv method.
"""
return self
def isConnected(self):
try:
throwaway = self.getpeername()
return True
except (socket.error, ), ex:
return False
class StringBuffer(list):
""" Partial implementation of the Java StringBuffer type
Translated code uses instances of this type to build up strings.
The list base type provides the append method.
"""
def __str__(self, join=str.join, chr=chr):
""" the string value of this instance
@return string from characters contained in this instance
"""
return join('', [chr(v) for v in self])
if 'qt' in sys.modules:
from qt import QThread
class ThreadType(QThread):
""" Partial implementation of Java Thread type, based on Qt3 QThread.
"""
def __init__(self, name):
""" Constructor.
@param name ignored
"""
QThread.__init__(self)
def interrupt(self):
""" Stop this thread (by call to terminate).
"""
return self.terminate()
def isInterrupted(self):
""" Check state of thread.
@return True if thread is finished
"""
return self.finished()
def setDaemon(self, value):
""" No-op.
@param value ignored
@return None
"""
def setName(self, value):
""" No-op.
@param value ignored
@return None
"""
elif 'PyQt4' in sys.modules:
from PyQt4.QtCore import QThread
class ThreadType(QThread):
""" Partial implementation of Java Thread type, based on Qt4 QThread.
"""
def __init__(self, name):
""" Constructor.
@param name ignored
"""
QThread.__init__(self)
def interrupt(self):
""" stop this thread (by call to exit)
"""
return self.exit()
def isInterrupted(self):
""" check state of thread
@return True if thread is finished
"""
return self.isFinished()
def setDaemon(self, value):
""" No-op.
@param value ignored
@return None
"""
def setName(self, value):
""" sets the name of this QObject
@param value name of object as string
@return None
"""
self.setObjectName(value)
else:
import threading
class ThreadType(threading.Thread):
""" Partial implementation of Java Thread type, based on Python Thread.
"""
def __init__(self, name):
""" Constructor.
@param name name of this thread
"""
threading.Thread.__init__(self, name=name)
self.setDaemon(True)
def interrupt(self):
""" No-op; Python threads are not directly interruptible.
"""
return False
def isInterrupted(self):
""" Check state of thread (always False).
@return False
"""
return False
class Thread(ThreadType):
""" Thread parent type, based on available framework
"""
def __init__(self, name, parent, dis):
""" Constructor.
@param name name of this thread
@param parent ignored
@param dis ignored
"""
ThreadType.__init__(self, name=name)
def term(self):
def isInterrupted():
print 'down town'
return True
self.isInterrupted = isInterrupted
self.m_dis.stream.shutdown(socket.SHUT_RDWR)
self.m_dis.stream.close()
|
bsd-3-clause
|
bob-the-hamster/commandergenius
|
project/jni/python/src/Lib/test/test_urllib2.py
|
48
|
45551
|
import unittest
from test import test_support
import os
import socket
import StringIO
import urllib2
from urllib2 import Request, OpenerDirector
# XXX
# Request
# CacheFTPHandler (hard to write)
# parse_keqv_list, parse_http_list, HTTPDigestAuthHandler
class TrivialTests(unittest.TestCase):
def test_trivial(self):
# A couple trivial tests
self.assertRaises(ValueError, urllib2.urlopen, 'bogus url')
# XXX Name hacking to get this to work on Windows.
fname = os.path.abspath(urllib2.__file__).replace('\\', '/')
if fname[1:2] == ":":
fname = fname[2:]
# And more hacking to get it to work on MacOS. This assumes
# urllib.pathname2url works, unfortunately...
if os.name == 'mac':
fname = '/' + fname.replace(':', '/')
elif os.name == 'riscos':
import string
fname = os.expand(fname)
fname = fname.translate(string.maketrans("/.", "./"))
file_url = "file://%s" % fname
f = urllib2.urlopen(file_url)
buf = f.read()
f.close()
def test_parse_http_list(self):
tests = [('a,b,c', ['a', 'b', 'c']),
('path"o,l"og"i"cal, example', ['path"o,l"og"i"cal', 'example']),
('a, b, "c", "d", "e,f", g, h', ['a', 'b', '"c"', '"d"', '"e,f"', 'g', 'h']),
('a="b\\"c", d="e\\,f", g="h\\\\i"', ['a="b"c"', 'd="e,f"', 'g="h\\i"'])]
for string, list in tests:
self.assertEquals(urllib2.parse_http_list(string), list)
def test_request_headers_dict():
"""
The Request.headers dictionary is not a documented interface. It should
stay that way, because the complete set of headers are only accessible
through the .get_header(), .has_header(), .header_items() interface.
However, .headers pre-dates those methods, and so real code will be using
the dictionary.
The introduction in 2.4 of those methods was a mistake for the same reason:
code that previously saw all (urllib2 user)-provided headers in .headers
now sees only a subset (and the function interface is ugly and incomplete).
A better change would have been to replace .headers dict with a dict
subclass (or UserDict.DictMixin instance?) that preserved the .headers
interface and also provided access to the "unredirected" headers. It's
probably too late to fix that, though.
Check .capitalize() case normalization:
>>> url = "http://example.com"
>>> Request(url, headers={"Spam-eggs": "blah"}).headers["Spam-eggs"]
'blah'
>>> Request(url, headers={"spam-EggS": "blah"}).headers["Spam-eggs"]
'blah'
Currently, Request(url, "Spam-eggs").headers["Spam-Eggs"] raises KeyError,
but that could be changed in future.
"""
def test_request_headers_methods():
"""
Note the case normalization of header names here, to .capitalize()-case.
This should be preserved for backwards-compatibility. (In the HTTP case,
normalization to .title()-case is done by urllib2 before sending headers to
httplib).
>>> url = "http://example.com"
>>> r = Request(url, headers={"Spam-eggs": "blah"})
>>> r.has_header("Spam-eggs")
True
>>> r.header_items()
[('Spam-eggs', 'blah')]
>>> r.add_header("Foo-Bar", "baz")
>>> items = r.header_items()
>>> items.sort()
>>> items
[('Foo-bar', 'baz'), ('Spam-eggs', 'blah')]
Note that e.g. r.has_header("spam-EggS") is currently False, and
r.get_header("spam-EggS") returns None, but that could be changed in
future.
>>> r.has_header("Not-there")
False
>>> print r.get_header("Not-there")
None
>>> r.get_header("Not-there", "default")
'default'
"""
def test_password_manager(self):
"""
>>> mgr = urllib2.HTTPPasswordMgr()
>>> add = mgr.add_password
>>> add("Some Realm", "http://example.com/", "joe", "password")
>>> add("Some Realm", "http://example.com/ni", "ni", "ni")
>>> add("c", "http://example.com/foo", "foo", "ni")
>>> add("c", "http://example.com/bar", "bar", "nini")
>>> add("b", "http://example.com/", "first", "blah")
>>> add("b", "http://example.com/", "second", "spam")
>>> add("a", "http://example.com", "1", "a")
>>> add("Some Realm", "http://c.example.com:3128", "3", "c")
>>> add("Some Realm", "d.example.com", "4", "d")
>>> add("Some Realm", "e.example.com:3128", "5", "e")
>>> mgr.find_user_password("Some Realm", "example.com")
('joe', 'password')
>>> mgr.find_user_password("Some Realm", "http://example.com")
('joe', 'password')
>>> mgr.find_user_password("Some Realm", "http://example.com/")
('joe', 'password')
>>> mgr.find_user_password("Some Realm", "http://example.com/spam")
('joe', 'password')
>>> mgr.find_user_password("Some Realm", "http://example.com/spam/spam")
('joe', 'password')
>>> mgr.find_user_password("c", "http://example.com/foo")
('foo', 'ni')
>>> mgr.find_user_password("c", "http://example.com/bar")
('bar', 'nini')
Actually, this is really undefined ATM
## Currently, we use the highest-level path where more than one match:
## >>> mgr.find_user_password("Some Realm", "http://example.com/ni")
## ('joe', 'password')
Use latest add_password() in case of conflict:
>>> mgr.find_user_password("b", "http://example.com/")
('second', 'spam')
No special relationship between a.example.com and example.com:
>>> mgr.find_user_password("a", "http://example.com/")
('1', 'a')
>>> mgr.find_user_password("a", "http://a.example.com/")
(None, None)
Ports:
>>> mgr.find_user_password("Some Realm", "c.example.com")
(None, None)
>>> mgr.find_user_password("Some Realm", "c.example.com:3128")
('3', 'c')
>>> mgr.find_user_password("Some Realm", "http://c.example.com:3128")
('3', 'c')
>>> mgr.find_user_password("Some Realm", "d.example.com")
('4', 'd')
>>> mgr.find_user_password("Some Realm", "e.example.com:3128")
('5', 'e')
"""
pass
def test_password_manager_default_port(self):
"""
>>> mgr = urllib2.HTTPPasswordMgr()
>>> add = mgr.add_password
The point to note here is that we can't guess the default port if there's
no scheme. This applies to both add_password and find_user_password.
>>> add("f", "http://g.example.com:80", "10", "j")
>>> add("g", "http://h.example.com", "11", "k")
>>> add("h", "i.example.com:80", "12", "l")
>>> add("i", "j.example.com", "13", "m")
>>> mgr.find_user_password("f", "g.example.com:100")
(None, None)
>>> mgr.find_user_password("f", "g.example.com:80")
('10', 'j')
>>> mgr.find_user_password("f", "g.example.com")
(None, None)
>>> mgr.find_user_password("f", "http://g.example.com:100")
(None, None)
>>> mgr.find_user_password("f", "http://g.example.com:80")
('10', 'j')
>>> mgr.find_user_password("f", "http://g.example.com")
('10', 'j')
>>> mgr.find_user_password("g", "h.example.com")
('11', 'k')
>>> mgr.find_user_password("g", "h.example.com:80")
('11', 'k')
>>> mgr.find_user_password("g", "http://h.example.com:80")
('11', 'k')
>>> mgr.find_user_password("h", "i.example.com")
(None, None)
>>> mgr.find_user_password("h", "i.example.com:80")
('12', 'l')
>>> mgr.find_user_password("h", "http://i.example.com:80")
('12', 'l')
>>> mgr.find_user_password("i", "j.example.com")
('13', 'm')
>>> mgr.find_user_password("i", "j.example.com:80")
(None, None)
>>> mgr.find_user_password("i", "http://j.example.com")
('13', 'm')
>>> mgr.find_user_password("i", "http://j.example.com:80")
(None, None)
"""
class MockOpener:
addheaders = []
def open(self, req, data=None):
self.req, self.data = req, data
def error(self, proto, *args):
self.proto, self.args = proto, args
class MockFile:
def read(self, count=None): pass
def readline(self, count=None): pass
def close(self): pass
class MockHeaders(dict):
def getheaders(self, name):
return self.values()
class MockResponse(StringIO.StringIO):
def __init__(self, code, msg, headers, data, url=None):
StringIO.StringIO.__init__(self, data)
self.code, self.msg, self.headers, self.url = code, msg, headers, url
def info(self):
return self.headers
def geturl(self):
return self.url
class MockCookieJar:
def add_cookie_header(self, request):
self.ach_req = request
def extract_cookies(self, response, request):
self.ec_req, self.ec_r = request, response
class FakeMethod:
def __init__(self, meth_name, action, handle):
self.meth_name = meth_name
self.handle = handle
self.action = action
def __call__(self, *args):
return self.handle(self.meth_name, self.action, *args)
class MockHandler:
# useful for testing handler machinery
# see add_ordered_mock_handlers() docstring
handler_order = 500
def __init__(self, methods):
self._define_methods(methods)
def _define_methods(self, methods):
for spec in methods:
if len(spec) == 2: name, action = spec
else: name, action = spec, None
meth = FakeMethod(name, action, self.handle)
setattr(self.__class__, name, meth)
def handle(self, fn_name, action, *args, **kwds):
self.parent.calls.append((self, fn_name, args, kwds))
if action is None:
return None
elif action == "return self":
return self
elif action == "return response":
res = MockResponse(200, "OK", {}, "")
return res
elif action == "return request":
return Request("http://blah/")
elif action.startswith("error"):
code = action[action.rfind(" ")+1:]
try:
code = int(code)
except ValueError:
pass
res = MockResponse(200, "OK", {}, "")
return self.parent.error("http", args[0], res, code, "", {})
elif action == "raise":
raise urllib2.URLError("blah")
assert False
def close(self): pass
def add_parent(self, parent):
self.parent = parent
self.parent.calls = []
def __lt__(self, other):
if not hasattr(other, "handler_order"):
# No handler_order, leave in original order. Yuck.
return True
return self.handler_order < other.handler_order
def add_ordered_mock_handlers(opener, meth_spec):
"""Create MockHandlers and add them to an OpenerDirector.
meth_spec: list of lists of tuples and strings defining methods to define
on handlers. eg:
[["http_error", "ftp_open"], ["http_open"]]
defines methods .http_error() and .ftp_open() on one handler, and
.http_open() on another. These methods just record their arguments and
return None. Using a tuple instead of a string causes the method to
perform some action (see MockHandler.handle()), eg:
[["http_error"], [("http_open", "return request")]]
defines .http_error() on one handler (which simply returns None), and
.http_open() on another handler, which returns a Request object.
"""
handlers = []
count = 0
for meths in meth_spec:
class MockHandlerSubclass(MockHandler): pass
h = MockHandlerSubclass(meths)
h.handler_order += count
h.add_parent(opener)
count = count + 1
handlers.append(h)
opener.add_handler(h)
return handlers
def build_test_opener(*handler_instances):
opener = OpenerDirector()
for h in handler_instances:
opener.add_handler(h)
return opener
class MockHTTPHandler(urllib2.BaseHandler):
# useful for testing redirections and auth
# sends supplied headers and code as first response
# sends 200 OK as second response
def __init__(self, code, headers):
self.code = code
self.headers = headers
self.reset()
def reset(self):
self._count = 0
self.requests = []
def http_open(self, req):
import mimetools, httplib, copy
from StringIO import StringIO
self.requests.append(copy.deepcopy(req))
if self._count == 0:
self._count = self._count + 1
name = httplib.responses[self.code]
msg = mimetools.Message(StringIO(self.headers))
return self.parent.error(
"http", req, MockFile(), self.code, name, msg)
else:
self.req = req
msg = mimetools.Message(StringIO("\r\n\r\n"))
return MockResponse(200, "OK", msg, "", req.get_full_url())
class MockPasswordManager:
def add_password(self, realm, uri, user, password):
self.realm = realm
self.url = uri
self.user = user
self.password = password
def find_user_password(self, realm, authuri):
self.target_realm = realm
self.target_url = authuri
return self.user, self.password
class OpenerDirectorTests(unittest.TestCase):
def test_add_non_handler(self):
class NonHandler(object):
pass
self.assertRaises(TypeError,
OpenerDirector().add_handler, NonHandler())
def test_badly_named_methods(self):
# test work-around for three methods that accidentally follow the
# naming conventions for handler methods
# (*_open() / *_request() / *_response())
# These used to call the accidentally-named methods, causing a
# TypeError in real code; here, returning self from these mock
# methods would either cause no exception, or AttributeError.
from urllib2 import URLError
o = OpenerDirector()
meth_spec = [
[("do_open", "return self"), ("proxy_open", "return self")],
[("redirect_request", "return self")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
o.add_handler(urllib2.UnknownHandler())
for scheme in "do", "proxy", "redirect":
self.assertRaises(URLError, o.open, scheme+"://example.com/")
def test_handled(self):
# handler returning non-None means no more handlers will be called
o = OpenerDirector()
meth_spec = [
["http_open", "ftp_open", "http_error_302"],
["ftp_open"],
[("http_open", "return self")],
[("http_open", "return self")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
r = o.open(req)
# Second .http_open() gets called, third doesn't, since second returned
# non-None. Handlers without .http_open() never get any methods called
# on them.
# In fact, second mock handler defining .http_open() returns self
# (instead of response), which becomes the OpenerDirector's return
# value.
self.assertEqual(r, handlers[2])
calls = [(handlers[0], "http_open"), (handlers[2], "http_open")]
for expected, got in zip(calls, o.calls):
handler, name, args, kwds = got
self.assertEqual((handler, name), expected)
self.assertEqual(args, (req,))
def test_handler_order(self):
o = OpenerDirector()
handlers = []
for meths, handler_order in [
([("http_open", "return self")], 500),
(["http_open"], 0),
]:
class MockHandlerSubclass(MockHandler): pass
h = MockHandlerSubclass(meths)
h.handler_order = handler_order
handlers.append(h)
o.add_handler(h)
r = o.open("http://example.com/")
# handlers called in reverse order, thanks to their sort order
self.assertEqual(o.calls[0][0], handlers[1])
self.assertEqual(o.calls[1][0], handlers[0])
def test_raise(self):
# raising URLError stops processing of request
o = OpenerDirector()
meth_spec = [
[("http_open", "raise")],
[("http_open", "return self")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
self.assertRaises(urllib2.URLError, o.open, req)
self.assertEqual(o.calls, [(handlers[0], "http_open", (req,), {})])
## def test_error(self):
## # XXX this doesn't actually seem to be used in standard library,
## # but should really be tested anyway...
def test_http_error(self):
# XXX http_error_default
# http errors are a special case
o = OpenerDirector()
meth_spec = [
[("http_open", "error 302")],
[("http_error_400", "raise"), "http_open"],
[("http_error_302", "return response"), "http_error_303",
"http_error"],
[("http_error_302")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
class Unknown:
def __eq__(self, other): return True
req = Request("http://example.com/")
r = o.open(req)
assert len(o.calls) == 2
calls = [(handlers[0], "http_open", (req,)),
(handlers[2], "http_error_302",
(req, Unknown(), 302, "", {}))]
for expected, got in zip(calls, o.calls):
handler, method_name, args = expected
self.assertEqual((handler, method_name), got[:2])
self.assertEqual(args, got[2])
def test_processors(self):
# *_request / *_response methods get called appropriately
o = OpenerDirector()
meth_spec = [
[("http_request", "return request"),
("http_response", "return response")],
[("http_request", "return request"),
("http_response", "return response")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
r = o.open(req)
# processor methods are called on *all* handlers that define them,
# not just the first handler that handles the request
calls = [
(handlers[0], "http_request"), (handlers[1], "http_request"),
(handlers[0], "http_response"), (handlers[1], "http_response")]
for i, (handler, name, args, kwds) in enumerate(o.calls):
if i < 2:
# *_request
self.assertEqual((handler, name), calls[i])
self.assertEqual(len(args), 1)
self.assert_(isinstance(args[0], Request))
else:
# *_response
self.assertEqual((handler, name), calls[i])
self.assertEqual(len(args), 2)
self.assert_(isinstance(args[0], Request))
# response from opener.open is None, because there's no
# handler that defines http_open to handle it
self.assert_(args[1] is None or
isinstance(args[1], MockResponse))
def sanepathname2url(path):
import urllib
urlpath = urllib.pathname2url(path)
if os.name == "nt" and urlpath.startswith("///"):
urlpath = urlpath[2:]
# XXX don't ask me about the mac...
return urlpath
class HandlerTests(unittest.TestCase):
def test_ftp(self):
class MockFTPWrapper:
def __init__(self, data): self.data = data
def retrfile(self, filename, filetype):
self.filename, self.filetype = filename, filetype
return StringIO.StringIO(self.data), len(self.data)
class NullFTPHandler(urllib2.FTPHandler):
def __init__(self, data): self.data = data
def connect_ftp(self, user, passwd, host, port, dirs,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.user, self.passwd = user, passwd
self.host, self.port = host, port
self.dirs = dirs
self.ftpwrapper = MockFTPWrapper(self.data)
return self.ftpwrapper
import ftplib
data = "rheum rhaponicum"
h = NullFTPHandler(data)
o = h.parent = MockOpener()
for url, host, port, type_, dirs, filename, mimetype in [
("ftp://localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://localhost:80/foo/bar/",
"localhost", 80, "D",
["foo", "bar"], "", None),
("ftp://localhost/baz.gif;type=a",
"localhost", ftplib.FTP_PORT, "A",
[], "baz.gif", None), # XXX really this should guess image/gif
]:
req = Request(url)
req.timeout = None
r = h.ftp_open(req)
# ftp authentication not yet implemented by FTPHandler
self.assert_(h.user == h.passwd == "")
self.assertEqual(h.host, socket.gethostbyname(host))
self.assertEqual(h.port, port)
self.assertEqual(h.dirs, dirs)
self.assertEqual(h.ftpwrapper.filename, filename)
self.assertEqual(h.ftpwrapper.filetype, type_)
headers = r.info()
self.assertEqual(headers.get("Content-type"), mimetype)
self.assertEqual(int(headers["Content-length"]), len(data))
def test_file(self):
import rfc822, socket
h = urllib2.FileHandler()
o = h.parent = MockOpener()
TESTFN = test_support.TESTFN
urlpath = sanepathname2url(os.path.abspath(TESTFN))
towrite = "hello, world\n"
urls = [
"file://localhost%s" % urlpath,
"file://%s" % urlpath,
"file://%s%s" % (socket.gethostbyname('localhost'), urlpath),
]
try:
localaddr = socket.gethostbyname(socket.gethostname())
except socket.gaierror:
localaddr = ''
if localaddr:
urls.append("file://%s%s" % (localaddr, urlpath))
for url in urls:
f = open(TESTFN, "wb")
try:
try:
f.write(towrite)
finally:
f.close()
r = h.file_open(Request(url))
try:
data = r.read()
headers = r.info()
newurl = r.geturl()
finally:
r.close()
stats = os.stat(TESTFN)
modified = rfc822.formatdate(stats.st_mtime)
finally:
os.remove(TESTFN)
self.assertEqual(data, towrite)
self.assertEqual(headers["Content-type"], "text/plain")
self.assertEqual(headers["Content-length"], "13")
self.assertEqual(headers["Last-modified"], modified)
for url in [
"file://localhost:80%s" % urlpath,
"file:///file_does_not_exist.txt",
"file://%s:80%s/%s" % (socket.gethostbyname('localhost'),
os.getcwd(), TESTFN),
"file://somerandomhost.ontheinternet.com%s/%s" %
(os.getcwd(), TESTFN),
]:
try:
f = open(TESTFN, "wb")
try:
f.write(towrite)
finally:
f.close()
self.assertRaises(urllib2.URLError,
h.file_open, Request(url))
finally:
os.remove(TESTFN)
h = urllib2.FileHandler()
o = h.parent = MockOpener()
# XXXX why does // mean ftp (and /// mean not ftp!), and where
# is file: scheme specified? I think this is really a bug, and
# what was intended was to distinguish between URLs like:
# file:/blah.txt (a file)
# file://localhost/blah.txt (a file)
# file:///blah.txt (a file)
# file://ftp.example.com/blah.txt (an ftp URL)
for url, ftp in [
("file://ftp.example.com//foo.txt", True),
("file://ftp.example.com///foo.txt", False),
# XXXX bug: fails with OSError, should be URLError
("file://ftp.example.com/foo.txt", False),
]:
req = Request(url)
try:
h.file_open(req)
# XXXX remove OSError when bug fixed
except (urllib2.URLError, OSError):
self.assert_(not ftp)
else:
self.assert_(o.req is req)
self.assertEqual(req.type, "ftp")
def test_http(self):
class MockHTTPResponse:
def __init__(self, fp, msg, status, reason):
self.fp = fp
self.msg = msg
self.status = status
self.reason = reason
def read(self):
return ''
class MockHTTPClass:
def __init__(self):
self.req_headers = []
self.data = None
self.raise_on_endheaders = False
def __call__(self, host, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.host = host
self.timeout = timeout
return self
def set_debuglevel(self, level):
self.level = level
def request(self, method, url, body=None, headers={}):
self.method = method
self.selector = url
self.req_headers += headers.items()
self.req_headers.sort()
if body:
self.data = body
if self.raise_on_endheaders:
import socket
raise socket.error()
def getresponse(self):
return MockHTTPResponse(MockFile(), {}, 200, "OK")
h = urllib2.AbstractHTTPHandler()
o = h.parent = MockOpener()
url = "http://example.com/"
for method, data in [("GET", None), ("POST", "blah")]:
req = Request(url, data, {"Foo": "bar"})
req.timeout = None
req.add_unredirected_header("Spam", "eggs")
http = MockHTTPClass()
r = h.do_open(http, req)
# result attributes
r.read; r.readline # wrapped MockFile methods
r.info; r.geturl # addinfourl methods
r.code, r.msg == 200, "OK" # added from MockHTTPClass.getreply()
hdrs = r.info()
hdrs.get; hdrs.has_key # r.info() gives dict from .getreply()
self.assertEqual(r.geturl(), url)
self.assertEqual(http.host, "example.com")
self.assertEqual(http.level, 0)
self.assertEqual(http.method, method)
self.assertEqual(http.selector, "/")
self.assertEqual(http.req_headers,
[("Connection", "close"),
("Foo", "bar"), ("Spam", "eggs")])
self.assertEqual(http.data, data)
# check socket.error converted to URLError
http.raise_on_endheaders = True
self.assertRaises(urllib2.URLError, h.do_open, http, req)
# check adding of standard headers
o.addheaders = [("Spam", "eggs")]
for data in "", None: # POST, GET
req = Request("http://example.com/", data)
r = MockResponse(200, "OK", {}, "")
newreq = h.do_request_(req)
if data is None: # GET
self.assert_("Content-length" not in req.unredirected_hdrs)
self.assert_("Content-type" not in req.unredirected_hdrs)
else: # POST
self.assertEqual(req.unredirected_hdrs["Content-length"], "0")
self.assertEqual(req.unredirected_hdrs["Content-type"],
"application/x-www-form-urlencoded")
# XXX the details of Host could be better tested
self.assertEqual(req.unredirected_hdrs["Host"], "example.com")
self.assertEqual(req.unredirected_hdrs["Spam"], "eggs")
# don't clobber existing headers
req.add_unredirected_header("Content-length", "foo")
req.add_unredirected_header("Content-type", "bar")
req.add_unredirected_header("Host", "baz")
req.add_unredirected_header("Spam", "foo")
newreq = h.do_request_(req)
self.assertEqual(req.unredirected_hdrs["Content-length"], "foo")
self.assertEqual(req.unredirected_hdrs["Content-type"], "bar")
self.assertEqual(req.unredirected_hdrs["Host"], "baz")
self.assertEqual(req.unredirected_hdrs["Spam"], "foo")
def test_http_doubleslash(self):
# Checks that the presence of an unnecessary double slash in a url doesn't break anything
# Previously, a double slash directly after the host could cause incorrect parsing of the url
h = urllib2.AbstractHTTPHandler()
o = h.parent = MockOpener()
data = ""
ds_urls = [
"http://example.com/foo/bar/baz.html",
"http://example.com//foo/bar/baz.html",
"http://example.com/foo//bar/baz.html",
"http://example.com/foo/bar//baz.html",
]
for ds_url in ds_urls:
ds_req = Request(ds_url, data)
# Check whether host is determined correctly if there is no proxy
np_ds_req = h.do_request_(ds_req)
self.assertEqual(np_ds_req.unredirected_hdrs["Host"],"example.com")
# Check whether host is determined correctly if there is a proxy
ds_req.set_proxy("someproxy:3128",None)
p_ds_req = h.do_request_(ds_req)
self.assertEqual(p_ds_req.unredirected_hdrs["Host"],"example.com")
def test_errors(self):
h = urllib2.HTTPErrorProcessor()
o = h.parent = MockOpener()
url = "http://example.com/"
req = Request(url)
# all 2xx are passed through
r = MockResponse(200, "OK", {}, "", url)
newr = h.http_response(req, r)
self.assert_(r is newr)
self.assert_(not hasattr(o, "proto")) # o.error not called
r = MockResponse(202, "Accepted", {}, "", url)
newr = h.http_response(req, r)
self.assert_(r is newr)
self.assert_(not hasattr(o, "proto")) # o.error not called
r = MockResponse(206, "Partial content", {}, "", url)
newr = h.http_response(req, r)
self.assert_(r is newr)
self.assert_(not hasattr(o, "proto")) # o.error not called
# anything else calls o.error (and MockOpener returns None, here)
r = MockResponse(502, "Bad gateway", {}, "", url)
self.assert_(h.http_response(req, r) is None)
self.assertEqual(o.proto, "http") # o.error called
self.assertEqual(o.args, (req, r, 502, "Bad gateway", {}))
def test_cookies(self):
cj = MockCookieJar()
h = urllib2.HTTPCookieProcessor(cj)
o = h.parent = MockOpener()
req = Request("http://example.com/")
r = MockResponse(200, "OK", {}, "")
newreq = h.http_request(req)
self.assert_(cj.ach_req is req is newreq)
self.assertEquals(req.get_origin_req_host(), "example.com")
self.assert_(not req.is_unverifiable())
newr = h.http_response(req, r)
self.assert_(cj.ec_req is req)
self.assert_(cj.ec_r is r is newr)
def test_redirect(self):
from_url = "http://example.com/a.html"
to_url = "http://example.com/b.html"
h = urllib2.HTTPRedirectHandler()
o = h.parent = MockOpener()
# ordinary redirect behaviour
for code in 301, 302, 303, 307:
for data in None, "blah\nblah\n":
method = getattr(h, "http_error_%s" % code)
req = Request(from_url, data)
req.add_header("Nonsense", "viking=withhold")
if data is not None:
req.add_header("Content-Length", str(len(data)))
req.add_unredirected_header("Spam", "spam")
try:
method(req, MockFile(), code, "Blah",
MockHeaders({"location": to_url}))
except urllib2.HTTPError:
# 307 in response to POST requires user OK
self.assert_(code == 307 and data is not None)
self.assertEqual(o.req.get_full_url(), to_url)
try:
self.assertEqual(o.req.get_method(), "GET")
except AttributeError:
self.assert_(not o.req.has_data())
# now it's a GET, there should not be headers regarding content
# (possibly dragged from before being a POST)
headers = [x.lower() for x in o.req.headers]
self.assertTrue("content-length" not in headers)
self.assertTrue("content-type" not in headers)
self.assertEqual(o.req.headers["Nonsense"],
"viking=withhold")
self.assert_("Spam" not in o.req.headers)
self.assert_("Spam" not in o.req.unredirected_hdrs)
# loop detection
req = Request(from_url)
def redirect(h, req, url=to_url):
h.http_error_302(req, MockFile(), 302, "Blah",
MockHeaders({"location": url}))
# Note that the *original* request shares the same record of
# redirections with the sub-requests caused by the redirections.
# detect infinite loop redirect of a URL to itself
req = Request(from_url, origin_req_host="example.com")
count = 0
try:
while 1:
redirect(h, req, "http://example.com/")
count = count + 1
except urllib2.HTTPError:
# don't stop until max_repeats, because cookies may introduce state
self.assertEqual(count, urllib2.HTTPRedirectHandler.max_repeats)
# detect endless non-repeating chain of redirects
req = Request(from_url, origin_req_host="example.com")
count = 0
try:
while 1:
redirect(h, req, "http://example.com/%d" % count)
count = count + 1
except urllib2.HTTPError:
self.assertEqual(count,
urllib2.HTTPRedirectHandler.max_redirections)
def test_cookie_redirect(self):
# cookies shouldn't leak into redirected requests
from cookielib import CookieJar
from test.test_cookielib import interact_netscape
cj = CookieJar()
interact_netscape(cj, "http://www.example.com/", "spam=eggs")
hh = MockHTTPHandler(302, "Location: http://www.cracker.com/\r\n\r\n")
hdeh = urllib2.HTTPDefaultErrorHandler()
hrh = urllib2.HTTPRedirectHandler()
cp = urllib2.HTTPCookieProcessor(cj)
o = build_test_opener(hh, hdeh, hrh, cp)
o.open("http://www.example.com/")
self.assert_(not hh.req.has_header("Cookie"))
def test_proxy(self):
o = OpenerDirector()
ph = urllib2.ProxyHandler(dict(http="proxy.example.com:3128"))
o.add_handler(ph)
meth_spec = [
[("http_open", "return response")]
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://acme.example.com/")
self.assertEqual(req.get_host(), "acme.example.com")
r = o.open(req)
self.assertEqual(req.get_host(), "proxy.example.com:3128")
self.assertEqual([(handlers[0], "http_open")],
[tup[0:2] for tup in o.calls])
def test_basic_auth(self, quote_char='"'):
opener = OpenerDirector()
password_manager = MockPasswordManager()
auth_handler = urllib2.HTTPBasicAuthHandler(password_manager)
realm = "ACME Widget Store"
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: Basic realm=%s%s%s\r\n\r\n' %
(quote_char, realm, quote_char) )
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Authorization",
realm, http_handler, password_manager,
"http://acme.example.com/protected",
"http://acme.example.com/protected",
)
def test_basic_auth_with_single_quoted_realm(self):
self.test_basic_auth(quote_char="'")
def test_proxy_basic_auth(self):
opener = OpenerDirector()
ph = urllib2.ProxyHandler(dict(http="proxy.example.com:3128"))
opener.add_handler(ph)
password_manager = MockPasswordManager()
auth_handler = urllib2.ProxyBasicAuthHandler(password_manager)
realm = "ACME Networks"
http_handler = MockHTTPHandler(
407, 'Proxy-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Proxy-authorization",
realm, http_handler, password_manager,
"http://acme.example.com:3128/protected",
"proxy.example.com:3128",
)
def test_basic_and_digest_auth_handlers(self):
# HTTPDigestAuthHandler threw an exception if it couldn't handle a 40*
# response (http://python.org/sf/1479302), where it should instead
# return None to allow another handler (especially
# HTTPBasicAuthHandler) to handle the response.
# Also (http://python.org/sf/14797027, RFC 2617 section 1.2), we must
# try digest first (since it's the strongest auth scheme), so we record
# order of calls here to check digest comes first:
class RecordingOpenerDirector(OpenerDirector):
def __init__(self):
OpenerDirector.__init__(self)
self.recorded = []
def record(self, info):
self.recorded.append(info)
class TestDigestAuthHandler(urllib2.HTTPDigestAuthHandler):
def http_error_401(self, *args, **kwds):
self.parent.record("digest")
urllib2.HTTPDigestAuthHandler.http_error_401(self,
*args, **kwds)
class TestBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
def http_error_401(self, *args, **kwds):
self.parent.record("basic")
urllib2.HTTPBasicAuthHandler.http_error_401(self,
*args, **kwds)
opener = RecordingOpenerDirector()
password_manager = MockPasswordManager()
digest_handler = TestDigestAuthHandler(password_manager)
basic_handler = TestBasicAuthHandler(password_manager)
realm = "ACME Networks"
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(basic_handler)
opener.add_handler(digest_handler)
opener.add_handler(http_handler)
# check basic auth isn't blocked by digest handler failing
self._test_basic_auth(opener, basic_handler, "Authorization",
realm, http_handler, password_manager,
"http://acme.example.com/protected",
"http://acme.example.com/protected",
)
# check digest was tried before basic (twice, because
# _test_basic_auth called .open() twice)
self.assertEqual(opener.recorded, ["digest", "basic"]*2)
def _test_basic_auth(self, opener, auth_handler, auth_header,
realm, http_handler, password_manager,
request_url, protected_url):
import base64
user, password = "wile", "coyote"
# .add_password() fed through to password manager
auth_handler.add_password(realm, request_url, user, password)
self.assertEqual(realm, password_manager.realm)
self.assertEqual(request_url, password_manager.url)
self.assertEqual(user, password_manager.user)
self.assertEqual(password, password_manager.password)
r = opener.open(request_url)
# should have asked the password manager for the username/password
self.assertEqual(password_manager.target_realm, realm)
self.assertEqual(password_manager.target_url, protected_url)
# expect one request without authorization, then one with
self.assertEqual(len(http_handler.requests), 2)
self.assertFalse(http_handler.requests[0].has_header(auth_header))
userpass = '%s:%s' % (user, password)
auth_hdr_value = 'Basic '+base64.encodestring(userpass).strip()
self.assertEqual(http_handler.requests[1].get_header(auth_header),
auth_hdr_value)
# if the password manager can't find a password, the handler won't
# handle the HTTP auth error
password_manager.user = password_manager.password = None
http_handler.reset()
r = opener.open(request_url)
self.assertEqual(len(http_handler.requests), 1)
self.assertFalse(http_handler.requests[0].has_header(auth_header))
class MiscTests(unittest.TestCase):
def test_build_opener(self):
class MyHTTPHandler(urllib2.HTTPHandler): pass
class FooHandler(urllib2.BaseHandler):
def foo_open(self): pass
class BarHandler(urllib2.BaseHandler):
def bar_open(self): pass
build_opener = urllib2.build_opener
o = build_opener(FooHandler, BarHandler)
self.opener_has_handler(o, FooHandler)
self.opener_has_handler(o, BarHandler)
# can take a mix of classes and instances
o = build_opener(FooHandler, BarHandler())
self.opener_has_handler(o, FooHandler)
self.opener_has_handler(o, BarHandler)
# subclasses of default handlers override default handlers
o = build_opener(MyHTTPHandler)
self.opener_has_handler(o, MyHTTPHandler)
# a particular case of overriding: default handlers can be passed
# in explicitly
o = build_opener()
self.opener_has_handler(o, urllib2.HTTPHandler)
o = build_opener(urllib2.HTTPHandler)
self.opener_has_handler(o, urllib2.HTTPHandler)
o = build_opener(urllib2.HTTPHandler())
self.opener_has_handler(o, urllib2.HTTPHandler)
# Issue2670: multiple handlers sharing the same base class
class MyOtherHTTPHandler(urllib2.HTTPHandler): pass
o = build_opener(MyHTTPHandler, MyOtherHTTPHandler)
self.opener_has_handler(o, MyHTTPHandler)
self.opener_has_handler(o, MyOtherHTTPHandler)
def opener_has_handler(self, opener, handler_class):
for h in opener.handlers:
if h.__class__ == handler_class:
break
else:
self.assert_(False)
class RequestTests(unittest.TestCase):
def setUp(self):
self.get = urllib2.Request("http://www.python.org/~jeremy/")
self.post = urllib2.Request("http://www.python.org/~jeremy/",
"data",
headers={"X-Test": "test"})
def test_method(self):
self.assertEqual("POST", self.post.get_method())
self.assertEqual("GET", self.get.get_method())
def test_add_data(self):
self.assert_(not self.get.has_data())
self.assertEqual("GET", self.get.get_method())
self.get.add_data("spam")
self.assert_(self.get.has_data())
self.assertEqual("POST", self.get.get_method())
def test_get_full_url(self):
self.assertEqual("http://www.python.org/~jeremy/",
self.get.get_full_url())
def test_selector(self):
self.assertEqual("/~jeremy/", self.get.get_selector())
req = urllib2.Request("http://www.python.org/")
self.assertEqual("/", req.get_selector())
def test_get_type(self):
self.assertEqual("http", self.get.get_type())
def test_get_host(self):
self.assertEqual("www.python.org", self.get.get_host())
def test_get_host_unquote(self):
req = urllib2.Request("http://www.%70ython.org/")
self.assertEqual("www.python.org", req.get_host())
def test_proxy(self):
self.assert_(not self.get.has_proxy())
self.get.set_proxy("www.perl.org", "http")
self.assert_(self.get.has_proxy())
self.assertEqual("www.python.org", self.get.get_origin_req_host())
self.assertEqual("www.perl.org", self.get.get_host())
def test_main(verbose=None):
from test import test_urllib2
test_support.run_doctest(test_urllib2, verbose)
test_support.run_doctest(urllib2, verbose)
tests = (TrivialTests,
OpenerDirectorTests,
HandlerTests,
MiscTests,
RequestTests)
test_support.run_unittest(*tests)
if __name__ == "__main__":
test_main(verbose=True)
|
lgpl-2.1
|
markYoungH/chromium.src
|
tools/usb_gadget/gadget.py
|
54
|
14586
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generic USB gadget functionality.
"""
import struct
import usb_constants
class Gadget(object):
"""Basic functionality for a USB device.
Implements standard control requests assuming that a subclass will handle
class- or vendor-specific requests.
"""
def __init__(self, device_desc, fs_config_desc, hs_config_desc):
"""Create a USB gadget device.
Args:
device_desc: USB device descriptor.
fs_config_desc: Low/full-speed device descriptor.
hs_config_desc: High-speed device descriptor.
"""
self._speed = usb_constants.Speed.UNKNOWN
self._chip = None
self._device_desc = device_desc
self._fs_config_desc = fs_config_desc
self._hs_config_desc = hs_config_desc
# dict mapping language codes to a dict mapping indexes to strings
self._strings = {}
# dict mapping interface numbers to a set of endpoint addresses
self._active_endpoints = {}
def GetDeviceDescriptor(self):
return self._device_desc
def GetFullSpeedConfigurationDescriptor(self):
return self._fs_config_desc
def GetHighSpeedConfigurationDescriptor(self):
return self._hs_config_desc
def GetConfigurationDescriptor(self):
if self._speed == usb_constants.Speed.FULL:
return self._fs_config_desc
elif self._speed == usb_constants.Speed.HIGH:
return self._hs_config_desc
else:
raise RuntimeError('Device is not connected.')
def GetSpeed(self):
return self._speed
def AddStringDescriptor(self, index, value, lang=0x0409):
"""Add a string descriptor to this device.
Args:
index: String descriptor index (matches 'i' fields in descriptors).
value: The string.
lang: Language code (default: English).
Raises:
ValueError: The index or language code is invalid.
"""
if index < 1 or index > 255:
raise ValueError('String descriptor index out of range.')
if lang < 0 or lang > 0xffff:
raise ValueError('String descriptor language code out of range.')
lang_strings = self._strings.setdefault(lang, {})
lang_strings[index] = value
def Connected(self, chip, speed):
"""The device has been connected to a USB host.
Args:
chip: USB controller.
speed: Connection speed.
"""
self._speed = speed
self._chip = chip
def Disconnected(self):
"""The device has been disconnected from the USB host."""
self._speed = usb_constants.Speed.UNKNOWN
self._chip = None
self._active_endpoints.clear()
def IsConnected(self):
return self._chip is not None
def ControlRead(self, request_type, request, value, index, length):
"""Handle a read on the control pipe (endpoint zero).
Args:
request_type: bmRequestType field of the setup packet.
request: bRequest field of the setup packet.
value: wValue field of the setup packet.
index: wIndex field of the setup packet.
length: Maximum amount of data the host expects the device to return.
Returns:
A buffer to return to the USB host with len <= length on success or
None to stall the pipe.
"""
assert request_type & usb_constants.Dir.IN
typ = request_type & usb_constants.Type.MASK
recipient = request_type & usb_constants.Recipient.MASK
if typ == usb_constants.Type.STANDARD:
return self.StandardControlRead(
recipient, request, value, index, length)
elif typ == usb_constants.Type.CLASS:
return self.ClassControlRead(
recipient, request, value, index, length)
elif typ == usb_constants.Type.VENDOR:
return self.VendorControlRead(
recipient, request, value, index, length)
def ControlWrite(self, request_type, request, value, index, data):
"""Handle a write to the control pipe (endpoint zero).
Args:
request_type: bmRequestType field of the setup packet.
request: bRequest field of the setup packet.
value: wValue field of the setup packet.
index: wIndex field of the setup packet.
data: Data stage of the request.
Returns:
True on success, None to stall the pipe.
"""
assert not request_type & usb_constants.Dir.IN
typ = request_type & usb_constants.Type.MASK
recipient = request_type & usb_constants.Recipient.MASK
if typ == usb_constants.Type.STANDARD:
return self.StandardControlWrite(
recipient, request, value, index, data)
elif typ == usb_constants.Type.CLASS:
return self.ClassControlWrite(
recipient, request, value, index, data)
elif typ == usb_constants.Type.VENDOR:
return self.VendorControlWrite(
recipient, request, value, index, data)
def SendPacket(self, endpoint, data):
"""Send a data packet on the given endpoint.
Args:
endpoint: Endpoint address.
data: Data buffer.
Raises:
ValueError: If the endpoint address is not valid.
RuntimeError: If the device is not connected.
"""
if self._chip is None:
raise RuntimeError('Device is not connected.')
if not endpoint & usb_constants.Dir.IN:
raise ValueError('Cannot write to non-input endpoint.')
self._chip.SendPacket(endpoint, data)
def ReceivePacket(self, endpoint, data):
"""Handle an incoming data packet on one of the device's active endpoints.
This method should be overridden by a subclass implementing endpoint-based
data transfers.
Args:
endpoint: Endpoint address.
data: Data buffer.
"""
pass
def HaltEndpoint(self, endpoint):
"""Signals a STALL condition to the host on the given endpoint.
Args:
endpoint: Endpoint address.
"""
self._chip.HaltEndpoint(endpoint)
def StandardControlRead(self, recipient, request, value, index, length):
"""Handle standard control transfers.
Args:
recipient: Request recipient (device, interface, endpoint, etc.)
request: bRequest field of the setup packet.
value: wValue field of the setup packet.
index: wIndex field of the setup packet.
length: Maximum amount of data the host expects the device to return.
Returns:
A buffer to return to the USB host with len <= length on success or
None to stall the pipe.
"""
if request == usb_constants.Request.GET_DESCRIPTOR:
desc_type = value >> 8
desc_index = value & 0xff
desc_lang = index
print 'GetDescriptor(recipient={}, type={}, index={}, lang={})'.format(
recipient, desc_type, desc_index, desc_lang)
return self.GetDescriptor(recipient, desc_type, desc_index, desc_lang,
length)
def GetDescriptor(self, recipient, typ, index, lang, length):
"""Handle a standard GET_DESCRIPTOR request.
See Universal Serial Bus Specification Revision 2.0 section 9.4.3.
Args:
recipient: Request recipient (device, interface, endpoint, etc.)
typ: Descriptor type.
index: Descriptor index.
lang: Descriptor language code.
length: Maximum amount of data the host expects the device to return.
Returns:
The value of the descriptor or None to stall the pipe.
"""
if recipient == usb_constants.Recipient.DEVICE:
if typ == usb_constants.DescriptorType.STRING:
return self.GetStringDescriptor(index, lang, length)
def ClassControlRead(self, recipient, request, value, index, length):
"""Handle class-specific control transfers.
This function should be overridden by a subclass implementing a particular
device class.
Args:
recipient: Request recipient (device, interface, endpoint, etc.)
request: bRequest field of the setup packet.
value: wValue field of the setup packet.
index: wIndex field of the setup packet.
length: Maximum amount of data the host expects the device to return.
Returns:
A buffer to return to the USB host with len <= length on success or
None to stall the pipe.
"""
_ = recipient, request, value, index, length
return None
def VendorControlRead(self, recipient, request, value, index, length):
"""Handle vendor-specific control transfers.
This function should be overridden by a subclass if implementing a device
that responds to vendor-specific requests.
Args:
recipient: Request recipient (device, interface, endpoint, etc.)
request: bRequest field of the setup packet.
value: wValue field of the setup packet.
index: wIndex field of the setup packet.
length: Maximum amount of data the host expects the device to return.
Returns:
A buffer to return to the USB host with len <= length on success or
None to stall the pipe.
"""
_ = recipient, request, value, index, length
return None
def StandardControlWrite(self, recipient, request, value, index, data):
"""Handle standard control transfers.
Args:
recipient: Request recipient (device, interface, endpoint, etc.)
request: bRequest field of the setup packet.
value: wValue field of the setup packet.
index: wIndex field of the setup packet.
data: Data stage of the request.
Returns:
True on success, None to stall the pipe.
"""
_ = data
if request == usb_constants.Request.SET_CONFIGURATION:
if recipient == usb_constants.Recipient.DEVICE:
return self.SetConfiguration(value)
elif request == usb_constants.Request.SET_INTERFACE:
if recipient == usb_constants.Recipient.INTERFACE:
return self.SetInterface(index, value)
def ClassControlWrite(self, recipient, request, value, index, data):
"""Handle class-specific control transfers.
This function should be overridden by a subclass implementing a particular
device class.
Args:
recipient: Request recipient (device, interface, endpoint, etc.)
request: bRequest field of the setup packet.
value: wValue field of the setup packet.
index: wIndex field of the setup packet.
data: Data stage of the request.
Returns:
True on success, None to stall the pipe.
"""
_ = recipient, request, value, index, data
return None
def VendorControlWrite(self, recipient, request, value, index, data):
"""Handle vendor-specific control transfers.
This function should be overridden by a subclass if implementing a device
that responds to vendor-specific requests.
Args:
recipient: Request recipient (device, interface, endpoint, etc.)
request: bRequest field of the setup packet.
value: wValue field of the setup packet.
index: wIndex field of the setup packet.
data: Data stage of the request.
Returns:
True on success, None to stall the pipe.
"""
_ = recipient, request, value, index, data
return None
def GetStringDescriptor(self, index, lang, length):
"""Handle a GET_DESCRIPTOR(String) request from the host.
Descriptor index 0 returns the set of languages supported by the device.
All other indices return the string descriptors registered with those
indices.
See Universal Serial Bus Specification Revision 2.0 section 9.6.7.
Args:
index: Descriptor index.
lang: Descriptor language code.
length: Maximum amount of data the host expects the device to return.
Returns:
The string descriptor or None to stall the pipe if the descriptor is not
found.
"""
if index == 0:
length = 2 + len(self._strings) * 2
header = struct.pack('<BB', length, usb_constants.DescriptorType.STRING)
lang_codes = [struct.pack('<H', lang)
for lang in self._strings.iterkeys()]
buf = header + ''.join(lang_codes)
assert len(buf) == length
return buf[:length]
elif lang not in self._strings:
return None
elif index not in self._strings[lang]:
return None
else:
string = self._strings[lang][index].encode('UTF-16LE')
header = struct.pack(
'<BB', 2 + len(string), usb_constants.DescriptorType.STRING)
buf = header + string
return buf[:length]
def SetConfiguration(self, index):
"""Handle a SET_CONFIGURATION request from the host.
See Universal Serial Bus Specification Revision 2.0 section 9.4.7.
Args:
index: Configuration index selected.
Returns:
True on success, None on error to stall the pipe.
"""
print 'SetConfiguration({})'.format(index)
for endpoint_addrs in self._active_endpoints.values():
for endpoint_addr in endpoint_addrs:
self._chip.StopEndpoint(endpoint_addr)
endpoint_addrs.clear()
if index == 0:
# SET_CONFIGRATION(0) puts the device into the Address state which
# Windows does before suspending the port.
return True
elif index != 1:
return None
config_desc = self.GetConfigurationDescriptor()
for interface_desc in config_desc.GetInterfaces():
if interface_desc.bAlternateSetting != 0:
continue
endpoint_addrs = self._active_endpoints.setdefault(
interface_desc.bInterfaceNumber, set())
for endpoint_desc in interface_desc.GetEndpoints():
self._chip.StartEndpoint(endpoint_desc)
endpoint_addrs.add(endpoint_desc.bEndpointAddress)
return True
def SetInterface(self, interface, alt_setting):
"""Handle a SET_INTERFACE request from the host.
See Universal Serial Bus Specification Revision 2.0 section 9.4.10.
Args:
interface: Interface number to configure.
alt_setting: Alternate setting to select.
Returns:
True on success, None on error to stall the pipe.
"""
print 'SetInterface({}, {})'.format(interface, alt_setting)
config_desc = self.GetConfigurationDescriptor()
interface_desc = None
for interface_option in config_desc.GetInterfaces():
if (interface_option.bInterfaceNumber == interface and
interface_option.bAlternateSetting == alt_setting):
interface_desc = interface_option
if interface_desc is None:
return None
endpoint_addrs = self._active_endpoints.setdefault(interface, set())
for endpoint_addr in endpoint_addrs:
self._chip.StopEndpoint(endpoint_addr)
for endpoint_desc in interface_desc.GetEndpoints():
self._chip.StartEndpoint(endpoint_desc)
endpoint_addrs.add(endpoint_desc.bEndpointAddress)
return True
|
bsd-3-clause
|
antonve/s4-project-mooc
|
common/djangoapps/student/tests/test_password_policy.py
|
113
|
12723
|
# -*- coding: utf-8 -*-
"""
This test file will verify proper password policy enforcement, which is an option feature
"""
import json
from django.test import TestCase
from django.test.client import RequestFactory
from django.core.urlresolvers import reverse
from django.contrib.auth.models import AnonymousUser
from django.utils.importlib import import_module
from django.test.utils import override_settings
from django.conf import settings
from mock import patch
from edxmako.tests import mako_middleware_process_request
from external_auth.models import ExternalAuthMap
from student.views import create_account
@patch.dict("django.conf.settings.FEATURES", {'ENFORCE_PASSWORD_POLICY': True})
class TestPasswordPolicy(TestCase):
"""
Go through some password policy tests to make sure things are properly working
"""
def setUp(self):
super(TestPasswordPolicy, self).setUp()
self.url = reverse('create_account')
self.request_factory = RequestFactory()
self.url_params = {
'username': 'username',
'email': 'foo_bar@bar.com',
'name': 'username',
'terms_of_service': 'true',
'honor_code': 'true',
}
@override_settings(PASSWORD_MIN_LENGTH=6)
def test_password_length_too_short(self):
self.url_params['password'] = 'aaa'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 400)
obj = json.loads(response.content)
self.assertEqual(
obj['value'],
"Password: Invalid Length (must be 6 characters or more)",
)
@override_settings(PASSWORD_MIN_LENGTH=6)
def test_password_length_long_enough(self):
self.url_params['password'] = 'ThisIsALongerPassword'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
self.assertTrue(obj['success'])
@override_settings(PASSWORD_MAX_LENGTH=12)
def test_password_length_too_long(self):
self.url_params['password'] = 'ThisPasswordIsWayTooLong'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 400)
obj = json.loads(response.content)
self.assertEqual(
obj['value'],
"Password: Invalid Length (must be 12 characters or less)",
)
@patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'UPPER': 3})
def test_password_not_enough_uppercase(self):
self.url_params['password'] = 'thisshouldfail'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 400)
obj = json.loads(response.content)
self.assertEqual(
obj['value'],
"Password: Must be more complex (must contain 3 or more uppercase characters)",
)
@patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'UPPER': 3})
def test_password_enough_uppercase(self):
self.url_params['password'] = 'ThisShouldPass'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
self.assertTrue(obj['success'])
@patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'LOWER': 3})
def test_password_not_enough_lowercase(self):
self.url_params['password'] = 'THISSHOULDFAIL'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 400)
obj = json.loads(response.content)
self.assertEqual(
obj['value'],
"Password: Must be more complex (must contain 3 or more lowercase characters)",
)
@patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'LOWER': 3})
def test_password_enough_lowercase(self):
self.url_params['password'] = 'ThisShouldPass'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
self.assertTrue(obj['success'])
@patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'DIGITS': 3})
def test_not_enough_digits(self):
self.url_params['password'] = 'thishasnodigits'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 400)
obj = json.loads(response.content)
self.assertEqual(
obj['value'],
"Password: Must be more complex (must contain 3 or more digits)",
)
@patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'DIGITS': 3})
def test_enough_digits(self):
self.url_params['password'] = 'Th1sSh0uldPa88'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
self.assertTrue(obj['success'])
@patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'PUNCTUATION': 3})
def test_not_enough_punctuations(self):
self.url_params['password'] = 'thisshouldfail'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 400)
obj = json.loads(response.content)
self.assertEqual(
obj['value'],
"Password: Must be more complex (must contain 3 or more punctuation characters)",
)
@patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'PUNCTUATION': 3})
def test_enough_punctuations(self):
self.url_params['password'] = 'Th!sSh.uldPa$*'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
self.assertTrue(obj['success'])
@patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'WORDS': 3})
def test_not_enough_words(self):
self.url_params['password'] = 'thisshouldfail'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 400)
obj = json.loads(response.content)
self.assertEqual(
obj['value'],
"Password: Must be more complex (must contain 3 or more unique words)",
)
@patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'WORDS': 3})
def test_enough_wordss(self):
self.url_params['password'] = u'this should pass'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
self.assertTrue(obj['success'])
@patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {
'PUNCTUATION': 3,
'WORDS': 3,
'DIGITS': 3,
'LOWER': 3,
'UPPER': 3,
})
def test_multiple_errors_fail(self):
self.url_params['password'] = 'thisshouldfail'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 400)
obj = json.loads(response.content)
errstring = (
"Password: Must be more complex ("
"must contain 3 or more uppercase characters, "
"must contain 3 or more digits, "
"must contain 3 or more punctuation characters, "
"must contain 3 or more unique words"
")"
)
self.assertEqual(obj['value'], errstring)
@patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {
'PUNCTUATION': 3,
'WORDS': 3,
'DIGITS': 3,
'LOWER': 3,
'UPPER': 3,
})
def test_multiple_errors_pass(self):
self.url_params['password'] = u'tH1s Sh0u!d P3#$'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
self.assertTrue(obj['success'])
@override_settings(PASSWORD_DICTIONARY=['foo', 'bar'])
@override_settings(PASSWORD_DICTIONARY_EDIT_DISTANCE_THRESHOLD=1)
def test_dictionary_similarity_fail1(self):
self.url_params['password'] = 'foo'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 400)
obj = json.loads(response.content)
self.assertEqual(
obj['value'],
"Password: Too similar to a restricted dictionary word.",
)
@override_settings(PASSWORD_DICTIONARY=['foo', 'bar'])
@override_settings(PASSWORD_DICTIONARY_EDIT_DISTANCE_THRESHOLD=1)
def test_dictionary_similarity_fail2(self):
self.url_params['password'] = 'bar'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 400)
obj = json.loads(response.content)
self.assertEqual(
obj['value'],
"Password: Too similar to a restricted dictionary word.",
)
@override_settings(PASSWORD_DICTIONARY=['foo', 'bar'])
@override_settings(PASSWORD_DICTIONARY_EDIT_DISTANCE_THRESHOLD=1)
def test_dictionary_similarity_fail3(self):
self.url_params['password'] = 'fo0'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 400)
obj = json.loads(response.content)
self.assertEqual(
obj['value'],
"Password: Too similar to a restricted dictionary word.",
)
@override_settings(PASSWORD_DICTIONARY=['foo', 'bar'])
@override_settings(PASSWORD_DICTIONARY_EDIT_DISTANCE_THRESHOLD=1)
def test_dictionary_similarity_pass(self):
self.url_params['password'] = 'this_is_ok'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
self.assertTrue(obj['success'])
def test_with_unicode(self):
self.url_params['password'] = u'四節比分和七年前'
response = self.client.post(self.url, self.url_params)
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
self.assertTrue(obj['success'])
@override_settings(PASSWORD_MIN_LENGTH=6, SESSION_ENGINE='django.contrib.sessions.backends.cache')
def test_ext_auth_password_length_too_short(self):
"""
Tests that even if password policy is enforced, ext_auth registrations aren't subject to it
"""
self.url_params['password'] = 'aaa' # shouldn't pass validation
request = self.request_factory.post(self.url, self.url_params)
# now indicate we are doing ext_auth by setting 'ExternalAuthMap' in the session.
request.session = import_module(settings.SESSION_ENGINE).SessionStore() # empty session
extauth = ExternalAuthMap(external_id='withmap@stanford.edu',
external_email='withmap@stanford.edu',
internal_password=self.url_params['password'],
external_domain='shib:https://idp.stanford.edu/')
request.session['ExternalAuthMap'] = extauth
request.user = AnonymousUser()
mako_middleware_process_request(request)
response = create_account(request)
self.assertEqual(response.status_code, 200)
obj = json.loads(response.content)
self.assertTrue(obj['success'])
class TestUsernamePasswordNonmatch(TestCase):
"""
Test that registration username and password fields differ
"""
def setUp(self):
super(TestUsernamePasswordNonmatch, self).setUp()
self.url = reverse('create_account')
self.url_params = {
'username': 'username',
'email': 'foo_bar@bar.com',
'name': 'username',
'terms_of_service': 'true',
'honor_code': 'true',
}
def test_with_username_password_match(self):
self.url_params['username'] = "foobar"
self.url_params['password'] = "foobar"
response = self.client.post(self.url, self.url_params)
self.assertEquals(response.status_code, 400)
obj = json.loads(response.content)
self.assertEqual(
obj['value'],
"Username and password fields cannot match",
)
def test_with_username_password_nonmatch(self):
self.url_params['username'] = "foobar"
self.url_params['password'] = "nonmatch"
response = self.client.post(self.url, self.url_params)
self.assertEquals(response.status_code, 200)
obj = json.loads(response.content)
self.assertTrue(obj['success'])
|
agpl-3.0
|
ATIX-AG/ansible
|
test/units/modules/network/dellos10/test_dellos10_command.py
|
44
|
4313
|
# (c) 2016 Red Hat Inc.
#
# (c) 2017 Dell EMC.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.compat.tests.mock import patch
from ansible.modules.network.dellos10 import dellos10_command
from units.modules.utils import set_module_args
from .dellos10_module import TestDellos10Module, load_fixture
class TestDellos10CommandModule(TestDellos10Module):
module = dellos10_command
def setUp(self):
super(TestDellos10CommandModule, self).setUp()
self.mock_run_commands = patch('ansible.modules.network.dellos10.dellos10_command.run_commands')
self.run_commands = self.mock_run_commands.start()
def tearDown(self):
super(TestDellos10CommandModule, self).tearDown()
self.mock_run_commands.stop()
def load_fixtures(self, commands=None):
def load_from_file(*args, **kwargs):
module, commands = args
output = list()
for item in commands:
try:
obj = json.loads(item['command'])
command = obj['command']
except ValueError:
command = item['command']
filename = str(command).replace(' ', '_')
output.append(load_fixture(filename))
return output
self.run_commands.side_effect = load_from_file
def test_dellos10_command_simple(self):
set_module_args(dict(commands=['show version']))
result = self.execute_module()
self.assertEqual(len(result['stdout']), 1)
self.assertTrue(result['stdout'][0].startswith('Dell EMC Networking'))
def test_dellos10_command_multiple(self):
set_module_args(dict(commands=['show version', 'show version']))
result = self.execute_module()
self.assertEqual(len(result['stdout']), 2)
self.assertTrue(result['stdout'][0].startswith('Dell EMC Networking'))
def test_dellos10_command_wait_for(self):
wait_for = 'result[0] contains "Dell EMC"'
set_module_args(dict(commands=['show version'], wait_for=wait_for))
self.execute_module()
def test_dellos10_command_wait_for_fails(self):
wait_for = 'result[0] contains "test string"'
set_module_args(dict(commands=['show version'], wait_for=wait_for))
self.execute_module(failed=True)
self.assertEqual(self.run_commands.call_count, 10)
def test_dellos10_command_retries(self):
wait_for = 'result[0] contains "test string"'
set_module_args(dict(commands=['show version'], wait_for=wait_for, retries=2))
self.execute_module(failed=True)
self.assertEqual(self.run_commands.call_count, 2)
def test_dellos10_command_match_any(self):
wait_for = ['result[0] contains "Dell EMC"',
'result[0] contains "test string"']
set_module_args(dict(commands=['show version'], wait_for=wait_for, match='any'))
self.execute_module()
def test_dellos10_command_match_all(self):
wait_for = ['result[0] contains "Dell EMC"',
'result[0] contains "OS10 Enterprise"']
set_module_args(dict(commands=['show version'], wait_for=wait_for, match='all'))
self.execute_module()
def test_dellos10_command_match_all_failure(self):
wait_for = ['result[0] contains "Dell EMC"',
'result[0] contains "test string"']
commands = ['show version', 'show version']
set_module_args(dict(commands=commands, wait_for=wait_for, match='all'))
self.execute_module(failed=True)
|
gpl-3.0
|
shahar-stratoscale/nova
|
nova/cells/state.py
|
9
|
18148
|
# Copyright (c) 2012 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
CellState Manager
"""
import copy
import datetime
import functools
from oslo.config import cfg
from nova.cells import rpc_driver
from nova import context
from nova.db import base
from nova import exception
from nova.openstack.common import fileutils
from nova.openstack.common.gettextutils import _
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.openstack.common import timeutils
from nova.openstack.common import units
from nova import rpc
from nova import utils
cell_state_manager_opts = [
cfg.IntOpt('db_check_interval',
default=60,
help='Interval, in seconds, for getting fresh cell '
'information from the database.'),
cfg.StrOpt('cells_config',
help='Configuration file from which to read cells '
'configuration. If given, overrides reading cells '
'from the database.'),
]
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.import_opt('name', 'nova.cells.opts', group='cells')
CONF.import_opt('reserve_percent', 'nova.cells.opts', group='cells')
CONF.import_opt('mute_child_interval', 'nova.cells.opts', group='cells')
#CONF.import_opt('capabilities', 'nova.cells.opts', group='cells')
CONF.register_opts(cell_state_manager_opts, group='cells')
class CellState(object):
"""Holds information for a particular cell."""
def __init__(self, cell_name, is_me=False):
self.name = cell_name
self.is_me = is_me
self.last_seen = datetime.datetime.min
self.capabilities = {}
self.capacities = {}
self.db_info = {}
# TODO(comstud): The DB will specify the driver to use to talk
# to this cell, but there's no column for this yet. The only
# available driver is the rpc driver.
self.driver = rpc_driver.CellsRPCDriver()
def update_db_info(self, cell_db_info):
"""Update cell credentials from db."""
self.db_info = dict(
[(k, v) for k, v in cell_db_info.iteritems()
if k != 'name'])
def update_capabilities(self, cell_metadata):
"""Update cell capabilities for a cell."""
self.last_seen = timeutils.utcnow()
self.capabilities = cell_metadata
def update_capacities(self, capacities):
"""Update capacity information for a cell."""
self.last_seen = timeutils.utcnow()
self.capacities = capacities
def get_cell_info(self):
"""Return subset of cell information for OS API use."""
db_fields_to_return = ['is_parent', 'weight_scale', 'weight_offset']
url_fields_to_return = {
'username': 'username',
'hostname': 'rpc_host',
'port': 'rpc_port',
}
cell_info = dict(name=self.name, capabilities=self.capabilities)
if self.db_info:
for field in db_fields_to_return:
cell_info[field] = self.db_info[field]
url = rpc.get_transport_url(self.db_info['transport_url'])
if url.hosts:
for field, canonical in url_fields_to_return.items():
cell_info[canonical] = getattr(url.hosts[0], field)
return cell_info
def send_message(self, message):
"""Send a message to a cell. Just forward this to the driver,
passing ourselves and the message as arguments.
"""
self.driver.send_message_to_cell(self, message)
def __repr__(self):
me = "me" if self.is_me else "not_me"
return "Cell '%s' (%s)" % (self.name, me)
def sync_before(f):
"""Use as a decorator to wrap methods that use cell information to
make sure they sync the latest information from the DB periodically.
"""
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
self._cell_data_sync()
return f(self, *args, **kwargs)
return wrapper
def sync_after(f):
"""Use as a decorator to wrap methods that update cell information
in the database to make sure the data is synchronized immediately.
"""
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
result = f(self, *args, **kwargs)
self._cell_data_sync(force=True)
return result
return wrapper
_unset = object()
class CellStateManager(base.Base):
def __new__(cls, cell_state_cls=None, cells_config=_unset):
if cls is not CellStateManager:
return super(CellStateManager, cls).__new__(cls)
if cells_config is _unset:
cells_config = CONF.cells.cells_config
if cells_config:
config_path = CONF.find_file(cells_config)
if not config_path:
raise cfg.ConfigFilesNotFoundError(config_files=[cells_config])
return CellStateManagerFile(cell_state_cls, config_path)
return CellStateManagerDB(cell_state_cls)
def __init__(self, cell_state_cls=None):
super(CellStateManager, self).__init__()
if not cell_state_cls:
cell_state_cls = CellState
self.cell_state_cls = cell_state_cls
self.my_cell_state = cell_state_cls(CONF.cells.name, is_me=True)
self.parent_cells = {}
self.child_cells = {}
self.last_cell_db_check = datetime.datetime.min
self._cell_data_sync(force=True)
my_cell_capabs = {}
for cap in CONF.cells.capabilities:
name, value = cap.split('=', 1)
if ';' in value:
values = set(value.split(';'))
else:
values = set([value])
my_cell_capabs[name] = values
self.my_cell_state.update_capabilities(my_cell_capabs)
def _refresh_cells_from_dict(self, db_cells_dict):
"""Make our cell info map match the db."""
# Update current cells. Delete ones that disappeared
for cells_dict in (self.parent_cells, self.child_cells):
for cell_name, cell_info in cells_dict.items():
is_parent = cell_info.db_info['is_parent']
db_dict = db_cells_dict.get(cell_name)
if db_dict and is_parent == db_dict['is_parent']:
cell_info.update_db_info(db_dict)
else:
del cells_dict[cell_name]
# Add new cells
for cell_name, db_info in db_cells_dict.items():
if db_info['is_parent']:
cells_dict = self.parent_cells
else:
cells_dict = self.child_cells
if cell_name not in cells_dict:
cells_dict[cell_name] = self.cell_state_cls(cell_name)
cells_dict[cell_name].update_db_info(db_info)
def _time_to_sync(self):
"""Is it time to sync the DB against our memory cache?"""
diff = timeutils.utcnow() - self.last_cell_db_check
return diff.seconds >= CONF.cells.db_check_interval
def _update_our_capacity(self, ctxt=None):
"""Update our capacity in the self.my_cell_state CellState.
This will add/update 2 entries in our CellState.capacities,
'ram_free' and 'disk_free'.
The values of these are both dictionaries with the following
format:
{'total_mb': <total_memory_free_in_the_cell>,
'units_by_mb: <units_dictionary>}
<units_dictionary> contains the number of units that we can build for
every distinct memory or disk requirement that we have based on
instance types. This number is computed by looking at room available
on every compute_node.
Take the following instance_types as an example:
[{'memory_mb': 1024, 'root_gb': 10, 'ephemeral_gb': 100},
{'memory_mb': 2048, 'root_gb': 20, 'ephemeral_gb': 200}]
capacities['ram_free']['units_by_mb'] would contain the following:
{'1024': <number_of_instances_that_will_fit>,
'2048': <number_of_instances_that_will_fit>}
capacities['disk_free']['units_by_mb'] would contain the following:
{'122880': <number_of_instances_that_will_fit>,
'225280': <number_of_instances_that_will_fit>}
Units are in MB, so 122880 = (10 + 100) * 1024.
NOTE(comstud): Perhaps we should only report a single number
available per instance_type.
"""
if not ctxt:
ctxt = context.get_admin_context()
reserve_level = CONF.cells.reserve_percent / 100.0
compute_hosts = {}
def _get_compute_hosts():
compute_nodes = self.db.compute_node_get_all(ctxt)
for compute in compute_nodes:
service = compute['service']
if not service or service['disabled']:
continue
host = service['host']
compute_hosts[host] = {
'free_ram_mb': compute['free_ram_mb'],
'free_disk_mb': compute['free_disk_gb'] * 1024,
'total_ram_mb': compute['memory_mb'],
'total_disk_mb': compute['local_gb'] * 1024}
_get_compute_hosts()
if not compute_hosts:
self.my_cell_state.update_capacities({})
return
ram_mb_free_units = {}
disk_mb_free_units = {}
total_ram_mb_free = 0
total_disk_mb_free = 0
def _free_units(total, free, per_inst):
if per_inst:
min_free = total * reserve_level
free = max(0, free - min_free)
return int(free / per_inst)
else:
return 0
instance_types = self.db.flavor_get_all(ctxt)
memory_mb_slots = frozenset(
[inst_type['memory_mb'] for inst_type in instance_types])
disk_mb_slots = frozenset(
[(inst_type['root_gb'] + inst_type['ephemeral_gb']) * units.Ki
for inst_type in instance_types])
for compute_values in compute_hosts.values():
total_ram_mb_free += compute_values['free_ram_mb']
total_disk_mb_free += compute_values['free_disk_mb']
for memory_mb_slot in memory_mb_slots:
ram_mb_free_units.setdefault(str(memory_mb_slot), 0)
free_units = _free_units(compute_values['total_ram_mb'],
compute_values['free_ram_mb'], memory_mb_slot)
ram_mb_free_units[str(memory_mb_slot)] += free_units
for disk_mb_slot in disk_mb_slots:
disk_mb_free_units.setdefault(str(disk_mb_slot), 0)
free_units = _free_units(compute_values['total_disk_mb'],
compute_values['free_disk_mb'], disk_mb_slot)
disk_mb_free_units[str(disk_mb_slot)] += free_units
capacities = {'ram_free': {'total_mb': total_ram_mb_free,
'units_by_mb': ram_mb_free_units},
'disk_free': {'total_mb': total_disk_mb_free,
'units_by_mb': disk_mb_free_units}}
self.my_cell_state.update_capacities(capacities)
@sync_before
def get_cell_info_for_neighbors(self):
"""Return cell information for all neighbor cells."""
cell_list = [cell.get_cell_info()
for cell in self.child_cells.itervalues()]
cell_list.extend([cell.get_cell_info()
for cell in self.parent_cells.itervalues()])
return cell_list
@sync_before
def get_my_state(self):
"""Return information for my (this) cell."""
return self.my_cell_state
@sync_before
def get_child_cells(self):
"""Return list of child cell_infos."""
return self.child_cells.values()
@sync_before
def get_parent_cells(self):
"""Return list of parent cell_infos."""
return self.parent_cells.values()
@sync_before
def get_parent_cell(self, cell_name):
return self.parent_cells.get(cell_name)
@sync_before
def get_child_cell(self, cell_name):
return self.child_cells.get(cell_name)
@sync_before
def update_cell_capabilities(self, cell_name, capabilities):
"""Update capabilities for a cell."""
cell = (self.child_cells.get(cell_name) or
self.parent_cells.get(cell_name))
if not cell:
LOG.error(_("Unknown cell '%(cell_name)s' when trying to "
"update capabilities"),
{'cell_name': cell_name})
return
# Make sure capabilities are sets.
for capab_name, values in capabilities.items():
capabilities[capab_name] = set(values)
cell.update_capabilities(capabilities)
@sync_before
def update_cell_capacities(self, cell_name, capacities):
"""Update capacities for a cell."""
cell = (self.child_cells.get(cell_name) or
self.parent_cells.get(cell_name))
if not cell:
LOG.error(_("Unknown cell '%(cell_name)s' when trying to "
"update capacities"),
{'cell_name': cell_name})
return
cell.update_capacities(capacities)
@sync_before
def get_our_capabilities(self, include_children=True):
capabs = copy.deepcopy(self.my_cell_state.capabilities)
if include_children:
for cell in self.child_cells.values():
if timeutils.is_older_than(cell.last_seen,
CONF.cells.mute_child_interval):
continue
for capab_name, values in cell.capabilities.items():
if capab_name not in capabs:
capabs[capab_name] = set([])
capabs[capab_name] |= values
return capabs
def _add_to_dict(self, target, src):
for key, value in src.items():
if isinstance(value, dict):
target.setdefault(key, {})
self._add_to_dict(target[key], value)
continue
target.setdefault(key, 0)
target[key] += value
@sync_before
def get_our_capacities(self, include_children=True):
capacities = copy.deepcopy(self.my_cell_state.capacities)
if include_children:
for cell in self.child_cells.values():
self._add_to_dict(capacities, cell.capacities)
return capacities
@sync_before
def get_capacities(self, cell_name=None):
if not cell_name or cell_name == self.my_cell_state.name:
return self.get_our_capacities()
if cell_name in self.child_cells:
return self.child_cells[cell_name].capacities
raise exception.CellNotFound(cell_name=cell_name)
@sync_before
def cell_get(self, ctxt, cell_name):
for cells_dict in (self.parent_cells, self.child_cells):
if cell_name in cells_dict:
return cells_dict[cell_name]
raise exception.CellNotFound(cell_name=cell_name)
class CellStateManagerDB(CellStateManager):
@utils.synchronized('cell-db-sync')
def _cell_data_sync(self, force=False):
"""Update cell status for all cells from the backing data store
when necessary.
:param force: If True, cell status will be updated regardless
of whether it's time to do so.
"""
if force or self._time_to_sync():
LOG.debug(_("Updating cell cache from db."))
self.last_cell_db_check = timeutils.utcnow()
ctxt = context.get_admin_context()
db_cells = self.db.cell_get_all(ctxt)
db_cells_dict = dict((cell['name'], cell) for cell in db_cells)
self._refresh_cells_from_dict(db_cells_dict)
self._update_our_capacity(ctxt)
@sync_after
def cell_create(self, ctxt, values):
return self.db.cell_create(ctxt, values)
@sync_after
def cell_update(self, ctxt, cell_name, values):
return self.db.cell_update(ctxt, cell_name, values)
@sync_after
def cell_delete(self, ctxt, cell_name):
return self.db.cell_delete(ctxt, cell_name)
class CellStateManagerFile(CellStateManager):
def __init__(self, cell_state_cls, cells_config_path):
self.cells_config_path = cells_config_path
super(CellStateManagerFile, self).__init__(cell_state_cls)
def _cell_data_sync(self, force=False):
"""Update cell status for all cells from the backing data store
when necessary.
:param force: If True, cell status will be updated regardless
of whether it's time to do so.
"""
reloaded, data = fileutils.read_cached_file(self.cells_config_path,
force_reload=force)
if reloaded:
LOG.debug(_("Updating cell cache from config file."))
self.cells_config_data = jsonutils.loads(data)
self._refresh_cells_from_dict(self.cells_config_data)
if force or self._time_to_sync():
self.last_cell_db_check = timeutils.utcnow()
self._update_our_capacity()
def cell_create(self, ctxt, values):
raise exception.CellsUpdateProhibited()
def cell_update(self, ctxt, cell_name, values):
raise exception.CellsUpdateProhibited()
def cell_delete(self, ctxt, cell_name):
raise exception.CellsUpdateProhibited()
|
apache-2.0
|
kuiwei/edx-platform
|
common/djangoapps/microsite_configuration/templatetags/microsite.py
|
107
|
2058
|
"""
Template tags and helper functions for displaying breadcrumbs in page titles
based on the current micro site.
"""
from django import template
from django.conf import settings
from microsite_configuration import microsite
from django.templatetags.static import static
register = template.Library()
def page_title_breadcrumbs(*crumbs, **kwargs):
"""
This function creates a suitable page title in the form:
Specific | Less Specific | General | edX
It will output the correct platform name for the request.
Pass in a `separator` kwarg to override the default of " | "
"""
separator = kwargs.get("separator", " | ")
if crumbs:
return u'{}{}{}'.format(separator.join(crumbs), separator, platform_name())
else:
return platform_name()
@register.simple_tag(name="page_title_breadcrumbs", takes_context=True)
def page_title_breadcrumbs_tag(context, *crumbs):
"""
Django template that creates breadcrumbs for page titles:
{% page_title_breadcrumbs "Specific" "Less Specific" General %}
"""
return page_title_breadcrumbs(*crumbs)
@register.simple_tag(name="platform_name")
def platform_name():
"""
Django template tag that outputs the current platform name:
{% platform_name %}
"""
return microsite.get_value('platform_name', settings.PLATFORM_NAME)
@register.simple_tag(name="favicon_path")
def favicon_path(default=getattr(settings, 'FAVICON_PATH', 'images/favicon.ico')):
"""
Django template tag that outputs the configured favicon:
{% favicon_path %}
"""
return static(microsite.get_value('favicon_path', default))
@register.simple_tag(name="microsite_css_overrides_file")
def microsite_css_overrides_file():
"""
Django template tag that outputs the css import for a:
{% microsite_css_overrides_file %}
"""
file_path = microsite.get_value('css_overrides_file', None)
if file_path is not None:
return "<link href='{}' rel='stylesheet' type='text/css'>".format(static(file_path))
else:
return ""
|
agpl-3.0
|
brookisme/gitnb
|
gitnb/tonb.py
|
1
|
4712
|
import os
import sys
import json
import re
import errno
import gitnb.config as con
import gitnb.utils as utils
from gitnb.constants import *
import gitnb.default as default
class Py2NB(object):
"""CONVERT NBPY to Notebook
Args:
path: <str> path to file
"""
def __init__(self,path,nb_path=None):
self._init_params()
self.path=default.nbpy_path(path)
self.nb_path=nb_path or default.ipynb_path(self.path)
def json(self):
""" get nbpy.py file as ipynb json
- sets ipynb dict
- sets ipynb json
- returns ipynb json
"""
if not self._json:
for line in utils.read_lines(self.path):
self._process_line(line)
self.ipynb_dict['cells']=self.cells
self.ipynb_dict.update(self._meta())
self._json=json.dumps(
self.ipynb_dict,
sort_keys=True,
indent=con.fig('TAB_SIZE'))
return self._json
def convert(self):
""" Convert .nbpy.py to .ipynb
returns file path of ipynb file
"""
if con.fig('CREATE_DIRS'): utils.mkdirs(self.nb_path)
with open(self.nb_path, 'w') as outfile:
outfile.write(self.json())
return self.nb_path
#
# INTERNAL: IPYNB_DICT CONSTRUCTION
#
def _process_line(self,line):
""" Process Line from file
- if new cell: init cell
- if end cell:
- close cell
- append cell to cells
- if cell-line: add cell-line to cell
"""
line=self._clean(line)
cell_type=self._is_new_cell(line)
if cell_type: self._init_cell(cell_type)
else:
if self._is_end_of_cell(line):
self._close_cell()
else:
if self.cell:
self._insert_line(line)
def _is_new_cell(self,line):
""" Check if new cell
if new cell return cell-type
else return None
"""
if line==CODE_START:
return CODE_TYPE
elif line==MARKDOWN_START:
return MARKDOWN_TYPE
elif line==RAW_START:
return RAW_TYPE
else:
return None
def _is_end_of_cell(self,line):
""" Check if end of cell
return bool
"""
if self.cell_type==CODE_TYPE:
return line==CODE_END
else:
return line==UNCODE_END
def _init_cell(self,cell_type):
""" Initialize New Cell
"""
self.cell_type=cell_type
self.cell=self._new_cell(cell_type)
if cell_type=='code':
self.cell['execution_count']=None
self.cell['outputs']=[]
def _insert_line(self,line):
""" Insert Source line into cell-source
"""
self.cell['source'].append("{}\n".format(line))
def _close_cell(self):
""" Close cell
- remove '\n' from last cell-source-line
- set cell/cell_type to None
"""
lastline=self.cell['source'].pop()
self.cell['source'].append(lastline.rstrip('\n'))
self.cells.append(self.cell)
self.cell=None
self.cell_type=None
def _new_cell(self,cell_type):
""" Return New Cell
"""
return {
"cell_type": cell_type,
"metadata": {},
"source": []}
def _meta(self):
""" Return Notebook Metadata
"""
major,minor,micro,_,_=sys.version_info
return {
"metadata": {
"kernelspec": {
"display_name": "Python {}".format(major),
"language": "python",
"name": "python{}".format(major)},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": major},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython{}".format(major),
"version": "{}.{}.{}".format(major,minor,micro)}},
"nbformat": 4,
"nbformat_minor": 2}
#
# INTERNAL: UTILS
#
def _init_params(self):
self._json=None
self.cells=[]
self.cell=None
self.cell_type=None
self.ipynb_dict={}
def _clean(self,line):
""" Remove spaces and line break
from end of line
"""
return line.rstrip(' ').rstrip('\n').rstrip(' ')
|
mit
|
thunsaker/cloudpebble
|
fabfile.py
|
2
|
4971
|
# WARNING: This file is extremely specific to how Katharine happens to have her
# local machines set up.
# In particular, to run without modification, you will need:
# - An EC2 keypair in ~/Downloads/katharine-keypair.pem
# - A keypair for the ycmd servers in ~/.ssh/id_rsa
# - The tintin source tree in ~/projects/tintin
# - With an appropriate python virtualenv in .env/
# - A clone of qemu-tintin-images in ~/projects/qemu-tintin-images
# - Access to the cloudpebble heroku app
from fabric.api import *
from fabric.tasks import execute
import os
PROJECTS_PATH = env.get('pebble_projects_path', '~/projects')
def get_project_path(name):
return os.path.join(PROJECTS_PATH, name)
env.roledefs = {
'qemu': ['ec2-user@qemu-us1.cloudpebble.net', 'ec2-user@qemu-us2.cloudpebble.net'],
'ycmd': ['root@ycm1.cloudpebble.net', 'root@ycm2.cloudpebble.net',
'root@ycm3.cloudpebble.net', 'root@ycm4.cloudpebble.net',],
}
# This needs to stay commented out for anyone except Katharine to use the script
# env.key_filename = ['~/.ssh/id_rsa', '~/Downloads/katharine-keypair.pem']
@task
@roles('qemu')
@parallel
def update_qemu_service():
with cd("cloudpebble-qemu-controller"):
run("git pull")
run("git submodule update --init --recursive")
with prefix(". .env/bin/activate"):
run("pip install -r requirements.txt")
sudo("restart cloudpebble-qemu")
@task
@roles('qemu')
@parallel
def update_qemu_sdk():
with cd('qemu'):
run("git pull")
# This is currently the last qemu commit which works with CloudPebble
run("git checkout 01b08e22cfc7c1e08d5087d669a5a2f4703d5a20")
run("make -j8")
with cd("qemu-tintin-images"):
run("git pull")
with cd("pypkjs"):
run("git pull")
run("git submodule update --init --recursive")
with prefix(". .env/bin/activate"):
run("pip install -r requirements.txt")
@task
@roles('qemu')
@parallel
def restart_qemu_service():
sudo("restart cloudpebble-qemu")
@task
@roles('ycmd')
@parallel
def update_ycmd_sdk(sdk_version):
with cd("/home/ycm"), settings(sudo_user="ycm", shell="/bin/bash -c"):
sudo("wget -nv -O sdk.tar.gz https://s3.amazonaws.com/assets.getpebble.com/sdk3/release/sdk-core-%s.tar.bz2" % sdk_version)
sudo("tar -xf sdk.tar.gz")
sudo("rm -rf sdk3")
sudo("mv sdk-core sdk3")
@task
@roles('ycmd')
@parallel
def update_ycmd_service():
with cd("/home/ycm/proxy"), settings(sudo_user="ycm", shell="/bin/bash -c"):
sudo("git pull")
run("pip install --upgrade -r requirements.txt")
run("restart ycmd-proxy")
@task
@roles('ycmd')
@parallel
def restart_ycmd_service():
run("restart ycmd-proxy")
@task
def deploy_heroku():
local("git push heroku master")
@task
def restart_heroku():
local("heroku restart -a cloudpebble")
@task
def update_all_services():
execute(update_qemu_service)
execute(update_ycmd_service)
execute(deploy_heroku)
@task
def restart_everything():
execute(restart_qemu_service)
execute(restart_ycmd_service)
execute(restart_heroku)
def build_qemu_image(board, platform):
with lcd(get_project_path("tintin")):
with prefix(". .env/bin/activate"):
local("python ./waf configure --board={} --qemu --release --sdkshell build qemu_image_spi qemu_image_micro".format(board))
local("cp build/qemu_* {}".format(os.path.join(get_project_path('qemu-tintin-images'), platform, "3.0")))
@task
@runs_once
def update_qemu_images(sdk_version):
# Merge conflicts are no fun.
with lcd(get_project_path("qemu-tintin-images")):
local("git pull")
with lcd(get_project_path("tintin")):
local("git checkout v%s" % sdk_version)
# build_qemu_image("bb2", "aplite")
build_qemu_image("snowy_bb2", "basalt")
build_qemu_image("spalding_bb2", "chalk")
build_qemu_image("silk_bb", "diorite")
build_qemu_image("robert_bb", "emery")
with lcd(get_project_path("qemu-tintin-images")):
local("git commit -a -m 'Update to v%s'" % sdk_version)
local("git push")
@task
@runs_once
def update_cloudpebble_sdk(sdk_version):
local("sed -i.bak 's/sdk-core-3.[a-z0-9-]*\.tar\.bz2/sdk-core-%s.tar.bz2/' bin/post_compile bootstrap.sh" % sdk_version)
local("git add bin/post_compile bootstrap.sh")
local("git commit -m 'Update to v%s'" % sdk_version)
local("git push")
execute(deploy_heroku)
@task
def update_sdk(sdk_version):
execute(update_qemu_images, sdk_version)
execute(update_qemu_sdk)
execute(update_ycmd_sdk, sdk_version)
execute(update_cloudpebble_sdk, sdk_version)
@task
def update_all(sdk_version):
execute(update_qemu_images, sdk_version)
execute(update_qemu_sdk)
execute(update_qemu_service)
execute(update_ycmd_sdk, sdk_version)
execute(update_ycmd_service)
execute(update_cloudpebble_sdk, sdk_version)
|
mit
|
upptalk/uppsell
|
uppsell/migrations/0004_auto__add_field_listing_shipping.py
|
1
|
17786
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Listing.shipping'
db.add_column('listings', 'shipping',
self.gf('django.db.models.fields.DecimalField')(default=0.0, max_digits=8, decimal_places=2),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Listing.shipping'
db.delete_column('listings', 'shipping')
models = {
u'uppsell.address': {
'Meta': {'object_name': 'Address', 'db_table': "'addresses'"},
'city': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'country_code': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'customer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Customer']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_used': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'other': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'province': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'zip': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
u'uppsell.cart': {
'Meta': {'object_name': 'Cart', 'db_table': "'carts'"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'customer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Customer']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'store': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Store']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'uppsell.cartitem': {
'Meta': {'object_name': 'CartItem', 'db_table': "'cart_items'"},
'cart': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Cart']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Listing']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
u'uppsell.coupon': {
'Meta': {'object_name': 'Coupon', 'db_table': "'coupons'"},
'code': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'customer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Customer']", 'null': 'True', 'blank': 'True'}),
'discount_amount': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'}),
'discount_pct': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_uses': ('django.db.models.fields.PositiveIntegerField', [], {}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Listing']", 'null': 'True', 'blank': 'True'}),
'product_group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.ProductGroup']", 'null': 'True', 'blank': 'True'}),
'relation': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'remaining': ('django.db.models.fields.PositiveIntegerField', [], {}),
'store': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Store']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'valid_from': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'valid_until': ('django.db.models.fields.DateTimeField', [], {})
},
u'uppsell.couponspend': {
'Meta': {'unique_together': "(('customer', 'coupon'),)", 'object_name': 'CouponSpend', 'db_table': "'coupon_spends'"},
'coupon': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Coupon']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'customer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Customer']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'uppsell.customer': {
'Meta': {'object_name': 'Customer', 'db_table': "'customers'"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'db_index': 'True', 'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_logged_in_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'uppsell.invoice': {
'Meta': {'object_name': 'Invoice', 'db_table': "'invoices'"},
'billing_address': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True'}),
'order_shipping_total': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'order_total': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'payment_made_ts': ('django.db.models.fields.DateTimeField', [], {}),
'product_id': ('django.db.models.fields.IntegerField', [], {}),
'psp_id': ('django.db.models.fields.IntegerField', [], {}),
'psp_response_code': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'psp_response_text': ('django.db.models.fields.CharField', [], {'max_length': '10000'}),
'psp_type': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'quantity': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shipping_address': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'store_id': ('django.db.models.fields.IntegerField', [], {}),
'transaction_id': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'user_email': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'user_fullname': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'user_jid': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'user_mobile_msisdn': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'uppsell.linkedaccount': {
'Meta': {'object_name': 'LinkedAccount', 'db_table': "'linked_accounts'"},
'account_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'customer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Customer']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '2000'}),
'linked_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.LinkedAccountType']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'uppsell.linkedaccounttype': {
'Meta': {'object_name': 'LinkedAccountType', 'db_table': "'linked_account_types'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
u'uppsell.listing': {
'Meta': {'object_name': 'Listing', 'db_table': "'listings'"},
'description': ('django.db.models.fields.CharField', [], {'max_length': '10000', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'price': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '8', 'decimal_places': '2'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Product']"}),
'sales_tax_rate': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'shipping': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '8', 'decimal_places': '2'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'store': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Store']"}),
'subtitle': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'uppsell.order': {
'Meta': {'object_name': 'Order', 'db_table': "'orders'"},
'billing_address': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'billing_address'", 'null': 'True', 'to': u"orm['uppsell.Address']"}),
'coupon': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Coupon']", 'null': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'customer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Customer']"}),
'fraud_state': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order_shipping_total': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'order_state': ('django.db.models.fields.CharField', [], {'default': "'init'", 'max_length': '30'}),
'order_total': ('django.db.models.fields.DecimalField', [], {'max_digits': '8', 'decimal_places': '2'}),
'payment_made_ts': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'payment_state': ('django.db.models.fields.CharField', [], {'default': "'init'", 'max_length': '30'}),
'shipping_address': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'shipping_address'", 'null': 'True', 'to': u"orm['uppsell.Address']"}),
'store': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Store']"}),
'transaction_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'uppsell.orderevent': {
'Meta': {'object_name': 'OrderEvent', 'db_table': "'order_events'"},
'action_type': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Order']"}),
'state_after': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'state_before': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
u'uppsell.orderitem': {
'Meta': {'object_name': 'OrderItem', 'db_table': "'order_items'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Order']"}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.Listing']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
u'uppsell.product': {
'Meta': {'object_name': 'Product', 'db_table': "'products'"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '10000'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.ProductGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'provisioning_codes': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'sku': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'stock_units': ('django.db.models.fields.FloatField', [], {}),
'subtitle': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'uppsell.productcode': {
'Meta': {'object_name': 'ProductCode', 'db_table': "'product_codes'"},
'code': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['uppsell.ProductGroup']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
u'uppsell.productgroup': {
'Meta': {'object_name': 'ProductGroup', 'db_table': "'product_groups'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'uppsell.store': {
'Meta': {'object_name': 'Store', 'db_table': "'stores'"},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'default_currency': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'default_lang': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'sales_tax_rate': ('django.db.models.fields.FloatField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['uppsell']
|
mit
|
sherazkasi/SabreSoftware
|
Lib/site-packages/scipy/setupscons.py
|
59
|
1685
|
from os.path import join as pjoin
def configuration(parent_package='', top_path=None, setup_name='setupscons.py'):
from numpy.distutils.misc_util import Configuration
from numpy.distutils.misc_util import scons_generate_config_py
pkgname = 'scipy'
config = Configuration(pkgname, parent_package, top_path,
setup_name = 'setupscons.py')
config.add_subpackage('cluster')
config.add_subpackage('constants')
config.add_subpackage('fftpack')
config.add_subpackage('integrate')
config.add_subpackage('interpolate')
config.add_subpackage('io')
config.add_subpackage('lib')
config.add_subpackage('linalg')
config.add_subpackage('maxentropy')
config.add_subpackage('misc')
config.add_subpackage('odr')
config.add_subpackage('optimize')
config.add_subpackage('signal')
config.add_subpackage('sparse')
config.add_subpackage('spatial')
config.add_subpackage('special')
config.add_subpackage('stats')
config.add_subpackage('ndimage')
config.add_subpackage('weave')
config.make_svn_version_py() # installs __svn_version__.py
def add_config(*args, **kw):
# Generate __config__, handle inplace issues.
if kw['scons_cmd'].inplace:
target = pjoin(kw['pkg_name'], '__config__.py')
else:
target = pjoin(kw['scons_cmd'].build_lib, kw['pkg_name'],
'__config__.py')
scons_generate_config_py(target)
config.add_sconscript(None, post_hook = add_config)
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
gpl-3.0
|
nurmd2/nurmd
|
addons/product/__openerp__.py
|
18
|
1558
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Products & Pricelists',
'version': '1.2',
'category': 'Sales Management',
'depends': ['base', 'decimal_precision', 'mail', 'report'],
'demo': [
'product_demo.xml',
'product_image_demo.xml',
],
'description': """
This is the base module for managing products and pricelists in OpenERP.
========================================================================
Products support variants, different pricing methods, vendors information,
make to stock/order, different unit of measures, packaging and properties.
Pricelists support:
-------------------
* Multiple-level of discount (by product, category, quantities)
* Compute price based on different criteria:
* Other pricelist
* Cost price
* List price
* Vendor price
Pricelists preferences by product and/or partners.
Print product labels with barcode.
""",
'data': [
'security/product_security.xml',
'security/ir.model.access.csv',
'wizard/product_price_view.xml',
'res_config_view.xml',
'product_data.xml',
'product_report.xml',
'product_view.xml',
'pricelist_view.xml',
'partner_view.xml',
'views/report_pricelist.xml',
'views/report_productlabel.xml'
],
'test': [
'product_pricelist_demo.yml',
'test/product_pricelist.yml',
],
'installable': True,
'auto_install': False,
}
|
gpl-3.0
|
akrherz/iem
|
htdocs/plotting/auto/scripts100/p160.py
|
1
|
8613
|
"""
TODO: add table listing each forecast's peak and peak time...
"""
import datetime
import pytz
import numpy as np
import pandas as pd
from pandas.io.sql import read_sql
import matplotlib.dates as mdates
from pyiem.plot import figure_axes
from pyiem.util import get_autoplot_context, get_dbconn
from pyiem.exceptions import NoDataFound
MDICT = {"primary": "Primary Field", "secondary": "Secondary Field"}
def get_description():
"""Return a dict describing how to call this plotter"""
desc = dict()
desc["data"] = True
desc["cache"] = 3600
desc[
"description"
] = """This page presents a sphagetti plot of river stage
and forecasts. The plot is roughly centered on the date of your choice
with the plot showing any forecasts made three days prior to the date
and for one day afterwards. Sorry that you have to know the station ID
prior to using this page (will fix at some point). Presented timestamps
are hopefully all in the local timezone of the reporting station. If
you download the data, the timestamps are all in UTC.
"""
utc = datetime.datetime.utcnow()
desc["arguments"] = [
dict(
type="text",
name="station",
default="EKDI4",
label="Enter 5 Char NWSLI Station Code (sorry):",
),
dict(
type="datetime",
name="dt",
default=utc.strftime("%Y/%m/%d %H%M"),
label="Time to center plot at (UTC Time Zone):",
min="2013/01/01 0000",
),
dict(
type="select",
name="var",
options=MDICT,
label="Which Variable to Plot:",
default="primary",
),
]
return desc
def get_context(fdict):
"""Do the common work"""
pgconn = get_dbconn("hml")
cursor = pgconn.cursor()
ctx = get_autoplot_context(fdict, get_description())
ctx["station"] = ctx["station"].upper()
station = ctx["station"]
dt = ctx["dt"]
# Attempt to get station information
cursor.execute(
"SELECT name, tzname from stations where id = %s and network ~* 'DCP'",
(station,),
)
ctx["name"] = ""
ctx["tzname"] = "UTC"
if cursor.rowcount > 0:
row = cursor.fetchone()
ctx["name"] = row[0]
ctx["tzname"] = row[1]
ctx["fdf"] = read_sql(
f"""with fx as (
select id, issued, primaryname, primaryunits, secondaryname,
secondaryunits from hml_forecast where station = %s
and generationtime between %s and %s)
SELECT f.id,
f.issued at time zone 'UTC' as issued,
d.valid at time zone 'UTC' as valid,
d.primary_value, f.primaryname,
f.primaryunits, d.secondary_value, f.secondaryname,
f.secondaryunits from
hml_forecast_data_{dt.year} d JOIN fx f
on (d.hml_forecast_id = f.id) ORDER by f.id ASC, d.valid ASC
""",
pgconn,
params=(
station,
dt - datetime.timedelta(days=3),
dt + datetime.timedelta(days=1),
),
index_col=None,
)
if not ctx["fdf"].empty:
ctx["fdf"]["valid"] = ctx["fdf"]["valid"].dt.tz_localize(pytz.UTC)
ctx["fdf"]["issued"] = ctx["fdf"]["issued"].dt.tz_localize(pytz.UTC)
ctx["primary"] = "%s[%s]" % (
ctx["fdf"].iloc[0]["primaryname"],
ctx["fdf"].iloc[0]["primaryunits"],
)
ctx["secondary"] = "%s[%s]" % (
ctx["fdf"].iloc[0]["secondaryname"],
ctx["fdf"].iloc[0]["secondaryunits"],
)
# get obs
mints = ctx["fdf"]["valid"].min()
maxts = ctx["fdf"]["valid"].max()
else:
mints = dt - datetime.timedelta(days=3)
maxts = dt + datetime.timedelta(days=3)
df = read_sql(
"SELECT distinct valid at time zone 'UTC' as valid, "
"h.label, value from hml_observed_data d "
"JOIN hml_observed_keys h on (d.key = h.id) WHERE station = %s and "
"valid between %s and %s ORDER by valid ASC",
pgconn,
params=(station, mints, maxts),
index_col=None,
)
if df.empty:
raise NoDataFound("No Data Found.")
df["valid"] = df["valid"].dt.tz_localize(pytz.UTC)
ctx["odf"] = df.pivot("valid", "label", "value")
if not ctx["fdf"].empty:
ctx["fdf"].reset_index(inplace=True)
ctx["df"] = pd.merge(
ctx["fdf"],
ctx["odf"],
left_on="valid",
right_on="valid",
how="left",
sort=False,
)
ctx["title"] = "[%s] %s" % (ctx["station"], ctx["name"])
ctx["subtitle"] = "+/- 72 hours around %s" % (
ctx["dt"]
.replace(tzinfo=pytz.UTC)
.astimezone(pytz.timezone(ctx["tzname"]))
.strftime("%d %b %Y %-I:%M %p %Z"),
)
if "df" not in ctx or (ctx["df"].empty and not ctx["odf"].empty):
ctx["primary"] = ctx["odf"].columns[0]
ctx["secondary"] = ctx["odf"].columns[1]
return ctx
def highcharts(fdict):
"""generate highcharts"""
ctx = get_context(fdict)
if "df" not in ctx:
raise NoDataFound("No Data Found.")
df = ctx["df"]
df["ticks"] = df["valid"].astype(np.int64) // 10 ** 6
lines = []
fxs = df["id"].unique()
for fx in fxs:
df2 = df[df["id"] == fx]
issued = (
df2.iloc[0]["issued"]
.tz_convert(pytz.timezone(ctx["tzname"]))
.strftime("%-m/%-d %-I%p %Z")
)
v = df2[["ticks", ctx["var"] + "_value"]].to_json(orient="values")
lines.append(
"""{
name: '"""
+ issued
+ """',
type: 'line',
tooltip: {valueDecimal: 1},
data: """
+ v
+ """
}
"""
)
ctx["odf"]["ticks"] = ctx["odf"].index.values.astype(np.int64) // 10 ** 6
if ctx["var"] in ctx:
v = ctx["odf"][["ticks", ctx[ctx["var"]]]].to_json(orient="values")
lines.append(
"""{
name: 'Obs',
type: 'line',
color: 'black',
lineWidth: 3,
tooltip: {valueDecimal: 1},
data: """
+ v
+ """
}
"""
)
series = ",".join(lines)
return (
"""
$("#ap_container").highcharts({
time: {
useUTC: false,
timezone: '"""
+ ctx["tzname"]
+ """'
},
title: {text: '"""
+ ctx["title"]
+ """'},
subtitle: {text: '"""
+ ctx["subtitle"]
+ """'},
chart: {zoomType: 'x'},
tooltip: {
shared: true,
crosshairs: true,
xDateFormat: '%d %b %Y %I:%M %p'
},
xAxis: {
title: {text: '"""
+ ctx["tzname"]
+ """ Timezone'},
type: 'datetime'},
yAxis: {title: {text: '"""
+ ctx.get(ctx["var"], "primary")
+ """'}},
series: ["""
+ series
+ """]
});
"""
)
def plotter(fdict):
"""Go"""
ctx = get_context(fdict)
if "df" not in ctx or (ctx["df"].empty and ctx["odf"].empty):
raise NoDataFound("No Data Found!")
df = ctx["df"]
title = "\n".join([ctx["title"], ctx["subtitle"]])
(fig, ax) = figure_axes(title=title)
fxs = df["id"].unique()
for fx in fxs:
df2 = df[df["id"] == fx]
issued = (
df2.iloc[0]["issued"]
.tz_convert(pytz.timezone(ctx["tzname"]))
.strftime("%-m/%-d %-I%p %Z")
)
ax.plot(
df2["valid"], df2[ctx["var"] + "_value"], zorder=2, label=issued
)
if not ctx["odf"].empty:
ax.plot(
ctx["odf"].index.values,
ctx["odf"][ctx[ctx["var"]]],
lw=2,
color="k",
label="Obs",
zorder=4,
)
ax.set_ylabel(ctx[ctx["var"]])
ax.xaxis.set_major_locator(
mdates.AutoDateLocator(tz=pytz.timezone(ctx["tzname"]))
)
ax.xaxis.set_major_formatter(
mdates.DateFormatter("%-d %b\n%Y", tz=pytz.timezone(ctx["tzname"]))
)
pos = ax.get_position()
ax.grid(True)
ax.set_position([pos.x0, pos.y0, 0.74, 0.8])
ax.set_xlabel(f"Timestamps in {ctx['tzname']} Timezone")
ax.legend(loc=(1.0, 0.0))
df["issued"] = df["issued"].apply(lambda x: x.strftime("%Y-%m-%d %H:%M"))
df["valid"] = df["valid"].apply(lambda x: x.strftime("%Y-%m-%d %H:%M"))
return fig, df
if __name__ == "__main__":
plotter(dict(station="MLGO1", dt="2021-06-19 1653"))
|
mit
|
hnjamba/onaclone
|
onadata/apps/main/tests/test_past_bugs.py
|
5
|
2294
|
import os
from pyxform.errors import PyXFormError
from onadata.apps.logger.models import XForm, Instance
from test_base import TestBase
class TestInputs(TestBase):
"""
This is where I'll input all files that proved problematic for
users when uploading.
"""
def test_uniqueness_of_group_names_enforced(self):
pre_count = XForm.objects.count()
self._create_user_and_login()
self.assertRaisesMessage(
PyXFormError,
"There are two sections with the name group_names_must_be_unique.",
self._publish_xls_file,
'fixtures/group_names_must_be_unique.xls')
self.assertEqual(XForm.objects.count(), pre_count)
def test_mch(self):
msg = u"Unknown question type 'Select one from source'"
with self.assertRaisesMessage(PyXFormError, msg):
self._publish_xls_file('fixtures/bug_fixes/MCH_v1.xls')
def test_erics_files(self):
for name in ['battery_life.xls',
'enumerator_weekly.xls',
'Enumerator_Training_Practice_Survey.xls']:
try:
self._publish_xls_file(os.path.join(
'fixtures', 'bug_fixes', name))
except Exception as e:
self.assertEqual(u"Duplicate column header: label",
unicode(e))
class TestSubmissionBugs(TestBase):
def test_submission_with_mixed_case_username(self):
self._publish_transportation_form()
s = self.surveys[0]
count = Instance.objects.count()
self._make_submission(
os.path.join(
self.this_directory, 'fixtures',
'transportation', 'instances', s, s + '.xml'), 'BoB')
self.assertEqual(Instance.objects.count(), count + 1)
class TestCascading(TestBase):
def test_correct_id_string_picked(self):
XForm.objects.all().delete()
name = 'new_cascading_select.xls'
id_string = u'cascading_select_test'
self._publish_xls_file(os.path.join(
self.this_directory, 'fixtures', 'bug_fixes', name))
self.assertEqual(XForm.objects.count(), 1)
xform_id_string = XForm.objects.all()[0].id_string
self.assertEqual(xform_id_string, id_string)
|
bsd-2-clause
|
Nettacker/Nettacker
|
lib/transactions/maltego/nettacker_transforms/src/nettacker_transforms/transforms/shodan_scan.py
|
1
|
1847
|
import random
from canari.maltego.transform import Transform
from canari.maltego.entities import URL
from canari.framework import EnableDebugWindow
from common.entities import NettackerScan
from lib.scan.shodan.engine import start
from database.db import __logs_by_scan_id as find_log
__author__ = 'Shaddy Garg'
__copyright__ = 'Copyright 2018, nettacker_transforms Project'
__credits__ = []
__license__ = 'GPLv3'
__version__ = '0.1'
__maintainer__ = 'Shaddy Garg'
__email__ = 'shaddygarg1@gmail.com'
__status__ = 'Development'
@EnableDebugWindow
class ShodanScan(Transform):
"""TODO: Your transform description."""
# The transform input entity type.
input_type = NettackerScan
def do_transform(self, request, response, config):
# TODO: write your code here.
scan_request = request.entity
scan_id = "".join(random.choice("0123456789abcdef") for x in range(32))
scan_request.ports = scan_request.ports.split(', ') if scan_request.ports is not None else None
start(scan_request.host, [], [], scan_request.ports, scan_request.timeout_sec, scan_request.thread_no,
1, 1, 'abcd', 0, "en", scan_request.verbose, scan_request.socks_proxy, scan_request.retries, [], None, scan_id,
"Through Maltego")
results = find_log(scan_id, "en")
for result in results:
url = result["HOST"] + ":" + result["PORT"]
response += URL(url=url, title=result["DESCRIPTION"],
short_title="Shodan scan completed!",
link_label='shodan_scan')
return response
def on_terminate(self):
"""This method gets called when transform execution is prematurely terminated. It is only applicable for local
transforms. It can be excluded if you don't need it."""
pass
|
gpl-3.0
|
koss822/misc
|
Linux/MySettings/myvim/vim/bundle/jedi-vim/pythonx/jedi/test/run.py
|
1
|
15920
|
#!/usr/bin/env python
"""
|jedi| is mostly being tested by what I would call "Blackbox Tests". These
tests are just testing the interface and do input/output testing. This makes a
lot of sense for |jedi|. Jedi supports so many different code structures, that
it is just stupid to write 200'000 unittests in the manner of
``regression.py``. Also, it is impossible to do doctests/unittests on most of
the internal data structures. That's why |jedi| uses mostly these kind of
tests.
There are different kind of tests:
- completions / goto_definitions ``#?``
- goto_assignments: ``#!``
- usages: ``#<``
How to run tests?
+++++++++++++++++
Jedi uses pytest_ to run unit and integration tests. To run tests,
simply run ``py.test``. You can also use tox_ to run tests for
multiple Python versions.
.. _pytest: http://pytest.org
.. _tox: http://testrun.org/tox
Integration test cases are located in ``test/completion`` directory
and each test case is indicated by either the comment ``#?`` (completions /
definitions), ``#!`` (assignments), or ``#<`` (usages).
There is also support for third party libraries. In a normal test run they are
not being executed, you have to provide a ``--thirdparty`` option.
In addition to standard `-k` and `-m` options in py.test, you can use
`-T` (`--test-files`) option to specify integration test cases to run.
It takes the format of ``FILE_NAME[:LINE[,LINE[,...]]]`` where
``FILE_NAME`` is a file in ``test/completion`` and ``LINE`` is a line
number of the test comment. Here is some recipes:
Run tests only in ``basic.py`` and ``imports.py``::
py.test test/test_integration.py -T basic.py -T imports.py
Run test at line 4, 6, and 8 in ``basic.py``::
py.test test/test_integration.py -T basic.py:4,6,8
See ``py.test --help`` for more information.
If you want to debug a test, just use the ``--pdb`` option.
Alternate Test Runner
+++++++++++++++++++++
If you don't like the output of ``py.test``, there's an alternate test runner
that you can start by running ``./run.py``. The above example could be run by::
./run.py basic 4 6 8 50-80
The advantage of this runner is simplicity and more customized error reports.
Using both runners will help you to have a quicker overview of what's
happening.
Auto-Completion
+++++++++++++++
Uses comments to specify a test in the next line. The comment says which
results are expected. The comment always begins with `#?`. The last row
symbolizes the cursor.
For example::
#? ['real']
a = 3; a.rea
Because it follows ``a.rea`` and a is an ``int``, which has a ``real``
property.
Goto Definitions
++++++++++++++++
Definition tests use the same symbols like completion tests. This is
possible because the completion tests are defined with a list::
#? int()
ab = 3; ab
Goto Assignments
++++++++++++++++
Tests look like this::
abc = 1
#! ['abc=1']
abc
Additionally it is possible to specify the column by adding a number, which
describes the position of the test (otherwise it's just the end of line)::
#! 2 ['abc=1']
abc
Usages
++++++
Tests look like this::
abc = 1
#< abc@1,0 abc@3,0
abc
"""
import os
import re
import sys
import operator
from ast import literal_eval
from io import StringIO
from functools import reduce
import parso
import jedi
from jedi import debug
from jedi._compatibility import unicode, is_py3
from jedi.api.classes import Definition
from jedi.api.completion import get_user_scope
from jedi import parser_utils
from jedi.api.environment import get_default_environment, get_system_environment
TEST_COMPLETIONS = 0
TEST_DEFINITIONS = 1
TEST_ASSIGNMENTS = 2
TEST_USAGES = 3
grammar36 = parso.load_grammar(version='3.6')
class IntegrationTestCase(object):
def __init__(self, test_type, correct, line_nr, column, start, line,
path=None, skip_version_info=None):
self.test_type = test_type
self.correct = correct
self.line_nr = line_nr
self.column = column
self.start = start
self.line = line
self.path = path
self._skip_version_info = skip_version_info
self._skip = None
def set_skip(self, reason):
self._skip = reason
def get_skip_reason(self, environment):
if self._skip is not None:
return self._skip
if self._skip_version_info is None:
return
comp_map = {
'==': 'eq',
'<=': 'le',
'>=': 'ge',
'<': 'lt',
'>': 'gt',
}
min_version, operator_ = self._skip_version_info
operation = getattr(operator, comp_map[operator_])
if not operation(environment.version_info[:2], min_version):
return "Python version %s %s.%s" % (
operator_, min_version[0], min_version[1]
)
@property
def module_name(self):
return os.path.splitext(os.path.basename(self.path))[0]
@property
def line_nr_test(self):
"""The test is always defined on the line before."""
return self.line_nr - 1
def __repr__(self):
return '<%s: %s:%s %r>' % (self.__class__.__name__, self.path,
self.line_nr_test, self.line.rstrip())
def script(self, environment):
return jedi.Script(
self.source, self.line_nr, self.column, self.path,
environment=environment
)
def run(self, compare_cb, environment=None):
testers = {
TEST_COMPLETIONS: self.run_completion,
TEST_DEFINITIONS: self.run_goto_definitions,
TEST_ASSIGNMENTS: self.run_goto_assignments,
TEST_USAGES: self.run_usages,
}
return testers[self.test_type](compare_cb, environment)
def run_completion(self, compare_cb, environment):
completions = self.script(environment).completions()
#import cProfile; cProfile.run('script.completions()')
comp_str = {c.name for c in completions}
return compare_cb(self, comp_str, set(literal_eval(self.correct)))
def run_goto_definitions(self, compare_cb, environment):
script = self.script(environment)
evaluator = script._evaluator
def comparison(definition):
suffix = '()' if definition.type == 'instance' else ''
return definition.desc_with_module + suffix
def definition(correct, correct_start, path):
should_be = set()
for match in re.finditer('(?:[^ ]+)', correct):
string = match.group(0)
parser = grammar36.parse(string, start_symbol='eval_input', error_recovery=False)
parser_utils.move(parser.get_root_node(), self.line_nr)
element = parser.get_root_node()
module_context = script._get_module()
# The context shouldn't matter for the test results.
user_context = get_user_scope(module_context, (self.line_nr, 0))
if user_context.api_type == 'function':
user_context = user_context.get_function_execution()
element.parent = user_context.tree_node
results = evaluator.eval_element(user_context, element)
if not results:
raise Exception('Could not resolve %s on line %s'
% (match.string, self.line_nr - 1))
should_be |= set(Definition(evaluator, r.name) for r in results)
debug.dbg('Finished getting types', color='YELLOW')
# Because the objects have different ids, `repr`, then compare.
should = set(comparison(r) for r in should_be)
return should
should = definition(self.correct, self.start, script.path)
result = script.goto_definitions()
is_str = set(comparison(r) for r in result)
return compare_cb(self, is_str, should)
def run_goto_assignments(self, compare_cb, environment):
result = self.script(environment).goto_assignments()
comp_str = str(sorted(str(r.description) for r in result))
return compare_cb(self, comp_str, self.correct)
def run_usages(self, compare_cb, environment):
result = self.script(environment).usages()
self.correct = self.correct.strip()
compare = sorted((r.module_name, r.line, r.column) for r in result)
wanted = []
if not self.correct:
positions = []
else:
positions = literal_eval(self.correct)
for pos_tup in positions:
if type(pos_tup[0]) == str:
# this means that there is a module specified
wanted.append(pos_tup)
else:
line = pos_tup[0]
if pos_tup[0] is not None:
line += self.line_nr
wanted.append((self.module_name, line, pos_tup[1]))
return compare_cb(self, compare, sorted(wanted))
def skip_python_version(line):
# check for python minimal version number
match = re.match(r" *# *python *([<>]=?|==) *(\d+(?:\.\d+)?)$", line)
if match:
minimal_python_version = tuple(map(int, match.group(2).split(".")))
return minimal_python_version, match.group(1)
return None
def collect_file_tests(path, lines, lines_to_execute):
def makecase(t):
return IntegrationTestCase(t, correct, line_nr, column,
start, line, path=path,
skip_version_info=skip_version_info)
start = None
correct = None
test_type = None
skip_version_info = None
for line_nr, line in enumerate(lines, 1):
if correct is not None:
r = re.match('^(\d+)\s*(.*)$', correct)
if r:
column = int(r.group(1))
correct = r.group(2)
start += r.regs[2][0] # second group, start index
else:
column = len(line) - 1 # -1 for the \n
if test_type == '!':
yield makecase(TEST_ASSIGNMENTS)
elif test_type == '<':
yield makecase(TEST_USAGES)
elif correct.startswith('['):
yield makecase(TEST_COMPLETIONS)
else:
yield makecase(TEST_DEFINITIONS)
correct = None
else:
skip_version_info = skip_python_version(line) or skip_version_info
try:
r = re.search(r'(?:^|(?<=\s))#([?!<])\s*([^\n]*)', line)
# test_type is ? for completion and ! for goto_assignments
test_type = r.group(1)
correct = r.group(2)
# Quick hack to make everything work (not quite a bloody unicorn hack though).
if correct == '':
correct = ' '
start = r.start()
except AttributeError:
correct = None
else:
# Skip the test, if this is not specified test.
for l in lines_to_execute:
if isinstance(l, tuple) and l[0] <= line_nr <= l[1] \
or line_nr == l:
break
else:
if lines_to_execute:
correct = None
def collect_dir_tests(base_dir, test_files, check_thirdparty=False):
for f_name in os.listdir(base_dir):
files_to_execute = [a for a in test_files.items() if f_name.startswith(a[0])]
lines_to_execute = reduce(lambda x, y: x + y[1], files_to_execute, [])
if f_name.endswith(".py") and (not test_files or files_to_execute):
skip = None
if check_thirdparty:
lib = f_name.replace('_.py', '')
try:
# there is always an underline at the end.
# It looks like: completion/thirdparty/pylab_.py
__import__(lib)
except ImportError:
skip = 'Thirdparty-Library %s not found.' % lib
path = os.path.join(base_dir, f_name)
if is_py3:
source = open(path, encoding='utf-8').read()
else:
source = unicode(open(path).read(), 'UTF-8')
for case in collect_file_tests(path, StringIO(source),
lines_to_execute):
case.source = source
if skip:
case.set_skip(skip)
yield case
docoptstr = """
Using run.py to make debugging easier with integration tests.
An alternative testing format, which is much more hacky, but very nice to
work with.
Usage:
run.py [--pdb] [--debug] [--thirdparty] [--env <dotted>] [<rest>...]
run.py --help
Options:
-h --help Show this screen.
--pdb Enable pdb debugging on fail.
-d, --debug Enable text output debugging (please install ``colorama``).
--thirdparty Also run thirdparty tests (in ``completion/thirdparty``).
--env <dotted> A Python version, like 2.7, 3.4, etc.
"""
if __name__ == '__main__':
import docopt
arguments = docopt.docopt(docoptstr)
import time
t_start = time.time()
if arguments['--debug']:
jedi.set_debug_function()
# get test list, that should be executed
test_files = {}
last = None
for arg in arguments['<rest>']:
match = re.match('(\d+)-(\d+)', arg)
if match:
start, end = match.groups()
test_files[last].append((int(start), int(end)))
elif arg.isdigit():
if last is None:
continue
test_files[last].append(int(arg))
else:
test_files[arg] = []
last = arg
# completion tests:
dir_ = os.path.dirname(os.path.realpath(__file__))
completion_test_dir = os.path.join(dir_, '../test/completion')
completion_test_dir = os.path.abspath(completion_test_dir)
summary = []
tests_fail = 0
# execute tests
cases = list(collect_dir_tests(completion_test_dir, test_files))
if test_files or arguments['--thirdparty']:
completion_test_dir += '/thirdparty'
cases += collect_dir_tests(completion_test_dir, test_files, True)
def file_change(current, tests, fails):
if current is not None:
current = os.path.basename(current)
print('%s \t\t %s tests and %s fails.' % (current, tests, fails))
def report(case, actual, desired):
if actual == desired:
return 0
else:
print("\ttest fail @%d, actual = %s, desired = %s"
% (case.line_nr - 1, actual, desired))
return 1
if arguments['--env']:
environment = get_system_environment(arguments['--env'])
else:
# Will be 3.6.
environment = get_default_environment()
import traceback
current = cases[0].path if cases else None
count = fails = 0
for c in cases:
if c.get_skip_reason(environment):
continue
if current != c.path:
file_change(current, count, fails)
current = c.path
count = fails = 0
try:
if c.run(report, environment):
tests_fail += 1
fails += 1
except Exception:
traceback.print_exc()
print("\ttest fail @%d" % (c.line_nr - 1))
tests_fail += 1
fails += 1
if arguments['--pdb']:
import pdb
pdb.post_mortem()
count += 1
file_change(current, count, fails)
print('\nSummary: (%s fails of %s tests) in %.3fs'
% (tests_fail, len(cases), time.time() - t_start))
for s in summary:
print(s)
exit_code = 1 if tests_fail else 0
sys.exit(exit_code)
|
gpl-3.0
|
prakritish/ansible
|
lib/ansible/modules/network/panos/panos_address.py
|
78
|
6066
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Ansible module to manage PaloAltoNetworks Firewall
# (c) 2016, techbizdev <techbizdev@paloaltonetworks.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: panos_address
short_description: Create address service object on PanOS devices
description:
- Create address service object of different types [IP Range, FQDN, or IP Netmask].
author: "Luigi Mori (@jtschichold), Ken Celenza (@itdependsnetworks), Ivan Bojer (@ivanbojer)"
version_added: "2.3"
requirements:
- pan-python can be obtained from PyPi U(https://pypi.python.org/pypi/pan-python)
options:
ip_address:
description:
- IP address (or hostname) of PAN-OS device being configured.
required: true
username:
description:
- Username credentials to use for authentication.
default: "admin"
password:
description:
- Password credentials to use for authentication.
required: true
address:
description:
- IP address with or without mask, range, or FQDN.
required: true
default: None
address_name:
description:
- Human readable name of the address.
required: true
default: None
type:
description:
- This is the type of the object created.
default: ip-nemask
choices: [ 'ip-netmask', 'fqdn', 'ip-range' ]
description:
description:
- Description of the address object.
default: None
tag:
description:
- Tag of the address object.
default: None
commit:
description:
- Commit configuration to the Firewall if it is changed.
default: true
'''
EXAMPLES = '''
- name: create IP-Netmask Object
panos_address:
ip_address: "192.168.1.1"
password: 'admin'
address_name: 'google_dns'
address: '8.8.8.8/32'
description: 'Google DNS'
tag: 'Outbound'
commit: False
- name: create IP-Range Object
panos_address:
ip_address: "192.168.1.1"
password: 'admin'
type: 'ip-range'
address_name: 'apple-range'
address: '17.0.0.0-17.255.255.255'
commit: False
- name: create FQDN Object
panos_address:
ip_address: "192.168.1.1"
password: 'admin'
type: 'fqdn'
address_name: 'google.com'
address: 'www.google.com'
'''
RETURN = '''
# Default return values
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import get_exception
try:
import pan.xapi
from pan.xapi import PanXapiError
HAS_LIB = True
except ImportError:
HAS_LIB = False
_ADDRESS_XPATH = "/config/devices/entry[@name='localhost.localdomain']" + \
"/vsys/entry[@name='vsys1']" + \
"/address/entry[@name='%s']"
def address_exists(xapi, address_name):
xapi.get(_ADDRESS_XPATH % address_name)
e = xapi.element_root.find('.//entry')
if e is None:
return False
return True
def add_address(xapi, module, address, address_name, description, type, tag):
if address_exists(xapi, address_name):
return False
exml = []
exml.append('<%s>' % type)
exml.append('%s' % address)
exml.append('</%s>' % type)
if description:
exml.append('<description>')
exml.append('%s' % description)
exml.append('</description>')
if tag:
exml.append('<tag>')
exml.append('<member>%s</member>' % tag)
exml.append('</tag>')
exml = ''.join(exml)
xapi.set(xpath=_ADDRESS_XPATH % address_name, element=exml)
return True
def main():
argument_spec = dict(
ip_address=dict(required=True),
password=dict(required=True, no_log=True),
username=dict(default='admin'),
address_name=dict(required=True),
address=dict(),
description=dict(),
tag=dict(),
type=dict(default='ip-netmask', choices=['ip-netmask', 'ip-range', 'fqdn']),
commit=dict(type='bool', default=True)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
if not HAS_LIB:
module.fail_json(msg='pan-python required for this module')
ip_address = module.params["ip_address"]
password = module.params["password"]
username = module.params['username']
xapi = pan.xapi.PanXapi(
hostname=ip_address,
api_username=username,
api_password=password
)
address_name = module.params['address_name']
address = module.params['address']
commit = module.params['commit']
description = module.params['description']
tag = module.params['tag']
type = module.params['type']
changed = False
try:
changed = add_address(xapi, module,
address,
address_name,
description,
type,
tag)
except PanXapiError:
exc = get_exception()
module.fail_json(msg=exc.message)
if changed and commit:
xapi.commit(cmd="<commit></commit>", sync=True, interval=1)
module.exit_json(changed=changed, msg="okey dokey")
if __name__ == '__main__':
main()
|
gpl-3.0
|
tavern-consulting/easter-egg
|
easter_egg/tests.py
|
1
|
4000
|
import json
import base64
import os
from django.core.urlresolvers import reverse
from django.test import TestCase, LiveServerTestCase
import requests
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from easter_egg.models import split_image
class _HelperTestCase(TestCase):
path_to_image = '/tmp/google_logo.png'
def setUp(self):
if os.path.isfile(self.path_to_image):
with open(self.path_to_image) as f:
self.content = f.read()
return
self.content = requests.get(
'https://www.google.com/images/srpr/logo11w.png',
).content
with open(self.path_to_image, 'w') as f:
f.write(self.content)
class EasterEggTestCase(_HelperTestCase):
url = reverse('index')
def test_index_page_returns_200(self):
r = self.client.get(self.url)
self.assertEqual(r.status_code, 200)
def test_index_page_returns_json(self):
r = self.client.get(self.url)
self.assertEqual(
r['Content-Type'],
'application/json',
)
def test_index_page_returns_ten_pieces_in_a_list(self):
r = self.client.get(self.url)
response = json.loads(r.content)
self.assertEqual(len(response), 10)
def test_index_page_returns_split_up_image(self):
r = self.client.get(self.url)
response = json.loads(r.content)
self.assertEqual(
base64.b64decode(''.join(response)),
self.content,
)
def test_test_page_returns_200(self):
r = self.client.get(reverse('easter_egg_test'))
self.assertEqual(r.status_code, 200)
class CoverageTestCase(TestCase):
def test(self):
from easter_egg import wsgi
assert wsgi
class ImageSplitterTestCase(_HelperTestCase):
def test_N_is_1_returns_image(self):
image_content = split_image(self.path_to_image, 1, 1)
self.assertEqual(
base64.b64decode(image_content),
self.content,
)
def test_N_is_2_returns_half_image(self):
image_content = ''.join([
split_image(self.path_to_image, 1, 2),
split_image(self.path_to_image, 2, 2),
])
self.assertEqual(
base64.b64decode(image_content),
self.content,
)
def test_N_is_3(self):
image_content = ''.join([
split_image(self.path_to_image, 1, 3),
split_image(self.path_to_image, 2, 3),
split_image(self.path_to_image, 3, 3),
])
self.assertEqual(
base64.b64decode(image_content),
self.content,
)
def test_N_is_1337(self):
image_content = ''.join([
split_image(self.path_to_image, i, 1337)
for i in range(1, 1338)
])
self.assertEqual(
base64.b64decode(image_content),
self.content,
)
class SleniumTestCase(_HelperTestCase, LiveServerTestCase):
def setUp(self):
super(SleniumTestCase, self).setUp()
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_konomi_code_generates_image(self):
self.browser.get(self.live_server_url + reverse('easter_egg_test'))
keys = ''.join([
Keys.UP,
Keys.UP,
Keys.DOWN,
Keys.DOWN,
Keys.LEFT,
Keys.RIGHT,
Keys.LEFT,
Keys.RIGHT,
'b',
'a',
])
body = self.browser.find_element_by_tag_name('body')
body.send_keys(keys)
image_tag = self.browser.find_element_by_tag_name('img')
assert image_tag
image_data = image_tag.get_attribute('src')
image_data = image_data[len('data:image/png;base64,'):]
self.assertEqual(
self.content,
base64.b64decode(image_data),
)
|
mit
|
tuxfux-hlp-notes/python-batches
|
batch-67/19-files/myenv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/langbulgarianmodel.py
|
2965
|
12784
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
# this table is modified base on win1251BulgarianCharToOrderMap, so
# only number <64 is sure valid
Latin5_BulgarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80
210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90
81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0
31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0
39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0
1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0
7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0
62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0
)
win1251BulgarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80
221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90
88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0
73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0
31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0
39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0
1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0
7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 96.9392%
# first 1024 sequences:3.0618%
# rest sequences: 0.2992%
# negative sequences: 0.0020%
BulgarianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,
3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,
0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,
0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,
0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,
0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,
0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,
2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,
3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,
1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,
3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,
1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,
2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,
2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,
3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,
1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,
2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,
2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,
1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,
2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,
2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,
2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,
1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,
2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,
1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,
3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,
1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,
3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,
1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,
2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,
1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,
2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,
1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,
2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,
1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,
2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,
1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,
0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,
1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,
1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,
1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,
0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,
1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,
1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
)
Latin5BulgarianModel = {
'charToOrderMap': Latin5_BulgarianCharToOrderMap,
'precedenceMatrix': BulgarianLangModel,
'mTypicalPositiveRatio': 0.969392,
'keepEnglishLetter': False,
'charsetName': "ISO-8859-5"
}
Win1251BulgarianModel = {
'charToOrderMap': win1251BulgarianCharToOrderMap,
'precedenceMatrix': BulgarianLangModel,
'mTypicalPositiveRatio': 0.969392,
'keepEnglishLetter': False,
'charsetName': "windows-1251"
}
# flake8: noqa
|
gpl-3.0
|
cbrunet/ouf
|
src/ouf/view/fileview.py
|
1
|
4520
|
import os
import subprocess
import sys
from PyQt5 import QtCore, QtWidgets
from ouf.filemodel.proxymodel import FileProxyModel
from ouf.view.filenamedelegate import FileNameDelegate
from ouf import shortcuts
# TODO: modifiers to open in new window
# TODO: switch icons / tree
# TODO: modify icon size
class FileView(QtWidgets.QTreeView):
current_path_changed = QtCore.pyqtSignal(str)
def __init__(self, model, parent=None):
super().__init__(parent)
self.proxy = FileProxyModel()
self.proxy.setSourceModel(model)
self._create_actions()
self.setModel(self.proxy)
self.setSortingEnabled(True)
self.sortByColumn(0, QtCore.Qt.AscendingOrder)
self.setIconSize(QtCore.QSize(32, 32))
self.setSelectionMode(self.ExtendedSelection)
self.setSelectionBehavior(self.SelectRows)
self.setUniformRowHeights(True)
self.setAllColumnsShowFocus(True)
# self.setAnimated(True)
self.setEditTriggers(self.SelectedClicked | self.EditKeyPressed)
self.setDefaultDropAction(QtCore.Qt.MoveAction)
self.setDragDropMode(self.DragDrop)
self.setDragDropOverwriteMode(False)
self.setDragEnabled(True)
self.setAutoExpandDelay(200)
self._file_name_delegate = FileNameDelegate(self)
self.setItemDelegateForColumn(0, self._file_name_delegate)
self.activated.connect(self.open_action)
def _create_actions(self):
self.action_delete = QtWidgets.QAction(_("Suppress Selected Files"), self)
self.action_delete.setShortcuts(shortcuts.delete)
self.action_delete.triggered.connect(self.delete_selected_files)
self.action_delete.setEnabled(False)
self.action_hidden = QtWidgets.QAction(_("Show Hidden Files"), self)
self.action_hidden.setShortcuts(shortcuts.hidden_files)
self.action_hidden.setCheckable(True)
self.action_hidden.setChecked(self.proxy.show_hidden)
self.action_hidden.toggled.connect(self.show_hide_hidden_files)
def selectionChanged(self, selected, deselected):
super().selectionChanged(selected, deselected)
self.action_delete.setEnabled(bool(self.selectedIndexes()))
def dragEnterEvent(self, event):
if event.mimeData().hasUrls():
if any(u.toLocalFile() for u in event.mimeData().urls()):
event.accept()
return
event.ignore()
def dragLeaveEvent(self, event):
pass
def dragMoveEvent(self, event):
super().dragMoveEvent(event)
if event.keyboardModifiers() & QtCore.Qt.CTRL:
if event.keyboardModifiers() & QtCore.Qt.SHIFT:
event.setDropAction(QtCore.Qt.LinkAction)
else:
event.setDropAction(QtCore.Qt.CopyAction)
else:
event.setDropAction(QtCore.Qt.MoveAction)
event.accept()
def dropEvent(self, event):
index = self.indexAt(event.pos())
# index = self.proxy.mapToSource(pindex)
self.model().dropMimeData(event.mimeData(), event.dropAction(), index.row(), index.column(), index.parent())
def open_action(self, index):
"""
Args:
index: proxy index
Returns:
"""
if index.isValid():
item = self.proxy.mapToSource(index).internalPointer()
if item.is_lock:
# TODO: prevent user
return
if item.is_dir:
self.setRootIndex(self.proxy.index(index.row(), 0, index.parent()))
#TODO: unselect
self.current_path_changed.emit(item.path)
QtCore.QCoreApplication.processEvents() # Ensure the new path is set before resizing
self.resizeColumnToContents(0)
else:
# TODO: open file / exec process / etc.
if sys.platform.startswith('linux'):
subprocess.run(['xdg-open', item.path])
else:
os.startfile(item.path) # windows
else:
# go to root
self.setRootIndex(index)
self.current_path_changed.emit("")
def delete_selected_files(self):
selection = self.proxy.mapSelectionToSource(self.selectionModel().selection())
self.proxy.sourceModel().delete_files(selection.indexes())
self.proxy.invalidate()
def show_hide_hidden_files(self, show):
self.proxy.show_hidden = show
|
gpl-3.0
|
josepht/snapcraft
|
snapcraft/internal/repo/__init__.py
|
2
|
1151
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import shutil
from snapcraft.internal.errors import MissingCommandError
from . import errors # noqa
from ._base import BaseRepo # noqa
from ._base import fix_pkg_config # noqa
from ._platform import _get_repo_for_platform
# Imported for backwards compatibility with plugins
from ._deb import Ubuntu # noqa
Repo = _get_repo_for_platform()
def check_for_command(command):
if not shutil.which(command):
raise MissingCommandError([command])
|
gpl-3.0
|
soldag/home-assistant
|
homeassistant/components/zha/core/typing.py
|
15
|
1647
|
"""Typing helpers for ZHA component."""
from typing import TYPE_CHECKING, Callable, TypeVar
import zigpy.device
import zigpy.endpoint
import zigpy.group
import zigpy.zcl
import zigpy.zdo
# pylint: disable=invalid-name
CALLABLE_T = TypeVar("CALLABLE_T", bound=Callable)
ChannelType = "ZigbeeChannel"
ChannelsType = "Channels"
ChannelPoolType = "ChannelPool"
ClientChannelType = "ClientChannel"
ZDOChannelType = "ZDOChannel"
ZhaDeviceType = "ZHADevice"
ZhaEntityType = "ZHAEntity"
ZhaGatewayType = "ZHAGateway"
ZhaGroupType = "ZHAGroupType"
ZigpyClusterType = zigpy.zcl.Cluster
ZigpyDeviceType = zigpy.device.Device
ZigpyEndpointType = zigpy.endpoint.Endpoint
ZigpyGroupType = zigpy.group.Group
ZigpyZdoType = zigpy.zdo.ZDO
if TYPE_CHECKING:
import homeassistant.components.zha.core.channels
import homeassistant.components.zha.core.channels as channels
import homeassistant.components.zha.core.channels.base as base_channels
import homeassistant.components.zha.core.device
import homeassistant.components.zha.core.gateway
import homeassistant.components.zha.core.group
import homeassistant.components.zha.entity
ChannelType = base_channels.ZigbeeChannel
ChannelsType = channels.Channels
ChannelPoolType = channels.ChannelPool
ClientChannelType = base_channels.ClientChannel
ZDOChannelType = base_channels.ZDOChannel
ZhaDeviceType = homeassistant.components.zha.core.device.ZHADevice
ZhaEntityType = homeassistant.components.zha.entity.ZhaEntity
ZhaGatewayType = homeassistant.components.zha.core.gateway.ZHAGateway
ZhaGroupType = homeassistant.components.zha.core.group.ZHAGroup
|
apache-2.0
|
wtmmac/p2pool
|
wstools/Utility.py
|
292
|
50865
|
# Copyright (c) 2003, The Regents of the University of California,
# through Lawrence Berkeley National Laboratory (subject to receipt of
# any required approvals from the U.S. Dept. of Energy). All rights
# reserved.
#
# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
ident = "$Id$"
import sys, types, httplib, urllib, socket, weakref
from os.path import isfile
from string import join, strip, split
from UserDict import UserDict
from cStringIO import StringIO
from TimeoutSocket import TimeoutSocket, TimeoutError
from urlparse import urlparse
from httplib import HTTPConnection, HTTPSConnection
from exceptions import Exception
try:
from ZSI import _get_idstr
except:
def _get_idstr(pyobj):
'''Python 2.3.x generates a FutureWarning for negative IDs, so
we use a different prefix character to ensure uniqueness, and
call abs() to avoid the warning.'''
x = id(pyobj)
if x < 0:
return 'x%x' % abs(x)
return 'o%x' % x
import xml.dom.minidom
from xml.dom import Node
import logging
from c14n import Canonicalize
from Namespaces import SCHEMA, SOAP, XMLNS, ZSI_SCHEMA_URI
try:
from xml.dom.ext import SplitQName
except:
def SplitQName(qname):
'''SplitQName(qname) -> (string, string)
Split Qualified Name into a tuple of len 2, consisting
of the prefix and the local name.
(prefix, localName)
Special Cases:
xmlns -- (localName, 'xmlns')
None -- (None, localName)
'''
l = qname.split(':')
if len(l) == 1:
l.insert(0, None)
elif len(l) == 2:
if l[0] == 'xmlns':
l.reverse()
else:
return
return tuple(l)
#
# python2.3 urllib.basejoin does not remove current directory ./
# from path and this causes problems on subsequent basejoins.
#
basejoin = urllib.basejoin
if sys.version_info[0:2] < (2, 4, 0, 'final', 0)[0:2]:
#basejoin = lambda base,url: urllib.basejoin(base,url.lstrip('./'))
token = './'
def basejoin(base, url):
if url.startswith(token) is True:
return urllib.basejoin(base,url[2:])
return urllib.basejoin(base,url)
class NamespaceError(Exception):
"""Used to indicate a Namespace Error."""
class RecursionError(Exception):
"""Used to indicate a HTTP redirect recursion."""
class ParseError(Exception):
"""Used to indicate a XML parsing error."""
class DOMException(Exception):
"""Used to indicate a problem processing DOM."""
class Base:
"""Base class for instance level Logging"""
def __init__(self, module=__name__):
self.logger = logging.getLogger('%s-%s(%s)' %(module, self.__class__, _get_idstr(self)))
class HTTPResponse:
"""Captures the information in an HTTP response message."""
def __init__(self, response):
self.status = response.status
self.reason = response.reason
self.headers = response.msg
self.body = response.read() or None
response.close()
class TimeoutHTTP(HTTPConnection):
"""A custom http connection object that supports socket timeout."""
def __init__(self, host, port=None, timeout=20):
HTTPConnection.__init__(self, host, port)
self.timeout = timeout
def connect(self):
self.sock = TimeoutSocket(self.timeout)
self.sock.connect((self.host, self.port))
class TimeoutHTTPS(HTTPSConnection):
"""A custom https object that supports socket timeout. Note that this
is not really complete. The builtin SSL support in the Python socket
module requires a real socket (type) to be passed in to be hooked to
SSL. That means our fake socket won't work and our timeout hacks are
bypassed for send and recv calls. Since our hack _is_ in place at
connect() time, it should at least provide some timeout protection."""
def __init__(self, host, port=None, timeout=20, **kwargs):
HTTPSConnection.__init__(self, str(host), port, **kwargs)
self.timeout = timeout
def connect(self):
sock = TimeoutSocket(self.timeout)
sock.connect((self.host, self.port))
realsock = getattr(sock.sock, '_sock', sock.sock)
ssl = socket.ssl(realsock, self.key_file, self.cert_file)
self.sock = httplib.FakeSocket(sock, ssl)
def urlopen(url, timeout=20, redirects=None):
"""A minimal urlopen replacement hack that supports timeouts for http.
Note that this supports GET only."""
scheme, host, path, params, query, frag = urlparse(url)
if not scheme in ('http', 'https'):
return urllib.urlopen(url)
if params: path = '%s;%s' % (path, params)
if query: path = '%s?%s' % (path, query)
if frag: path = '%s#%s' % (path, frag)
if scheme == 'https':
# If ssl is not compiled into Python, you will not get an exception
# until a conn.endheaders() call. We need to know sooner, so use
# getattr.
try:
import M2Crypto
except ImportError:
if not hasattr(socket, 'ssl'):
raise RuntimeError, 'no built-in SSL Support'
conn = TimeoutHTTPS(host, None, timeout)
else:
ctx = M2Crypto.SSL.Context()
ctx.set_session_timeout(timeout)
conn = M2Crypto.httpslib.HTTPSConnection(host, ssl_context=ctx)
conn.set_debuglevel(1)
else:
conn = TimeoutHTTP(host, None, timeout)
conn.putrequest('GET', path)
conn.putheader('Connection', 'close')
conn.endheaders()
response = None
while 1:
response = conn.getresponse()
if response.status != 100:
break
conn._HTTPConnection__state = httplib._CS_REQ_SENT
conn._HTTPConnection__response = None
status = response.status
# If we get an HTTP redirect, we will follow it automatically.
if status >= 300 and status < 400:
location = response.msg.getheader('location')
if location is not None:
response.close()
if redirects is not None and redirects.has_key(location):
raise RecursionError(
'Circular HTTP redirection detected.'
)
if redirects is None:
redirects = {}
redirects[location] = 1
return urlopen(location, timeout, redirects)
raise HTTPResponse(response)
if not (status >= 200 and status < 300):
raise HTTPResponse(response)
body = StringIO(response.read())
response.close()
return body
class DOM:
"""The DOM singleton defines a number of XML related constants and
provides a number of utility methods for DOM related tasks. It
also provides some basic abstractions so that the rest of the
package need not care about actual DOM implementation in use."""
# Namespace stuff related to the SOAP specification.
NS_SOAP_ENV_1_1 = 'http://schemas.xmlsoap.org/soap/envelope/'
NS_SOAP_ENC_1_1 = 'http://schemas.xmlsoap.org/soap/encoding/'
NS_SOAP_ENV_1_2 = 'http://www.w3.org/2001/06/soap-envelope'
NS_SOAP_ENC_1_2 = 'http://www.w3.org/2001/06/soap-encoding'
NS_SOAP_ENV_ALL = (NS_SOAP_ENV_1_1, NS_SOAP_ENV_1_2)
NS_SOAP_ENC_ALL = (NS_SOAP_ENC_1_1, NS_SOAP_ENC_1_2)
NS_SOAP_ENV = NS_SOAP_ENV_1_1
NS_SOAP_ENC = NS_SOAP_ENC_1_1
_soap_uri_mapping = {
NS_SOAP_ENV_1_1 : '1.1',
NS_SOAP_ENV_1_2 : '1.2',
}
SOAP_ACTOR_NEXT_1_1 = 'http://schemas.xmlsoap.org/soap/actor/next'
SOAP_ACTOR_NEXT_1_2 = 'http://www.w3.org/2001/06/soap-envelope/actor/next'
SOAP_ACTOR_NEXT_ALL = (SOAP_ACTOR_NEXT_1_1, SOAP_ACTOR_NEXT_1_2)
def SOAPUriToVersion(self, uri):
"""Return the SOAP version related to an envelope uri."""
value = self._soap_uri_mapping.get(uri)
if value is not None:
return value
raise ValueError(
'Unsupported SOAP envelope uri: %s' % uri
)
def GetSOAPEnvUri(self, version):
"""Return the appropriate SOAP envelope uri for a given
human-friendly SOAP version string (e.g. '1.1')."""
attrname = 'NS_SOAP_ENV_%s' % join(split(version, '.'), '_')
value = getattr(self, attrname, None)
if value is not None:
return value
raise ValueError(
'Unsupported SOAP version: %s' % version
)
def GetSOAPEncUri(self, version):
"""Return the appropriate SOAP encoding uri for a given
human-friendly SOAP version string (e.g. '1.1')."""
attrname = 'NS_SOAP_ENC_%s' % join(split(version, '.'), '_')
value = getattr(self, attrname, None)
if value is not None:
return value
raise ValueError(
'Unsupported SOAP version: %s' % version
)
def GetSOAPActorNextUri(self, version):
"""Return the right special next-actor uri for a given
human-friendly SOAP version string (e.g. '1.1')."""
attrname = 'SOAP_ACTOR_NEXT_%s' % join(split(version, '.'), '_')
value = getattr(self, attrname, None)
if value is not None:
return value
raise ValueError(
'Unsupported SOAP version: %s' % version
)
# Namespace stuff related to XML Schema.
NS_XSD_99 = 'http://www.w3.org/1999/XMLSchema'
NS_XSI_99 = 'http://www.w3.org/1999/XMLSchema-instance'
NS_XSD_00 = 'http://www.w3.org/2000/10/XMLSchema'
NS_XSI_00 = 'http://www.w3.org/2000/10/XMLSchema-instance'
NS_XSD_01 = 'http://www.w3.org/2001/XMLSchema'
NS_XSI_01 = 'http://www.w3.org/2001/XMLSchema-instance'
NS_XSD_ALL = (NS_XSD_99, NS_XSD_00, NS_XSD_01)
NS_XSI_ALL = (NS_XSI_99, NS_XSI_00, NS_XSI_01)
NS_XSD = NS_XSD_01
NS_XSI = NS_XSI_01
_xsd_uri_mapping = {
NS_XSD_99 : NS_XSI_99,
NS_XSD_00 : NS_XSI_00,
NS_XSD_01 : NS_XSI_01,
}
for key, value in _xsd_uri_mapping.items():
_xsd_uri_mapping[value] = key
def InstanceUriForSchemaUri(self, uri):
"""Return the appropriate matching XML Schema instance uri for
the given XML Schema namespace uri."""
return self._xsd_uri_mapping.get(uri)
def SchemaUriForInstanceUri(self, uri):
"""Return the appropriate matching XML Schema namespace uri for
the given XML Schema instance namespace uri."""
return self._xsd_uri_mapping.get(uri)
# Namespace stuff related to WSDL.
NS_WSDL_1_1 = 'http://schemas.xmlsoap.org/wsdl/'
NS_WSDL_ALL = (NS_WSDL_1_1,)
NS_WSDL = NS_WSDL_1_1
NS_SOAP_BINDING_1_1 = 'http://schemas.xmlsoap.org/wsdl/soap/'
NS_HTTP_BINDING_1_1 = 'http://schemas.xmlsoap.org/wsdl/http/'
NS_MIME_BINDING_1_1 = 'http://schemas.xmlsoap.org/wsdl/mime/'
NS_SOAP_BINDING_ALL = (NS_SOAP_BINDING_1_1,)
NS_HTTP_BINDING_ALL = (NS_HTTP_BINDING_1_1,)
NS_MIME_BINDING_ALL = (NS_MIME_BINDING_1_1,)
NS_SOAP_BINDING = NS_SOAP_BINDING_1_1
NS_HTTP_BINDING = NS_HTTP_BINDING_1_1
NS_MIME_BINDING = NS_MIME_BINDING_1_1
NS_SOAP_HTTP_1_1 = 'http://schemas.xmlsoap.org/soap/http'
NS_SOAP_HTTP_ALL = (NS_SOAP_HTTP_1_1,)
NS_SOAP_HTTP = NS_SOAP_HTTP_1_1
_wsdl_uri_mapping = {
NS_WSDL_1_1 : '1.1',
}
def WSDLUriToVersion(self, uri):
"""Return the WSDL version related to a WSDL namespace uri."""
value = self._wsdl_uri_mapping.get(uri)
if value is not None:
return value
raise ValueError(
'Unsupported SOAP envelope uri: %s' % uri
)
def GetWSDLUri(self, version):
attr = 'NS_WSDL_%s' % join(split(version, '.'), '_')
value = getattr(self, attr, None)
if value is not None:
return value
raise ValueError(
'Unsupported WSDL version: %s' % version
)
def GetWSDLSoapBindingUri(self, version):
attr = 'NS_SOAP_BINDING_%s' % join(split(version, '.'), '_')
value = getattr(self, attr, None)
if value is not None:
return value
raise ValueError(
'Unsupported WSDL version: %s' % version
)
def GetWSDLHttpBindingUri(self, version):
attr = 'NS_HTTP_BINDING_%s' % join(split(version, '.'), '_')
value = getattr(self, attr, None)
if value is not None:
return value
raise ValueError(
'Unsupported WSDL version: %s' % version
)
def GetWSDLMimeBindingUri(self, version):
attr = 'NS_MIME_BINDING_%s' % join(split(version, '.'), '_')
value = getattr(self, attr, None)
if value is not None:
return value
raise ValueError(
'Unsupported WSDL version: %s' % version
)
def GetWSDLHttpTransportUri(self, version):
attr = 'NS_SOAP_HTTP_%s' % join(split(version, '.'), '_')
value = getattr(self, attr, None)
if value is not None:
return value
raise ValueError(
'Unsupported WSDL version: %s' % version
)
# Other xml namespace constants.
NS_XMLNS = 'http://www.w3.org/2000/xmlns/'
def isElement(self, node, name, nsuri=None):
"""Return true if the given node is an element with the given
name and optional namespace uri."""
if node.nodeType != node.ELEMENT_NODE:
return 0
return node.localName == name and \
(nsuri is None or self.nsUriMatch(node.namespaceURI, nsuri))
def getElement(self, node, name, nsuri=None, default=join):
"""Return the first child of node with a matching name and
namespace uri, or the default if one is provided."""
nsmatch = self.nsUriMatch
ELEMENT_NODE = node.ELEMENT_NODE
for child in node.childNodes:
if child.nodeType == ELEMENT_NODE:
if ((child.localName == name or name is None) and
(nsuri is None or nsmatch(child.namespaceURI, nsuri))
):
return child
if default is not join:
return default
raise KeyError, name
def getElementById(self, node, id, default=join):
"""Return the first child of node matching an id reference."""
attrget = self.getAttr
ELEMENT_NODE = node.ELEMENT_NODE
for child in node.childNodes:
if child.nodeType == ELEMENT_NODE:
if attrget(child, 'id') == id:
return child
if default is not join:
return default
raise KeyError, name
def getMappingById(self, document, depth=None, element=None,
mapping=None, level=1):
"""Create an id -> element mapping of those elements within a
document that define an id attribute. The depth of the search
may be controlled by using the (1-based) depth argument."""
if document is not None:
element = document.documentElement
mapping = {}
attr = element._attrs.get('id', None)
if attr is not None:
mapping[attr.value] = element
if depth is None or depth > level:
level = level + 1
ELEMENT_NODE = element.ELEMENT_NODE
for child in element.childNodes:
if child.nodeType == ELEMENT_NODE:
self.getMappingById(None, depth, child, mapping, level)
return mapping
def getElements(self, node, name, nsuri=None):
"""Return a sequence of the child elements of the given node that
match the given name and optional namespace uri."""
nsmatch = self.nsUriMatch
result = []
ELEMENT_NODE = node.ELEMENT_NODE
for child in node.childNodes:
if child.nodeType == ELEMENT_NODE:
if ((child.localName == name or name is None) and (
(nsuri is None) or nsmatch(child.namespaceURI, nsuri))):
result.append(child)
return result
def hasAttr(self, node, name, nsuri=None):
"""Return true if element has attribute with the given name and
optional nsuri. If nsuri is not specified, returns true if an
attribute exists with the given name with any namespace."""
if nsuri is None:
if node.hasAttribute(name):
return True
return False
return node.hasAttributeNS(nsuri, name)
def getAttr(self, node, name, nsuri=None, default=join):
"""Return the value of the attribute named 'name' with the
optional nsuri, or the default if one is specified. If
nsuri is not specified, an attribute that matches the
given name will be returned regardless of namespace."""
if nsuri is None:
result = node._attrs.get(name, None)
if result is None:
for item in node._attrsNS.keys():
if item[1] == name:
result = node._attrsNS[item]
break
else:
result = node._attrsNS.get((nsuri, name), None)
if result is not None:
return result.value
if default is not join:
return default
return ''
def getAttrs(self, node):
"""Return a Collection of all attributes
"""
attrs = {}
for k,v in node._attrs.items():
attrs[k] = v.value
return attrs
def getElementText(self, node, preserve_ws=None):
"""Return the text value of an xml element node. Leading and trailing
whitespace is stripped from the value unless the preserve_ws flag
is passed with a true value."""
result = []
for child in node.childNodes:
nodetype = child.nodeType
if nodetype == child.TEXT_NODE or \
nodetype == child.CDATA_SECTION_NODE:
result.append(child.nodeValue)
value = join(result, '')
if preserve_ws is None:
value = strip(value)
return value
def findNamespaceURI(self, prefix, node):
"""Find a namespace uri given a prefix and a context node."""
attrkey = (self.NS_XMLNS, prefix)
DOCUMENT_NODE = node.DOCUMENT_NODE
ELEMENT_NODE = node.ELEMENT_NODE
while 1:
if node is None:
raise DOMException('Value for prefix %s not found.' % prefix)
if node.nodeType != ELEMENT_NODE:
node = node.parentNode
continue
result = node._attrsNS.get(attrkey, None)
if result is not None:
return result.value
if hasattr(node, '__imported__'):
raise DOMException('Value for prefix %s not found.' % prefix)
node = node.parentNode
if node.nodeType == DOCUMENT_NODE:
raise DOMException('Value for prefix %s not found.' % prefix)
def findDefaultNS(self, node):
"""Return the current default namespace uri for the given node."""
attrkey = (self.NS_XMLNS, 'xmlns')
DOCUMENT_NODE = node.DOCUMENT_NODE
ELEMENT_NODE = node.ELEMENT_NODE
while 1:
if node.nodeType != ELEMENT_NODE:
node = node.parentNode
continue
result = node._attrsNS.get(attrkey, None)
if result is not None:
return result.value
if hasattr(node, '__imported__'):
raise DOMException('Cannot determine default namespace.')
node = node.parentNode
if node.nodeType == DOCUMENT_NODE:
raise DOMException('Cannot determine default namespace.')
def findTargetNS(self, node):
"""Return the defined target namespace uri for the given node."""
attrget = self.getAttr
attrkey = (self.NS_XMLNS, 'xmlns')
DOCUMENT_NODE = node.DOCUMENT_NODE
ELEMENT_NODE = node.ELEMENT_NODE
while 1:
if node.nodeType != ELEMENT_NODE:
node = node.parentNode
continue
result = attrget(node, 'targetNamespace', default=None)
if result is not None:
return result
node = node.parentNode
if node.nodeType == DOCUMENT_NODE:
raise DOMException('Cannot determine target namespace.')
def getTypeRef(self, element):
"""Return (namespaceURI, name) for a type attribue of the given
element, or None if the element does not have a type attribute."""
typeattr = self.getAttr(element, 'type', default=None)
if typeattr is None:
return None
parts = typeattr.split(':', 1)
if len(parts) == 2:
nsuri = self.findNamespaceURI(parts[0], element)
else:
nsuri = self.findDefaultNS(element)
return (nsuri, parts[1])
def importNode(self, document, node, deep=0):
"""Implements (well enough for our purposes) DOM node import."""
nodetype = node.nodeType
if nodetype in (node.DOCUMENT_NODE, node.DOCUMENT_TYPE_NODE):
raise DOMException('Illegal node type for importNode')
if nodetype == node.ENTITY_REFERENCE_NODE:
deep = 0
clone = node.cloneNode(deep)
self._setOwnerDoc(document, clone)
clone.__imported__ = 1
return clone
def _setOwnerDoc(self, document, node):
node.ownerDocument = document
for child in node.childNodes:
self._setOwnerDoc(document, child)
def nsUriMatch(self, value, wanted, strict=0, tt=type(())):
"""Return a true value if two namespace uri values match."""
if value == wanted or (type(wanted) is tt) and value in wanted:
return 1
if not strict and value is not None:
wanted = type(wanted) is tt and wanted or (wanted,)
value = value[-1:] != '/' and value or value[:-1]
for item in wanted:
if item == value or item[:-1] == value:
return 1
return 0
def createDocument(self, nsuri, qname, doctype=None):
"""Create a new writable DOM document object."""
impl = xml.dom.minidom.getDOMImplementation()
return impl.createDocument(nsuri, qname, doctype)
def loadDocument(self, data):
"""Load an xml file from a file-like object and return a DOM
document instance."""
return xml.dom.minidom.parse(data)
def loadFromURL(self, url):
"""Load an xml file from a URL and return a DOM document."""
if isfile(url) is True:
file = open(url, 'r')
else:
file = urlopen(url)
try:
result = self.loadDocument(file)
except Exception, ex:
file.close()
raise ParseError(('Failed to load document %s' %url,) + ex.args)
else:
file.close()
return result
DOM = DOM()
class MessageInterface:
'''Higher Level Interface, delegates to DOM singleton, must
be subclassed and implement all methods that throw NotImplementedError.
'''
def __init__(self, sw):
'''Constructor, May be extended, do not override.
sw -- soapWriter instance
'''
self.sw = None
if type(sw) != weakref.ReferenceType and sw is not None:
self.sw = weakref.ref(sw)
else:
self.sw = sw
def AddCallback(self, func, *arglist):
self.sw().AddCallback(func, *arglist)
def Known(self, obj):
return self.sw().Known(obj)
def Forget(self, obj):
return self.sw().Forget(obj)
def canonicalize(self):
'''canonicalize the underlying DOM, and return as string.
'''
raise NotImplementedError, ''
def createDocument(self, namespaceURI=SOAP.ENV, localName='Envelope'):
'''create Document
'''
raise NotImplementedError, ''
def createAppendElement(self, namespaceURI, localName):
'''create and append element(namespaceURI,localName), and return
the node.
'''
raise NotImplementedError, ''
def findNamespaceURI(self, qualifiedName):
raise NotImplementedError, ''
def resolvePrefix(self, prefix):
raise NotImplementedError, ''
def setAttributeNS(self, namespaceURI, localName, value):
'''set attribute (namespaceURI, localName)=value
'''
raise NotImplementedError, ''
def setAttributeType(self, namespaceURI, localName):
'''set attribute xsi:type=(namespaceURI, localName)
'''
raise NotImplementedError, ''
def setNamespaceAttribute(self, namespaceURI, prefix):
'''set namespace attribute xmlns:prefix=namespaceURI
'''
raise NotImplementedError, ''
class ElementProxy(Base, MessageInterface):
'''
'''
_soap_env_prefix = 'SOAP-ENV'
_soap_enc_prefix = 'SOAP-ENC'
_zsi_prefix = 'ZSI'
_xsd_prefix = 'xsd'
_xsi_prefix = 'xsi'
_xml_prefix = 'xml'
_xmlns_prefix = 'xmlns'
_soap_env_nsuri = SOAP.ENV
_soap_enc_nsuri = SOAP.ENC
_zsi_nsuri = ZSI_SCHEMA_URI
_xsd_nsuri = SCHEMA.XSD3
_xsi_nsuri = SCHEMA.XSI3
_xml_nsuri = XMLNS.XML
_xmlns_nsuri = XMLNS.BASE
standard_ns = {\
_xml_prefix:_xml_nsuri,
_xmlns_prefix:_xmlns_nsuri
}
reserved_ns = {\
_soap_env_prefix:_soap_env_nsuri,
_soap_enc_prefix:_soap_enc_nsuri,
_zsi_prefix:_zsi_nsuri,
_xsd_prefix:_xsd_nsuri,
_xsi_prefix:_xsi_nsuri,
}
name = None
namespaceURI = None
def __init__(self, sw, message=None):
'''Initialize.
sw -- SoapWriter
'''
self._indx = 0
MessageInterface.__init__(self, sw)
Base.__init__(self)
self._dom = DOM
self.node = None
if type(message) in (types.StringType,types.UnicodeType):
self.loadFromString(message)
elif isinstance(message, ElementProxy):
self.node = message._getNode()
else:
self.node = message
self.processorNss = self.standard_ns.copy()
self.processorNss.update(self.reserved_ns)
def __str__(self):
return self.toString()
def evaluate(self, expression, processorNss=None):
'''expression -- XPath compiled expression
'''
from Ft.Xml import XPath
if not processorNss:
context = XPath.Context.Context(self.node, processorNss=self.processorNss)
else:
context = XPath.Context.Context(self.node, processorNss=processorNss)
nodes = expression.evaluate(context)
return map(lambda node: ElementProxy(self.sw,node), nodes)
#############################################
# Methods for checking/setting the
# classes (namespaceURI,name) node.
#############################################
def checkNode(self, namespaceURI=None, localName=None):
'''
namespaceURI -- namespace of element
localName -- local name of element
'''
namespaceURI = namespaceURI or self.namespaceURI
localName = localName or self.name
check = False
if localName and self.node:
check = self._dom.isElement(self.node, localName, namespaceURI)
if not check:
raise NamespaceError, 'unexpected node type %s, expecting %s' %(self.node, localName)
def setNode(self, node=None):
if node:
if isinstance(node, ElementProxy):
self.node = node._getNode()
else:
self.node = node
elif self.node:
node = self._dom.getElement(self.node, self.name, self.namespaceURI, default=None)
if not node:
raise NamespaceError, 'cant find element (%s,%s)' %(self.namespaceURI,self.name)
self.node = node
else:
#self.node = self._dom.create(self.node, self.name, self.namespaceURI, default=None)
self.createDocument(self.namespaceURI, localName=self.name, doctype=None)
self.checkNode()
#############################################
# Wrapper Methods for direct DOM Element Node access
#############################################
def _getNode(self):
return self.node
def _getElements(self):
return self._dom.getElements(self.node, name=None)
def _getOwnerDocument(self):
return self.node.ownerDocument or self.node
def _getUniquePrefix(self):
'''I guess we need to resolve all potential prefixes
because when the current node is attached it copies the
namespaces into the parent node.
'''
while 1:
self._indx += 1
prefix = 'ns%d' %self._indx
try:
self._dom.findNamespaceURI(prefix, self._getNode())
except DOMException, ex:
break
return prefix
def _getPrefix(self, node, nsuri):
'''
Keyword arguments:
node -- DOM Element Node
nsuri -- namespace of attribute value
'''
try:
if node and (node.nodeType == node.ELEMENT_NODE) and \
(nsuri == self._dom.findDefaultNS(node)):
return None
except DOMException, ex:
pass
if nsuri == XMLNS.XML:
return self._xml_prefix
if node.nodeType == Node.ELEMENT_NODE:
for attr in node.attributes.values():
if attr.namespaceURI == XMLNS.BASE \
and nsuri == attr.value:
return attr.localName
else:
if node.parentNode:
return self._getPrefix(node.parentNode, nsuri)
raise NamespaceError, 'namespaceURI "%s" is not defined' %nsuri
def _appendChild(self, node):
'''
Keyword arguments:
node -- DOM Element Node
'''
if node is None:
raise TypeError, 'node is None'
self.node.appendChild(node)
def _insertBefore(self, newChild, refChild):
'''
Keyword arguments:
child -- DOM Element Node to insert
refChild -- DOM Element Node
'''
self.node.insertBefore(newChild, refChild)
def _setAttributeNS(self, namespaceURI, qualifiedName, value):
'''
Keyword arguments:
namespaceURI -- namespace of attribute
qualifiedName -- qualified name of new attribute value
value -- value of attribute
'''
self.node.setAttributeNS(namespaceURI, qualifiedName, value)
#############################################
#General Methods
#############################################
def isFault(self):
'''check to see if this is a soap:fault message.
'''
return False
def getPrefix(self, namespaceURI):
try:
prefix = self._getPrefix(node=self.node, nsuri=namespaceURI)
except NamespaceError, ex:
prefix = self._getUniquePrefix()
self.setNamespaceAttribute(prefix, namespaceURI)
return prefix
def getDocument(self):
return self._getOwnerDocument()
def setDocument(self, document):
self.node = document
def importFromString(self, xmlString):
doc = self._dom.loadDocument(StringIO(xmlString))
node = self._dom.getElement(doc, name=None)
clone = self.importNode(node)
self._appendChild(clone)
def importNode(self, node):
if isinstance(node, ElementProxy):
node = node._getNode()
return self._dom.importNode(self._getOwnerDocument(), node, deep=1)
def loadFromString(self, data):
self.node = self._dom.loadDocument(StringIO(data))
def canonicalize(self):
return Canonicalize(self.node)
def toString(self):
return self.canonicalize()
def createDocument(self, namespaceURI, localName, doctype=None):
'''If specified must be a SOAP envelope, else may contruct an empty document.
'''
prefix = self._soap_env_prefix
if namespaceURI == self.reserved_ns[prefix]:
qualifiedName = '%s:%s' %(prefix,localName)
elif namespaceURI is localName is None:
self.node = self._dom.createDocument(None,None,None)
return
else:
raise KeyError, 'only support creation of document in %s' %self.reserved_ns[prefix]
document = self._dom.createDocument(nsuri=namespaceURI, qname=qualifiedName, doctype=doctype)
self.node = document.childNodes[0]
#set up reserved namespace attributes
for prefix,nsuri in self.reserved_ns.items():
self._setAttributeNS(namespaceURI=self._xmlns_nsuri,
qualifiedName='%s:%s' %(self._xmlns_prefix,prefix),
value=nsuri)
#############################################
#Methods for attributes
#############################################
def hasAttribute(self, namespaceURI, localName):
return self._dom.hasAttr(self._getNode(), name=localName, nsuri=namespaceURI)
def setAttributeType(self, namespaceURI, localName):
'''set xsi:type
Keyword arguments:
namespaceURI -- namespace of attribute value
localName -- name of new attribute value
'''
self.logger.debug('setAttributeType: (%s,%s)', namespaceURI, localName)
value = localName
if namespaceURI:
value = '%s:%s' %(self.getPrefix(namespaceURI),localName)
xsi_prefix = self.getPrefix(self._xsi_nsuri)
self._setAttributeNS(self._xsi_nsuri, '%s:type' %xsi_prefix, value)
def createAttributeNS(self, namespace, name, value):
document = self._getOwnerDocument()
##this function doesn't exist!! it has only two arguments
attrNode = document.createAttributeNS(namespace, name, value)
def setAttributeNS(self, namespaceURI, localName, value):
'''
Keyword arguments:
namespaceURI -- namespace of attribute to create, None is for
attributes in no namespace.
localName -- local name of new attribute
value -- value of new attribute
'''
prefix = None
if namespaceURI:
try:
prefix = self.getPrefix(namespaceURI)
except KeyError, ex:
prefix = 'ns2'
self.setNamespaceAttribute(prefix, namespaceURI)
qualifiedName = localName
if prefix:
qualifiedName = '%s:%s' %(prefix, localName)
self._setAttributeNS(namespaceURI, qualifiedName, value)
def setNamespaceAttribute(self, prefix, namespaceURI):
'''
Keyword arguments:
prefix -- xmlns prefix
namespaceURI -- value of prefix
'''
self._setAttributeNS(XMLNS.BASE, 'xmlns:%s' %prefix, namespaceURI)
#############################################
#Methods for elements
#############################################
def createElementNS(self, namespace, qname):
'''
Keyword arguments:
namespace -- namespace of element to create
qname -- qualified name of new element
'''
document = self._getOwnerDocument()
node = document.createElementNS(namespace, qname)
return ElementProxy(self.sw, node)
def createAppendSetElement(self, namespaceURI, localName, prefix=None):
'''Create a new element (namespaceURI,name), append it
to current node, then set it to be the current node.
Keyword arguments:
namespaceURI -- namespace of element to create
localName -- local name of new element
prefix -- if namespaceURI is not defined, declare prefix. defaults
to 'ns1' if left unspecified.
'''
node = self.createAppendElement(namespaceURI, localName, prefix=None)
node=node._getNode()
self._setNode(node._getNode())
def createAppendElement(self, namespaceURI, localName, prefix=None):
'''Create a new element (namespaceURI,name), append it
to current node, and return the newly created node.
Keyword arguments:
namespaceURI -- namespace of element to create
localName -- local name of new element
prefix -- if namespaceURI is not defined, declare prefix. defaults
to 'ns1' if left unspecified.
'''
declare = False
qualifiedName = localName
if namespaceURI:
try:
prefix = self.getPrefix(namespaceURI)
except:
declare = True
prefix = prefix or self._getUniquePrefix()
if prefix:
qualifiedName = '%s:%s' %(prefix, localName)
node = self.createElementNS(namespaceURI, qualifiedName)
if declare:
node._setAttributeNS(XMLNS.BASE, 'xmlns:%s' %prefix, namespaceURI)
self._appendChild(node=node._getNode())
return node
def createInsertBefore(self, namespaceURI, localName, refChild):
qualifiedName = localName
prefix = self.getPrefix(namespaceURI)
if prefix:
qualifiedName = '%s:%s' %(prefix, localName)
node = self.createElementNS(namespaceURI, qualifiedName)
self._insertBefore(newChild=node._getNode(), refChild=refChild._getNode())
return node
def getElement(self, namespaceURI, localName):
'''
Keyword arguments:
namespaceURI -- namespace of element
localName -- local name of element
'''
node = self._dom.getElement(self.node, localName, namespaceURI, default=None)
if node:
return ElementProxy(self.sw, node)
return None
def getAttributeValue(self, namespaceURI, localName):
'''
Keyword arguments:
namespaceURI -- namespace of attribute
localName -- local name of attribute
'''
if self.hasAttribute(namespaceURI, localName):
attr = self.node.getAttributeNodeNS(namespaceURI,localName)
return attr.value
return None
def getValue(self):
return self._dom.getElementText(self.node, preserve_ws=True)
#############################################
#Methods for text nodes
#############################################
def createAppendTextNode(self, pyobj):
node = self.createTextNode(pyobj)
self._appendChild(node=node._getNode())
return node
def createTextNode(self, pyobj):
document = self._getOwnerDocument()
node = document.createTextNode(pyobj)
return ElementProxy(self.sw, node)
#############################################
#Methods for retrieving namespaceURI's
#############################################
def findNamespaceURI(self, qualifiedName):
parts = SplitQName(qualifiedName)
element = self._getNode()
if len(parts) == 1:
return (self._dom.findTargetNS(element), value)
return self._dom.findNamespaceURI(parts[0], element)
def resolvePrefix(self, prefix):
element = self._getNode()
return self._dom.findNamespaceURI(prefix, element)
def getSOAPEnvURI(self):
return self._soap_env_nsuri
def isEmpty(self):
return not self.node
class Collection(UserDict):
"""Helper class for maintaining ordered named collections."""
default = lambda self,k: k.name
def __init__(self, parent, key=None):
UserDict.__init__(self)
self.parent = weakref.ref(parent)
self.list = []
self._func = key or self.default
def __getitem__(self, key):
if type(key) is type(1):
return self.list[key]
return self.data[key]
def __setitem__(self, key, item):
item.parent = weakref.ref(self)
self.list.append(item)
self.data[key] = item
def keys(self):
return map(lambda i: self._func(i), self.list)
def items(self):
return map(lambda i: (self._func(i), i), self.list)
def values(self):
return self.list
class CollectionNS(UserDict):
"""Helper class for maintaining ordered named collections."""
default = lambda self,k: k.name
def __init__(self, parent, key=None):
UserDict.__init__(self)
self.parent = weakref.ref(parent)
self.targetNamespace = None
self.list = []
self._func = key or self.default
def __getitem__(self, key):
self.targetNamespace = self.parent().targetNamespace
if type(key) is types.IntType:
return self.list[key]
elif self.__isSequence(key):
nsuri,name = key
return self.data[nsuri][name]
return self.data[self.parent().targetNamespace][key]
def __setitem__(self, key, item):
item.parent = weakref.ref(self)
self.list.append(item)
targetNamespace = getattr(item, 'targetNamespace', self.parent().targetNamespace)
if not self.data.has_key(targetNamespace):
self.data[targetNamespace] = {}
self.data[targetNamespace][key] = item
def __isSequence(self, key):
return (type(key) in (types.TupleType,types.ListType) and len(key) == 2)
def keys(self):
keys = []
for tns in self.data.keys():
keys.append(map(lambda i: (tns,self._func(i)), self.data[tns].values()))
return keys
def items(self):
return map(lambda i: (self._func(i), i), self.list)
def values(self):
return self.list
# This is a runtime guerilla patch for pulldom (used by minidom) so
# that xml namespace declaration attributes are not lost in parsing.
# We need them to do correct QName linking for XML Schema and WSDL.
# The patch has been submitted to SF for the next Python version.
from xml.dom.pulldom import PullDOM, START_ELEMENT
if 1:
def startPrefixMapping(self, prefix, uri):
if not hasattr(self, '_xmlns_attrs'):
self._xmlns_attrs = []
self._xmlns_attrs.append((prefix or 'xmlns', uri))
self._ns_contexts.append(self._current_context.copy())
self._current_context[uri] = prefix or ''
PullDOM.startPrefixMapping = startPrefixMapping
def startElementNS(self, name, tagName , attrs):
# Retrieve xml namespace declaration attributes.
xmlns_uri = 'http://www.w3.org/2000/xmlns/'
xmlns_attrs = getattr(self, '_xmlns_attrs', None)
if xmlns_attrs is not None:
for aname, value in xmlns_attrs:
attrs._attrs[(xmlns_uri, aname)] = value
self._xmlns_attrs = []
uri, localname = name
if uri:
# When using namespaces, the reader may or may not
# provide us with the original name. If not, create
# *a* valid tagName from the current context.
if tagName is None:
prefix = self._current_context[uri]
if prefix:
tagName = prefix + ":" + localname
else:
tagName = localname
if self.document:
node = self.document.createElementNS(uri, tagName)
else:
node = self.buildDocument(uri, tagName)
else:
# When the tagname is not prefixed, it just appears as
# localname
if self.document:
node = self.document.createElement(localname)
else:
node = self.buildDocument(None, localname)
for aname,value in attrs.items():
a_uri, a_localname = aname
if a_uri == xmlns_uri:
if a_localname == 'xmlns':
qname = a_localname
else:
qname = 'xmlns:' + a_localname
attr = self.document.createAttributeNS(a_uri, qname)
node.setAttributeNodeNS(attr)
elif a_uri:
prefix = self._current_context[a_uri]
if prefix:
qname = prefix + ":" + a_localname
else:
qname = a_localname
attr = self.document.createAttributeNS(a_uri, qname)
node.setAttributeNodeNS(attr)
else:
attr = self.document.createAttribute(a_localname)
node.setAttributeNode(attr)
attr.value = value
self.lastEvent[1] = [(START_ELEMENT, node), None]
self.lastEvent = self.lastEvent[1]
self.push(node)
PullDOM.startElementNS = startElementNS
#
# This is a runtime guerilla patch for minidom so
# that xmlns prefixed attributes dont raise AttributeErrors
# during cloning.
#
# Namespace declarations can appear in any start-tag, must look for xmlns
# prefixed attribute names during cloning.
#
# key (attr.namespaceURI, tag)
# ('http://www.w3.org/2000/xmlns/', u'xsd') <xml.dom.minidom.Attr instance at 0x82227c4>
# ('http://www.w3.org/2000/xmlns/', 'xmlns') <xml.dom.minidom.Attr instance at 0x8414b3c>
#
# xml.dom.minidom.Attr.nodeName = xmlns:xsd
# xml.dom.minidom.Attr.value = = http://www.w3.org/2001/XMLSchema
if 1:
def _clone_node(node, deep, newOwnerDocument):
"""
Clone a node and give it the new owner document.
Called by Node.cloneNode and Document.importNode
"""
if node.ownerDocument.isSameNode(newOwnerDocument):
operation = xml.dom.UserDataHandler.NODE_CLONED
else:
operation = xml.dom.UserDataHandler.NODE_IMPORTED
if node.nodeType == xml.dom.minidom.Node.ELEMENT_NODE:
clone = newOwnerDocument.createElementNS(node.namespaceURI,
node.nodeName)
for attr in node.attributes.values():
clone.setAttributeNS(attr.namespaceURI, attr.nodeName, attr.value)
prefix, tag = xml.dom.minidom._nssplit(attr.nodeName)
if prefix == 'xmlns':
a = clone.getAttributeNodeNS(attr.namespaceURI, tag)
elif prefix:
a = clone.getAttributeNodeNS(attr.namespaceURI, tag)
else:
a = clone.getAttributeNodeNS(attr.namespaceURI, attr.nodeName)
a.specified = attr.specified
if deep:
for child in node.childNodes:
c = xml.dom.minidom._clone_node(child, deep, newOwnerDocument)
clone.appendChild(c)
elif node.nodeType == xml.dom.minidom.Node.DOCUMENT_FRAGMENT_NODE:
clone = newOwnerDocument.createDocumentFragment()
if deep:
for child in node.childNodes:
c = xml.dom.minidom._clone_node(child, deep, newOwnerDocument)
clone.appendChild(c)
elif node.nodeType == xml.dom.minidom.Node.TEXT_NODE:
clone = newOwnerDocument.createTextNode(node.data)
elif node.nodeType == xml.dom.minidom.Node.CDATA_SECTION_NODE:
clone = newOwnerDocument.createCDATASection(node.data)
elif node.nodeType == xml.dom.minidom.Node.PROCESSING_INSTRUCTION_NODE:
clone = newOwnerDocument.createProcessingInstruction(node.target,
node.data)
elif node.nodeType == xml.dom.minidom.Node.COMMENT_NODE:
clone = newOwnerDocument.createComment(node.data)
elif node.nodeType == xml.dom.minidom.Node.ATTRIBUTE_NODE:
clone = newOwnerDocument.createAttributeNS(node.namespaceURI,
node.nodeName)
clone.specified = True
clone.value = node.value
elif node.nodeType == xml.dom.minidom.Node.DOCUMENT_TYPE_NODE:
assert node.ownerDocument is not newOwnerDocument
operation = xml.dom.UserDataHandler.NODE_IMPORTED
clone = newOwnerDocument.implementation.createDocumentType(
node.name, node.publicId, node.systemId)
clone.ownerDocument = newOwnerDocument
if deep:
clone.entities._seq = []
clone.notations._seq = []
for n in node.notations._seq:
notation = xml.dom.minidom.Notation(n.nodeName, n.publicId, n.systemId)
notation.ownerDocument = newOwnerDocument
clone.notations._seq.append(notation)
if hasattr(n, '_call_user_data_handler'):
n._call_user_data_handler(operation, n, notation)
for e in node.entities._seq:
entity = xml.dom.minidom.Entity(e.nodeName, e.publicId, e.systemId,
e.notationName)
entity.actualEncoding = e.actualEncoding
entity.encoding = e.encoding
entity.version = e.version
entity.ownerDocument = newOwnerDocument
clone.entities._seq.append(entity)
if hasattr(e, '_call_user_data_handler'):
e._call_user_data_handler(operation, n, entity)
else:
# Note the cloning of Document and DocumentType nodes is
# implemenetation specific. minidom handles those cases
# directly in the cloneNode() methods.
raise xml.dom.NotSupportedErr("Cannot clone node %s" % repr(node))
# Check for _call_user_data_handler() since this could conceivably
# used with other DOM implementations (one of the FourThought
# DOMs, perhaps?).
if hasattr(node, '_call_user_data_handler'):
node._call_user_data_handler(operation, node, clone)
return clone
xml.dom.minidom._clone_node = _clone_node
|
gpl-3.0
|
facebook/buck
|
third-party/py/pex/pex/tracer.py
|
55
|
3628
|
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import sys
import threading
import time
from contextlib import contextmanager
from .variables import ENV
__all__ = ('TraceLogger',)
class Trace(object):
__slots__ = ('msg', 'verbosity', 'parent', 'children', '_clock', '_start', '_stop')
def __init__(self, msg, parent=None, verbosity=1, clock=time):
self.msg = msg
self.verbosity = verbosity
self.parent = parent
if parent is not None:
parent.children.append(self)
self.children = []
self._clock = clock
self._start = self._clock.time()
self._stop = None
def stop(self):
self._stop = self._clock.time()
def duration(self):
assert self._stop is not None
return self._stop - self._start
class TraceLogger(object):
"""
A multi-threaded tracer.
"""
def __init__(self, predicate=None, output=sys.stderr, clock=time, prefix=''):
"""
If predicate specified, it should take a "verbosity" integer and determine whether
or not to log, e.g.
def predicate(verbosity):
try:
return verbosity < int(os.environ.get('APP_VERBOSITY', 0))
except ValueError:
return False
output defaults to sys.stderr, but can take any file-like object.
"""
self._predicate = predicate or (lambda verbosity: True)
self._length = None
self._output = output
self._isatty = getattr(output, 'isatty', False) and output.isatty()
self._lock = threading.RLock()
self._local = threading.local()
self._clock = clock
self._prefix = prefix
def should_log(self, V):
return self._predicate(V)
def log(self, msg, V=1, end='\n'):
if not self.should_log(V):
return
if not self._isatty and end == '\r':
# force newlines if we're not a tty
end = '\n'
trailing_whitespace = ''
with self._lock:
if self._length and self._length > (len(self._prefix) + len(msg)):
trailing_whitespace = ' ' * (self._length - len(msg) - len(self._prefix))
self._output.write(''.join([self._prefix, msg, trailing_whitespace, end]))
self._output.flush()
self._length = (len(self._prefix) + len(msg)) if end == '\r' else 0
def print_trace_snippet(self):
parent = self._local.parent
parent_verbosity = parent.verbosity
if not self.should_log(parent_verbosity):
return
traces = []
while parent:
if self.should_log(parent.verbosity):
traces.append(parent.msg)
parent = parent.parent
self.log(' :: '.join(reversed(traces)), V=parent_verbosity, end='\r')
def print_trace(self, indent=0, node=None):
node = node or self._local.parent
with self._lock:
self.log(' ' * indent + ('%s: %.1fms' % (node.msg, 1000.0 * node.duration())),
V=node.verbosity)
for child in node.children:
self.print_trace(indent=indent + 2, node=child)
@contextmanager
def timed(self, msg, V=1):
if getattr(self._local, 'parent', None) is None:
self._local.parent = Trace(msg, verbosity=V, clock=self._clock)
else:
parent = self._local.parent
self._local.parent = Trace(msg, parent=parent, verbosity=V, clock=self._clock)
self.print_trace_snippet()
yield
self._local.parent.stop()
if self._local.parent.parent is not None:
self._local.parent = self._local.parent.parent
else:
self.print_trace()
self._local.parent = None
TRACER = TraceLogger(
predicate=lambda verbosity: verbosity <= ENV.PEX_VERBOSE,
prefix='pex: '
)
|
apache-2.0
|
julien78910/CouchPotatoServer
|
libs/bencode/__init__.py
|
171
|
3305
|
# The contents of this file are subject to the BitTorrent Open Source License
# Version 1.1 (the License). You may not copy or use this file, in either
# source code or executable form, except in compliance with the License. You
# may obtain a copy of the License at http://www.bittorrent.com/license/.
#
# Software distributed under the License is distributed on an AS IS basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
# Written by Petru Paler
from BTL import BTFailure
def decode_int(x, f):
f += 1
newf = x.index('e', f)
n = int(x[f:newf])
if x[f] == '-':
if x[f + 1] == '0':
raise ValueError
elif x[f] == '0' and newf != f+1:
raise ValueError
return (n, newf+1)
def decode_string(x, f):
colon = x.index(':', f)
n = int(x[f:colon])
if x[f] == '0' and colon != f+1:
raise ValueError
colon += 1
return (x[colon:colon+n], colon+n)
def decode_list(x, f):
r, f = [], f+1
while x[f] != 'e':
v, f = decode_func[x[f]](x, f)
r.append(v)
return (r, f + 1)
def decode_dict(x, f):
r, f = {}, f+1
while x[f] != 'e':
k, f = decode_string(x, f)
r[k], f = decode_func[x[f]](x, f)
return (r, f + 1)
decode_func = {}
decode_func['l'] = decode_list
decode_func['d'] = decode_dict
decode_func['i'] = decode_int
decode_func['0'] = decode_string
decode_func['1'] = decode_string
decode_func['2'] = decode_string
decode_func['3'] = decode_string
decode_func['4'] = decode_string
decode_func['5'] = decode_string
decode_func['6'] = decode_string
decode_func['7'] = decode_string
decode_func['8'] = decode_string
decode_func['9'] = decode_string
def bdecode(x):
try:
r, l = decode_func[x[0]](x, 0)
except (IndexError, KeyError, ValueError):
raise BTFailure("not a valid bencoded string")
if l != len(x):
raise BTFailure("invalid bencoded value (data after valid prefix)")
return r
from types import StringType, IntType, LongType, DictType, ListType, TupleType
class Bencached(object):
__slots__ = ['bencoded']
def __init__(self, s):
self.bencoded = s
def encode_bencached(x,r):
r.append(x.bencoded)
def encode_int(x, r):
r.extend(('i', str(x), 'e'))
def encode_bool(x, r):
if x:
encode_int(1, r)
else:
encode_int(0, r)
def encode_string(x, r):
r.extend((str(len(x)), ':', x))
def encode_list(x, r):
r.append('l')
for i in x:
encode_func[type(i)](i, r)
r.append('e')
def encode_dict(x,r):
r.append('d')
ilist = x.items()
ilist.sort()
for k, v in ilist:
r.extend((str(len(k)), ':', k))
encode_func[type(v)](v, r)
r.append('e')
encode_func = {}
encode_func[Bencached] = encode_bencached
encode_func[IntType] = encode_int
encode_func[LongType] = encode_int
encode_func[StringType] = encode_string
encode_func[ListType] = encode_list
encode_func[TupleType] = encode_list
encode_func[DictType] = encode_dict
try:
from types import BooleanType
encode_func[BooleanType] = encode_bool
except ImportError:
pass
def bencode(x):
r = []
encode_func[type(x)](x, r)
return ''.join(r)
|
gpl-3.0
|
antar2801/namebench
|
nb_third_party/dns/rcode.py
|
248
|
3106
|
# Copyright (C) 2001-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS Result Codes."""
import dns.exception
NOERROR = 0
FORMERR = 1
SERVFAIL = 2
NXDOMAIN = 3
NOTIMP = 4
REFUSED = 5
YXDOMAIN = 6
YXRRSET = 7
NXRRSET = 8
NOTAUTH = 9
NOTZONE = 10
BADVERS = 16
_by_text = {
'NOERROR' : NOERROR,
'FORMERR' : FORMERR,
'SERVFAIL' : SERVFAIL,
'NXDOMAIN' : NXDOMAIN,
'NOTIMP' : NOTIMP,
'REFUSED' : REFUSED,
'YXDOMAIN' : YXDOMAIN,
'YXRRSET' : YXRRSET,
'NXRRSET' : NXRRSET,
'NOTAUTH' : NOTAUTH,
'NOTZONE' : NOTZONE,
'BADVERS' : BADVERS
}
# We construct the inverse mapping programmatically to ensure that we
# cannot make any mistakes (e.g. omissions, cut-and-paste errors) that
# would cause the mapping not to be a true inverse.
_by_value = dict([(y, x) for x, y in _by_text.iteritems()])
class UnknownRcode(dns.exception.DNSException):
"""Raised if an rcode is unknown."""
pass
def from_text(text):
"""Convert text into an rcode.
@param text: the texual rcode
@type text: string
@raises UnknownRcode: the rcode is unknown
@rtype: int
"""
if text.isdigit():
v = int(text)
if v >= 0 and v <= 4095:
return v
v = _by_text.get(text.upper())
if v is None:
raise UnknownRcode
return v
def from_flags(flags, ednsflags):
"""Return the rcode value encoded by flags and ednsflags.
@param flags: the DNS flags
@type flags: int
@param ednsflags: the EDNS flags
@type ednsflags: int
@raises ValueError: rcode is < 0 or > 4095
@rtype: int
"""
value = (flags & 0x000f) | ((ednsflags >> 20) & 0xff0)
if value < 0 or value > 4095:
raise ValueError('rcode must be >= 0 and <= 4095')
return value
def to_flags(value):
"""Return a (flags, ednsflags) tuple which encodes the rcode.
@param value: the rcode
@type value: int
@raises ValueError: rcode is < 0 or > 4095
@rtype: (int, int) tuple
"""
if value < 0 or value > 4095:
raise ValueError('rcode must be >= 0 and <= 4095')
v = value & 0xf
ev = long(value & 0xff0) << 20
return (v, ev)
def to_text(value):
"""Convert rcode into text.
@param value: the rcode
@type value: int
@rtype: string
"""
text = _by_value.get(value)
if text is None:
text = str(value)
return text
|
apache-2.0
|
muzena/deluge
|
deluge/plugins/Stats/deluge/plugins/stats/core.py
|
7
|
8233
|
#
# core.py
#
# Copyright (C) 2009 Ian Martin <ianmartin@cantab.net>
# Copyright (C) 2008 Damien Churchill <damoxc@gmail.com>
# Copyright (C) 2008 Martijn Voncken <mvoncken@gmail.com>
# Copyright (C) Marcos Pinto 2007 <markybob@gmail.com>
#
# Deluge is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# deluge is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with deluge. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
import time
import logging
from twisted.internet.task import LoopingCall
import deluge
from deluge.plugins.pluginbase import CorePluginBase
from deluge import component
from deluge import configmanager
from deluge.core.rpcserver import export
DEFAULT_PREFS = {
"test": "NiNiNi",
"update_interval": 1, #2 seconds.
"length": 150, # 2 seconds * 150 --> 5 minutes.
}
DEFAULT_TOTALS = {
"total_upload": 0,
"total_download": 0,
"total_payload_upload": 0,
"total_payload_download": 0,
"stats": {}
}
log = logging.getLogger(__name__)
def get_key(config, key):
try:
return config[key]
except KeyError:
return None
def mean(items):
try:
return sum(items)/ len(items)
except Exception:
return 0
class Core(CorePluginBase):
totals = {} #class var to catch only updating this once per session in enable.
def enable(self):
log.debug("Stats plugin enabled")
self.core = component.get("Core")
self.stats ={}
self.count = {}
self.intervals = [1, 5, 30, 300]
self.last_update = {}
t = time.time()
for i in self.intervals:
self.stats[i] = {}
self.last_update[i] = t
self.count[i] = 0
self.config = configmanager.ConfigManager("stats.conf", DEFAULT_PREFS)
self.saved_stats = configmanager.ConfigManager("stats.totals", DEFAULT_TOTALS)
if self.totals == {}:
self.totals.update(self.saved_stats.config)
self.length = self.config["length"]
#self.stats = get_key(self.saved_stats, "stats") or {}
self.stats_keys = []
self.add_stats(
'upload_rate',
'download_rate',
'num_connections',
'dht_nodes',
'dht_cache_nodes',
'dht_torrents',
'num_peers',
)
self.update_stats()
self.update_timer = LoopingCall(self.update_stats)
self.update_timer.start(self.config["update_interval"])
self.save_timer = LoopingCall(self.save_stats)
self.save_timer.start(60)
def disable(self):
self.save_stats()
try:
self.update_timer.stop()
self.save_timer.stop()
except:
pass
def add_stats(self, *stats):
for stat in stats:
if stat not in self.stats_keys:
self.stats_keys.append(stat)
for i in self.intervals:
if stat not in self.stats[i]:
self.stats[i][stat] = []
def update_stats(self):
try:
#Get all possible stats!
stats = {}
for key in self.stats_keys:
#try all keys we have, very inefficient but saves having to
#work out where a key comes from...
try:
stats.update(self.core.get_session_status([key]))
except AttributeError:
pass
stats["num_connections"] = self.core.get_num_connections()
stats.update(self.core.get_config_values(["max_download",
"max_upload",
"max_num_connections"]))
# status = self.core.session.status()
# for stat in dir(status):
# if not stat.startswith('_') and stat not in stats:
# stats[stat] = getattr(status, stat, None)
update_time = time.time()
self.last_update[1] = update_time
#extract the ones we are interested in
#adding them to the 1s array
for stat, stat_list in self.stats[1].iteritems():
if stat in stats:
stat_list.insert(0, int(stats[stat]))
else:
stat_list.insert(0, 0)
if len(stat_list) > self.length:
stat_list.pop()
def update_interval(interval, base, multiplier):
self.count[interval] = self.count[interval] + 1
if self.count[interval] >= interval:
self.last_update[interval] = update_time
self.count[interval] = 0
current_stats = self.stats[interval]
for stat, stat_list in self.stats[base].iteritems():
try:
avg = mean(stat_list[0:multiplier])
except ValueError:
avg = 0
current_stats[stat].insert(0, avg)
if len(current_stats[stat]) > self.length:
current_stats[stat].pop()
update_interval(5, 1, 5)
update_interval(30, 5, 6)
update_interval(300, 30, 10)
except Exception, e:
log.error("Stats update error %s" % e)
return True
def save_stats(self):
try:
self.saved_stats["stats"] = self.stats
self.saved_stats.config.update(self.get_totals())
self.saved_stats.save()
except Exception, e:
log.error("Stats save error", e)
return True
# export:
@export
def get_stats(self, keys, interval):
if interval not in self.intervals:
return None
stats_dict = {}
for key in keys:
if key in self.stats[interval]:
stats_dict[key] = self.stats[interval][key]
stats_dict["_last_update"] = self.last_update[interval]
stats_dict["_length"] = self.config["length"]
stats_dict["_update_interval"] = interval
return stats_dict
@export
def get_totals(self):
result = {}
session_totals = self.get_session_totals()
for key in session_totals:
result[key] = self.totals[key] + session_totals[key]
return result
@export
def get_session_totals(self):
status = self.core.session.status()
return {
"total_upload": status.total_upload,
"total_download": status.total_download,
"total_payload_upload": status.total_payload_upload,
"total_payload_download": status.total_payload_download
}
@export
def set_config(self, config):
"sets the config dictionary"
for key in config.keys():
self.config[key] = config[key]
self.config.save()
@export
def get_config(self):
"returns the config dictionary"
return self.config.config
@export
def get_intervals(self):
"Returns the available resolutions"
return self.intervals
|
gpl-3.0
|
beckastar/django
|
django/contrib/gis/gdal/prototypes/geom.py
|
79
|
4718
|
from ctypes import c_char_p, c_double, c_int, c_void_p, POINTER
from django.contrib.gis.gdal.envelope import OGREnvelope
from django.contrib.gis.gdal.libgdal import lgdal
from django.contrib.gis.gdal.prototypes.errcheck import check_envelope
from django.contrib.gis.gdal.prototypes.generation import (const_string_output,
double_output, geom_output, int_output, srs_output, string_output, void_output)
### Generation routines specific to this module ###
def env_func(f, argtypes):
"For getting OGREnvelopes."
f.argtypes = argtypes
f.restype = None
f.errcheck = check_envelope
return f
def pnt_func(f):
"For accessing point information."
return double_output(f, [c_void_p, c_int])
def topology_func(f):
f.argtypes = [c_void_p, c_void_p]
f.restype = c_int
f.errchck = bool
return f
### OGR_G ctypes function prototypes ###
# GeoJSON routines.
from_json = geom_output(lgdal.OGR_G_CreateGeometryFromJson, [c_char_p])
to_json = string_output(lgdal.OGR_G_ExportToJson, [c_void_p], str_result=True, decoding='ascii')
to_kml = string_output(lgdal.OGR_G_ExportToKML, [c_void_p, c_char_p], str_result=True, decoding='ascii')
# GetX, GetY, GetZ all return doubles.
getx = pnt_func(lgdal.OGR_G_GetX)
gety = pnt_func(lgdal.OGR_G_GetY)
getz = pnt_func(lgdal.OGR_G_GetZ)
# Geometry creation routines.
from_wkb = geom_output(lgdal.OGR_G_CreateFromWkb, [c_char_p, c_void_p, POINTER(c_void_p), c_int], offset=-2)
from_wkt = geom_output(lgdal.OGR_G_CreateFromWkt, [POINTER(c_char_p), c_void_p, POINTER(c_void_p)], offset=-1)
create_geom = geom_output(lgdal.OGR_G_CreateGeometry, [c_int])
clone_geom = geom_output(lgdal.OGR_G_Clone, [c_void_p])
get_geom_ref = geom_output(lgdal.OGR_G_GetGeometryRef, [c_void_p, c_int])
get_boundary = geom_output(lgdal.OGR_G_GetBoundary, [c_void_p])
geom_convex_hull = geom_output(lgdal.OGR_G_ConvexHull, [c_void_p])
geom_diff = geom_output(lgdal.OGR_G_Difference, [c_void_p, c_void_p])
geom_intersection = geom_output(lgdal.OGR_G_Intersection, [c_void_p, c_void_p])
geom_sym_diff = geom_output(lgdal.OGR_G_SymmetricDifference, [c_void_p, c_void_p])
geom_union = geom_output(lgdal.OGR_G_Union, [c_void_p, c_void_p])
# Geometry modification routines.
add_geom = void_output(lgdal.OGR_G_AddGeometry, [c_void_p, c_void_p])
import_wkt = void_output(lgdal.OGR_G_ImportFromWkt, [c_void_p, POINTER(c_char_p)])
# Destroys a geometry
destroy_geom = void_output(lgdal.OGR_G_DestroyGeometry, [c_void_p], errcheck=False)
# Geometry export routines.
to_wkb = void_output(lgdal.OGR_G_ExportToWkb, None, errcheck=True) # special handling for WKB.
to_wkt = string_output(lgdal.OGR_G_ExportToWkt, [c_void_p, POINTER(c_char_p)], decoding='ascii')
to_gml = string_output(lgdal.OGR_G_ExportToGML, [c_void_p], str_result=True, decoding='ascii')
get_wkbsize = int_output(lgdal.OGR_G_WkbSize, [c_void_p])
# Geometry spatial-reference related routines.
assign_srs = void_output(lgdal.OGR_G_AssignSpatialReference, [c_void_p, c_void_p], errcheck=False)
get_geom_srs = srs_output(lgdal.OGR_G_GetSpatialReference, [c_void_p])
# Geometry properties
get_area = double_output(lgdal.OGR_G_GetArea, [c_void_p])
get_centroid = void_output(lgdal.OGR_G_Centroid, [c_void_p, c_void_p])
get_dims = int_output(lgdal.OGR_G_GetDimension, [c_void_p])
get_coord_dim = int_output(lgdal.OGR_G_GetCoordinateDimension, [c_void_p])
set_coord_dim = void_output(lgdal.OGR_G_SetCoordinateDimension, [c_void_p, c_int], errcheck=False)
get_geom_count = int_output(lgdal.OGR_G_GetGeometryCount, [c_void_p])
get_geom_name = const_string_output(lgdal.OGR_G_GetGeometryName, [c_void_p], decoding='ascii')
get_geom_type = int_output(lgdal.OGR_G_GetGeometryType, [c_void_p])
get_point_count = int_output(lgdal.OGR_G_GetPointCount, [c_void_p])
get_point = void_output(lgdal.OGR_G_GetPoint, [c_void_p, c_int, POINTER(c_double), POINTER(c_double), POINTER(c_double)], errcheck=False)
geom_close_rings = void_output(lgdal.OGR_G_CloseRings, [c_void_p], errcheck=False)
# Topology routines.
ogr_contains = topology_func(lgdal.OGR_G_Contains)
ogr_crosses = topology_func(lgdal.OGR_G_Crosses)
ogr_disjoint = topology_func(lgdal.OGR_G_Disjoint)
ogr_equals = topology_func(lgdal.OGR_G_Equals)
ogr_intersects = topology_func(lgdal.OGR_G_Intersects)
ogr_overlaps = topology_func(lgdal.OGR_G_Overlaps)
ogr_touches = topology_func(lgdal.OGR_G_Touches)
ogr_within = topology_func(lgdal.OGR_G_Within)
# Transformation routines.
geom_transform = void_output(lgdal.OGR_G_Transform, [c_void_p, c_void_p])
geom_transform_to = void_output(lgdal.OGR_G_TransformTo, [c_void_p, c_void_p])
# For retrieving the envelope of the geometry.
get_envelope = env_func(lgdal.OGR_G_GetEnvelope, [c_void_p, POINTER(OGREnvelope)])
|
bsd-3-clause
|
dtroyer/python-openstacksdk
|
openstack/network/v2/metering_label.py
|
1
|
1534
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.network import network_service
from openstack import resource
class MeteringLabel(resource.Resource):
resource_key = 'metering_label'
resources_key = 'metering_labels'
base_path = '/metering/metering-labels'
service = network_service.NetworkService()
# capabilities
allow_create = True
allow_get = True
allow_update = True
allow_delete = True
allow_list = True
_query_mapping = resource.QueryParameters(
'description', 'name',
is_shared='shared',
project_id='tenant_id'
)
# Properties
#: Description of the metering label.
description = resource.Body('description')
#: Name of the metering label.
name = resource.Body('name')
#: The ID of the project this metering label is associated with.
project_id = resource.Body('tenant_id')
#: Indicates whether this label is shared across all tenants.
#: *Type: bool*
is_shared = resource.Body('shared', type=bool)
|
apache-2.0
|
jamesblunt/sympy
|
sympy/assumptions/handlers/matrices.py
|
71
|
13732
|
"""
This module contains query handlers responsible for calculus queries:
infinitesimal, bounded, etc.
"""
from __future__ import print_function, division
from sympy.logic.boolalg import conjuncts
from sympy.assumptions import Q, ask
from sympy.assumptions.handlers import CommonHandler, test_closed_group
from sympy.matrices.expressions import MatMul, MatrixExpr
from sympy.core.logic import fuzzy_and
from sympy.utilities.iterables import sift
from sympy.core import Basic
from functools import partial
def _Factorization(predicate, expr, assumptions):
if predicate in expr.predicates:
return True
class AskSquareHandler(CommonHandler):
"""
Handler for key 'square'
"""
@staticmethod
def MatrixExpr(expr, assumptions):
return expr.shape[0] == expr.shape[1]
class AskSymmetricHandler(CommonHandler):
"""
Handler for key 'symmetric'
"""
@staticmethod
def MatMul(expr, assumptions):
factor, mmul = expr.as_coeff_mmul()
if all(ask(Q.symmetric(arg), assumptions) for arg in mmul.args):
return True
if len(mmul.args) >= 2 and mmul.args[0] == mmul.args[-1].T:
return ask(Q.symmetric(MatMul(*mmul.args[1:-1])), assumptions)
@staticmethod
def MatAdd(expr, assumptions):
return all(ask(Q.symmetric(arg), assumptions) for arg in expr.args)
@staticmethod
def MatrixSymbol(expr, assumptions):
if not expr.is_square:
return False
if Q.symmetric(expr) in conjuncts(assumptions):
return True
@staticmethod
def ZeroMatrix(expr, assumptions):
return ask(Q.square(expr), assumptions)
@staticmethod
def Transpose(expr, assumptions):
return ask(Q.symmetric(expr.arg), assumptions)
Inverse = Transpose
@staticmethod
def MatrixSlice(expr, assumptions):
if not expr.on_diag:
return None
else:
return ask(Q.symmetric(expr.parent), assumptions)
Identity = staticmethod(CommonHandler.AlwaysTrue)
class AskInvertibleHandler(CommonHandler):
"""
Handler for key 'invertible'
"""
@staticmethod
def MatMul(expr, assumptions):
factor, mmul = expr.as_coeff_mmul()
if all(ask(Q.invertible(arg), assumptions) for arg in mmul.args):
return True
if any(ask(Q.invertible(arg), assumptions) is False
for arg in mmul.args):
return False
@staticmethod
def MatAdd(expr, assumptions):
return None
@staticmethod
def MatrixSymbol(expr, assumptions):
if not expr.is_square:
return False
if Q.invertible(expr) in conjuncts(assumptions):
return True
Identity, Inverse = [staticmethod(CommonHandler.AlwaysTrue)]*2
ZeroMatrix = staticmethod(CommonHandler.AlwaysFalse)
@staticmethod
def Transpose(expr, assumptions):
return ask(Q.invertible(expr.arg), assumptions)
@staticmethod
def MatrixSlice(expr, assumptions):
if not expr.on_diag:
return None
else:
return ask(Q.invertible(expr.parent), assumptions)
class AskOrthogonalHandler(CommonHandler):
"""
Handler for key 'orthogonal'
"""
predicate = Q.orthogonal
@staticmethod
def MatMul(expr, assumptions):
factor, mmul = expr.as_coeff_mmul()
if (all(ask(Q.orthogonal(arg), assumptions) for arg in mmul.args) and
factor == 1):
return True
if any(ask(Q.invertible(arg), assumptions) is False
for arg in mmul.args):
return False
@staticmethod
def MatAdd(expr, assumptions):
if (len(expr.args) == 1 and
ask(Q.orthogonal(expr.args[0]), assumptions)):
return True
@staticmethod
def MatrixSymbol(expr, assumptions):
if not expr.is_square:
return False
if Q.orthogonal(expr) in conjuncts(assumptions):
return True
Identity = staticmethod(CommonHandler.AlwaysTrue)
ZeroMatrix = staticmethod(CommonHandler.AlwaysFalse)
@staticmethod
def Transpose(expr, assumptions):
return ask(Q.orthogonal(expr.arg), assumptions)
Inverse = Transpose
@staticmethod
def MatrixSlice(expr, assumptions):
if not expr.on_diag:
return None
else:
return ask(Q.orthogonal(expr.parent), assumptions)
Factorization = staticmethod(partial(_Factorization, Q.orthogonal))
class AskUnitaryHandler(CommonHandler):
"""
Handler for key 'unitary'
"""
predicate = Q.unitary
@staticmethod
def MatMul(expr, assumptions):
factor, mmul = expr.as_coeff_mmul()
if (all(ask(Q.unitary(arg), assumptions) for arg in mmul.args) and
abs(factor) == 1):
return True
if any(ask(Q.invertible(arg), assumptions) is False
for arg in mmul.args):
return False
@staticmethod
def MatrixSymbol(expr, assumptions):
if not expr.is_square:
return False
if Q.unitary(expr) in conjuncts(assumptions):
return True
@staticmethod
def Transpose(expr, assumptions):
return ask(Q.unitary(expr.arg), assumptions)
Inverse = Transpose
@staticmethod
def MatrixSlice(expr, assumptions):
if not expr.on_diag:
return None
else:
return ask(Q.unitary(expr.parent), assumptions)
@staticmethod
def DFT(expr, assumptions):
return True
Factorization = staticmethod(partial(_Factorization, Q.unitary))
Identity = staticmethod(CommonHandler.AlwaysTrue)
ZeroMatrix = staticmethod(CommonHandler.AlwaysFalse)
class AskFullRankHandler(CommonHandler):
"""
Handler for key 'fullrank'
"""
@staticmethod
def MatMul(expr, assumptions):
if all(ask(Q.fullrank(arg), assumptions) for arg in expr.args):
return True
Identity = staticmethod(CommonHandler.AlwaysTrue)
ZeroMatrix = staticmethod(CommonHandler.AlwaysFalse)
@staticmethod
def Transpose(expr, assumptions):
return ask(Q.fullrank(expr.arg), assumptions)
Inverse = Transpose
@staticmethod
def MatrixSlice(expr, assumptions):
if ask(Q.orthogonal(expr.parent), assumptions):
return True
class AskPositiveDefiniteHandler(CommonHandler):
"""
Handler for key 'positive_definite'
"""
@staticmethod
def MatMul(expr, assumptions):
factor, mmul = expr.as_coeff_mmul()
if (all(ask(Q.positive_definite(arg), assumptions)
for arg in mmul.args) and factor > 0):
return True
if (len(mmul.args) >= 2
and mmul.args[0] == mmul.args[-1].T
and ask(Q.fullrank(mmul.args[0]), assumptions)):
return ask(Q.positive_definite(
MatMul(*mmul.args[1:-1])), assumptions)
@staticmethod
def MatAdd(expr, assumptions):
if all(ask(Q.positive_definite(arg), assumptions)
for arg in expr.args):
return True
@staticmethod
def MatrixSymbol(expr, assumptions):
if not expr.is_square:
return False
if Q.positive_definite(expr) in conjuncts(assumptions):
return True
Identity = staticmethod(CommonHandler.AlwaysTrue)
ZeroMatrix = staticmethod(CommonHandler.AlwaysFalse)
@staticmethod
def Transpose(expr, assumptions):
return ask(Q.positive_definite(expr.arg), assumptions)
Inverse = Transpose
@staticmethod
def MatrixSlice(expr, assumptions):
if not expr.on_diag:
return None
else:
return ask(Q.positive_definite(expr.parent), assumptions)
class AskUpperTriangularHandler(CommonHandler):
"""
Handler for key 'upper_triangular'
"""
@staticmethod
def MatMul(expr, assumptions):
factor, matrices = expr.as_coeff_matrices()
if all(ask(Q.upper_triangular(m), assumptions) for m in matrices):
return True
@staticmethod
def MatAdd(expr, assumptions):
if all(ask(Q.upper_triangular(arg), assumptions) for arg in expr.args):
return True
@staticmethod
def MatrixSymbol(expr, assumptions):
if Q.upper_triangular(expr) in conjuncts(assumptions):
return True
Identity, ZeroMatrix = [staticmethod(CommonHandler.AlwaysTrue)]*2
@staticmethod
def Transpose(expr, assumptions):
return ask(Q.lower_triangular(expr.arg), assumptions)
@staticmethod
def Inverse(expr, assumptions):
return ask(Q.upper_triangular(expr.arg), assumptions)
@staticmethod
def MatrixSlice(expr, assumptions):
if not expr.on_diag:
return None
else:
return ask(Q.upper_triangular(expr.parent), assumptions)
Factorization = staticmethod(partial(_Factorization, Q.upper_triangular))
class AskLowerTriangularHandler(CommonHandler):
"""
Handler for key 'lower_triangular'
"""
@staticmethod
def MatMul(expr, assumptions):
factor, matrices = expr.as_coeff_matrices()
if all(ask(Q.lower_triangular(m), assumptions) for m in matrices):
return True
@staticmethod
def MatAdd(expr, assumptions):
if all(ask(Q.lower_triangular(arg), assumptions) for arg in expr.args):
return True
@staticmethod
def MatrixSymbol(expr, assumptions):
if Q.lower_triangular(expr) in conjuncts(assumptions):
return True
Identity, ZeroMatrix = [staticmethod(CommonHandler.AlwaysTrue)]*2
@staticmethod
def Transpose(expr, assumptions):
return ask(Q.upper_triangular(expr.arg), assumptions)
@staticmethod
def Inverse(expr, assumptions):
return ask(Q.lower_triangular(expr.arg), assumptions)
@staticmethod
def MatrixSlice(expr, assumptions):
if not expr.on_diag:
return None
else:
return ask(Q.lower_triangular(expr.parent), assumptions)
Factorization = staticmethod(partial(_Factorization, Q.lower_triangular))
class AskDiagonalHandler(CommonHandler):
"""
Handler for key 'diagonal'
"""
@staticmethod
def MatMul(expr, assumptions):
factor, matrices = expr.as_coeff_matrices()
if all(ask(Q.diagonal(m), assumptions) for m in matrices):
return True
@staticmethod
def MatAdd(expr, assumptions):
if all(ask(Q.diagonal(arg), assumptions) for arg in expr.args):
return True
@staticmethod
def MatrixSymbol(expr, assumptions):
if Q.diagonal(expr) in conjuncts(assumptions):
return True
Identity, ZeroMatrix = [staticmethod(CommonHandler.AlwaysTrue)]*2
@staticmethod
def Transpose(expr, assumptions):
return ask(Q.diagonal(expr.arg), assumptions)
@staticmethod
def Inverse(expr, assumptions):
return ask(Q.diagonal(expr.arg), assumptions)
@staticmethod
def MatrixSlice(expr, assumptions):
if not expr.on_diag:
return None
else:
return ask(Q.diagonal(expr.parent), assumptions)
@staticmethod
def DiagonalMatrix(expr, assumptions):
return True
Factorization = staticmethod(partial(_Factorization, Q.diagonal))
def BM_elements(predicate, expr, assumptions):
""" Block Matrix elements """
return all(ask(predicate(b), assumptions) for b in expr.blocks)
def MS_elements(predicate, expr, assumptions):
""" Matrix Slice elements """
return ask(predicate(expr.parent), assumptions)
def MatMul_elements(matrix_predicate, scalar_predicate, expr, assumptions):
d = sift(expr.args, lambda x: isinstance(x, MatrixExpr))
factors, matrices = d[False], d[True]
return fuzzy_and([
test_closed_group(Basic(*factors), assumptions, scalar_predicate),
test_closed_group(Basic(*matrices), assumptions, matrix_predicate)])
class AskIntegerElementsHandler(CommonHandler):
@staticmethod
def MatAdd(expr, assumptions):
return test_closed_group(expr, assumptions, Q.integer_elements)
HadamardProduct, Determinant, Trace, Transpose = [MatAdd]*4
ZeroMatrix, Identity = [staticmethod(CommonHandler.AlwaysTrue)]*2
MatMul = staticmethod(partial(MatMul_elements, Q.integer_elements,
Q.integer))
MatrixSlice = staticmethod(partial(MS_elements, Q.integer_elements))
BlockMatrix = staticmethod(partial(BM_elements, Q.integer_elements))
class AskRealElementsHandler(CommonHandler):
@staticmethod
def MatAdd(expr, assumptions):
return test_closed_group(expr, assumptions, Q.real_elements)
HadamardProduct, Determinant, Trace, Transpose, Inverse, \
Factorization = [MatAdd]*6
MatMul = staticmethod(partial(MatMul_elements, Q.real_elements, Q.real))
MatrixSlice = staticmethod(partial(MS_elements, Q.real_elements))
BlockMatrix = staticmethod(partial(BM_elements, Q.real_elements))
class AskComplexElementsHandler(CommonHandler):
@staticmethod
def MatAdd(expr, assumptions):
return test_closed_group(expr, assumptions, Q.complex_elements)
HadamardProduct, Determinant, Trace, Transpose, Inverse, \
Factorization = [MatAdd]*6
MatMul = staticmethod(partial(MatMul_elements, Q.complex_elements,
Q.complex))
MatrixSlice = staticmethod(partial(MS_elements, Q.complex_elements))
BlockMatrix = staticmethod(partial(BM_elements, Q.complex_elements))
DFT = staticmethod(CommonHandler.AlwaysTrue)
|
bsd-3-clause
|
xcgoner/dist-mxnet
|
tools/caffe_converter/test_converter.py
|
8
|
4408
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Test converted models
"""
import os
import argparse
import sys
import logging
import mxnet as mx
from convert_caffe_modelzoo import convert_caffe_model, get_model_meta_info, download_caffe_model
from compare_layers import convert_and_compare_caffe_to_mxnet
curr_path = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.join(curr_path, "../../example/image-classification"))
from test_score import download_data # pylint: disable=wrong-import-position
from score import score # pylint: disable=wrong-import-position
logging.basicConfig(level=logging.DEBUG)
def test_imagenet_model_performance(model_name, val_data, gpus, batch_size):
"""test model performance on imagenet """
logging.info('test performance of model: %s', model_name)
meta_info = get_model_meta_info(model_name)
[model_name, mean] = convert_caffe_model(model_name, meta_info)
sym, arg_params, aux_params = mx.model.load_checkpoint(model_name, 0)
acc = [mx.metric.create('acc'), mx.metric.create('top_k_accuracy', top_k=5)]
if isinstance(mean, str):
mean_args = {'mean_img':mean}
else:
mean_args = {'rgb_mean':','.join([str(i) for i in mean])}
print(val_data)
gpus_string = '' if gpus[0] == -1 else ','.join([str(i) for i in gpus])
(speed,) = score(model=(sym, arg_params, aux_params),
data_val=val_data,
label_name='prob_label',
metrics=acc,
gpus=gpus_string,
batch_size=batch_size,
max_num_examples=500,
**mean_args)
logging.info('speed : %f image/sec', speed)
for a in acc:
logging.info(a.get())
max_performance_diff_allowed = 0.03
assert acc[0].get()[1] > meta_info['top-1-acc'] - max_performance_diff_allowed
assert acc[1].get()[1] > meta_info['top-5-acc'] - max_performance_diff_allowed
def test_model_weights_and_outputs(model_name, image_url, gpu):
"""
Run the layer comparison on one of the known caffe models.
:param model_name: available models are listed in convert_caffe_modelzoo.py
:param image_url: image file or url to run inference on
:param gpu: gpu to use, -1 for cpu
"""
logging.info('test weights and outputs of model: %s', model_name)
meta_info = get_model_meta_info(model_name)
(prototxt, caffemodel, mean) = download_caffe_model(model_name, meta_info, dst_dir='./model')
convert_and_compare_caffe_to_mxnet(image_url, gpu, prototxt, caffemodel, mean,
mean_diff_allowed=1e-03, max_diff_allowed=1e-01)
return
def main():
"""Entrypoint for test_converter"""
parser = argparse.ArgumentParser(description='Test Caffe converter')
parser.add_argument('--cpu', action='store_true', help='use cpu?')
parser.add_argument('--image_url', type=str,
default='http://writm.com/wp-content/uploads/2016/08/Cat-hd-wallpapers.jpg',
help='input image to test inference, can be either file path or url')
args = parser.parse_args()
if args.cpu:
gpus = [-1]
batch_size = 32
else:
gpus = mx.test_utils.list_gpus()
assert gpus, 'At least one GPU is needed to run test_converter in GPU mode'
batch_size = 32 * len(gpus)
models = ['bvlc_googlenet', 'vgg-16', 'resnet-50']
val = download_data()
for m in models:
test_model_weights_and_outputs(m, args.image_url, gpus[0])
test_imagenet_model_performance(m, val, gpus, batch_size)
if __name__ == '__main__':
main()
|
apache-2.0
|
blue-yonder/pyscaffold
|
tests/test_termui.py
|
1
|
2915
|
# -*- coding: utf-8 -*-
from importlib import reload
from io import StringIO
import pytest
from pyscaffold import termui
@pytest.fixture(scope="module")
def after():
# Reload termui after tests to ensure constants are calculated
# with original logic (without mocks).
yield
reload(termui)
@pytest.fixture
def fake_tty(monkeypatch):
# NOTE:
# This fixture is a workaround for the limitations of pytest stdout/stderr
# capture.
# To be realistic we should test termui against real TTY devices,
# and maybe the best way of doing it is just using the stdout/stderr
# in the terminal.
# Since pytest buffers the output, we need to rely on the `disabled`
# contextmanager of the `capsys` fixture.
# However, this currently doesn't work with pytest-xdist (see
# https://github.com/pytest-dev/pytest/issues/1991).
# So if we intend to run our tests in parallel, we have to gave up on it :(
stream = StringIO()
monkeypatch.setattr(stream, "isatty", lambda *_: True)
return stream
def test_isatty_file(tmpfolder, orig_isatty):
file = tmpfolder.join("file.txt").ensure().open()
assert not termui.isatty(file)
def test_isatty_buffer(orig_isatty):
assert not termui.isatty(StringIO())
def test_isatty_random_obj(orig_isatty):
assert not termui.isatty([1, 2, 3])
def test_isatty_tty(fake_tty, orig_isatty):
assert termui.isatty(fake_tty)
def test_support_with_curses_no_colorama(
fake_tty, curses_mock, no_colorama_mock, orig_isatty
):
reload(termui) # ensure mocks side effects
assert termui.SYSTEM_SUPPORTS_COLOR
assert termui.supports_color(fake_tty)
def test_support_no_curses_with_colorama(
fake_tty, no_curses_mock, colorama_mock, orig_isatty
):
reload(termui) # ensure mocks side effects
assert termui.SYSTEM_SUPPORTS_COLOR
assert termui.supports_color(fake_tty)
def test_support_with_curses_with_colorama(
fake_tty, curses_mock, colorama_mock, orig_isatty
):
reload(termui) # ensure mocks side effects
assert termui.SYSTEM_SUPPORTS_COLOR
assert termui.supports_color(fake_tty)
def test_support_no_colorama_no_curses(
fake_tty, no_curses_mock, no_colorama_mock, orig_isatty
):
reload(termui) # ensure mocks side effects
assert not termui.SYSTEM_SUPPORTS_COLOR
assert not termui.supports_color(fake_tty)
def test_decorate():
# When styles are passed,
text = termui.decorate("text", "red", "bold")
# then they should be present in the response
assert "\033[1m" in text # bold
assert "\033[31m" in text # red
assert "\033[0m" in text # clear
# When styles are not passed,
text = termui.decorate("text")
# then the text should not contain ansi codes
assert "\033[1m" not in text # bold
assert "\033[31m" not in text # red
assert "\033[0m" not in text # clear
assert text == "text"
|
mit
|
ianjuma/usiu-app-dir
|
benchcare/patients/models.py
|
1
|
5931
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from django.contrib import admin
class Patient(models.Model):
GENDER = (
('F', 'Female'),
('M', 'Male'),
)
MARRIAGE_STATUS = (
('S', 'Single'),
('M', 'Married'),
('W', 'Widowed'),
('D', 'Divorced'),
)
EDUCATION = (
('N', 'None'),
('P', 'Primary'),
('S', 'Seconday/High School'),
('C', 'College/University?Graduate'),
)
id_number = models.IntegerField(db_index=True)
patient_number = models.IntegerField(db_index=True)
surname = models.CharField(max_length=400)
first_name = models.CharField(max_length=400)
last_name = models.CharField(max_length=400)
sex = models.CharField(max_length=2, choices=GENDER)
date_of_birth = models.DateField(auto_now=True)
marriage_status = models.CharField(max_length=4, choices=MARRIAGE_STATUS)
occupation = models.CharField(max_length=500)
education = models.CharField(max_length=4, choices=EDUCATION)
telephone_number = models.IntegerField()
post_address = models.CharField(max_length=400)
email_address = models.EmailField(max_length=75)
family_medical_history = models.TextField(max_length=1000)
def __unicode__(self):
return '%s %s %s ' % (self.surname, self.first_name, self.last_name)
class Meta:
verbose_name = _('Patient')
verbose_name_plural = _('Patients')
class Next_of_Kin(models.Model):
patient_details = models.ForeignKey(Patient)
kin_relation = models.CharField(max_length=200)
kin_surname = models.CharField(max_length=400)
kin_first_name = models.CharField(max_length=400)
kin_last_name = models.CharField(max_length=400)
kin_phone_number = models.IntegerField()
kin_email_address = models.EmailField(max_length=75)
def __unicode__(self):
return '%s %s %s %s ' % (self.kin_relation, self.kin_surname, self.kin_first_name, self.kin_last_name)
class Meta:
verbose_name = _('Next of Kin')
verbose_name_plural = _('Next of Kins')
class Vitals(models.Model):
patient_vitals = models.ForeignKey(Patient)
height = models.IntegerField(
blank=True, null=True, help_text="Measured in .")
weight = models.IntegerField(
blank=True, null=True, help_text="Measured in .")
temperature = models.IntegerField(
blank=True, null=True, help_text="Measured in .")
pulse = models.IntegerField(
blank=True, null=True, help_text="Measured in .")
respiratory_ration = models.IntegerField(
blank=True, null=True, help_text="Measured in .")
blood_pressure = models.IntegerField(
blank=True, null=True, help_text="Measured in .")
blood_oxygen = models.IntegerField(
blank=True, null=True, help_text="Measured in .")
def __unicode__(self):
return '%s %s %s %s %s %s %s ' % (self.patient_vitals, self.height, self.weight, self.temperature, self.pulse, self.respiratory_ration, self.blood_pressure)
class Meta:
verbose_name = _('PatientVital')
verbose_name_plural = _('PatientVitals')
class Visits (models.Model):
chief_complaint = models.TextField(max_length=1000, blank=True, null=True)
patient_visits = models.ForeignKey(Patient, blank=True, null=True)
def __unicode__(self):
return self.chief_complaint
class Meta:
verbose_name = _('Complaint')
verbose_name_plural = _('Complaints')
class Diagnosis(models.Model):
TEST_CHOICES = (
('None', 'None'),
('Blood Test', 'Blood Test'),
('Malaria Test', 'Malaria Test'),
)
patient_diagnosis = models.TextField(
max_length=5000, blank=True, null=True)
patient_test = models.CharField(
max_length=250, choices=TEST_CHOICES, blank=True, null=True)
patient_test_results = models.TextField(
max_length=5000, blank=True, null=True)
patientdetails = models.ForeignKey(Patient, blank=True, null=True)
def __unicode__(self):
return self.patient_diagnosis
class Meta:
verbose_name = _('PatientDiagnosis')
verbose_name_plural = _('PatientDiagnosis')
class Medication(models.Model):
_patientmedication = models.TextField(
max_length=5000, blank=True, null=True)
_patient_non_drug_prescription = models.TextField(
max_length=5000, blank=True, null=True)
_patientprogress = models.TextField(max_length=5000, blank=True, null=True)
_patientdetails = models.ForeignKey(Patient, blank=True, null=True)
_patientvisit = models.ForeignKey(Visits)
_patientdiagosis = models.ForeignKey(Diagnosis)
def __unicode__(self):
return self._patientmedication
class Meta:
verbose_name = _('Patient Medication')
verbose_name_plural = _('Patient Medications')
class History(models.Model):
diagnosis = models.ForeignKey(Diagnosis)
procedures_performed = models.TextField(1000)
visit_dates = models.ForeignKey(Visits)
patient_details = models.ForeignKey(Patient)
dischage_dates = models.DateField(auto_now=True)
chief_complaint = models.TextField(max_length=1000)
dischage_summary = models.TextField(
max_length=1000, help_text="Please type the dischage summary here .")
def __unicode__(self):
return self.diagnosis
class Meta:
verbose_name = _('Patient History')
verbose_name_plural = _('Patient Histories')
class Documents(models.Model):
patientsdetails = models.ForeignKey(Patient)
patientsvisit = models.ForeignKey(Visits)
patients_documents = models.FileField(upload_to='media')
def __unicode__(self):
return self.patients_documents
class Meta:
verbose_name = _('Patient Document')
verbose_name_plural = _('Patient Documents')
|
gpl-2.0
|
verma-varsha/zulip
|
zerver/webhooks/gosquared/view.py
|
1
|
1441
|
from __future__ import absolute_import
from django.utils.translation import ugettext as _
from zerver.lib.actions import check_send_message
from zerver.lib.response import json_success, json_error
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import UserProfile
from django.http import HttpRequest, HttpResponse
from typing import Dict, Any, Optional, Text
BODY_TEMPLATE = '[{website_name}]({website_url}) has {user_num} visitors online.'
@api_key_only_webhook_view('GoSquared')
@has_request_variables
def api_gosquared_webhook(request, user_profile,
payload=REQ(argument_type='body'),
stream=REQ(default='gosquared'),
topic=REQ(default=None)):
# type: (HttpRequest, UserProfile, Dict[str, Dict[str, Any]], Text, Text) -> HttpResponse
domain_name = payload['siteDetails']['domain']
user_num = payload['concurrents']
user_acc = payload['siteDetails']['acct']
acc_url = 'https://www.gosquared.com/now/' + user_acc
body = BODY_TEMPLATE.format(website_name=domain_name, website_url=acc_url, user_num=user_num)
# allows for customisable topics
if topic is None:
topic = 'GoSquared - {website_name}'.format(website_name=domain_name)
check_send_message(user_profile, request.client, 'stream', [stream],
topic, body)
return json_success()
|
apache-2.0
|
hasadna/django
|
django/contrib/localflavor/pt/forms.py
|
100
|
1601
|
"""
PT-specific Form helpers
"""
from __future__ import unicode_literals
import re
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field, RegexField
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
phone_digits_re = re.compile(r'^(\d{9}|(00|\+)\d*)$')
class PTZipCodeField(RegexField):
default_error_messages = {
'invalid': _('Enter a zip code in the format XXXX-XXX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(PTZipCodeField, self).__init__(r'^(\d{4}-\d{3}|\d{7})$',
max_length, min_length, *args, **kwargs)
def clean(self,value):
cleaned = super(PTZipCodeField, self).clean(value)
if len(cleaned) == 7:
return '%s-%s' % (cleaned[:4],cleaned[4:])
else:
return cleaned
class PTPhoneNumberField(Field):
"""
Validate local Portuguese phone number (including international ones)
It should have 9 digits (may include spaces) or start by 00 or + (international)
"""
default_error_messages = {
'invalid': _('Phone numbers must have 9 digits, or start by + or 00.'),
}
def clean(self, value):
super(PTPhoneNumberField, self).clean(value)
if value in EMPTY_VALUES:
return ''
value = re.sub('(\.|\s)', '', smart_text(value))
m = phone_digits_re.search(value)
if m:
return '%s' % value
raise ValidationError(self.error_messages['invalid'])
|
bsd-3-clause
|
alexlib/Qt-Python-Binding-Examples
|
common_widgets/checkbox/checkbox.py
|
1
|
1200
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
"""
QCheckBox demo
Tested environment:
Mac OS X 10.6.8
http://doc.qt.nokia.com/latest/qcheckbox.html
http://doc.qt.nokia.com/latest/qabstractbutton.html
http://doc.qt.nokia.com/latest/qt.html#CheckState-enum
"""
import sys
try:
from PySide import QtCore
from PySide import QtGui
except ImportError:
from PyQt4 import QtCore
from PyQt4 import QtGui
class Demo(QtGui.QWidget):
def __init__(self):
super(Demo, self).__init__()
x, y, w, h = 500, 200, 300, 400
self.setGeometry(x, y, w, h)
self._checkbox = QtGui.QCheckBox("CheckBox", self)
self._checkbox.move(10, 10)
self._checkbox.stateChanged.connect(self._checkbox_cb)
def _checkbox_cb(self, state):
assert QtCore.Qt.Unchecked == 0
assert QtCore.Qt.Checked == 2
assert state in (QtCore.Qt.Checked, QtCore.Qt.Unchecked, QtCore.Qt.PartiallyChecked)
print "state:", state
def show_and_raise(self):
self.show()
self.raise_()
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
demo = Demo()
demo.show_and_raise()
sys.exit(app.exec_())
|
bsd-3-clause
|
vileopratama/vitech
|
src/addons/point_of_lounge/report/lounge_receipt.py
|
1
|
1856
|
from openerp.osv import osv
import time
from openerp.report import report_sxw
def titlize(journal_name):
words = journal_name.split()
while words.pop() != 'journal':
continue
return ' '.join(words)
class order(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(order, self).__init__(cr, uid, name, context=context)
user = self.pool['res.users'].browse(cr, uid, uid, context=context)
partner = user.company_id.partner_id
self.localcontext.update({
'time': time,
'disc': self.discount,
'net': self.netamount,
'get_journal_amt': self._get_journal_amt,
'address': partner or False,
'titlize': titlize
})
def discount(self, order_id):
sql = 'select discount, price_unit, qty from lounge_order_line where order_id = %s '
self.cr.execute(sql, (order_id,))
res = self.cr.fetchall()
dsum = 0
for line in res:
if line[0] != 0:
dsum = dsum + (line[2] * (line[0] * line[1] / 100))
return dsum
def netamount(self, order_line_id):
sql = 'select (qty*price_unit) as net_price from lounge_order_line where id = %s'
self.cr.execute(sql, (order_line_id,))
res = self.cr.fetchone()
return res[0]
def _get_journal_amt(self, order_id):
data = {}
sql = """ select aj.name,absl.amount as amt from account_bank_statement as abs
LEFT JOIN account_bank_statement_line as absl ON abs.id = absl.statement_id
LEFT JOIN account_journal as aj ON aj.id = abs.journal_id
WHERE absl.lounge_statement_id =%d""" % (order_id)
self.cr.execute(sql)
data = self.cr.dictfetchall()
return data
class report_order_receipt(osv.AbstractModel):
_name = 'report.point_of_lounge.report_receipt'
_inherit = 'report.abstract_report'
_template = 'point_of_lounge.report_receipt'
_wrapped_report_class = order
|
mit
|
paulrouget/servo
|
tests/jquery/run_jquery.py
|
13
|
9583
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
import os
import re
import subprocess
import sys
import BaseHTTPServer
import SimpleHTTPServer
import SocketServer
import threading
import urlparse
# List of jQuery modules that will be tested.
# TODO(gw): Disabled most of them as something has been
# introduced very recently that causes the resource task
# to panic - and hard fail doesn't exit the servo
# process when this happens.
# See https://github.com/servo/servo/issues/6210 and
# https://github.com/servo/servo/issues/6211
JQUERY_MODULES = [
# "ajax", # panics
# "attributes",
# "callbacks",
# "core", # mozjs crash
# "css",
# "data",
# "deferred",
# "dimensions",
# "effects",
# "event", # panics
# "manipulation", # mozjs crash
# "offset",
# "queue",
"selector",
# "serialize",
# "support",
# "traversing",
# "wrap"
]
# Port to run the HTTP server on for jQuery.
TEST_SERVER_PORT = 8192
# A regex for matching console.log output lines from the test runner.
REGEX_PATTERN = "^\[jQuery test\] \[([0-9]+)/([0-9]+)/([0-9]+)] (.*)"
# The result of a single test group.
class TestResult:
def __init__(self, success, fail, total, text):
self.success = int(success)
self.fail = int(fail)
self.total = int(total)
self.text = text
def __key(self):
return (self.success, self.fail, self.total, self.text)
def __eq__(self, other):
return self.__key() == other.__key()
def __ne__(self, other):
return self.__key() != other.__key()
def __hash__(self):
return hash(self.__key())
def __repr__(self):
return "ok={0} fail={1} total={2}".format(self.success, self.fail, self.total)
# Parse a line, producing a TestResult.
# Throws if unable to parse.
def parse_line_to_result(line):
match = re.match(REGEX_PATTERN, line)
success, fail, total, name = match.groups()
return name, TestResult(success, fail, total, line)
# Parse an entire buffer of lines to a dictionary
# of test results, keyed by the test name.
def parse_string_to_results(buffer):
test_results = {}
lines = buffer.splitlines()
for line in lines:
name, test_result = parse_line_to_result(line)
test_results[name] = test_result
return test_results
# Run servo and print / parse the results for a specific jQuery test module.
def run_servo(servo_exe, module):
url = "http://localhost:{0}/jquery/test/?module={1}".format(TEST_SERVER_PORT, module)
args = [servo_exe, url, "-z", "-f"]
proc = subprocess.Popen(args, stdout=subprocess.PIPE)
while True:
line = proc.stdout.readline()
if len(line) == 0:
break
line = line.rstrip()
try:
name, test_result = parse_line_to_result(line)
yield name, test_result
except AttributeError:
pass
# Build the filename for an expected results file.
def module_filename(module):
return 'expected_{0}.txt'.format(module)
# Read an existing set of expected results to compare against.
def read_existing_results(module):
with open(module_filename(module), 'r') as file:
buffer = file.read()
return parse_string_to_results(buffer)
# Write a set of results to file
def write_results(module, results):
with open(module_filename(module), 'w') as file:
for result in test_results.itervalues():
file.write(result.text + '\n')
# Print usage if command line args are incorrect
def print_usage():
print("USAGE: {0} test|update servo_binary jquery_base_dir".format(sys.argv[0]))
# Run a simple HTTP server to serve up the jQuery test suite
def run_http_server():
class ThreadingSimpleServer(SocketServer.ThreadingMixIn,
BaseHTTPServer.HTTPServer):
allow_reuse_address = True
class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
# TODO(gw): HACK copy the fixed version from python
# main repo - due to https://bugs.python.org/issue23112
def send_head(self):
path = self.translate_path(self.path)
f = None
if os.path.isdir(path):
parts = urlparse.urlsplit(self.path)
if not parts.path.endswith('/'):
# redirect browser - doing basically what apache does
self.send_response(301)
new_parts = (parts[0], parts[1], parts[2] + '/',
parts[3], parts[4])
new_url = urlparse.urlunsplit(new_parts)
self.send_header("Location", new_url)
self.end_headers()
return None
for index in "index.html", "index.htm":
index = os.path.join(path, index)
if os.path.exists(index):
path = index
break
else:
return self.list_directory(path)
ctype = self.guess_type(path)
try:
# Always read in binary mode. Opening files in text mode may cause
# newline translations, making the actual size of the content
# transmitted *less* than the content-length!
f = open(path, 'rb')
except IOError:
self.send_error(404, "File not found")
return None
try:
self.send_response(200)
self.send_header("Content-type", ctype)
fs = os.fstat(f.fileno())
self.send_header("Content-Length", str(fs[6]))
self.send_header("Last-Modified", self.date_time_string(fs.st_mtime))
self.end_headers()
return f
except:
f.close()
raise
def log_message(self, format, *args):
return
server = ThreadingSimpleServer(('', TEST_SERVER_PORT), RequestHandler)
while True:
sys.stdout.flush()
server.handle_request()
if __name__ == '__main__':
if len(sys.argv) == 4:
cmd = sys.argv[1]
servo_exe = sys.argv[2]
base_dir = sys.argv[3]
os.chdir(base_dir)
# Ensure servo binary can be found
if not os.path.isfile(servo_exe):
print("Unable to find {0}. This script expects an existing build of Servo.".format(servo_exe))
sys.exit(1)
# Start the test server
httpd_thread = threading.Thread(target=run_http_server)
httpd_thread.setDaemon(True)
httpd_thread.start()
if cmd == "test":
print("Testing jQuery on Servo!")
test_count = 0
unexpected_count = 0
individual_success = 0
individual_total = 0
# Test each module separately
for module in JQUERY_MODULES:
print("\t{0}".format(module))
prev_test_results = read_existing_results(module)
for name, current_result in run_servo(servo_exe, module):
test_count += 1
individual_success += current_result.success
individual_total += current_result.total
# If this test was in the previous results, compare them.
if name in prev_test_results:
prev_result = prev_test_results[name]
if prev_result == current_result:
print("\t\tOK: {0}".format(name))
else:
unexpected_count += 1
print("\t\tFAIL: {0}: WAS {1} NOW {2}".format(name, prev_result, current_result))
del prev_test_results[name]
else:
# There was a new test that wasn't expected
unexpected_count += 1
print("\t\tNEW: {0}".format(current_result.text))
# Check what's left over, these are tests that were expected but didn't run this time.
for name in prev_test_results:
test_count += 1
unexpected_count += 1
print("\t\tMISSING: {0}".format(prev_test_results[name].text))
print("\tRan {0} test groups. {1} unexpected results.".format(test_count, unexpected_count))
print("\t{0} tests succeeded of {1} ({2:.2f}%)".format(individual_success,
individual_total,
100.0 * individual_success / individual_total))
if unexpected_count > 0:
sys.exit(1)
elif cmd == "update":
print("Updating jQuery expected results")
for module in JQUERY_MODULES:
print("\t{0}".format(module))
test_results = {}
for name, test_result in run_servo(servo_exe, module):
print("\t\t{0} {1}".format(name, test_result))
test_results[name] = test_result
write_results(module, test_results)
else:
print_usage()
else:
print_usage()
|
mpl-2.0
|
xxxIsaacPeralxxx/anim-studio-tools
|
probe/sources/probe/plugins/nose_tank_scratch.py
|
5
|
3677
|
# Dr. D Studios - Software Disclaimer
#
# Copyright 2010 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios), its
# affiliates and/or its licensors.
#
import itch.tank_scratch
import nose.plugins
class TankScratchNosePlugin(nose.plugins.Plugin):
"""
Adds Tank scratch setup and teardown capability to tests.
.. versionadded:: 0.6.0
"""
name = 'tank-scratch'
score = 1 # run early
def __init__(self):
"""
.. versionadded:: 0.6.0
"""
super(TankScratchNosePlugin, self).__init__()
self.preserve = False
self.scratch_name = None
def options(self, parser, env):
"""
Register commandline options.
.. versionadded:: 0.6.0
"""
super(TankScratchNosePlugin, self).options(parser, env)
parser.add_option('--tank-scratch-preserve', action='store_true', dest='preserve',
default=False,
metavar="PRESERVE",
help="Only create new scratch if missing and don't teardown at end")
parser.add_option('--tank-scratch-name', action='store', dest='scratch_name',
default=None,
metavar="NAME",
help="A name for the scratch area.")
def available(self):
"""
Check to see if this plugin is available.
.. versionadded:: 0.12.0
"""
ts = itch.tank_scratch.TankScratch()
ts.available()
def configure(self, options, conf):
"""
Configure plugin.
.. versionadded:: 0.6.0
.. versionchanged:: 0.9.0
Changed the module name.
"""
super(TankScratchNosePlugin, self).configure(options, conf)
self.conf = conf
self.preserve = options.preserve
self.scratch_name = options.scratch_name
self.ts = itch.tank_scratch.TankScratch(name=self.scratch_name)
def begin(self):
"""
Setup scratch area if required.
.. versionadded:: 0.6.0
.. versionchanged:: 0.9.1
Updating project name to match what is used elsewhere.
"""
if self.preserve:
if not self.ts.exists():
self.ts.setup()
else:
self.ts.setup(auto_teardown=True)
os.environ["TANK_CONFIG_TANK_SCRATCH"] = os.path.join(self.ts.location, "scratch_config.cfg")
os.environ["TANK_PROJECT"] = "TANK_SCRATCH"
def finalize(self, result):
"""
Tear down scratch area if required.
.. versionadded:: 0.6.0
"""
if not self.preserve:
self.ts.teardown()
# Copyright 2008-2012 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios)
#
# This file is part of anim-studio-tools.
#
# anim-studio-tools is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# anim-studio-tools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with anim-studio-tools. If not, see <http://www.gnu.org/licenses/>.
|
gpl-3.0
|
cortedeltimo/SickRage
|
lib/sqlalchemy/ext/mutable.py
|
76
|
22912
|
# ext/mutable.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Provide support for tracking of in-place changes to scalar values,
which are propagated into ORM change events on owning parent objects.
.. versionadded:: 0.7 :mod:`sqlalchemy.ext.mutable` replaces SQLAlchemy's
legacy approach to in-place mutations of scalar values; see
:ref:`07_migration_mutation_extension`.
.. _mutable_scalars:
Establishing Mutability on Scalar Column Values
===============================================
A typical example of a "mutable" structure is a Python dictionary.
Following the example introduced in :ref:`types_toplevel`, we
begin with a custom type that marshals Python dictionaries into
JSON strings before being persisted::
from sqlalchemy.types import TypeDecorator, VARCHAR
import json
class JSONEncodedDict(TypeDecorator):
"Represents an immutable structure as a json-encoded string."
impl = VARCHAR
def process_bind_param(self, value, dialect):
if value is not None:
value = json.dumps(value)
return value
def process_result_value(self, value, dialect):
if value is not None:
value = json.loads(value)
return value
The usage of ``json`` is only for the purposes of example. The
:mod:`sqlalchemy.ext.mutable` extension can be used
with any type whose target Python type may be mutable, including
:class:`.PickleType`, :class:`.postgresql.ARRAY`, etc.
When using the :mod:`sqlalchemy.ext.mutable` extension, the value itself
tracks all parents which reference it. Below, we illustrate the a simple
version of the :class:`.MutableDict` dictionary object, which applies
the :class:`.Mutable` mixin to a plain Python dictionary::
from sqlalchemy.ext.mutable import Mutable
class MutableDict(Mutable, dict):
@classmethod
def coerce(cls, key, value):
"Convert plain dictionaries to MutableDict."
if not isinstance(value, MutableDict):
if isinstance(value, dict):
return MutableDict(value)
# this call will raise ValueError
return Mutable.coerce(key, value)
else:
return value
def __setitem__(self, key, value):
"Detect dictionary set events and emit change events."
dict.__setitem__(self, key, value)
self.changed()
def __delitem__(self, key):
"Detect dictionary del events and emit change events."
dict.__delitem__(self, key)
self.changed()
The above dictionary class takes the approach of subclassing the Python
built-in ``dict`` to produce a dict
subclass which routes all mutation events through ``__setitem__``. There are
variants on this approach, such as subclassing ``UserDict.UserDict`` or
``collections.MutableMapping``; the part that's important to this example is
that the :meth:`.Mutable.changed` method is called whenever an in-place
change to the datastructure takes place.
We also redefine the :meth:`.Mutable.coerce` method which will be used to
convert any values that are not instances of ``MutableDict``, such
as the plain dictionaries returned by the ``json`` module, into the
appropriate type. Defining this method is optional; we could just as well
created our ``JSONEncodedDict`` such that it always returns an instance
of ``MutableDict``, and additionally ensured that all calling code
uses ``MutableDict`` explicitly. When :meth:`.Mutable.coerce` is not
overridden, any values applied to a parent object which are not instances
of the mutable type will raise a ``ValueError``.
Our new ``MutableDict`` type offers a class method
:meth:`~.Mutable.as_mutable` which we can use within column metadata
to associate with types. This method grabs the given type object or
class and associates a listener that will detect all future mappings
of this type, applying event listening instrumentation to the mapped
attribute. Such as, with classical table metadata::
from sqlalchemy import Table, Column, Integer
my_data = Table('my_data', metadata,
Column('id', Integer, primary_key=True),
Column('data', MutableDict.as_mutable(JSONEncodedDict))
)
Above, :meth:`~.Mutable.as_mutable` returns an instance of ``JSONEncodedDict``
(if the type object was not an instance already), which will intercept any
attributes which are mapped against this type. Below we establish a simple
mapping against the ``my_data`` table::
from sqlalchemy import mapper
class MyDataClass(object):
pass
# associates mutation listeners with MyDataClass.data
mapper(MyDataClass, my_data)
The ``MyDataClass.data`` member will now be notified of in place changes
to its value.
There's no difference in usage when using declarative::
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class MyDataClass(Base):
__tablename__ = 'my_data'
id = Column(Integer, primary_key=True)
data = Column(MutableDict.as_mutable(JSONEncodedDict))
Any in-place changes to the ``MyDataClass.data`` member
will flag the attribute as "dirty" on the parent object::
>>> from sqlalchemy.orm import Session
>>> sess = Session()
>>> m1 = MyDataClass(data={'value1':'foo'})
>>> sess.add(m1)
>>> sess.commit()
>>> m1.data['value1'] = 'bar'
>>> assert m1 in sess.dirty
True
The ``MutableDict`` can be associated with all future instances
of ``JSONEncodedDict`` in one step, using
:meth:`~.Mutable.associate_with`. This is similar to
:meth:`~.Mutable.as_mutable` except it will intercept all occurrences
of ``MutableDict`` in all mappings unconditionally, without
the need to declare it individually::
MutableDict.associate_with(JSONEncodedDict)
class MyDataClass(Base):
__tablename__ = 'my_data'
id = Column(Integer, primary_key=True)
data = Column(JSONEncodedDict)
Supporting Pickling
--------------------
The key to the :mod:`sqlalchemy.ext.mutable` extension relies upon the
placement of a ``weakref.WeakKeyDictionary`` upon the value object, which
stores a mapping of parent mapped objects keyed to the attribute name under
which they are associated with this value. ``WeakKeyDictionary`` objects are
not picklable, due to the fact that they contain weakrefs and function
callbacks. In our case, this is a good thing, since if this dictionary were
picklable, it could lead to an excessively large pickle size for our value
objects that are pickled by themselves outside of the context of the parent.
The developer responsibility here is only to provide a ``__getstate__`` method
that excludes the :meth:`~MutableBase._parents` collection from the pickle
stream::
class MyMutableType(Mutable):
def __getstate__(self):
d = self.__dict__.copy()
d.pop('_parents', None)
return d
With our dictionary example, we need to return the contents of the dict itself
(and also restore them on __setstate__)::
class MutableDict(Mutable, dict):
# ....
def __getstate__(self):
return dict(self)
def __setstate__(self, state):
self.update(state)
In the case that our mutable value object is pickled as it is attached to one
or more parent objects that are also part of the pickle, the :class:`.Mutable`
mixin will re-establish the :attr:`.Mutable._parents` collection on each value
object as the owning parents themselves are unpickled.
.. _mutable_composites:
Establishing Mutability on Composites
=====================================
Composites are a special ORM feature which allow a single scalar attribute to
be assigned an object value which represents information "composed" from one
or more columns from the underlying mapped table. The usual example is that of
a geometric "point", and is introduced in :ref:`mapper_composite`.
.. versionchanged:: 0.7
The internals of :func:`.orm.composite` have been
greatly simplified and in-place mutation detection is no longer enabled by
default; instead, the user-defined value must detect changes on its own and
propagate them to all owning parents. The :mod:`sqlalchemy.ext.mutable`
extension provides the helper class :class:`.MutableComposite`, which is a
slight variant on the :class:`.Mutable` class.
As is the case with :class:`.Mutable`, the user-defined composite class
subclasses :class:`.MutableComposite` as a mixin, and detects and delivers
change events to its parents via the :meth:`.MutableComposite.changed` method.
In the case of a composite class, the detection is usually via the usage of
Python descriptors (i.e. ``@property``), or alternatively via the special
Python method ``__setattr__()``. Below we expand upon the ``Point`` class
introduced in :ref:`mapper_composite` to subclass :class:`.MutableComposite`
and to also route attribute set events via ``__setattr__`` to the
:meth:`.MutableComposite.changed` method::
from sqlalchemy.ext.mutable import MutableComposite
class Point(MutableComposite):
def __init__(self, x, y):
self.x = x
self.y = y
def __setattr__(self, key, value):
"Intercept set events"
# set the attribute
object.__setattr__(self, key, value)
# alert all parents to the change
self.changed()
def __composite_values__(self):
return self.x, self.y
def __eq__(self, other):
return isinstance(other, Point) and \\
other.x == self.x and \\
other.y == self.y
def __ne__(self, other):
return not self.__eq__(other)
The :class:`.MutableComposite` class uses a Python metaclass to automatically
establish listeners for any usage of :func:`.orm.composite` that specifies our
``Point`` type. Below, when ``Point`` is mapped to the ``Vertex`` class,
listeners are established which will route change events from ``Point``
objects to each of the ``Vertex.start`` and ``Vertex.end`` attributes::
from sqlalchemy.orm import composite, mapper
from sqlalchemy import Table, Column
vertices = Table('vertices', metadata,
Column('id', Integer, primary_key=True),
Column('x1', Integer),
Column('y1', Integer),
Column('x2', Integer),
Column('y2', Integer),
)
class Vertex(object):
pass
mapper(Vertex, vertices, properties={
'start': composite(Point, vertices.c.x1, vertices.c.y1),
'end': composite(Point, vertices.c.x2, vertices.c.y2)
})
Any in-place changes to the ``Vertex.start`` or ``Vertex.end`` members
will flag the attribute as "dirty" on the parent object::
>>> from sqlalchemy.orm import Session
>>> sess = Session()
>>> v1 = Vertex(start=Point(3, 4), end=Point(12, 15))
>>> sess.add(v1)
>>> sess.commit()
>>> v1.end.x = 8
>>> assert v1 in sess.dirty
True
Coercing Mutable Composites
---------------------------
The :meth:`.MutableBase.coerce` method is also supported on composite types.
In the case of :class:`.MutableComposite`, the :meth:`.MutableBase.coerce`
method is only called for attribute set operations, not load operations.
Overriding the :meth:`.MutableBase.coerce` method is essentially equivalent
to using a :func:`.validates` validation routine for all attributes which
make use of the custom composite type::
class Point(MutableComposite):
# other Point methods
# ...
def coerce(cls, key, value):
if isinstance(value, tuple):
value = Point(*value)
elif not isinstance(value, Point):
raise ValueError("tuple or Point expected")
return value
.. versionadded:: 0.7.10,0.8.0b2
Support for the :meth:`.MutableBase.coerce` method in conjunction with
objects of type :class:`.MutableComposite`.
Supporting Pickling
--------------------
As is the case with :class:`.Mutable`, the :class:`.MutableComposite` helper
class uses a ``weakref.WeakKeyDictionary`` available via the
:meth:`MutableBase._parents` attribute which isn't picklable. If we need to
pickle instances of ``Point`` or its owning class ``Vertex``, we at least need
to define a ``__getstate__`` that doesn't include the ``_parents`` dictionary.
Below we define both a ``__getstate__`` and a ``__setstate__`` that package up
the minimal form of our ``Point`` class::
class Point(MutableComposite):
# ...
def __getstate__(self):
return self.x, self.y
def __setstate__(self, state):
self.x, self.y = state
As with :class:`.Mutable`, the :class:`.MutableComposite` augments the
pickling process of the parent's object-relational state so that the
:meth:`MutableBase._parents` collection is restored to all ``Point`` objects.
"""
from ..orm.attributes import flag_modified
from .. import event, types
from ..orm import mapper, object_mapper, Mapper
from ..util import memoized_property
import weakref
class MutableBase(object):
"""Common base class to :class:`.Mutable`
and :class:`.MutableComposite`.
"""
@memoized_property
def _parents(self):
"""Dictionary of parent object->attribute name on the parent.
This attribute is a so-called "memoized" property. It initializes
itself with a new ``weakref.WeakKeyDictionary`` the first time
it is accessed, returning the same object upon subsequent access.
"""
return weakref.WeakKeyDictionary()
@classmethod
def coerce(cls, key, value):
"""Given a value, coerce it into the target type.
Can be overridden by custom subclasses to coerce incoming
data into a particular type.
By default, raises ``ValueError``.
This method is called in different scenarios depending on if
the parent class is of type :class:`.Mutable` or of type
:class:`.MutableComposite`. In the case of the former, it is called
for both attribute-set operations as well as during ORM loading
operations. For the latter, it is only called during attribute-set
operations; the mechanics of the :func:`.composite` construct
handle coercion during load operations.
:param key: string name of the ORM-mapped attribute being set.
:param value: the incoming value.
:return: the method should return the coerced value, or raise
``ValueError`` if the coercion cannot be completed.
"""
if value is None:
return None
msg = "Attribute '%s' does not accept objects of type %s"
raise ValueError(msg % (key, type(value)))
@classmethod
def _listen_on_attribute(cls, attribute, coerce, parent_cls):
"""Establish this type as a mutation listener for the given
mapped descriptor.
"""
key = attribute.key
if parent_cls is not attribute.class_:
return
# rely on "propagate" here
parent_cls = attribute.class_
def load(state, *args):
"""Listen for objects loaded or refreshed.
Wrap the target data member's value with
``Mutable``.
"""
val = state.dict.get(key, None)
if val is not None:
if coerce:
val = cls.coerce(key, val)
state.dict[key] = val
val._parents[state.obj()] = key
def set(target, value, oldvalue, initiator):
"""Listen for set/replace events on the target
data member.
Establish a weak reference to the parent object
on the incoming value, remove it for the one
outgoing.
"""
if value is oldvalue:
return value
if not isinstance(value, cls):
value = cls.coerce(key, value)
if value is not None:
value._parents[target.obj()] = key
if isinstance(oldvalue, cls):
oldvalue._parents.pop(target.obj(), None)
return value
def pickle(state, state_dict):
val = state.dict.get(key, None)
if val is not None:
if 'ext.mutable.values' not in state_dict:
state_dict['ext.mutable.values'] = []
state_dict['ext.mutable.values'].append(val)
def unpickle(state, state_dict):
if 'ext.mutable.values' in state_dict:
for val in state_dict['ext.mutable.values']:
val._parents[state.obj()] = key
event.listen(parent_cls, 'load', load,
raw=True, propagate=True)
event.listen(parent_cls, 'refresh', load,
raw=True, propagate=True)
event.listen(attribute, 'set', set,
raw=True, retval=True, propagate=True)
event.listen(parent_cls, 'pickle', pickle,
raw=True, propagate=True)
event.listen(parent_cls, 'unpickle', unpickle,
raw=True, propagate=True)
class Mutable(MutableBase):
"""Mixin that defines transparent propagation of change
events to a parent object.
See the example in :ref:`mutable_scalars` for usage information.
"""
def changed(self):
"""Subclasses should call this method whenever change events occur."""
for parent, key in self._parents.items():
flag_modified(parent, key)
@classmethod
def associate_with_attribute(cls, attribute):
"""Establish this type as a mutation listener for the given
mapped descriptor.
"""
cls._listen_on_attribute(attribute, True, attribute.class_)
@classmethod
def associate_with(cls, sqltype):
"""Associate this wrapper with all future mapped columns
of the given type.
This is a convenience method that calls
``associate_with_attribute`` automatically.
.. warning::
The listeners established by this method are *global*
to all mappers, and are *not* garbage collected. Only use
:meth:`.associate_with` for types that are permanent to an
application, not with ad-hoc types else this will cause unbounded
growth in memory usage.
"""
def listen_for_type(mapper, class_):
for prop in mapper.column_attrs:
if isinstance(prop.columns[0].type, sqltype):
cls.associate_with_attribute(getattr(class_, prop.key))
event.listen(mapper, 'mapper_configured', listen_for_type)
@classmethod
def as_mutable(cls, sqltype):
"""Associate a SQL type with this mutable Python type.
This establishes listeners that will detect ORM mappings against
the given type, adding mutation event trackers to those mappings.
The type is returned, unconditionally as an instance, so that
:meth:`.as_mutable` can be used inline::
Table('mytable', metadata,
Column('id', Integer, primary_key=True),
Column('data', MyMutableType.as_mutable(PickleType))
)
Note that the returned type is always an instance, even if a class
is given, and that only columns which are declared specifically with
that type instance receive additional instrumentation.
To associate a particular mutable type with all occurrences of a
particular type, use the :meth:`.Mutable.associate_with` classmethod
of the particular :class:`.Mutable` subclass to establish a global
association.
.. warning::
The listeners established by this method are *global*
to all mappers, and are *not* garbage collected. Only use
:meth:`.as_mutable` for types that are permanent to an application,
not with ad-hoc types else this will cause unbounded growth
in memory usage.
"""
sqltype = types.to_instance(sqltype)
def listen_for_type(mapper, class_):
for prop in mapper.column_attrs:
if prop.columns[0].type is sqltype:
cls.associate_with_attribute(getattr(class_, prop.key))
event.listen(mapper, 'mapper_configured', listen_for_type)
return sqltype
class MutableComposite(MutableBase):
"""Mixin that defines transparent propagation of change
events on a SQLAlchemy "composite" object to its
owning parent or parents.
See the example in :ref:`mutable_composites` for usage information.
"""
def changed(self):
"""Subclasses should call this method whenever change events occur."""
for parent, key in self._parents.items():
prop = object_mapper(parent).get_property(key)
for value, attr_name in zip(
self.__composite_values__(),
prop._attribute_keys):
setattr(parent, attr_name, value)
def _setup_composite_listener():
def _listen_for_type(mapper, class_):
for prop in mapper.iterate_properties:
if (hasattr(prop, 'composite_class') and
isinstance(prop.composite_class, type) and
issubclass(prop.composite_class, MutableComposite)):
prop.composite_class._listen_on_attribute(
getattr(class_, prop.key), False, class_)
if not event.contains(Mapper, "mapper_configured", _listen_for_type):
event.listen(Mapper, 'mapper_configured', _listen_for_type)
_setup_composite_listener()
class MutableDict(Mutable, dict):
"""A dictionary type that implements :class:`.Mutable`.
.. versionadded:: 0.8
"""
def __setitem__(self, key, value):
"""Detect dictionary set events and emit change events."""
dict.__setitem__(self, key, value)
self.changed()
def __delitem__(self, key):
"""Detect dictionary del events and emit change events."""
dict.__delitem__(self, key)
self.changed()
def clear(self):
dict.clear(self)
self.changed()
@classmethod
def coerce(cls, key, value):
"""Convert plain dictionary to MutableDict."""
if not isinstance(value, MutableDict):
if isinstance(value, dict):
return MutableDict(value)
return Mutable.coerce(key, value)
else:
return value
def __getstate__(self):
return dict(self)
def __setstate__(self, state):
self.update(state)
|
gpl-3.0
|
epiqc/ScaffCC
|
llvm/utils/benchmark/tools/gbench/util.py
|
70
|
5178
|
"""util.py - General utilities for running, loading, and processing benchmarks
"""
import json
import os
import tempfile
import subprocess
import sys
# Input file type enumeration
IT_Invalid = 0
IT_JSON = 1
IT_Executable = 2
_num_magic_bytes = 2 if sys.platform.startswith('win') else 4
def is_executable_file(filename):
"""
Return 'True' if 'filename' names a valid file which is likely
an executable. A file is considered an executable if it starts with the
magic bytes for a EXE, Mach O, or ELF file.
"""
if not os.path.isfile(filename):
return False
with open(filename, mode='rb') as f:
magic_bytes = f.read(_num_magic_bytes)
if sys.platform == 'darwin':
return magic_bytes in [
b'\xfe\xed\xfa\xce', # MH_MAGIC
b'\xce\xfa\xed\xfe', # MH_CIGAM
b'\xfe\xed\xfa\xcf', # MH_MAGIC_64
b'\xcf\xfa\xed\xfe', # MH_CIGAM_64
b'\xca\xfe\xba\xbe', # FAT_MAGIC
b'\xbe\xba\xfe\xca' # FAT_CIGAM
]
elif sys.platform.startswith('win'):
return magic_bytes == b'MZ'
else:
return magic_bytes == b'\x7FELF'
def is_json_file(filename):
"""
Returns 'True' if 'filename' names a valid JSON output file.
'False' otherwise.
"""
try:
with open(filename, 'r') as f:
json.load(f)
return True
except:
pass
return False
def classify_input_file(filename):
"""
Return a tuple (type, msg) where 'type' specifies the classified type
of 'filename'. If 'type' is 'IT_Invalid' then 'msg' is a human readable
string represeting the error.
"""
ftype = IT_Invalid
err_msg = None
if not os.path.exists(filename):
err_msg = "'%s' does not exist" % filename
elif not os.path.isfile(filename):
err_msg = "'%s' does not name a file" % filename
elif is_executable_file(filename):
ftype = IT_Executable
elif is_json_file(filename):
ftype = IT_JSON
else:
err_msg = "'%s' does not name a valid benchmark executable or JSON file" % filename
return ftype, err_msg
def check_input_file(filename):
"""
Classify the file named by 'filename' and return the classification.
If the file is classified as 'IT_Invalid' print an error message and exit
the program.
"""
ftype, msg = classify_input_file(filename)
if ftype == IT_Invalid:
print("Invalid input file: %s" % msg)
sys.exit(1)
return ftype
def find_benchmark_flag(prefix, benchmark_flags):
"""
Search the specified list of flags for a flag matching `<prefix><arg>` and
if it is found return the arg it specifies. If specified more than once the
last value is returned. If the flag is not found None is returned.
"""
assert prefix.startswith('--') and prefix.endswith('=')
result = None
for f in benchmark_flags:
if f.startswith(prefix):
result = f[len(prefix):]
return result
def remove_benchmark_flags(prefix, benchmark_flags):
"""
Return a new list containing the specified benchmark_flags except those
with the specified prefix.
"""
assert prefix.startswith('--') and prefix.endswith('=')
return [f for f in benchmark_flags if not f.startswith(prefix)]
def load_benchmark_results(fname):
"""
Read benchmark output from a file and return the JSON object.
REQUIRES: 'fname' names a file containing JSON benchmark output.
"""
with open(fname, 'r') as f:
return json.load(f)
def run_benchmark(exe_name, benchmark_flags):
"""
Run a benchmark specified by 'exe_name' with the specified
'benchmark_flags'. The benchmark is run directly as a subprocess to preserve
real time console output.
RETURNS: A JSON object representing the benchmark output
"""
output_name = find_benchmark_flag('--benchmark_out=',
benchmark_flags)
is_temp_output = False
if output_name is None:
is_temp_output = True
thandle, output_name = tempfile.mkstemp()
os.close(thandle)
benchmark_flags = list(benchmark_flags) + \
['--benchmark_out=%s' % output_name]
cmd = [exe_name] + benchmark_flags
print("RUNNING: %s" % ' '.join(cmd))
exitCode = subprocess.call(cmd)
if exitCode != 0:
print('TEST FAILED...')
sys.exit(exitCode)
json_res = load_benchmark_results(output_name)
if is_temp_output:
os.unlink(output_name)
return json_res
def run_or_load_benchmark(filename, benchmark_flags):
"""
Get the results for a specified benchmark. If 'filename' specifies
an executable benchmark then the results are generated by running the
benchmark. Otherwise 'filename' must name a valid JSON output file,
which is loaded and the result returned.
"""
ftype = check_input_file(filename)
if ftype == IT_JSON:
return load_benchmark_results(filename)
elif ftype == IT_Executable:
return run_benchmark(filename, benchmark_flags)
else:
assert False # This branch is unreachable
|
bsd-2-clause
|
duongbaoduy/gtools
|
third_party/boto/pyami/bootstrap.py
|
89
|
5739
|
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import os
import boto
from boto.utils import get_instance_metadata, get_instance_userdata
from boto.pyami.config import Config, BotoConfigPath
from boto.pyami.scriptbase import ScriptBase
import time
class Bootstrap(ScriptBase):
"""
The Bootstrap class is instantiated and run as part of the PyAMI
instance initialization process. The methods in this class will
be run from the rc.local script of the instance and will be run
as the root user.
The main purpose of this class is to make sure the boto distribution
on the instance is the one required.
"""
def __init__(self):
self.working_dir = '/mnt/pyami'
self.write_metadata()
ScriptBase.__init__(self)
def write_metadata(self):
fp = open(os.path.expanduser(BotoConfigPath), 'w')
fp.write('[Instance]\n')
inst_data = get_instance_metadata()
for key in inst_data:
fp.write('%s = %s\n' % (key, inst_data[key]))
user_data = get_instance_userdata()
fp.write('\n%s\n' % user_data)
fp.write('[Pyami]\n')
fp.write('working_dir = %s\n' % self.working_dir)
fp.close()
# This file has the AWS credentials, should we lock it down?
# os.chmod(BotoConfigPath, stat.S_IREAD | stat.S_IWRITE)
# now that we have written the file, read it into a pyami Config object
boto.config = Config()
boto.init_logging()
def create_working_dir(self):
boto.log.info('Working directory: %s' % self.working_dir)
if not os.path.exists(self.working_dir):
os.mkdir(self.working_dir)
def load_boto(self):
update = boto.config.get('Boto', 'boto_update', 'svn:HEAD')
if update.startswith('svn'):
if update.find(':') >= 0:
method, version = update.split(':')
version = '-r%s' % version
else:
version = '-rHEAD'
location = boto.config.get('Boto', 'boto_location', '/usr/local/boto')
self.run('svn update %s %s' % (version, location))
elif update.startswith('git'):
location = boto.config.get('Boto', 'boto_location', '/usr/share/python-support/python-boto/boto')
num_remaining_attempts = 10
while num_remaining_attempts > 0:
num_remaining_attempts -= 1
try:
self.run('git pull', cwd=location)
num_remaining_attempts = 0
except Exception, e:
boto.log.info('git pull attempt failed with the following exception. Trying again in a bit. %s', e)
time.sleep(2)
if update.find(':') >= 0:
method, version = update.split(':')
else:
version = 'master'
self.run('git checkout %s' % version, cwd=location)
else:
# first remove the symlink needed when running from subversion
self.run('rm /usr/local/lib/python2.5/site-packages/boto')
self.run('easy_install %s' % update)
def fetch_s3_file(self, s3_file):
try:
from boto.utils import fetch_file
f = fetch_file(s3_file)
path = os.path.join(self.working_dir, s3_file.split("/")[-1])
open(path, "w").write(f.read())
except:
boto.log.exception('Problem Retrieving file: %s' % s3_file)
path = None
return path
def load_packages(self):
package_str = boto.config.get('Pyami', 'packages')
if package_str:
packages = package_str.split(',')
for package in packages:
package = package.strip()
if package.startswith('s3:'):
package = self.fetch_s3_file(package)
if package:
# if the "package" is really a .py file, it doesn't have to
# be installed, just being in the working dir is enough
if not package.endswith('.py'):
self.run('easy_install -Z %s' % package, exit_on_error=False)
def main(self):
self.create_working_dir()
self.load_boto()
self.load_packages()
self.notify('Bootstrap Completed for %s' % boto.config.get_instance('instance-id'))
if __name__ == "__main__":
# because bootstrap starts before any logging configuration can be loaded from
# the boto config files, we will manually enable logging to /var/log/boto.log
boto.set_file_logger('bootstrap', '/var/log/boto.log')
bs = Bootstrap()
bs.main()
|
bsd-3-clause
|
sumyfly/nw.js
|
tests/automation/buff_from_string/internal/test.py
|
169
|
1430
|
#!/usr/bin/env python
from selenium import webdriver
import os
import traceback
import time
from selenium import webdriver
from platform import platform
import socket
from sys import argv
port = 13013
if len(argv) >= 2:
port = int(argv[1])
path = os.path
dirname = path.abspath(path.dirname(__file__))
chromedriver_path = path.join(dirname,"chromedriver2_server")
nw = webdriver.Chrome(chromedriver_path,service_args=[dirname])
input_element = nw.find_element_by_id("message")
input_element.send_keys("hello world")
send_button = nw.find_element_by_id("send-message-btn")
send_button.click()
time.sleep(1)
results = nw.execute_script('return JSON.stringify(results);')
connection = socket.create_connection(("localhost",port))
connection.sendall(results)
connection.close()
def kill_process_tree(pid):
machine_type = platform()
if "Linux" in machine_type or "Darwin" in machine_type:
import psutil
parent = psutil.Process(spid)
for child in parent.get_children(recursive=True):
child.kill()
parent.kill()
return
elif 'Windows' in machine_type:
import subprocess
dev_null = open(os.devnull,"wb")
subprocess.Popen(['taskkill', '/F', '/T', '/PID', str(pid)],stdout=dev_null,stderr=dev_null)
return
else:
# print "Unknow OS type"
return
time.sleep(2)
spid = nw.service.process.pid
kill_process_tree(spid)
|
mit
|
visi0nary/mt6735-kernel-3.10.61
|
tools/perf/scripts/python/futex-contention.py
|
11261
|
1486
|
# futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
|
gpl-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.