repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
|---|---|---|---|---|---|
DelazJ/QGIS
|
tests/src/python/test_qgsserver_wms_getlegendgraphic.py
|
21
|
41277
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsServer WMS GetLegendGraphic.
From build dir, run: ctest -R PyQgsServerWMSGetLegendGraphic -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Alessandro Pasotti'
__date__ = '25/05/2015'
__copyright__ = 'Copyright 2015, The QGIS Project'
import os
# Needed on Qt 5 so that the serialization of XML is consistent among all executions
os.environ['QT_HASH_SEED'] = '1'
import re
import json
import urllib.request
import urllib.parse
import urllib.error
from qgis.testing import unittest
from qgis.PyQt.QtCore import QSize
import osgeo.gdal # NOQA
from test_qgsserver_wms import TestQgsServerWMSTestBase
from qgis.core import (
QgsProject,
QgsMarkerSymbol,
QgsRuleBasedRenderer,
QgsVectorLayer,
)
from qgis.server import (
QgsBufferServerRequest,
QgsBufferServerResponse,
QgsServer,
QgsServerRequest,
)
# Strip path and content length because path may vary
RE_STRIP_UNCHECKABLE = br'MAP=[^"]+|Content-Length: \d+'
RE_ATTRIBUTES = br'[^>\s]+=[^>\s]+'
class TestQgsServerWMSGetLegendGraphic(TestQgsServerWMSTestBase):
"""QGIS Server WMS Tests for GetLegendGraphic request"""
# Set to True to re-generate reference files for this class
# regenerate_reference = True
def test_getLegendGraphics(self):
"""Test that does not return an exception but an image"""
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
# 'WIDTH': '20', # optional
# 'HEIGHT': '20', # optional
'LAYER': 'testlayer%20èé',
}
qs = '?' + '&'.join(["%s=%s" % (k, v) for k, v in parms.items()])
h, r = self._execute_request(qs)
self.assertEqual(-1, h.find(b'Content-Type: text/xml; charset=utf-8'), "Header: %s\nResponse:\n%s" % (h, r))
self.assertNotEqual(-1, h.find(b'Content-Type: image/png'), "Header: %s\nResponse:\n%s" % (h, r))
def test_wms_GetLegendGraphic_LayerSpace(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"FORMAT": "image/png",
# "HEIGHT": "500",
# "WIDTH": "500",
"LAYERSPACE": "50.0",
"LAYERFONTBOLD": "TRUE",
"LAYERFONTSIZE": "30",
"ITEMFONTBOLD": "TRUE",
"ITEMFONTSIZE": "20",
"LAYERFONTFAMILY": self.fontFamily,
"ITEMFONTFAMILY": self.fontFamily,
"LAYERTITLE": "TRUE",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_LayerSpace", max_size_diff=QSize(1, 1))
def test_wms_getLegendGraphics_invalid_parameters(self):
"""Test that does return an exception"""
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello,db_point",
"LAYERTITLE": "FALSE",
"RULELABEL": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"RULE": "1",
"BBOX": "-151.7,-38.9,51.0,78.0",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
err = b"BBOX parameter cannot be combined with RULE" in r
self.assertTrue(err)
def test_wms_GetLegendGraphic_LayerTitleSpace(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"FORMAT": "image/png",
# "HEIGHT": "500",
# "WIDTH": "500",
"LAYERTITLESPACE": "20.0",
"LAYERFONTBOLD": "TRUE",
"LAYERFONTSIZE": "30",
"ITEMFONTBOLD": "TRUE",
"ITEMFONTSIZE": "20",
"LAYERFONTFAMILY": self.fontFamily,
"ITEMFONTFAMILY": self.fontFamily,
"LAYERTITLE": "TRUE",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_LayerTitleSpace")
def test_wms_GetLegendGraphic_ShowFeatureCount(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"FORMAT": "image/png",
# "HEIGHT": "500",
# "WIDTH": "500",
"LAYERTITLE": "TRUE",
"LAYERFONTBOLD": "TRUE",
"LAYERFONTSIZE": "30",
"LAYERFONTFAMILY": self.fontFamily,
"ITEMFONTFAMILY": self.fontFamily,
"ITEMFONTBOLD": "TRUE",
"ITEMFONTSIZE": "20",
"SHOWFEATURECOUNT": "TRUE",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ShowFeatureCount", max_size_diff=QSize(1, 1))
def test_wms_getLegendGraphics_layertitle(self):
"""Test that does not return an exception but an image"""
print("TEST FONT FAMILY: ", self.fontFamily)
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
# 'WIDTH': '20', # optional
# 'HEIGHT': '20', # optional
'LAYER': u'testlayer%20èé',
'LAYERFONTBOLD': 'TRUE',
'LAYERFONTSIZE': '30',
'ITEMFONTBOLD': 'TRUE',
'LAYERFONTFAMILY': self.fontFamily,
'ITEMFONTFAMILY': self.fontFamily,
'ITEMFONTSIZE': '20',
'LAYERTITLE': 'TRUE',
'RULELABEL': 'TRUE'
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_test", 250, QSize(15, 15))
# no set of LAYERTITLE and RULELABEL means they are true
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
# 'WIDTH': '20', # optional
# 'HEIGHT': '20', # optional
'LAYER': u'testlayer%20èé',
'LAYERFONTBOLD': 'TRUE',
'LAYERFONTSIZE': '30',
'ITEMFONTBOLD': 'TRUE',
'LAYERFONTFAMILY': self.fontFamily,
'ITEMFONTFAMILY': self.fontFamily,
'ITEMFONTSIZE': '20'
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_test", 250, QSize(15, 15))
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
# 'WIDTH': '20', # optional
# 'HEIGHT': '20', # optional
'LAYER': u'testlayer%20èé',
'LAYERTITLE': 'FALSE',
'RULELABEL': 'FALSE'
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_test_layertitle_false", 250, QSize(15, 15))
def test_wms_getLegendGraphics_rulelabel(self):
"""Test that does not return an exception but an image"""
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
'LAYER': u'testlayer%20èé',
'LAYERFONTBOLD': 'TRUE',
'LAYERFONTSIZE': '30',
'ITEMFONTBOLD': 'TRUE',
'ITEMFONTSIZE': '20',
'LAYERFONTFAMILY': self.fontFamily,
'ITEMFONTFAMILY': self.fontFamily,
'RULELABEL': 'FALSE'
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_rulelabel_false", 250, QSize(15, 15))
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
'LAYER': u'testlayer%20èé',
'LAYERFONTBOLD': 'TRUE',
'LAYERFONTSIZE': '30',
'ITEMFONTBOLD': 'TRUE',
'ITEMFONTSIZE': '20',
'LAYERFONTFAMILY': self.fontFamily,
'ITEMFONTFAMILY': self.fontFamily,
'LAYERTITLE': 'FALSE',
'RULELABEL': 'TRUE'
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_rulelabel_true", 250, QSize(15, 15))
# no set of RULELABEL means it is true
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
'LAYER': u'testlayer%20èé',
'LAYERFONTBOLD': 'TRUE',
'LAYERFONTSIZE': '30',
'ITEMFONTBOLD': 'TRUE',
'ITEMFONTSIZE': '20',
'LAYERFONTFAMILY': self.fontFamily,
'ITEMFONTFAMILY': self.fontFamily,
'LAYERTITLE': 'FALSE'
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_rulelabel_notset", 250, QSize(15, 15))
# RULELABEL AUTO for single symbol means it is removed
parms = {
'MAP': self.testdata_path + "test_project.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
'LAYER': u'testlayer%20èé',
'LAYERTITLE': 'FALSE',
'RULELABEL': 'AUTO'
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_rulelabel_auto", 250, QSize(15, 15))
def test_wms_getLegendGraphics_rule(self):
"""Test that does not return an exception but an image"""
parms = {
'MAP': self.testdata_path + "test_project_legend_rule.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
'LAYER': u'testlayer%20èé',
'WIDTH': '20',
'HEIGHT': '20',
'RULE': 'rule0',
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_rule0", 250, QSize(15, 15))
parms = {
'MAP': self.testdata_path + "test_project_legend_rule.qgs",
'SERVICE': 'WMS',
'VERSION': '1.3.0',
'REQUEST': 'GetLegendGraphic',
'FORMAT': 'image/png',
'LAYER': u'testlayer%20èé',
'WIDTH': '20',
'HEIGHT': '20',
'RULE': 'rule1',
}
qs = '?' + '&'.join([u"%s=%s" % (k, v) for k, v in parms.items()])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_rule1", 250, QSize(15, 15))
def test_wms_GetLegendGraphic_Basic(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"RULELABEL": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Basic")
def test_wms_GetLegendGraphic_Transparent(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"RULELABEL": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"TRANSPARENT": "TRUE"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Transparent")
def test_wms_GetLegendGraphic_Background(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"RULELABEL": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"BGCOLOR": "green"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Background")
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"RULELABEL": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"BGCOLOR": "0x008000"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Background_Hex")
def test_wms_GetLegendGraphic_BoxSpace(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"RULELABEL": "FALSE",
"BOXSPACE": "100",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_BoxSpace")
def test_wms_GetLegendGraphic_SymbolSpace(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"RULELABEL": "FALSE",
"SYMBOLSPACE": "100",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_SymbolSpace")
def test_wms_GetLegendGraphic_IconLabelSpace(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"RULELABEL": "FALSE",
"ICONLABELSPACE": "100",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_IconLabelSpace")
def test_wms_GetLegendGraphic_SymbolSize(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "FALSE",
"RULELABEL": "FALSE",
"SYMBOLWIDTH": "50",
"SYMBOLHEIGHT": "30",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_SymbolSize")
def test_wms_GetLegendGraphic_LayerFont(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "TRUE",
"LAYERFONTBOLD": "TRUE",
"LAYERFONTITALIC": "TRUE",
"LAYERFONTSIZE": "30",
"ITEMFONTBOLD": "TRUE",
"ITEMFONTSIZE": "20",
"LAYERFONTFAMILY": self.fontFamily,
"ITEMFONTFAMILY": self.fontFamily,
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_LayerFont", max_size_diff=QSize(1, 1))
def test_wms_GetLegendGraphic_ItemFont(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"LAYERTITLE": "TRUE",
"LAYERFONTBOLD": "TRUE",
"LAYERFONTSIZE": "30",
"ITEMFONTBOLD": "TRUE",
"ITEMFONTITALIC": "TRUE",
"ITEMFONTSIZE": "20",
"LAYERFONTFAMILY": self.fontFamily,
"ITEMFONTFAMILY": self.fontFamily,
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ItemFont", max_size_diff=QSize(1, 1))
def test_wms_GetLegendGraphic_BBox(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello,db_point",
"LAYERTITLE": "FALSE",
"RULELABEL": "FALSE",
"FORMAT": "image/png",
"SRCHEIGHT": "500",
"SRCWIDTH": "500",
"BBOX": "-151.7,-38.9,51.0,78.0",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_BBox")
def test_wms_GetLegendGraphic_BBox2(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello,db_point",
"LAYERTITLE": "FALSE",
"RULELABEL": "FALSE",
"FORMAT": "image/png",
"SRCHEIGHT": "500",
"SRCWIDTH": "500",
"BBOX": "-76.08,-6.4,-19.38,38.04",
"SRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_BBox2")
def test_wms_GetLegendGraphic_BBox_Fallback(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello,db_point",
"LAYERTITLE": "FALSE",
"RULELABEL": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"BBOX": "-151.7,-38.9,51.0,78.0",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_BBox")
def test_wms_GetLegendGraphic_BBox2_Fallback(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello,db_point",
"LAYERTITLE": "FALSE",
"RULELABEL": "FALSE",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"BBOX": "-76.08,-6.4,-19.38,38.04",
"SRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_BBox2")
def test_wms_GetLegendGraphic_EmptyLegend(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_contextual_legend.qgs',
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "QGIS%20Server%20Hello%20World",
"FORMAT": "image/png",
"SRCHEIGHT": "840",
"SRCWIDTH": "1226",
"BBOX": "10.38450,-49.6370,73.8183,42.9461",
"SRS": "EPSG:4326",
"SCALE": "15466642"
}.items())])
h, r = self._execute_request(qs)
self.assertEqual(-1, h.find(b'Content-Type: text/xml; charset=utf-8'), "Header: %s\nResponse:\n%s" % (h, r))
self.assertNotEqual(-1, h.find(b'Content-Type: image/png'), "Header: %s\nResponse:\n%s" % (h, r))
def test_wms_GetLegendGraphic_wmsRootName(self):
"""Test an unreported issue when a wmsRootName short name is set in the service capabilities"""
# First test with the project title itself:
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_wms_grouped_layers.qgs',
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "QGIS%20Server%20-%20Grouped%20Layer",
"FORMAT": "image/png",
"SRCHEIGHT": "840",
"SRCWIDTH": "1226",
"BBOX": "609152,5808188,625492,5814318",
"SRS": "EPSG:25832",
"SCALE": "38976"
}.items())])
h, r = self._execute_request(qs)
self.assertEqual(-1, h.find(b'Content-Type: text/xml; charset=utf-8'), "Header: %s\nResponse:\n%s" % (h, r))
self.assertNotEqual(-1, h.find(b'Content-Type: image/png'), "Header: %s\nResponse:\n%s" % (h, r))
# Then test with the wmsRootName short name:
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_wms_grouped_layers_wmsroot.qgs',
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "All_grouped_layers",
"FORMAT": "image/png",
"SRCHEIGHT": "840",
"SRCWIDTH": "1226",
"BBOX": "609152,5808188,625492,5814318",
"SRS": "EPSG:25832",
"SCALE": "38976"
}.items())])
h, r = self._execute_request(qs)
self.assertEqual(-1, h.find(b'Content-Type: text/xml; charset=utf-8'), "Header: %s\nResponse:\n%s" % (h, r))
self.assertNotEqual(-1, h.find(b'Content-Type: image/png'), "Header: %s\nResponse:\n%s" % (h, r))
def test_wms_GetLegendGraphic_ScaleSymbol_Min(self):
# 1:500000000 min
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer",
"FORMAT": "image/png",
"SRCHEIGHT": "550",
"SRCWIDTH": "850",
"BBOX": "-608.4,-1002.6,698.2,1019.0",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_Min", max_size_diff=QSize(1, 1))
# 1:1000000000 min
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer",
"FORMAT": "image/png",
"SRCHEIGHT": "550",
"SRCWIDTH": "850",
"BBOX": "-1261.7,-2013.5,1351.5,2029.9",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_Min", max_size_diff=QSize(15, 15))
def test_wms_GetLegendGraphic_ScaleSymbol_Scaled_01(self):
# 1:10000000 scaled
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer",
"FORMAT": "image/png",
"SRCHEIGHT": "550",
"SRCWIDTH": "850",
"BBOX": "31.8,-12.0,58.0,28.4",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_Scaled_01", max_size_diff=QSize(15, 15))
def test_wms_GetLegendGraphic_ScaleSymbol_Scaled_02(self):
# 1:15000000 scaled
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer",
"FORMAT": "image/png",
"SRCHEIGHT": "550",
"SRCWIDTH": "850",
"BBOX": "25.3,-22.1,64.5,38.5",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_Scaled_02", max_size_diff=QSize(15, 15))
def test_wms_GetLegendGraphic_ScaleSymbol_Max(self):
# 1:100000 max
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer",
"FORMAT": "image/png",
"SRCHEIGHT": "550",
"SRCWIDTH": "850",
"BBOX": "44.8,8.0,45.0,8.4",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_Max", max_size_diff=QSize(15, 15))
# 1:1000000 max
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer",
"FORMAT": "image/png",
"SRCHEIGHT": "550",
"SRCWIDTH": "850",
"BBOX": "43.6,6.2,46.2,10.2",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_Max", max_size_diff=QSize(15, 15))
def test_wms_GetLegendGraphic_ScaleSymbol_DefaultMapUnitsPerMillimeter(self):
# map units per mm on 1:20000000 with SRCHEIGHT=598&SRCWIDTH=1640&BBOX=16.5,-69.7,73.3,86.1 would be around what is set as default: 0.359 map units per mm
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer",
"FORMAT": "image/png",
"CRS": "EPSG:4326"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_DefaultMapUnitsPerMillimeter",
max_size_diff=QSize(15, 15))
def test_wms_GetLegendGraphic_ScaleSymbol_Scaled_2056(self):
# 1:1000 scale on an EPSG:2056 calculating DPI that is around 96
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols_2056.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer_2056",
"FORMAT": "image/png",
"SRCHEIGHT": "600",
"SRCWIDTH": "1500",
"BBOX": "2662610.7,1268841.8,2663010.5,1269000.05",
"CRS": "EPSG:2056"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_Scaled_2056", max_size_diff=QSize(15, 15))
def test_wms_GetLegendGraphic_ScaleSymbol_DefaultScale_2056(self):
# 1:1000 as default value - it's not exactly the same result than passing the bbox and size because of exact DPI 96 (default)
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_scaledsymbols_2056.qgs',
"SERVICE": "WMS",
"REQUEST": "GetLegendGraphic",
"LAYER": "testlayer_2056",
"FORMAT": "image/png",
"CRS": "EPSG:2056"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ScaleSymbol_DefaultScale_2056", max_size_diff=QSize(15, 15))
def test_wms_GetLegendGraphic_LAYERFONTCOLOR(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"LAYERFONTCOLOR": "red"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_LAYERFONTCOLOR", max_size_diff=QSize(10, 2))
def test_wms_GetLegendGraphic_ITEMFONTCOLOR(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"ITEMFONTCOLOR": "red",
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ITEMFONTCOLOR", max_size_diff=QSize(10, 2))
def test_wms_GetLegendGraphic_ITEMFONTCOLOR_and_LAYERFONTCOLOR(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"ITEMFONTCOLOR": "red",
"LAYERFONTCOLOR": "blue"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ITEMFONTCOLOR_and_LAYERFONTCOLOR", max_size_diff=QSize(10, 2))
def test_wms_GetLegendGraphic_ITEMFONTCOLOR_and_LAYERFONTCOLOR_hex(self):
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.projectPath),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetLegendGraphic",
"LAYER": "Country,Hello",
"FORMAT": "image/png",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857",
"ITEMFONTCOLOR": r"%23FF0000",
"LAYERFONTCOLOR": r"%230000FF"
}.items())])
r, h = self._result(self._execute_request(qs))
self._img_diff_error(r, h, "WMS_GetLegendGraphic_ITEMFONTCOLOR_and_LAYERFONTCOLOR", max_size_diff=QSize(10, 2))
def test_BBoxNoWidthNoHeight(self):
"""Test with BBOX and no width/height (like QGIS client does)"""
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'test_project_wms_grouped_nested_layers.qgs',
"SERVICE": "WMS",
"VERSION": "1.3",
"REQUEST": "GetLegendGraphic",
"LAYER": "areas%20and%20symbols",
"FORMAT": "image/png",
"CRS": "EPSG:4326",
"BBOX": "52.44462990911360123,10.6723591605239374,52.44631832182876963,10.6795952150175264",
"SLD_VERSION": "1.1",
}.items())])
r, h = self._result(self._execute_request(qs))
self.assertFalse(b'Exception' in r)
self._img_diff_error(r, h, "WMS_GetLegendGraphic_NoWidthNoHeight", max_size_diff=QSize(10, 2))
def testGetLegendGraphicRegression32020(self):
"""When two classes have the same symbol they both are shown in the contextual
legend even if just one is actually visible in the map extent
This test also checks for corner cases (literally) and reprojection.
"""
# Visible is "Type 1"
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'bug_gh32020.qgs',
"SERVICE": "WMS",
"VERSION": "1.3",
"REQUEST": "GetLegendGraphic",
"LAYER": "test_layer",
"FORMAT": "image/png",
"CRS": "EPSG:4326",
"BBOX": "0.05148830809982496426,-2.237691019614711507,0.8090701330998248952,-0.2050896957968479928",
"SLD_VERSION": "1.1",
}.items())])
r, h = self._result(self._execute_request(qs))
self.assertFalse(b'Exception' in r)
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Regression32020_type1", max_size_diff=QSize(10, 2))
# Visible is "Type 2"
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'bug_gh32020.qgs',
"SERVICE": "WMS",
"VERSION": "1.3",
"REQUEST": "GetLegendGraphic",
"LAYER": "test_layer",
"FORMAT": "image/png",
"CRS": "EPSG:4326",
"BBOX": "0.02893333257443075901,-0.2568334631786342026,1.544096982574430621,3.808369184457092604",
"SLD_VERSION": "1.1",
}.items())])
r, h = self._result(self._execute_request(qs))
self.assertFalse(b'Exception' in r)
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Regression32020_type2", max_size_diff=QSize(10, 2))
# Visible is "Type 2" and 3
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'bug_gh32020.qgs',
"SERVICE": "WMS",
"VERSION": "1.3",
"REQUEST": "GetLegendGraphic",
"LAYER": "test_layer",
"FORMAT": "image/png",
"CRS": "EPSG:4326",
"BBOX": "-0.6636370923817864753,-0.2886757815674259042,0.8515265576182133866,3.776526866068300681",
"SLD_VERSION": "1.1",
}.items())])
r, h = self._result(self._execute_request(qs))
self.assertFalse(b'Exception' in r)
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Regression32020_type2_and_3", max_size_diff=QSize(10, 2))
# Visible is "Type 1" and 3
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'bug_gh32020.qgs',
"SERVICE": "WMS",
"VERSION": "1.3",
"REQUEST": "GetLegendGraphic",
"LAYER": "test_layer",
"FORMAT": "image/png",
"CRS": "EPSG:4326",
"BBOX": "-0.5787242433450088264,-4.316729057749563836,0.9364394066549910356,-0.2515264101138368069",
"SLD_VERSION": "1.1",
}.items())])
r, h = self._result(self._execute_request(qs))
self.assertFalse(b'Exception' in r)
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Regression32020_type1_and_3", max_size_diff=QSize(10, 2))
# Change CRS: 3857
# Visible is "Type 2"
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": self.testdata_path + 'bug_gh32020.qgs',
"SERVICE": "WMS",
"VERSION": "1.3",
"REQUEST": "GetLegendGraphic",
"LAYER": "test_layer",
"FORMAT": "image/png",
"CRS": "EPSG:3857",
"BBOX": "-28147.15420315234223,3960.286488616475253,424402.4530122592696,172632.4964886165108",
"SLD_VERSION": "1.1",
}.items())])
r, h = self._result(self._execute_request(qs))
self.assertFalse(b'Exception' in r)
self._img_diff_error(r, h, "WMS_GetLegendGraphic_Regression32020_type2_3857", max_size_diff=QSize(10, 2))
def test_wms_GetLegendGraphic_JSON(self):
self.wms_request_compare("GetLegendGraphic",
"&LAYERS=testlayer%20%C3%A8%C3%A9"
"&FORMAT=application/json",
"wms_getlegendgraphic_json")
def test_wms_GetLegendGraphic_JSON_multiple_layers(self):
self.wms_request_compare("GetLegendGraphic",
"&LAYERS=testlayer%20%C3%A8%C3%A9,testlayer3"
"&FORMAT=application/json",
"wms_getlegendgraphic_json_multiple_layers")
def testJsonSymbolMaxMinScale(self):
"""Test min/max scale in symbol json export"""
project = QgsProject()
layer = QgsVectorLayer("Point?field=fldtxt:string",
"layer1", "memory")
symbol = QgsMarkerSymbol.createSimple(
{'name': 'square', 'color': 'red'})
scale_min = 10000
scale_max = 1000
rule = QgsRuleBasedRenderer.Rule(symbol, scale_min, scale_max, '')
rootrule = QgsRuleBasedRenderer.Rule(None)
rootrule.appendChild(rule)
layer.setRenderer(QgsRuleBasedRenderer(rootrule))
project.addMapLayers([layer])
server = QgsServer()
request = QgsBufferServerRequest("/?SERVICE=WMS&VERSION=1.3.0&REQUEST=GetLegendGraphic" +
"&LAYERS=layer1" +
"&FORMAT=application/json")
response = QgsBufferServerResponse()
server.handleRequest(request, response, project)
j = json.loads(bytes(response.body()))
node = j['nodes'][0]
self.assertEqual(node['scaleMaxDenom'], 1000)
self.assertEqual(node['scaleMinDenom'], 10000)
if __name__ == '__main__':
unittest.main()
|
gpl-2.0
|
Shrhawk/edx-platform
|
cms/djangoapps/contentstore/features/video.py
|
134
|
8674
|
# pylint: disable=missing-docstring
from lettuce import world, step
from selenium.webdriver.common.keys import Keys
from xmodule.modulestore.django import modulestore
VIDEO_BUTTONS = {
'CC': '.hide-subtitles',
'volume': '.volume',
'play': '.video_control.play',
'pause': '.video_control.pause',
'handout': '.video-handout.video-download-button a',
}
SELECTORS = {
'spinner': '.video-wrapper .spinner',
'controls': 'section.video-controls',
}
# We should wait 300 ms for event handler invocation + 200ms for safety.
DELAY = 0.5
@step('youtube stub server (.*) YouTube API')
def configure_youtube_api(_step, action):
action = action.strip()
if action == 'proxies':
world.youtube.config['youtube_api_blocked'] = False
elif action == 'blocks':
world.youtube.config['youtube_api_blocked'] = True
else:
raise ValueError('Parameter `action` should be one of "proxies" or "blocks".')
@step('I have created a Video component$')
def i_created_a_video_component(step):
step.given('I am in Studio editing a new unit')
world.create_component_instance(
step=step,
category='video',
)
world.wait_for_xmodule()
world.disable_jquery_animations()
world.wait_for_present('.is-initialized')
world.wait(DELAY)
world.wait_for_invisible(SELECTORS['spinner'])
if not world.youtube.config.get('youtube_api_blocked'):
world.wait_for_visible(SELECTORS['controls'])
@step('I have created a Video component with subtitles$')
def i_created_a_video_with_subs(_step):
_step.given('I have created a Video component with subtitles "3_yD_cEKoCk"')
@step('I have created a Video component with subtitles "([^"]*)"$')
def i_created_a_video_with_subs_with_name(_step, sub_id):
_step.given('I have created a Video component')
# Store the current URL so we can return here
video_url = world.browser.url
# Upload subtitles for the video using the upload interface
_step.given('I have uploaded subtitles "{}"'.format(sub_id))
# Return to the video
world.visit(video_url)
world.wait_for_xmodule()
# update .sub filed with proper subs name (which mimics real Studio/XML behavior)
# this is needed only for that videos which are created in acceptance tests.
_step.given('I edit the component')
world.wait_for_ajax_complete()
_step.given('I save changes')
world.disable_jquery_animations()
world.wait_for_present('.is-initialized')
world.wait_for_invisible(SELECTORS['spinner'])
@step('I have uploaded subtitles "([^"]*)"$')
def i_have_uploaded_subtitles(_step, sub_id):
_step.given('I go to the files and uploads page')
_step.given('I upload the test file "subs_{}.srt.sjson"'.format(sub_id.strip()))
@step('when I view the (.*) it does not have autoplay enabled$')
def does_not_autoplay(_step, video_type):
world.wait(DELAY)
world.wait_for_ajax_complete()
actual = world.css_find('.%s' % video_type)[0]['data-autoplay']
expected = [u'False', u'false', False]
assert actual in expected
assert world.css_has_class('.video_control', 'play')
@step('creating a video takes a single click$')
def video_takes_a_single_click(_step):
component_css = '.xmodule_VideoModule'
assert world.is_css_not_present(component_css)
world.css_click("a[data-category='video']")
assert world.is_css_present(component_css)
@step('I edit the component$')
def i_edit_the_component(_step):
world.edit_component()
@step('I have (hidden|toggled) captions$')
def hide_or_show_captions(step, shown):
button_css = 'a.hide-subtitles'
if shown == 'hidden':
world.css_click(button_css)
if shown == 'toggled':
world.css_click(button_css)
# When we click the first time, a tooltip shows up. We want to
# click the button rather than the tooltip, so move the mouse
# away to make it disappear.
button = world.css_find(button_css)
# mouse_out is not implemented on firefox with selenium
if not world.is_firefox:
button.mouse_out()
world.css_click(button_css)
@step('I have created a video with only XML data$')
def xml_only_video(step):
# Create a new video *without* metadata. This requires a certain
# amount of rummaging to make sure all the correct data is present
step.given('I have clicked the new unit button')
# Wait for the new unit to be created and to load the page
world.wait(1)
course = world.scenario_dict['COURSE']
store = modulestore()
parent_location = store.get_items(course.id, qualifiers={'category': 'vertical'})[0].location
youtube_id = 'ABCDEFG'
world.scenario_dict['YOUTUBE_ID'] = youtube_id
# Create a new Video component, but ensure that it doesn't have
# metadata. This allows us to test that we are correctly parsing
# out XML
world.ItemFactory.create(
parent_location=parent_location,
category='video',
data='<video youtube="1.00:%s"></video>' % youtube_id,
modulestore=store,
user_id=world.scenario_dict["USER"].id
)
@step('The correct Youtube video is shown$')
def the_youtube_video_is_shown(_step):
ele = world.css_find('.video').first
assert ele['data-streams'].split(':')[1] == world.scenario_dict['YOUTUBE_ID']
@step('Make sure captions are (.+)$')
def set_captions_visibility_state(_step, captions_state):
SELECTOR = '.closed .subtitles'
world.wait_for_visible('.hide-subtitles')
if captions_state == 'closed':
if world.is_css_not_present(SELECTOR):
world.css_find('.hide-subtitles').click()
else:
if world.is_css_present(SELECTOR):
world.css_find('.hide-subtitles').click()
@step('I hover over button "([^"]*)"$')
def hover_over_button(_step, button):
world.css_find(VIDEO_BUTTONS[button.strip()]).mouse_over()
@step('Captions (?:are|become) "([^"]*)"$')
def check_captions_visibility_state(_step, visibility_state):
if visibility_state == 'visible':
assert world.css_visible('.subtitles')
else:
assert not world.css_visible('.subtitles')
def find_caption_line_by_data_index(index):
SELECTOR = ".subtitles > li[data-index='{index}']".format(index=index)
return world.css_find(SELECTOR).first
@step('I focus on caption line with data-index "([^"]*)"$')
def focus_on_caption_line(_step, index):
world.wait_for_present('.video.is-captions-rendered')
world.wait_for(lambda _: world.css_text('.subtitles'), timeout=30)
find_caption_line_by_data_index(int(index.strip()))._element.send_keys(Keys.TAB)
@step('I press "enter" button on caption line with data-index "([^"]*)"$')
def click_on_the_caption(_step, index):
world.wait_for_present('.video.is-captions-rendered')
world.wait_for(lambda _: world.css_text('.subtitles'), timeout=30)
find_caption_line_by_data_index(int(index.strip()))._element.send_keys(Keys.ENTER)
@step('I see caption line with data-index "([^"]*)" has class "([^"]*)"$')
def caption_line_has_class(_step, index, className):
SELECTOR = ".subtitles > li[data-index='{index}']".format(index=int(index.strip()))
assert world.css_has_class(SELECTOR, className.strip())
@step('I see a range on slider$')
def see_a_range_slider_with_proper_range(_step):
world.wait_for_visible(VIDEO_BUTTONS['pause'])
assert world.css_visible(".slider-range")
@step('I (.*) see video button "([^"]*)"$')
def do_not_see_or_not_button_video(_step, action, button_type):
world.wait(DELAY)
world.wait_for_ajax_complete()
action = action.strip()
button = button_type.strip()
if action == 'do not':
assert not world.is_css_present(VIDEO_BUTTONS[button])
elif action == 'can':
assert world.css_visible(VIDEO_BUTTONS[button])
else:
raise ValueError('Parameter `action` should be one of "do not" or "can".')
@step('I click video button "([^"]*)"$')
def click_button_video(_step, button_type):
world.wait(DELAY)
world.wait_for_ajax_complete()
button = button_type.strip()
world.css_click(VIDEO_BUTTONS[button])
@step('I seek video to "([^"]*)" seconds$')
def seek_video_to_n_seconds(_step, seconds):
time = float(seconds.strip())
jsCode = "$('.video').data('video-player-state').videoPlayer.onSlideSeek({{time: {0:f}}})".format(time)
world.browser.execute_script(jsCode)
@step('I see video starts playing from "([^"]*)" position$')
def start_playing_video_from_n_seconds(_step, position):
world.wait_for(
func=lambda _: world.css_html('.vidtime')[:4] == position.strip(),
timeout=5
)
|
agpl-3.0
|
ATSTI/administra
|
alterando_produtos_odoo/oerplib/service/inspect/on_change.py
|
4
|
4037
|
# -*- coding: UTF-8 -*-
##############################################################################
#
# OERPLib
# Copyright (C) 2013 Sébastien Alix.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
"""Provides the :func:`scan_on_change` function."""
import xml.etree.ElementTree
import re
ON_CHANGE_RE = re.compile('^(.*?)\((.*)\)$')
def scan_on_change(oerp, models):
"""Scan all `on_change` methods detected among views of `models`."""
result = {}
view_obj = oerp.get('ir.ui.view')
model_data_obj = oerp.get('ir.model.data')
for model in models:
# Get all model views
view_ids = view_obj.search(
[('model', '=', model), ('type', 'in', ['form', 'tree'])])
model_data_ids = model_data_obj.search(
[('res_id', 'in', view_ids), ('model', '=', 'ir.ui.view')])
model_data = model_data_obj.read(
model_data_ids, ['name', 'module', 'res_id'])
for data in model_data:
# For each view, find all `on_change` methods
view_name = "{0}.{1}".format(data['module'], data['name'])
view_data = oerp.execute(
model, 'fields_view_get', data['res_id'], 'form')
_scan_view(model, view_name, view_data, result)
return result
def _scan_view(model, view_name, view_data, result):
"""Update `result` with all `on_change` methods detected
on the view described by `view_data`.
"""
if model not in result:
result[model] = {}
# Scan the main view description
xml_root = xml.etree.ElementTree.fromstring(view_data['arch'])
# NOTE: Python 2.6 does not support full XPath, it is
# why the ".//field" pattern is used instead of ".//field[@on_change]"
for elt in xml_root.findall(".//field"):
if 'on_change' not in elt.attrib:
continue
match = ON_CHANGE_RE.match(elt.attrib['on_change'])
if match:
func = match.group(1)
args = [arg.strip() for arg in match.group(2).split(',')]
field = elt.attrib['name']
if func not in result[model]:
result[model][func] = {}
if view_name not in result[model][func]:
result[model][func][view_name] = {}
if field not in result[model][func][view_name]:
result[model][func][view_name][field] = []
if args and args not in result[model][func][view_name][field]:
args = map(_clean_arg, args)
result[model][func][view_name][field] = args
# Scan recursively all other sub-descriptions defined in the view
for field_name, field_data in view_data['fields'].iteritems():
if field_data.get('views') and field_data['views'].get('form'):
model = field_data['relation']
if field_data['views'].get('form'):
_scan_view(
model, view_name, field_data['views']['form'], result)
if field_data['views'].get('tree'):
_scan_view(
model, view_name, field_data['views']['tree'], result)
return result
def _clean_arg(arg):
return {
'False': False,
'True': True,
'None': None,
}.get(arg, arg)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
gpl-2.0
|
elggem/tensorflow_node
|
src/tensorflow_node/input/opencv.py
|
2
|
1954
|
# -*- coding: utf-8 -*-
import rospy
import numpy as np
import cv2
import os.path
import rospy
from tensorflow_node.input import InputLayer
class OpenCVInputLayer(InputLayer):
"""
Contains OpenCV to feed in video feeds to TF.
"""
def __init__(self, batch_size=1, output_size=[28, 28], input="", number_of_frames=-1, repeat=True):
super(OpenCVInputLayer, self).__init__(batch_size, output_size, input)
self.number_of_frames = number_of_frames
self.repeat = repeat
def feed_to(self, feed_callback):
# TODO: there should be clearer distinction here, get these params via daemon.
frames = rospy.get_param("tensorflow_node/inputlayer/params/number_of_frames")
repeat = rospy.get_param("tensorflow_node/inputlayer/params/repeat")
# check if file exists
if not os.path.isfile(self.input) or self.input == 0:
raise IOError("OpenCVLayer - video file not found!")
cap = cv2.VideoCapture(self.input)
while(frames != 0):
isvalid, frame = cap.read()
if (not isvalid):
break
res = cv2.resize(frame, (self.output_size[0], self.output_size[1]), interpolation=cv2.INTER_CUBIC)
gray = cv2.cvtColor(res, cv2.COLOR_BGR2GRAY)
gray = gray * 1.0 / 255
# use grayscale image
self.batch.append(gray.reshape([self.output_size[0], self.output_size[1], 1]))
# batch is full
# Can we use TF Queue for this?
if len(self.batch) >= self.batch_size:
feed_dict = {self.name + '/input:0': np.array(self.batch)}
feed_callback(feed_dict)
self.batch = []
print("Inputlayer: Evaluated batch of size %i" % self.batch_size)
if (frames > 0):
frames -= 1
cap.release()
if (repeat):
self.feed_to(feed_callback)
|
unlicense
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-machinelearningcompute/azure/mgmt/machinelearningcompute/models/container_service_credentials.py
|
2
|
1932
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ContainerServiceCredentials(Model):
"""Information about the Azure Container Registry which contains the images
deployed to the cluster.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar acs_kube_config: The ACS kube config file.
:vartype acs_kube_config: str
:ivar service_principal_configuration: Service principal configuration
used by Kubernetes.
:vartype service_principal_configuration:
~azure.mgmt.machinelearningcompute.models.ServicePrincipalProperties
:ivar image_pull_secret_name: The ACR image pull secret name which was
created in Kubernetes.
:vartype image_pull_secret_name: str
"""
_validation = {
'acs_kube_config': {'readonly': True},
'service_principal_configuration': {'readonly': True},
'image_pull_secret_name': {'readonly': True},
}
_attribute_map = {
'acs_kube_config': {'key': 'acsKubeConfig', 'type': 'str'},
'service_principal_configuration': {'key': 'servicePrincipalConfiguration', 'type': 'ServicePrincipalProperties'},
'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'},
}
def __init__(self):
super(ContainerServiceCredentials, self).__init__()
self.acs_kube_config = None
self.service_principal_configuration = None
self.image_pull_secret_name = None
|
mit
|
seldon/django-flexi-auth
|
flexi_auth/exceptions.py
|
1
|
2184
|
# Copyright (C) 2011 REES Marche <http://www.reesmarche.org>
#
# This file is part of ``django-flexi-auth``.
# ``django-flexi-auth`` is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# ``django-flexi-auth`` is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with ``django-flexi-auth``. If not, see <http://www.gnu.org/licenses/>.
from django.utils.translation import ugettext as _
class RoleNotAllowed(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return _(u"Role %s is not allowed in current application domain") % self.value
class RoleParameterNotAllowed(Exception):
def __init__(self, role_name, allowed_params, wrong_param):
self.role_name = role_name
self.allowed_params = allowed_params
self.wrong_param = wrong_param
def __str__(self):
return _(u"Wrong param '%(wp)s' provided for role %(r)s. Only %(pl)s are relatable to this role") % \
{ 'wp' : self.wrong_param, 'r' : self.role_name, 'pl' : ", ".join(self.allowed_params) }
class RoleParameterWrongSpecsProvided(Exception):
def __init__(self, role_name, param_specs):
self.role_name = role_name
self.param_specs = param_specs
def __str__(self):
return _(u"Wrong specs %(s)s for role %(r)s") % \
{ 's' : self.param_specs, 'r' : self.role_name }
class WrongPermissionCheck(Exception):
def __init__(self, perm, obj, context):
self.perm = perm
self.obj = obj
self.context = context
def __str__(self):
return _(u"Can't check permission %(perm)s on object %(obj)s with respect to context %(ctx)s") % \
{ 'perm' : self.perm, 'obj' : self.obj, 'ctx' : self.context }
|
agpl-3.0
|
shoreflyer/cerbero
|
cerbero/packages/packagesstore.py
|
5
|
7907
|
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import imp
from collections import defaultdict
from cerbero.build.cookbook import CookBook
from cerbero.config import Platform, Architecture, Distro, DistroVersion,\
License
from cerbero.packages import package, PackageType
from cerbero.errors import FatalError, PackageNotFoundError
from cerbero.utils import _, shell, remove_list_duplicates
from cerbero.utils import messages as m
class PackagesStore (object):
'''
Stores a list of L{cerbero.packages.package.Package}
'''
PKG_EXT = '.package'
def __init__(self, config, load=True):
self._config = config
self._packages = {} # package_name -> package
self.cookbook = CookBook(config, load)
# used in tests to skip loading a dir with packages definitions
if not load:
return
if not os.path.exists(config.packages_dir):
raise FatalError(_("Packages dir %s not found") %
config.packages_dir)
self._load_packages()
def get_packages_list(self):
'''
Gets the list of packages
@return: list of packages
@rtype: list
'''
packages = self._packages.values()
packages.sort(key=lambda x: x.name)
return packages
def get_package(self, name):
'''
Gets a recipe from its name
@param name: name of the package
@type name: str
@return: the package instance
@rtype: L{cerbero.packages.package.Package}
'''
if name not in self._packages:
raise PackageNotFoundError(name)
return self._packages[name]
def get_package_deps(self, pkg, recursive=False):
'''
Gets the dependencies of a package
@param package: name of the package or package instance
@type package: L{cerbero.packages.package.Package}
@return: a list with the package dependencies
@rtype: list
'''
if isinstance(pkg, str):
pkg = self.get_package(pkg)
if isinstance(pkg, package.MetaPackage):
ret = self._list_metapackage_deps(pkg)
else:
ret = [self.get_package(x) for x in pkg.deps]
# get deps recursively
if recursive:
for p in ret:
ret.extend(self.get_package_deps(p, recursive))
return remove_list_duplicates(ret)
def get_package_files_list(self, name):
'''
Gets the list of files provided by a package
@param name: name of the package
@type name: str
@return: the package instance
@rtype: L{cerbero.packages.package.PackageBase}
'''
p = self.get_package(name)
if isinstance(p, package.MetaPackage):
return sorted(self._list_metapackage_files(p))
else:
return sorted(p.files_list())
def add_package(self, package):
'''
Adds a new package to the store
@param package: the package to add
@type package: L{cerbero.packages.package.PackageBase}
'''
self._packages[package.name] = package
def get_package_recipes_deps(self, package_name):
'''
Gets the list of recipes needed to create this package
@param name: name of the package
@type name: str
@return: a list with the recipes required to build this package
@rtype: list
'''
deps = self.get_package_deps(package_name)
return [self.cookbok.get_recipe(x) for x in deps]
def _list_metapackage_deps(self, metapackage):
def get_package_deps(package_name, visited=[], depslist=[]):
if package_name in visited:
return
visited.append(package_name)
p = self.get_package(package_name)
depslist.append(p)
for p_name in p.deps:
get_package_deps(p_name, visited, depslist)
return depslist
deps = []
for p in metapackage.list_packages():
deps.extend(get_package_deps(p, [], []))
return remove_list_duplicates(deps)
def _list_metapackage_files(self, metapackage):
l = []
for p in self._list_metapackage_deps(metapackage):
l.extend(p.files_list())
# remove duplicates and sort
return sorted(list(set(l)))
def _load_packages(self):
self._packages = {}
packages = defaultdict(dict)
repos = self._config.get_packages_repos()
for reponame, (repodir, priority) in repos.iteritems():
packages[int(priority)].update(
self._load_packages_from_dir(repodir))
# Add recipes by asceding pripority
for key in sorted(packages.keys()):
self._packages.update(packages[key])
def _load_packages_from_dir(self, repo):
packages_dict = {}
packages = shell.find_files('*%s' % self.PKG_EXT, repo)
packages.extend(shell.find_files('*/*%s' % self.PKG_EXT, repo))
try:
custom = None
m_path = os.path.join(repo, 'custom.py')
if os.path.exists(m_path):
custom = imp.load_source('custom', m_path)
except Exception, ex:
# import traceback
# traceback.print_exc()
# m.warning("Error loading package %s" % ex)
custom = None
for f in packages:
p = self._load_package_from_file(f, custom)
if p is None:
m.warning(_("Could not found a valid package in %s") % f)
continue
p.__file__ = os.path.abspath(f)
packages_dict[p.name] = p
return packages_dict
def _load_package_from_file(self, filepath, custom=None):
mod_name, file_ext = os.path.splitext(os.path.split(filepath)[-1])
try:
d = {'Platform': Platform, 'Architecture': Architecture,
'Distro': Distro, 'DistroVersion': DistroVersion,
'License': License, 'package': package,
'PackageType': PackageType, 'custom': custom}
execfile(filepath, d)
if 'Package' in d:
p = d['Package'](self._config, self, self.cookbook)
elif 'SDKPackage' in d:
p = d['SDKPackage'](self._config, self)
elif 'InstallerPackage' in d:
p = d['InstallerPackage'](self._config, self)
elif 'App' in d:
p = d['App'](self._config, self, self.cookbook)
else:
raise Exception('Package, SDKPackage, InstallerPackage or App '
'class not found')
p.prepare()
# reload files from package now that we called prepare that
# may have changed it
p.load_files()
return p
except Exception, ex:
import traceback
traceback.print_exc()
m.warning("Error loading package %s" % ex)
return None
|
lgpl-2.1
|
georgyberdyshev/ascend
|
pygtk/loading.py
|
1
|
4002
|
import sys
import config
import os.path
global have_gtk
have_gtk = False
#if not sys.executable.endswith("pythonw.exe"):
# print "PYTHON PATH =",sys.path
try:
import pygtk
pygtk.require('2.0')
import gtk
have_gtk = True
except Exception,e:
if sys.platform=="win32":
try:
from ctypes import c_int, WINFUNCTYPE, windll
from ctypes.wintypes import HWND, LPCSTR, UINT
prototype = WINFUNCTYPE(c_int, HWND, LPCSTR, LPCSTR, UINT)
paramflags = (1, "hwnd", 0), (1, "text", "Hi"), (1, "caption", None), (1, "flags", 0)
MessageBox = prototype(("MessageBoxA", windll.user32), paramflags)
MessageBox(text="""ASCEND could not load PyGTK. Probably this is because
either PyGTK, PyCairo, PyGObject or GTK+ are not installed on your
system. Please try re-installing ASCEND to rectify the problem.""")
except:
pass
else:
print "PyGTK COULD NOT BE LOADED (is it installed? do you have X-Windows running?) (%s)" % str(e)
sys.exit("FATAL ERROR: PyGTK not available, unable to start ASCEND.")
global _messages
_messages = []
def get_messages():
return _messages
def load_matplotlib(throw=False,alert=False):
print_status("Loading python matplotlib")
try:
import matplotlib
matplotlib.use('GTKAgg')
try:
print_status("Trying python numpy")
import numpy
print_status("","Using python module numpy")
except ImportError:
print_status("","FAILED to load Python module 'numpy'")
import pylab
except ImportError,e:
print_status("","FAILED TO LOAD MATPLOTLIB")
if alert or throw:
_d = gtk.MessageDialog(None,gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT
,gtk.MESSAGE_ERROR,gtk.BUTTONS_CLOSE,"Plotting functions are not available unless you have 'matplotlib' installed.\n\nSee http://matplotlib.sf.net/\n\nFailed to load matplotlib (%s)" % str(e)
)
_d.run()
_d.destroy()
while gtk.events_pending():
gtk.main_iteration(False)
if throw:
raise RuntimeError("Failed to load plotting library 'matplotlib'. (%s)" % str(e))
class LoadingWindow:
def __init__(self):
self.is_loading = False
self.set_assets_dir(config.PYGTK_ASSETS)
def set_assets_dir(self, d):
self.assetsdir = d
self.splashfile = os.path.join(self.assetsdir,'ascend-loading.png')
def create_window(self):
if have_gtk:
if os.path.exists(self.splashfile):
_w = gtk.Window(gtk.WINDOW_TOPLEVEL)
_w.set_decorated(False)
_w.set_position(gtk.WIN_POS_CENTER)
_a = gtk.Alignment()
_a.set_padding(4,4,4,4)
_w.add(_a)
_a.show()
_v = gtk.VBox()
_a.add(_v)
_v.show()
_i = gtk.Image()
self.image = _i
_i.set_pixel_size(3)
_i.set_from_file(self.splashfile)
_v.add(_i)
_i.show()
_l = gtk.Label("Loading ASCEND...")
_l.set_justify(gtk.JUSTIFY_CENTER)
_v.add(_l)
_l.show()
_w.show()
self.window = _w
self.label = _l
self.is_loading = True
while gtk.events_pending():
gtk.main_iteration(False)
else:
pass
#do nothing, don't know where splash file is yet
else:
print "DON'T HAVE GTK!"
sys.exit(1)
def print_status(self,status,msg=None):
if self.is_loading:
if not sys.executable.endswith("pythonw.exe"):
print status
self.label.set_text(status)
if msg is not None:
try:
sys.stderr.write(msg+"\n")
except IOError:
pass
_messages.append(msg)
while gtk.events_pending():
gtk.main_iteration(False)
else:
try:
sys.stderr.write("\r \r")
if msg!=None:
sys.stderr.write(msg+"\r")
_messages.append(msg)
sys.stderr.write(status+"...\r")
sys.stderr.flush()
except IOError:
pass
def complete(self):
if self.is_loading:
self.window.destroy()
self.is_loading = False
global w
def print_status(status,msg=None):
w.print_status(status,msg)
def complete():
w.complete()
def create_window(assetsdir=config.PYGTK_ASSETS):
w.set_assets_dir(assetsdir)
w.create_window()
w = LoadingWindow()
create_window()
|
gpl-2.0
|
andrewsomething/python-digitalocean
|
digitalocean/LoadBalancer.py
|
2
|
12222
|
# -*- coding: utf-8 -*-
from .baseapi import BaseAPI, GET, POST, DELETE
class StickySesions(object):
"""
An object holding information on a LoadBalancer's sticky sessions settings.
Args:
type (str): The type of sticky sessions used. Can be "cookies" or
"none"
cookie_name (str, optional): The name used for the client cookie when
using cookies for sticky session
cookie_ttl_seconds (int, optional): The number of seconds until the
cookie expires
"""
def __init__(self, type='none', cookie_name='DO_LB',
cookie_ttl_seconds=300):
self.type = type
if type is 'cookies':
self.cookie_name = cookie_name
self.cookie_ttl_seconds = cookie_ttl_seconds
class ForwardingRule(object):
"""
An object holding information about a LoadBalancer forwarding rule setting.
Args:
entry_protocol (str): The protocol used for traffic to a LoadBalancer.
The possible values are: "http", "https", or "tcp"
entry_port (int): The port the LoadBalancer instance will listen on
target_protocol (str): The protocol used for traffic from a
LoadBalancer to the backend Droplets. The possible values are:
"http", "https", or "tcp"
target_port (int): The port on the backend Droplets on which the
LoadBalancer will send traffic
certificate_id (str, optional): The ID of the TLS certificate used for
SSL termination if enabled
tls_passthrough (bool, optional): A boolean indicating if SSL encrypted
traffic will be passed through to the backend Droplets
"""
def __init__(self, entry_protocol=None, entry_port=None,
target_protocol=None, target_port=None, certificate_id="",
tls_passthrough=False):
self.entry_protocol = entry_protocol
self.entry_port = entry_port
self.target_protocol = target_protocol
self.target_port = target_port
self.certificate_id = certificate_id
self.tls_passthrough = tls_passthrough
class HealthCheck(object):
"""
An object holding information about a LoadBalancer health check settings.
Args:
protocol (str): The protocol used for health checks. The possible
values are "http" or "tcp".
port (int): The port on the backend Droplets for heath checks
path (str): The path to send a health check request to
check_interval_seconds (int): The number of seconds between between two
consecutive health checks
response_timeout_seconds (int): The number of seconds the Load Balancer
instance will wait for a response until marking a check as failed
healthy_threshold (int): The number of times a health check must fail
for a backend Droplet to be removed from the pool
unhealthy_threshold (int): The number of times a health check must pass
for a backend Droplet to be re-added to the pool
"""
def __init__(self, protocol='http', port=80, path='/',
check_interval_seconds=10, response_timeout_seconds=5,
healthy_threshold=5, unhealthy_threshold=3):
self.protocol = protocol
self.port = port
self.path = path
self.check_interval_seconds = check_interval_seconds
self.response_timeout_seconds = response_timeout_seconds
self.healthy_threshold = healthy_threshold
self.unhealthy_threshold = unhealthy_threshold
class LoadBalancer(BaseAPI):
"""
An object representing an DigitalOcean Load Balancer.
Attributes accepted at creation time:
Args:
name (str): The Load Balancer's name
region (str): The slug identifier for a DigitalOcean region
algorithm (str, optional): The load balancing algorithm to be
used. Currently, it must be either "round_robin" or
"least_connections"
forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects
health_check (obj, optional): A `HealthCheck` object
sticky_sessions (obj, optional): A `StickySessions` object
redirect_http_to_https (bool, optional): A boolean indicating
whether HTTP requests to the Load Balancer should be
redirected to HTTPS
droplet_ids (obj:`list` of `int`): A list of IDs representing
Droplets to be added to the Load Balancer (mutually
exclusive with 'tag')
tag (str): A string representing a DigitalOcean Droplet tag
(mutually exclusive with 'droplet_ids')
Attributes returned by API:
name (str): The Load Balancer's name
id (str): An unique identifier for a LoadBalancer
ip (str): Public IP address for a LoadBalancer
region (str): The slug identifier for a DigitalOcean region
algorithm (str, optional): The load balancing algorithm to be
used. Currently, it must be either "round_robin" or
"least_connections"
forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects
health_check (obj, optional): A `HealthCheck` object
sticky_sessions (obj, optional): A `StickySessions` object
redirect_http_to_https (bool, optional): A boolean indicating
whether HTTP requests to the Load Balancer should be
redirected to HTTPS
droplet_ids (obj:`list` of `int`): A list of IDs representing
Droplets to be added to the Load Balancer
tag (str): A string representing a DigitalOcean Droplet tag
status (string): An indication the current state of the LoadBalancer
created_at (str): The date and time when the LoadBalancer was created
"""
def __init__(self, *args, **kwargs):
self.id = None
self.name = None
self.region = None
self.algorithm = None
self.forwarding_rules = []
self.health_check = None
self.sticky_sessions = None
self.redirect_http_to_https = False
self.droplet_ids = []
self.tag = None
self.status = None
self.created_at = None
super(LoadBalancer, self).__init__(*args, **kwargs)
@classmethod
def get_object(cls, api_token, id):
"""
Class method that will return a LoadBalancer object by its ID.
Args:
api_token (str): DigitalOcean API token
id (str): Load Balancer ID
"""
load_balancer = cls(token=api_token, id=id)
load_balancer.load()
return load_balancer
def load(self):
"""
Loads updated attributues for a LoadBalancer object.
Requires self.id to be set.
"""
data = self.get_data('load_balancers/%s' % self.id, type=GET)
load_balancer = data['load_balancer']
# Setting the attribute values
for attr in load_balancer.keys():
if attr == 'health_check':
health_check = HealthCheck(**load_balancer['health_check'])
setattr(self, attr, health_check)
elif attr == 'sticky_sessions':
sticky_ses = StickySesions(**load_balancer['sticky_sessions'])
setattr(self, attr, sticky_ses)
elif attr == 'forwarding_rules':
rules = list()
for rule in load_balancer['forwarding_rules']:
rules.append(ForwardingRule(**rule))
setattr(self, attr, rules)
else:
setattr(self, attr, load_balancer[attr])
return self
def create(self, *args, **kwargs):
"""
Creates a new LoadBalancer.
Note: Every argument and parameter given to this method will be
assigned to the object.
Args:
name (str): The Load Balancer's name
region (str): The slug identifier for a DigitalOcean region
algorithm (str, optional): The load balancing algorithm to be
used. Currently, it must be either "round_robin" or
"least_connections"
forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects
health_check (obj, optional): A `HealthCheck` object
sticky_sessions (obj, optional): A `StickySessions` object
redirect_http_to_https (bool, optional): A boolean indicating
whether HTTP requests to the Load Balancer should be
redirected to HTTPS
droplet_ids (obj:`list` of `int`): A list of IDs representing
Droplets to be added to the Load Balancer (mutually
exclusive with 'tag')
tag (str): A string representing a DigitalOcean Droplet tag
(mutually exclusive with 'droplet_ids')
"""
rules_dict = [rule.__dict__ for rule in self.forwarding_rules]
params = {'name': self.name, 'region': self.region,
'forwarding_rules': rules_dict,
'redirect_http_to_https': self.redirect_http_to_https}
if self.droplet_ids and self.tag:
raise ValueError('droplet_ids and tag are mutually exclusive args')
elif self.tag:
params['tag'] = self.tag
else:
params['droplet_ids'] = self.droplet_ids
if self.algorithm:
params['algorithm'] = self.algorithm
if self.health_check:
params['health_check'] = self.health_check.__dict__
if self.sticky_sessions:
params['sticky_sessions'] = self.sticky_sessions.__dict__
data = self.get_data('load_balancers/', type=POST, params=params)
if data:
self.id = data['load_balancer']['id']
self.ip = data['load_balancer']['ip']
self.algorithm = data['load_balancer']['algorithm']
self.health_check = HealthCheck(
**data['load_balancer']['health_check'])
self.sticky_sessions = StickySesions(
**data['load_balancer']['sticky_sessions'])
self.droplet_ids = data['load_balancer']['droplet_ids']
self.status = data['load_balancer']['status']
self.created_at = data['load_balancer']['created_at']
return self
def destroy(self):
"""
Destroy the LoadBalancer
"""
return self.get_data('load_balancers/%s/' % self.id, type=DELETE)
def add_droplets(self, droplet_ids):
"""
Assign a LoadBalancer to a Droplet.
Args:
droplet_ids (obj:`list` of `int`): A list of Droplet IDs
"""
return self.get_data(
"load_balancers/%s/droplets/" % self.id,
type=POST,
params={"droplet_ids": droplet_ids}
)
def remove_droplets(self, droplet_ids):
"""
Unassign a LoadBalancer.
Args:
droplet_ids (obj:`list` of `int`): A list of Droplet IDs
"""
return self.get_data(
"load_balancers/%s/droplets/" % self.id,
type=DELETE,
params={"droplet_ids": droplet_ids}
)
def add_forwarding_rules(self, forwarding_rules):
"""
Adds new forwarding rules to a LoadBalancer.
Args:
forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects
"""
rules_dict = [rule.__dict__ for rule in forwarding_rules]
return self.get_data(
"load_balancers/%s/forwarding_rules/" % self.id,
type=POST,
params={"forwarding_rules": rules_dict}
)
def remove_forwarding_rules(self, forwarding_rules):
"""
Removes existing forwarding rules from a LoadBalancer.
Args:
forwarding_rules (obj:`list`): A list of `ForwrdingRules` objects
"""
rules_dict = [rule.__dict__ for rule in forwarding_rules]
return self.get_data(
"load_balancers/%s/forwarding_rules/" % self.id,
type=DELETE,
params={"forwarding_rules": rules_dict}
)
def __str__(self):
return "%s" % (self.id)
|
lgpl-3.0
|
lindsayad/sympy
|
sympy/__init__.py
|
42
|
2442
|
"""SymPy is a Python library for symbolic mathematics. It aims to become a
full-featured computer algebra system (CAS) while keeping the code as
simple as possible in order to be comprehensible and easily extensible.
SymPy is written entirely in Python and does not require any external
libraries, except optionally for plotting support.
See the webpage for more information and documentation:
http://sympy.org
"""
from __future__ import absolute_import, print_function
del absolute_import, print_function
try:
import mpmath
except ImportError:
raise ImportError("SymPy now depends on mpmath as an external library. "
"See http://docs.sympy.org/latest/install.html#mpmath for more information.")
from sympy.release import __version__
import sys
if sys.version_info[0] == 2 and sys.version_info[1] < 6:
raise ImportError("Python Version 2.6 or above is required for SymPy.")
else: # Python 3
pass
# Here we can also check for specific Python 3 versions, if needed
del sys
def __sympy_debug():
# helper function so we don't import os globally
import os
debug_str = os.getenv('SYMPY_DEBUG', 'False')
if debug_str in ('True', 'False'):
return eval(debug_str)
else:
raise RuntimeError("unrecognized value for SYMPY_DEBUG: %s" %
debug_str)
SYMPY_DEBUG = __sympy_debug()
from .core import *
from .logic import *
from .assumptions import *
from .polys import *
from .series import *
from .functions import *
from .ntheory import *
from .concrete import *
from .simplify import *
from .sets import *
from .solvers import *
from .matrices import *
from .geometry import *
from .utilities import *
from .integrals import *
from .tensor import *
from .parsing import *
from .calculus import *
# Adds about .04-.05 seconds of import time
# from combinatorics import *
# This module is slow to import:
#from physics import units
from .plotting import plot, textplot, plot_backends, plot_implicit
from .printing import pretty, pretty_print, pprint, pprint_use_unicode, \
pprint_try_use_unicode, print_gtk, print_tree, pager_print, TableForm
from .printing import ccode, fcode, jscode, mathematica_code, octave_code, \
latex, preview
from .printing import python, print_python, srepr, sstr, sstrrepr
from .interactive import init_session, init_printing
evalf._create_evalf_table()
# This is slow to import:
#import abc
from .deprecated import *
|
bsd-3-clause
|
maftieu/CouchPotatoServer
|
libs/sqlalchemy/dialects/postgresql/pypostgresql.py
|
18
|
2155
|
# postgresql/pypostgresql.py
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Support for the PostgreSQL database via py-postgresql.
Connecting
----------
URLs are of the form ``postgresql+pypostgresql://user:password@host:port/dbname[?key=value&key=value...]``.
"""
from sqlalchemy import util
from sqlalchemy import types as sqltypes
from sqlalchemy.dialects.postgresql.base import PGDialect, PGExecutionContext
from sqlalchemy import processors
class PGNumeric(sqltypes.Numeric):
def bind_processor(self, dialect):
return processors.to_str
def result_processor(self, dialect, coltype):
if self.asdecimal:
return None
else:
return processors.to_float
class PGExecutionContext_pypostgresql(PGExecutionContext):
pass
class PGDialect_pypostgresql(PGDialect):
driver = 'pypostgresql'
supports_unicode_statements = True
supports_unicode_binds = True
description_encoding = None
default_paramstyle = 'pyformat'
# requires trunk version to support sane rowcounts
# TODO: use dbapi version information to set this flag appropriately
supports_sane_rowcount = True
supports_sane_multi_rowcount = False
execution_ctx_cls = PGExecutionContext_pypostgresql
colspecs = util.update_copy(
PGDialect.colspecs,
{
sqltypes.Numeric : PGNumeric,
sqltypes.Float: sqltypes.Float, # prevents PGNumeric from being used
}
)
@classmethod
def dbapi(cls):
from postgresql.driver import dbapi20
return dbapi20
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
if 'port' in opts:
opts['port'] = int(opts['port'])
else:
opts['port'] = 5432
opts.update(url.query)
return ([], opts)
def is_disconnect(self, e, connection, cursor):
return "connection is closed" in str(e)
dialect = PGDialect_pypostgresql
|
gpl-3.0
|
LamCiuLoeng/budget
|
budget/widgets/access.py
|
1
|
2516
|
# -*- coding: utf-8 -*-
from tw.api import WidgetsList
from tw.forms import TableForm
from tw.forms.fields import HiddenField
from budget.model import DBSession
from budget.model import *
from budget.widgets.components import *
#class SearchForm(RPACForm):
#
# group_options = DBSession.query(Group.group_id,Group.group_name).order_by(Group.group_name)
#
# permission_options = DBSession.query(Permission.permission_id,Permission.permission_name).order_by(Permission.permission_name)
#
# fields = [
# RPACText("user_name",label_text="User Name"),
# RPACSelect("group_id",label_text="Group Name",options=group_options),
# RPACSelect("permission_id",label_text="Permission Name",options=permission_options)
# ]
#
#access_search_form = SearchForm("search")
group_options=lambda :[(None, '')]+[(g.group_id, g.group_name) for g in DBSession.query(Group.group_id, Group.group_name).order_by(Group.group_name)]
team_options=lambda :[(None, '')]+[(t.id, t.name) for t in DBSession.query(Team.id, Team.name).order_by(Team.name)]
region_options=lambda:[(None, '')]+[(r.id, r.name) for r in DBSession.query(Region.id, Region.name).order_by(Region.name)]
dba_customer_options=lambda:[(None, '')]+[(r.id, r.name) for r in DBSession.query(DBACustomer.id, DBACustomer.name).order_by(DBACustomer.name)]
class UserSearchForm(RPACForm):
fields=[RPACText("user_name", label_text="User Name"), ]
user_search_form=UserSearchForm()
class GroupSearchForm(RPACForm):
fields=[RPACText("group_name", label_text="Group Name"), ]
group_search_form=GroupSearchForm()
class PermissionSearchForm(RPACForm):
fields=[RPACText("permission_name", label_text="Permission Name"), ]
permission_search_form=PermissionSearchForm()
class UserForm(RPACForm):
fields=[
HiddenField("id"),
RPACText("user_name", label_text="User Name"),
RPACText("password", label_text="Password"),
RPACText("email_address", label_text="E-mail Address"),
RPACText("display_name", label_text="Display Name"),
]
user_update_form=UserForm()
class GroupForm(RPACForm):
fields=[
HiddenField("id"),
RPACText("group_name", label_text="Group Name"),
]
group_update_form=GroupForm()
class PermissionForm(RPACForm):
fields=[
HiddenField("id"),
RPACText("permission_name", label_text="Permission Name"),
]
permission_update_form=PermissionForm()
|
mit
|
ubc/edx-platform
|
lms/djangoapps/shoppingcart/migrations/0002_auto__add_field_paidcourseregistration_mode.py
|
182
|
8687
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'PaidCourseRegistration.mode'
db.add_column('shoppingcart_paidcourseregistration', 'mode',
self.gf('django.db.models.fields.SlugField')(default='honor', max_length=50),
keep_default=False)
def backwards(self, orm):
# Deleting field 'PaidCourseRegistration.mode'
db.delete_column('shoppingcart_paidcourseregistration', 'mode')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'shoppingcart.certificateitem': {
'Meta': {'object_name': 'CertificateItem', '_ormbases': ['shoppingcart.OrderItem']},
'course_enrollment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['student.CourseEnrollment']"}),
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'shoppingcart.order': {
'Meta': {'object_name': 'Order'},
'bill_to_cardtype': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'bill_to_ccnum': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_city': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_country': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_first': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_last': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'bill_to_postalcode': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'bill_to_state': ('django.db.models.fields.CharField', [], {'max_length': '8', 'blank': 'True'}),
'bill_to_street1': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'bill_to_street2': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processor_reply_dump': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'purchase_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.orderitem': {
'Meta': {'object_name': 'OrderItem'},
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_cost': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'line_desc': ('django.db.models.fields.CharField', [], {'default': "'Misc. Item'", 'max_length': '1024'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shoppingcart.Order']"}),
'qty': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'cart'", 'max_length': '32'}),
'unit_cost': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '30', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'shoppingcart.paidcourseregistration': {
'Meta': {'object_name': 'PaidCourseRegistration', '_ormbases': ['shoppingcart.OrderItem']},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'mode': ('django.db.models.fields.SlugField', [], {'default': "'honor'", 'max_length': '50'}),
'orderitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['shoppingcart.OrderItem']", 'unique': 'True', 'primary_key': 'True'})
},
'student.courseenrollment': {
'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['shoppingcart']
|
agpl-3.0
|
mglukhikh/intellij-community
|
python/helpers/py2only/docutils/transforms/__init__.py
|
186
|
6505
|
# $Id: __init__.py 6433 2010-09-28 08:21:25Z milde $
# Authors: David Goodger <goodger@python.org>; Ueli Schlaepfer
# Copyright: This module has been placed in the public domain.
"""
This package contains modules for standard tree transforms available
to Docutils components. Tree transforms serve a variety of purposes:
- To tie up certain syntax-specific "loose ends" that remain after the
initial parsing of the input plaintext. These transforms are used to
supplement a limited syntax.
- To automate the internal linking of the document tree (hyperlink
references, footnote references, etc.).
- To extract useful information from the document tree. These
transforms may be used to construct (for example) indexes and tables
of contents.
Each transform is an optional step that a Docutils component may
choose to perform on the parsed document.
"""
__docformat__ = 'reStructuredText'
from docutils import languages, ApplicationError, TransformSpec
class TransformError(ApplicationError): pass
class Transform:
"""
Docutils transform component abstract base class.
"""
default_priority = None
"""Numerical priority of this transform, 0 through 999 (override)."""
def __init__(self, document, startnode=None):
"""
Initial setup for in-place document transforms.
"""
self.document = document
"""The document tree to transform."""
self.startnode = startnode
"""Node from which to begin the transform. For many transforms which
apply to the document as a whole, `startnode` is not set (i.e. its
value is `None`)."""
self.language = languages.get_language(
document.settings.language_code, document.reporter)
"""Language module local to this document."""
def apply(self, **kwargs):
"""Override to apply the transform to the document tree."""
raise NotImplementedError('subclass must override this method')
class Transformer(TransformSpec):
"""
Stores transforms (`Transform` classes) and applies them to document
trees. Also keeps track of components by component type name.
"""
def __init__(self, document):
self.transforms = []
"""List of transforms to apply. Each item is a 3-tuple:
``(priority string, transform class, pending node or None)``."""
self.unknown_reference_resolvers = []
"""List of hook functions which assist in resolving references"""
self.document = document
"""The `nodes.document` object this Transformer is attached to."""
self.applied = []
"""Transforms already applied, in order."""
self.sorted = 0
"""Boolean: is `self.tranforms` sorted?"""
self.components = {}
"""Mapping of component type name to component object. Set by
`self.populate_from_components()`."""
self.serialno = 0
"""Internal serial number to keep track of the add order of
transforms."""
def add_transform(self, transform_class, priority=None, **kwargs):
"""
Store a single transform. Use `priority` to override the default.
`kwargs` is a dictionary whose contents are passed as keyword
arguments to the `apply` method of the transform. This can be used to
pass application-specific data to the transform instance.
"""
if priority is None:
priority = transform_class.default_priority
priority_string = self.get_priority_string(priority)
self.transforms.append(
(priority_string, transform_class, None, kwargs))
self.sorted = 0
def add_transforms(self, transform_list):
"""Store multiple transforms, with default priorities."""
for transform_class in transform_list:
priority_string = self.get_priority_string(
transform_class.default_priority)
self.transforms.append(
(priority_string, transform_class, None, {}))
self.sorted = 0
def add_pending(self, pending, priority=None):
"""Store a transform with an associated `pending` node."""
transform_class = pending.transform
if priority is None:
priority = transform_class.default_priority
priority_string = self.get_priority_string(priority)
self.transforms.append(
(priority_string, transform_class, pending, {}))
self.sorted = 0
def get_priority_string(self, priority):
"""
Return a string, `priority` combined with `self.serialno`.
This ensures FIFO order on transforms with identical priority.
"""
self.serialno += 1
return '%03d-%03d' % (priority, self.serialno)
def populate_from_components(self, components):
"""
Store each component's default transforms, with default priorities.
Also, store components by type name in a mapping for later lookup.
"""
for component in components:
if component is None:
continue
self.add_transforms(component.get_transforms())
self.components[component.component_type] = component
self.sorted = 0
# Set up all of the reference resolvers for this transformer. Each
# component of this transformer is able to register its own helper
# functions to help resolve references.
unknown_reference_resolvers = []
for i in components:
unknown_reference_resolvers.extend(i.unknown_reference_resolvers)
decorated_list = [(f.priority, f) for f in unknown_reference_resolvers]
decorated_list.sort()
self.unknown_reference_resolvers.extend([f[1] for f in decorated_list])
def apply_transforms(self):
"""Apply all of the stored transforms, in priority order."""
self.document.reporter.attach_observer(
self.document.note_transform_message)
while self.transforms:
if not self.sorted:
# Unsorted initially, and whenever a transform is added.
self.transforms.sort()
self.transforms.reverse()
self.sorted = 1
priority, transform_class, pending, kwargs = self.transforms.pop()
transform = transform_class(self.document, startnode=pending)
transform.apply(**kwargs)
self.applied.append((priority, transform_class, pending, kwargs))
|
apache-2.0
|
iver333/phantomjs
|
src/breakpad/src/third_party/protobuf/protobuf/python/google/protobuf/internal/wire_format_test.py
|
571
|
10848
|
#! /usr/bin/python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test for google.protobuf.internal.wire_format."""
__author__ = 'robinson@google.com (Will Robinson)'
import unittest
from google.protobuf import message
from google.protobuf.internal import wire_format
class WireFormatTest(unittest.TestCase):
def testPackTag(self):
field_number = 0xabc
tag_type = 2
self.assertEqual((field_number << 3) | tag_type,
wire_format.PackTag(field_number, tag_type))
PackTag = wire_format.PackTag
# Number too high.
self.assertRaises(message.EncodeError, PackTag, field_number, 6)
# Number too low.
self.assertRaises(message.EncodeError, PackTag, field_number, -1)
def testUnpackTag(self):
# Test field numbers that will require various varint sizes.
for expected_field_number in (1, 15, 16, 2047, 2048):
for expected_wire_type in range(6): # Highest-numbered wiretype is 5.
field_number, wire_type = wire_format.UnpackTag(
wire_format.PackTag(expected_field_number, expected_wire_type))
self.assertEqual(expected_field_number, field_number)
self.assertEqual(expected_wire_type, wire_type)
self.assertRaises(TypeError, wire_format.UnpackTag, None)
self.assertRaises(TypeError, wire_format.UnpackTag, 'abc')
self.assertRaises(TypeError, wire_format.UnpackTag, 0.0)
self.assertRaises(TypeError, wire_format.UnpackTag, object())
def testZigZagEncode(self):
Z = wire_format.ZigZagEncode
self.assertEqual(0, Z(0))
self.assertEqual(1, Z(-1))
self.assertEqual(2, Z(1))
self.assertEqual(3, Z(-2))
self.assertEqual(4, Z(2))
self.assertEqual(0xfffffffe, Z(0x7fffffff))
self.assertEqual(0xffffffff, Z(-0x80000000))
self.assertEqual(0xfffffffffffffffe, Z(0x7fffffffffffffff))
self.assertEqual(0xffffffffffffffff, Z(-0x8000000000000000))
self.assertRaises(TypeError, Z, None)
self.assertRaises(TypeError, Z, 'abcd')
self.assertRaises(TypeError, Z, 0.0)
self.assertRaises(TypeError, Z, object())
def testZigZagDecode(self):
Z = wire_format.ZigZagDecode
self.assertEqual(0, Z(0))
self.assertEqual(-1, Z(1))
self.assertEqual(1, Z(2))
self.assertEqual(-2, Z(3))
self.assertEqual(2, Z(4))
self.assertEqual(0x7fffffff, Z(0xfffffffe))
self.assertEqual(-0x80000000, Z(0xffffffff))
self.assertEqual(0x7fffffffffffffff, Z(0xfffffffffffffffe))
self.assertEqual(-0x8000000000000000, Z(0xffffffffffffffff))
self.assertRaises(TypeError, Z, None)
self.assertRaises(TypeError, Z, 'abcd')
self.assertRaises(TypeError, Z, 0.0)
self.assertRaises(TypeError, Z, object())
def NumericByteSizeTestHelper(self, byte_size_fn, value, expected_value_size):
# Use field numbers that cause various byte sizes for the tag information.
for field_number, tag_bytes in ((15, 1), (16, 2), (2047, 2), (2048, 3)):
expected_size = expected_value_size + tag_bytes
actual_size = byte_size_fn(field_number, value)
self.assertEqual(expected_size, actual_size,
'byte_size_fn: %s, field_number: %d, value: %r\n'
'Expected: %d, Actual: %d'% (
byte_size_fn, field_number, value, expected_size, actual_size))
def testByteSizeFunctions(self):
# Test all numeric *ByteSize() functions.
NUMERIC_ARGS = [
# Int32ByteSize().
[wire_format.Int32ByteSize, 0, 1],
[wire_format.Int32ByteSize, 127, 1],
[wire_format.Int32ByteSize, 128, 2],
[wire_format.Int32ByteSize, -1, 10],
# Int64ByteSize().
[wire_format.Int64ByteSize, 0, 1],
[wire_format.Int64ByteSize, 127, 1],
[wire_format.Int64ByteSize, 128, 2],
[wire_format.Int64ByteSize, -1, 10],
# UInt32ByteSize().
[wire_format.UInt32ByteSize, 0, 1],
[wire_format.UInt32ByteSize, 127, 1],
[wire_format.UInt32ByteSize, 128, 2],
[wire_format.UInt32ByteSize, wire_format.UINT32_MAX, 5],
# UInt64ByteSize().
[wire_format.UInt64ByteSize, 0, 1],
[wire_format.UInt64ByteSize, 127, 1],
[wire_format.UInt64ByteSize, 128, 2],
[wire_format.UInt64ByteSize, wire_format.UINT64_MAX, 10],
# SInt32ByteSize().
[wire_format.SInt32ByteSize, 0, 1],
[wire_format.SInt32ByteSize, -1, 1],
[wire_format.SInt32ByteSize, 1, 1],
[wire_format.SInt32ByteSize, -63, 1],
[wire_format.SInt32ByteSize, 63, 1],
[wire_format.SInt32ByteSize, -64, 1],
[wire_format.SInt32ByteSize, 64, 2],
# SInt64ByteSize().
[wire_format.SInt64ByteSize, 0, 1],
[wire_format.SInt64ByteSize, -1, 1],
[wire_format.SInt64ByteSize, 1, 1],
[wire_format.SInt64ByteSize, -63, 1],
[wire_format.SInt64ByteSize, 63, 1],
[wire_format.SInt64ByteSize, -64, 1],
[wire_format.SInt64ByteSize, 64, 2],
# Fixed32ByteSize().
[wire_format.Fixed32ByteSize, 0, 4],
[wire_format.Fixed32ByteSize, wire_format.UINT32_MAX, 4],
# Fixed64ByteSize().
[wire_format.Fixed64ByteSize, 0, 8],
[wire_format.Fixed64ByteSize, wire_format.UINT64_MAX, 8],
# SFixed32ByteSize().
[wire_format.SFixed32ByteSize, 0, 4],
[wire_format.SFixed32ByteSize, wire_format.INT32_MIN, 4],
[wire_format.SFixed32ByteSize, wire_format.INT32_MAX, 4],
# SFixed64ByteSize().
[wire_format.SFixed64ByteSize, 0, 8],
[wire_format.SFixed64ByteSize, wire_format.INT64_MIN, 8],
[wire_format.SFixed64ByteSize, wire_format.INT64_MAX, 8],
# FloatByteSize().
[wire_format.FloatByteSize, 0.0, 4],
[wire_format.FloatByteSize, 1000000000.0, 4],
[wire_format.FloatByteSize, -1000000000.0, 4],
# DoubleByteSize().
[wire_format.DoubleByteSize, 0.0, 8],
[wire_format.DoubleByteSize, 1000000000.0, 8],
[wire_format.DoubleByteSize, -1000000000.0, 8],
# BoolByteSize().
[wire_format.BoolByteSize, False, 1],
[wire_format.BoolByteSize, True, 1],
# EnumByteSize().
[wire_format.EnumByteSize, 0, 1],
[wire_format.EnumByteSize, 127, 1],
[wire_format.EnumByteSize, 128, 2],
[wire_format.EnumByteSize, wire_format.UINT32_MAX, 5],
]
for args in NUMERIC_ARGS:
self.NumericByteSizeTestHelper(*args)
# Test strings and bytes.
for byte_size_fn in (wire_format.StringByteSize, wire_format.BytesByteSize):
# 1 byte for tag, 1 byte for length, 3 bytes for contents.
self.assertEqual(5, byte_size_fn(10, 'abc'))
# 2 bytes for tag, 1 byte for length, 3 bytes for contents.
self.assertEqual(6, byte_size_fn(16, 'abc'))
# 2 bytes for tag, 2 bytes for length, 128 bytes for contents.
self.assertEqual(132, byte_size_fn(16, 'a' * 128))
# Test UTF-8 string byte size calculation.
# 1 byte for tag, 1 byte for length, 8 bytes for content.
self.assertEqual(10, wire_format.StringByteSize(
5, unicode('\xd0\xa2\xd0\xb5\xd1\x81\xd1\x82', 'utf-8')))
class MockMessage(object):
def __init__(self, byte_size):
self.byte_size = byte_size
def ByteSize(self):
return self.byte_size
message_byte_size = 10
mock_message = MockMessage(byte_size=message_byte_size)
# Test groups.
# (2 * 1) bytes for begin and end tags, plus message_byte_size.
self.assertEqual(2 + message_byte_size,
wire_format.GroupByteSize(1, mock_message))
# (2 * 2) bytes for begin and end tags, plus message_byte_size.
self.assertEqual(4 + message_byte_size,
wire_format.GroupByteSize(16, mock_message))
# Test messages.
# 1 byte for tag, plus 1 byte for length, plus contents.
self.assertEqual(2 + mock_message.byte_size,
wire_format.MessageByteSize(1, mock_message))
# 2 bytes for tag, plus 1 byte for length, plus contents.
self.assertEqual(3 + mock_message.byte_size,
wire_format.MessageByteSize(16, mock_message))
# 2 bytes for tag, plus 2 bytes for length, plus contents.
mock_message.byte_size = 128
self.assertEqual(4 + mock_message.byte_size,
wire_format.MessageByteSize(16, mock_message))
# Test message set item byte size.
# 4 bytes for tags, plus 1 byte for length, plus 1 byte for type_id,
# plus contents.
mock_message.byte_size = 10
self.assertEqual(mock_message.byte_size + 6,
wire_format.MessageSetItemByteSize(1, mock_message))
# 4 bytes for tags, plus 2 bytes for length, plus 1 byte for type_id,
# plus contents.
mock_message.byte_size = 128
self.assertEqual(mock_message.byte_size + 7,
wire_format.MessageSetItemByteSize(1, mock_message))
# 4 bytes for tags, plus 2 bytes for length, plus 2 byte for type_id,
# plus contents.
self.assertEqual(mock_message.byte_size + 8,
wire_format.MessageSetItemByteSize(128, mock_message))
# Too-long varint.
self.assertRaises(message.EncodeError,
wire_format.UInt64ByteSize, 1, 1 << 128)
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
ruiliLaMeilleure/11ad-backhaul
|
src/aodv/bindings/callbacks_list.py
|
95
|
1688
|
callback_classes = [
['void', 'ns3::Ptr<ns3::Packet const>', 'ns3::Ipv4Header const&', 'ns3::Socket::SocketErrno', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Ipv4Route>', 'ns3::Ptr<ns3::Packet const>', 'ns3::Ipv4Header const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::WifiMacHeader const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ipv4Address', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::ArpCache const>', 'ns3::Ipv4Address', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::Socket>', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
|
gpl-2.0
|
vitaly-krugl/nupic
|
src/nupic/support/__init__.py
|
10
|
15849
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Package containing modules that are used internally by Numenta Python
tools and plugins to extend standard library functionality.
These modules should NOT be used by client applications.
"""
from __future__ import with_statement
# Standard imports
import os
import sys
import inspect
import logging
import logging.config
import logging.handlers
from platform import python_version
import struct
from StringIO import StringIO
import time
import traceback
from pkg_resources import resource_string, resource_filename
from configuration import Configuration
from nupic.support.fs_helpers import makeDirectoryFromAbsolutePath
# Local imports
def getCallerInfo(depth=2):
"""Utility function to get information about function callers
The information is the tuple (function/method name, filename, class)
The class will be None if the caller is just a function and not an object
method.
:param depth: (int) how far back in the callstack to go to extract the caller
info
"""
f = sys._getframe(depth)
method_name = f.f_code.co_name
filename = f.f_code.co_filename
arg_class = None
args = inspect.getargvalues(f)
if len(args[0]) > 0:
arg_name = args[0][0] # potentially the 'self' arg if its a method
arg_class = args[3][arg_name].__class__.__name__
return (method_name, filename, arg_class)
def title(s=None, additional='', stream=sys.stdout):
"""Utility function to display nice titles
It automatically extracts the name of the function/method it is called from
and you can add additional text. title() will then print the name
of the function/method and the additional text surrounded by tow lines
of dashes. If you don't want the name of the function, you can provide
alternative text (regardless of the additional text)
:param s: (string) text to display, uses the function name and arguments by
default
:param additional: (string) extra text to display (not needed if s is not
None)
:param stream: (stream) the stream to print to. Ny default goes to standard
output
Examples:
.. code-block:: python
def foo():
title()
will display:
.. code-block:: text
---
foo
---
.. code-block:: python
def foo():
title(additional='(), this is cool!!!')
will display:
.. code-block:: text
----------------------
foo(), this is cool!!!
----------------------
.. code-block:: python
def foo():
title('No function name here!')
will display:
.. code-block:: text
----------------------
No function name here!
----------------------
"""
if s is None:
callable_name, file_name, class_name = getCallerInfo(2)
s = callable_name
if class_name is not None:
s = class_name + '.' + callable_name
lines = (s + additional).split('\n')
length = max(len(line) for line in lines)
print >> stream, '-' * length
print >> stream, s + additional
print >> stream, '-' * length
def getArgumentDescriptions(f):
"""
Get the arguments, default values, and argument descriptions for a function.
Parses the argument descriptions out of the function docstring, using a
format something lke this:
::
[junk]
argument_name: description...
description...
description...
[junk]
[more arguments]
It will find an argument as long as the exact argument name starts the line.
It will then strip a trailing colon, if present, then strip the rest of the
line and use it to start the description. It will then strip and append any
subsequent lines with a greater indent level than the original argument name.
:param f: (function) to inspect
:returns: (list of tuples) (``argName``, ``argDescription``, ``defaultValue``)
If an argument has no default value, the tuple is only two elements long (as
``None`` cannot be used, since it could be a default value itself).
"""
# Get the argument names and default values
argspec = inspect.getargspec(f)
# Scan through the docstring to extract documentation for each argument as
# follows:
# Check the first word of the line, stripping a colon if one is present.
# If it matches an argument name:
# Take the rest of the line, stripping leading whitespeace
# Take each subsequent line if its indentation level is greater than the
# initial indentation level
# Once the indentation level is back to the original level, look for
# another argument
docstring = f.__doc__
descriptions = {}
if docstring:
lines = docstring.split('\n')
i = 0
while i < len(lines):
stripped = lines[i].lstrip()
if not stripped:
i += 1
continue
# Indentation level is index of the first character
indentLevel = lines[i].index(stripped[0])
# Get the first word and remove the colon, if present
firstWord = stripped.split()[0]
if firstWord.endswith(':'):
firstWord = firstWord[:-1]
if firstWord in argspec.args:
# Found an argument
argName = firstWord
restOfLine = stripped[len(firstWord)+1:].strip()
argLines = [restOfLine]
# Take the next lines as long as they are indented more
i += 1
while i < len(lines):
stripped = lines[i].lstrip()
if not stripped:
# Empty line - stop
break
if lines[i].index(stripped[0]) <= indentLevel:
# No longer indented far enough - stop
break
# This line counts too
argLines.append(lines[i].strip())
i += 1
# Store this description
descriptions[argName] = ' '.join(argLines)
else:
# Not an argument
i += 1
# Build the list of (argName, description, defaultValue)
args = []
if argspec.defaults:
defaultCount = len(argspec.defaults)
else:
defaultCount = 0
nonDefaultArgCount = len(argspec.args) - defaultCount
for i, argName in enumerate(argspec.args):
if i >= nonDefaultArgCount:
defaultValue = argspec.defaults[i - nonDefaultArgCount]
args.append((argName, descriptions.get(argName, ""), defaultValue))
else:
args.append((argName, descriptions.get(argName, "")))
return args
gLoggingInitialized = False
def initLogging(verbose=False, console='stdout', consoleLevel='DEBUG'):
"""
Initilize NuPic logging by reading in from the logging configuration file. The
logging configuration file is named ``nupic-logging.conf`` and is expected to
be in the format defined by the python logging module.
If the environment variable ``NTA_CONF_PATH`` is defined, then the logging
configuration file is expected to be in the ``NTA_CONF_PATH`` directory. If
``NTA_CONF_PATH`` is not defined, then it is found in the 'conf/default'
subdirectory of the NuPic installation directory (typically
~/nupic/current/conf/default)
The logging configuration file can use the environment variable
``NTA_LOG_DIR`` to set the locations of log files. If this variable is not
defined, logging to files will be disabled.
:param console: Defines console output for the default "root" logging
configuration; this may be one of 'stdout', 'stderr', or None;
Use None to suppress console logging output
:param consoleLevel:
Logging-level filter string for console output corresponding to
logging levels in the logging module; may be one of:
'DEBUG', 'INFO', 'WARNING', 'ERROR', or 'CRITICAL'.
E.g., a value of'WARNING' suppresses DEBUG and INFO level output
to console, but allows WARNING, ERROR, and CRITICAL
"""
# NOTE: If you call this twice from the same process there seems to be a
# bug - logged messages don't show up for loggers that you do another
# logging.getLogger() on.
global gLoggingInitialized
if gLoggingInitialized:
if verbose:
print >> sys.stderr, "Logging already initialized, doing nothing."
return
consoleStreamMappings = {
'stdout' : 'stdoutConsoleHandler',
'stderr' : 'stderrConsoleHandler',
}
consoleLogLevels = ['DEBUG', 'INFO', 'WARNING', 'WARN', 'ERROR', 'CRITICAL',
'FATAL']
assert console is None or console in consoleStreamMappings.keys(), (
'Unexpected console arg value: %r') % (console,)
assert consoleLevel in consoleLogLevels, (
'Unexpected consoleLevel arg value: %r') % (consoleLevel)
# -----------------------------------------------------------------------
# Setup logging. Look for the nupic-logging.conf file, first in the
# NTA_CONFIG_DIR path (if defined), then in a subdirectory of the nupic
# module
configFilename = 'nupic-logging.conf'
configFilePath = resource_filename("nupic.support", configFilename)
configLogDir = os.environ.get('NTA_LOG_DIR', None)
# Load in the logging configuration file
if verbose:
print >> sys.stderr, (
"Using logging configuration file: %s") % (configFilePath)
# This dict will hold our replacement strings for logging configuration
replacements = dict()
def makeKey(name):
""" Makes replacement key """
return "$$%s$$" % (name)
platform = sys.platform.lower()
if platform.startswith('java'):
# Jython
import java.lang
platform = java.lang.System.getProperty("os.name").lower()
if platform.startswith('mac os x'):
platform = 'darwin'
if platform.startswith('darwin'):
replacements[makeKey('SYSLOG_HANDLER_ADDRESS')] = '"/var/run/syslog"'
elif platform.startswith('linux'):
replacements[makeKey('SYSLOG_HANDLER_ADDRESS')] = '"/dev/log"'
elif platform.startswith('win'):
replacements[makeKey('SYSLOG_HANDLER_ADDRESS')] = '"log"'
else:
raise RuntimeError("This platform is neither darwin, win32, nor linux: %s" % (
sys.platform,))
# Nupic logs go to file
replacements[makeKey('PERSISTENT_LOG_HANDLER')] = 'fileHandler'
if platform.startswith('win'):
replacements[makeKey('FILE_HANDLER_LOG_FILENAME')] = '"NUL"'
else:
replacements[makeKey('FILE_HANDLER_LOG_FILENAME')] = '"/dev/null"'
# Set up log file path for the default file handler and configure handlers
handlers = list()
if configLogDir is not None:
logFilePath = _genLoggingFilePath()
makeDirectoryFromAbsolutePath(os.path.dirname(logFilePath))
replacements[makeKey('FILE_HANDLER_LOG_FILENAME')] = repr(logFilePath)
handlers.append(replacements[makeKey('PERSISTENT_LOG_HANDLER')])
if console is not None:
handlers.append(consoleStreamMappings[console])
replacements[makeKey('ROOT_LOGGER_HANDLERS')] = ", ".join(handlers)
# Set up log level for console handlers
replacements[makeKey('CONSOLE_LOG_LEVEL')] = consoleLevel
customConfig = StringIO()
# Using pkg_resources to get the logging file, which should be packaged and
# associated with this source file name.
loggingFileContents = resource_string(__name__, configFilename)
for lineNum, line in enumerate(loggingFileContents.splitlines()):
if "$$" in line:
for (key, value) in replacements.items():
line = line.replace(key, value)
# If there is still a replacement string in the line, we're missing it
# from our replacements dict
if "$$" in line and "$$<key>$$" not in line:
raise RuntimeError(("The text %r, found at line #%d of file %r, "
"contains a string not found in our replacement "
"dict.") % (line, lineNum, configFilePath))
customConfig.write("%s\n" % line)
customConfig.seek(0)
if python_version()[:3] >= '2.6':
logging.config.fileConfig(customConfig, disable_existing_loggers=False)
else:
logging.config.fileConfig(customConfig)
gLoggingInitialized = True
def _genLoggingFilePath():
""" Generate a filepath for the calling app """
appName = os.path.splitext(os.path.basename(sys.argv[0]))[0] or 'UnknownApp'
appLogDir = os.path.abspath(os.path.join(
os.environ['NTA_LOG_DIR'],
'numenta-logs-%s' % (os.environ['USER'],),
appName))
appLogFileName = '%s-%s-%s.log' % (
appName, long(time.mktime(time.gmtime())), os.getpid())
return os.path.join(appLogDir, appLogFileName)
def aggregationToMonthsSeconds(interval):
"""
Return the number of months and seconds from an aggregation dict that
represents a date and time.
Interval is a dict that contain one or more of the following keys: 'years',
'months', 'weeks', 'days', 'hours', 'minutes', seconds', 'milliseconds',
'microseconds'.
For example:
::
aggregationMicroseconds({'years': 1, 'hours': 4, 'microseconds':42}) ==
{'months':12, 'seconds':14400.000042}
:param interval: (dict) The aggregation interval representing a date and time
:returns: (dict) number of months and seconds in the interval:
``{months': XX, 'seconds': XX}``. The seconds is
a floating point that can represent resolutions down to a
microsecond.
"""
seconds = interval.get('microseconds', 0) * 0.000001
seconds += interval.get('milliseconds', 0) * 0.001
seconds += interval.get('seconds', 0)
seconds += interval.get('minutes', 0) * 60
seconds += interval.get('hours', 0) * 60 * 60
seconds += interval.get('days', 0) * 24 * 60 * 60
seconds += interval.get('weeks', 0) * 7 * 24 * 60 * 60
months = interval.get('months', 0)
months += 12 * interval.get('years', 0)
return {'months': months, 'seconds': seconds}
def aggregationDivide(dividend, divisor):
"""
Return the result from dividing two dicts that represent date and time.
Both dividend and divisor are dicts that contain one or more of the following
keys: 'years', 'months', 'weeks', 'days', 'hours', 'minutes', seconds',
'milliseconds', 'microseconds'.
For example:
::
aggregationDivide({'hours': 4}, {'minutes': 15}) == 16
:param dividend: (dict) The numerator, as a dict representing a date and time
:param divisor: (dict) the denominator, as a dict representing a date and time
:returns: (float) number of times divisor goes into dividend
"""
# Convert each into microseconds
dividendMonthSec = aggregationToMonthsSeconds(dividend)
divisorMonthSec = aggregationToMonthsSeconds(divisor)
# It is a usage error to mix both months and seconds in the same operation
if (dividendMonthSec['months'] != 0 and divisorMonthSec['seconds'] != 0) \
or (dividendMonthSec['seconds'] != 0 and divisorMonthSec['months'] != 0):
raise RuntimeError("Aggregation dicts with months/years can only be "
"inter-operated with other aggregation dicts that contain "
"months/years")
if dividendMonthSec['months'] > 0:
return float(dividendMonthSec['months']) / divisor['months']
else:
return float(dividendMonthSec['seconds']) / divisorMonthSec['seconds']
|
agpl-3.0
|
XDEMOND/holamundo
|
plugins/ti.alloy/plugin.py
|
1729
|
5251
|
import os, sys, subprocess, hashlib
import subprocess
def check_output(*popenargs, **kwargs):
r"""Run command with arguments and return its output as a byte string.
Backported from Python 2.7 as it's implemented as pure python on stdlib.
>>> check_output(['/usr/bin/python', '--version'])
Python 2.6.2
"""
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
error = subprocess.CalledProcessError(retcode, cmd)
error.output = output
raise error
return output
def compile(config):
paths = {}
binaries = ["alloy","node"]
dotAlloy = os.path.abspath(os.path.join(config['project_dir'], 'build', '.alloynewcli'))
if os.path.exists(dotAlloy):
print "[DEBUG] build/.alloynewcli file found, skipping plugin..."
os.remove(dotAlloy)
else:
for binary in binaries:
try:
# see if the environment variable is defined
paths[binary] = os.environ["ALLOY_" + ("NODE_" if binary == "node" else "") + "PATH"]
except KeyError as ex:
# next try PATH, and then our guess paths
if sys.platform == "darwin" or sys.platform.startswith('linux'):
userPath = os.environ["HOME"]
guessPaths = [
"/usr/local/bin/"+binary,
"/opt/local/bin/"+binary,
userPath+"/local/bin/"+binary,
"/opt/bin/"+binary,
"/usr/bin/"+binary,
"/usr/local/share/npm/bin/"+binary
]
try:
binaryPath = check_output(["which",binary], stderr=subprocess.STDOUT).strip()
print "[DEBUG] %s installed at '%s'" % (binary,binaryPath)
except:
print "[WARN] Couldn't find %s on your PATH:" % binary
print "[WARN] %s" % os.environ["PATH"]
print "[WARN]"
print "[WARN] Checking for %s in a few default locations:" % binary
for p in guessPaths:
sys.stdout.write("[WARN] %s -> " % p)
if os.path.exists(p):
binaryPath = p
print "FOUND"
break
else:
print "not found"
binaryPath = None
if binaryPath is None:
print "[ERROR] Couldn't find %s" % binary
sys.exit(1)
else:
paths[binary] = binaryPath
# no guesses on windows, just use the PATH
elif sys.platform == "win32":
paths["alloy"] = "alloy.cmd"
f = os.path.abspath(os.path.join(config['project_dir'], 'app'))
if os.path.exists(f):
print "[INFO] alloy app found at %s" % f
rd = os.path.abspath(os.path.join(config['project_dir'], 'Resources'))
devicefamily = 'none'
simtype = 'none'
version = '0'
deploytype = 'development'
if config['platform']==u'ios':
version = config['iphone_version']
devicefamily = config['devicefamily']
deploytype = config['deploytype']
if config['platform']==u'android':
builder = config['android_builder']
version = builder.tool_api_level
deploytype = config['deploy_type']
if config['platform']==u'mobileweb':
builder = config['mobileweb_builder']
deploytype = config['deploytype']
cfg = "platform=%s,version=%s,simtype=%s,devicefamily=%s,deploytype=%s," % (config['platform'],version,simtype,devicefamily,deploytype)
if sys.platform == "win32":
cmd = [paths["alloy"], "compile", f, "--no-colors", "--config", cfg]
else:
cmd = [paths["node"], paths["alloy"], "compile", f, "--no-colors", "--config", cfg]
print "[INFO] Executing Alloy compile:"
print "[INFO] %s" % " ".join(cmd)
try:
print check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as ex:
if hasattr(ex, 'output'):
print ex.output
print "[ERROR] Alloy compile failed"
retcode = 1
if hasattr(ex, 'returncode'):
retcode = ex.returncode
sys.exit(retcode)
except EnvironmentError as ex:
print "[ERROR] Unexpected error with Alloy compiler plugin: %s" % ex.strerror
sys.exit(2)
|
apache-2.0
|
ipsosante/django-audit-log
|
audit_log/middleware.py
|
1
|
3349
|
from django.db.models import signals
from django.utils.functional import curry
from django.conf import settings
from audit_log import registration
from audit_log.models import fields
from audit_log.models.managers import AuditLogManager
def _disable_audit_log_managers(instance):
for attr in dir(instance):
try:
if isinstance(getattr(instance, attr), AuditLogManager):
getattr(instance, attr).disable_tracking()
except AttributeError:
pass
def _enable_audit_log_managers(instance):
for attr in dir(instance):
try:
if isinstance(getattr(instance, attr), AuditLogManager):
getattr(instance, attr).enable_tracking()
except AttributeError:
pass
class UserLoggingMiddleware(object):
def process_request(self, request):
if getattr(settings, 'DISABLE_AUDIT_LOG', False):
return
update_pre_save_info = curry(self._update_pre_save_info, request)
update_post_save_info = curry(self._update_post_save_info, request)
signals.pre_save.connect(update_pre_save_info, dispatch_uid=(self.__class__, request,), weak = False)
signals.post_save.connect(update_post_save_info, dispatch_uid=(self.__class__, request,), weak = False)
def process_response(self, request, response):
if getattr(settings, 'DISABLE_AUDIT_LOG', False):
return
signals.pre_save.disconnect(dispatch_uid=(self.__class__, request,))
signals.post_save.disconnect(dispatch_uid=(self.__class__, request,))
return response
def _update_pre_save_info(self, request, sender, instance, **kwargs):
if hasattr(request, 'user') and request.user.is_authenticated():
user = request.user
else:
user = None
session = request.session.session_key
registry = registration.FieldRegistry(fields.LastUserField)
if sender in registry:
for field in registry.get_fields(sender):
setattr(instance, field.name, user)
registry = registration.FieldRegistry(fields.LastSessionKeyField)
if sender in registry:
for field in registry.get_fields(sender):
setattr(instance, field.name, session)
def _update_post_save_info(self, request, sender, instance, created, **kwargs):
if hasattr(request, 'user') and request.user.is_authenticated():
user = request.user
else:
user = None
session = request.session.session_key
if created:
registry = registration.FieldRegistry(fields.CreatingUserField)
if sender in registry:
for field in registry.get_fields(sender):
setattr(instance, field.name, user)
_disable_audit_log_managers(instance)
instance.save()
_enable_audit_log_managers(instance)
registry = registration.FieldRegistry(fields.CreatingSessionKeyField)
if sender in registry:
for field in registry.get_fields(sender):
setattr(instance, field.name, session)
_disable_audit_log_managers(instance)
instance.save()
_enable_audit_log_managers(instance)
|
bsd-3-clause
|
edx/edx-platform
|
cms/djangoapps/contentstore/management/commands/tests/test_backfill_course_outlines.py
|
4
|
4869
|
"""
Tests for `backfill_course_outlines` Studio (cms) management command.
"""
from django.core.management import call_command
from opaque_keys.edx.keys import CourseKey
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.content.learning_sequences.api import get_course_keys_with_outlines
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from ....outlines import update_outline_from_modulestore
class BackfillCourseOutlinesTest(SharedModuleStoreTestCase):
"""
Test `backfill_orgs_and_org_courses`.
"""
def setUp(self):
"""
Create the CourseOverviews we need for this test case.
There's no publish signal, so we manually create the CourseOverviews.
Without that, backfill_orgs_and_org_courses has no way to figure out
which courses exist, which it needs in order to figure out which ones
need backfilling.
We can't turn on the course_published signal because if we did so, then
the outlines would get generated automatically, and there'd be nothing
to backfill.
"""
super().setUp()
CourseOverview.update_select_courses(self.course_keys, force_update=True)
@classmethod
def setUpClass(cls):
"""
We set up some content here, without publish signals enabled.
"""
super().setUpClass()
course_run_ids = [
"OpenEdX/OutlineCourse/OldMongoRun1",
"course-v1:OpenEdX+OutlineCourse+Run2",
"course-v1:OpenEdX+OutlineCourse+Run3",
]
cls.course_keys = [
CourseKey.from_string(course_run_id) for course_run_id in course_run_ids
]
for course_key in cls.course_keys:
if course_key.deprecated:
store_type = ModuleStoreEnum.Type.mongo
else:
store_type = ModuleStoreEnum.Type.split
with cls.store.default_store(store_type):
course = CourseFactory.create(
org=course_key.org,
number=course_key.course,
run=course_key.run,
display_name=f"Outline Backfill Test Course {course_key.run}"
)
with cls.store.bulk_operations(course_key):
section = ItemFactory.create(
parent_location=course.location,
category="chapter",
display_name="A Section"
)
sequence = ItemFactory.create(
parent_location=section.location,
category="sequential",
display_name="A Sequence"
)
unit = ItemFactory.create(
parent_location=sequence.location,
category="vertical",
display_name="A Unit"
)
ItemFactory.create(
parent_location=unit.location,
category="html",
display_name="An HTML Module"
)
def test_end_to_end(self):
"""Normal invocation, it should skip only the Old Mongo course."""
# In the beginning, we have no outlines...
assert not get_course_keys_with_outlines().exists()
# Run command and outlines appear for Split Mongo courses...
call_command("backfill_course_outlines")
course_keys_with_outlines = set(get_course_keys_with_outlines())
assert course_keys_with_outlines == {
CourseKey.from_string("course-v1:OpenEdX+OutlineCourse+Run2"),
CourseKey.from_string("course-v1:OpenEdX+OutlineCourse+Run3"),
}
def test_partial(self):
"""Also works when we've manually created one in advance."""
course_keys_with_outlines = set(get_course_keys_with_outlines())
assert not get_course_keys_with_outlines().exists()
# Manually create one
update_outline_from_modulestore(
CourseKey.from_string("course-v1:OpenEdX+OutlineCourse+Run2")
)
assert set(get_course_keys_with_outlines()) == {
CourseKey.from_string("course-v1:OpenEdX+OutlineCourse+Run2")
}
# backfill command should fill in the other
call_command("backfill_course_outlines")
course_keys_with_outlines = set(get_course_keys_with_outlines())
assert course_keys_with_outlines == {
CourseKey.from_string("course-v1:OpenEdX+OutlineCourse+Run2"),
CourseKey.from_string("course-v1:OpenEdX+OutlineCourse+Run3"),
}
|
agpl-3.0
|
MobinRanjbar/hue
|
desktop/core/ext-py/pycrypto-2.6.1/lib/Crypto/SelfTest/Hash/test_SHA256.py
|
116
|
3663
|
# -*- coding: utf-8 -*-
#
# SelfTest/Hash/test_SHA256.py: Self-test for the SHA-256 hash function
#
# Written in 2008 by Dwayne C. Litzenberger <dlitz@dlitz.net>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test suite for Crypto.Hash.SHA256"""
__revision__ = "$Id$"
import unittest
from Crypto.Util.py3compat import *
class LargeSHA256Test(unittest.TestCase):
def runTest(self):
"""SHA256: 512/520 MiB test"""
from Crypto.Hash import SHA256
zeros = bchr(0x00) * (1024*1024)
h = SHA256.new(zeros)
for i in xrange(511):
h.update(zeros)
# This test vector is from PyCrypto's old testdata.py file.
self.assertEqual('9acca8e8c22201155389f65abbf6bc9723edc7384ead80503839f49dcc56d767', h.hexdigest()) # 512 MiB
for i in xrange(8):
h.update(zeros)
# This test vector is from PyCrypto's old testdata.py file.
self.assertEqual('abf51ad954b246009dfe5a50ecd582fd5b8f1b8b27f30393853c3ef721e7fa6e', h.hexdigest()) # 520 MiB
def get_tests(config={}):
# Test vectors from FIPS PUB 180-2
# This is a list of (expected_result, input[, description]) tuples.
test_data = [
# FIPS PUB 180-2, B.1 - "One-Block Message"
('ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad',
'abc'),
# FIPS PUB 180-2, B.2 - "Multi-Block Message"
('248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1',
'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'),
# FIPS PUB 180-2, B.3 - "Long Message"
('cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0',
'a' * 10**6,
'"a" * 10**6'),
# Test for an old PyCrypto bug.
('f7fd017a3c721ce7ff03f3552c0813adcc48b7f33f07e5e2ba71e23ea393d103',
'This message is precisely 55 bytes long, to test a bug.',
'Length = 55 (mod 64)'),
# Example from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm
('e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', ''),
('d32b568cd1b96d459e7291ebf4b25d007f275c9f13149beeb782fac0716613f8',
'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'),
]
from Crypto.Hash import SHA256
from common import make_hash_tests
tests = make_hash_tests(SHA256, "SHA256", test_data,
digest_size=32,
oid="\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01")
if config.get('slow_tests'):
tests += [LargeSHA256Test()]
return tests
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
|
apache-2.0
|
Romaingin/Antlia
|
antlia/blueprint/text.py
|
1
|
1690
|
from .primitive import *
from ..rect import Rect
from .unicode import NAME_TO_UNICODE
import re
import time as ti
class Text(Primitive):
def __init__(self, text, font_path, size, align="center"):
super(Text, self).__init__()
self.text = text
self.size = int(size)
self.align = align
# Test if icon
icon = re.compile(r'#(.+)#')
s = icon.search(self.text)
if s is not None:
# Icon found !
icon_name = s.group(1)
self.text = NAME_TO_UNICODE[icon_name]
# Add font to the font manager
self.font_id = font_manager.addFont("icons", self.size)
else:
self.font_id = font_manager.addFont(font_path, self.size)
self.font = font_manager.getFont(self.font_id)
def build(self, renderer, rect, color):
if self.text != "":
textSurface = sdl2ttf.TTF_RenderUTF8_Blended_Wrapped(self.font, self.text.encode(),
sdl2.SDL_Color(*color),
rect.w)
errors = sdl2ttf.TTF_GetError()
if errors:
print("Text", errors)
exit(1)
w = textSurface.contents.w
h = textSurface.contents.h
# Compute the square in absolute coordinates
X = rect.x
Y = int(rect.y + rect.h /2 - h/2)
if self.align == "center":
X = int(X + rect.w /2 - w/2)
if self.align == "right":
X = int(X + rect.w - w)
self.abs_rect = sdl2.SDL_Rect(X, Y, w, h)
self.textTexture = sdl2.SDL_CreateTextureFromSurface(renderer, textSurface)
sdl2.SDL_FreeSurface(textSurface)
def draw(self, renderer):
if self.text != "":
sdl2.SDL_RenderCopy(renderer, self.textTexture, None, self.abs_rect)
def getFontId(self):
return self.font_id
def destroy(self):
if self.text != "":
sdl2.SDL_DestroyTexture(self.textTexture)
|
mit
|
CydarLtd/ansible
|
lib/ansible/modules/network/netconf/netconf_config.py
|
38
|
10019
|
#!/usr/bin/python
# (c) 2016, Leandro Lisboa Penz <lpenz at lpenz.org>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: netconf_config
author: "Leandro Lisboa Penz (@lpenz)"
short_description: netconf device configuration
description:
- Netconf is a network management protocol developed and standardized by
the IETF. It is documented in RFC 6241.
- This module allows the user to send a configuration XML file to a netconf
device, and detects if there was a configuration change.
notes:
- This module supports devices with and without the the candidate and
confirmed-commit capabilities. It always use the safer feature.
version_added: "2.2"
options:
host:
description:
- the hostname or ip address of the netconf device
required: true
port:
description:
- the netconf port
default: 830
required: false
hostkey_verify:
description:
- if true, the ssh host key of the device must match a ssh key present on the host
- if false, the ssh host key of the device is not checked
default: true
required: false
look_for_keys:
description:
- if true, enables looking in the usual locations for ssh keys (e.g. ~/.ssh/id_*)
- if false, disables looking for ssh keys
default: true
required: false
version_added: "2.4"
allow_agent:
description:
- if true, enables querying SSH agent (if found) for keys
- if false, disables querying the SSH agent for ssh keys
default: true
required: false
version_added: "2.4"
datastore:
description:
- auto, uses candidate and fallback to running
- candidate, edit <candidate/> datastore and then commit
- running, edit <running/> datastore directly
default: auto
required: false
version_added: "2.4"
save:
description:
- The C(save) argument instructs the module to save the running-
config to the startup-config if changed.
required: false
default: false
version_added: "2.4"
username:
description:
- the username to authenticate with
required: true
password:
description:
- password of the user to authenticate with
required: true
xml:
description:
- the XML content to send to the device
required: false
src:
description:
- Specifies the source path to the xml file that contains the configuration
or configuration template to load. The path to the source file can
either be the full path on the Ansible control host or a relative
path from the playbook or role root directory. This argument is mutually
exclusive with I(xml).
required: false
version_added: "2.4"
requirements:
- "python >= 2.6"
- "ncclient"
'''
EXAMPLES = '''
- name: set ntp server in the device
netconf_config:
host: 10.0.0.1
username: admin
password: admin
xml: |
<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0">
<system xmlns="urn:ietf:params:xml:ns:yang:ietf-system">
<ntp>
<enabled>true</enabled>
<server>
<name>ntp1</name>
<udp><address>127.0.0.1</address></udp>
</server>
</ntp>
</system>
</config>
- name: wipe ntp configuration
netconf_config:
host: 10.0.0.1
username: admin
password: admin
xml: |
<config xmlns:xc="urn:ietf:params:xml:ns:netconf:base:1.0">
<system xmlns="urn:ietf:params:xml:ns:yang:ietf-system">
<ntp>
<enabled>false</enabled>
<server operation="remove">
<name>ntp1</name>
</server>
</ntp>
</system>
</config>
'''
RETURN = '''
server_capabilities:
description: list of capabilities of the server
returned: success
type: list
sample: ['urn:ietf:params:netconf:base:1.1','urn:ietf:params:netconf:capability:confirmed-commit:1.0','urn:ietf:params:netconf:capability:candidate:1.0']
'''
import xml.dom.minidom
try:
import ncclient.manager
HAS_NCCLIENT = True
except ImportError:
HAS_NCCLIENT = False
import logging
def netconf_edit_config(m, xml, commit, retkwargs, datastore):
m.lock(target=datastore)
try:
if datastore == "candidate":
m.discard_changes()
config_before = m.get_config(source=datastore)
m.edit_config(target=datastore, config=xml)
config_after = m.get_config(source=datastore)
changed = config_before.data_xml != config_after.data_xml
if changed and commit and datastore == "candidate":
if ":confirmed-commit" in m.server_capabilities:
m.commit(confirmed=True)
m.commit()
else:
m.commit()
return changed
finally:
m.unlock(target=datastore)
# ------------------------------------------------------------------- #
# Main
def main():
module = AnsibleModule(
argument_spec=dict(
host=dict(type='str', required=True),
port=dict(type='int', default=830),
hostkey_verify=dict(type='bool', default=True),
allow_agent=dict(type='bool', default=True),
look_for_keys=dict(type='bool', default=True),
datastore=dict(choices=['auto', 'candidate', 'running'], default='auto'),
save=dict(type='bool', default=False),
username=dict(type='str', required=True, no_log=True),
password=dict(type='str', required=True, no_log=True),
xml=dict(type='str', required=False),
src=dict(type='path', required=False),
),
mutually_exclusive=[('xml', 'src')]
)
if not HAS_NCCLIENT:
module.fail_json(msg='could not import the python library '
'ncclient required by this module')
if (module.params['src']):
config_xml = str(module.params['src'])
elif module.params['xml']:
config_xml = str(module.params['xml'])
else:
module.fail_json(msg='Option src or xml must be provided')
try:
xml.dom.minidom.parseString(config_xml)
except:
e = get_exception()
module.fail_json(
msg='error parsing XML: ' + str(e)
)
nckwargs = dict(
host=module.params['host'],
port=module.params['port'],
hostkey_verify=module.params['hostkey_verify'],
allow_agent=module.params['allow_agent'],
look_for_keys=module.params['look_for_keys'],
username=module.params['username'],
password=module.params['password'],
)
try:
m = ncclient.manager.connect(**nckwargs)
except ncclient.transport.errors.AuthenticationError:
module.fail_json(
msg='authentication failed while connecting to device'
)
except:
e = get_exception()
module.fail_json(
msg='error connecting to the device: ' + str(e)
)
retkwargs = dict()
retkwargs['server_capabilities'] = list(m.server_capabilities)
if module.params['datastore'] == 'candidate':
if ':candidate' in m.server_capabilities:
datastore = 'candidate'
else:
m.close_session()
module.fail_json(
msg=':candidate is not supported by this netconf server'
)
elif module.params['datastore'] == 'running':
if ':writable-running' in m.server_capabilities:
datastore = 'running'
else:
m.close_session()
module.fail_json(
msg=':writable-running is not supported by this netconf server'
)
elif module.params['datastore'] == 'auto':
if ':candidate' in m.server_capabilities:
datastore = 'candidate'
elif ':writable-running' in m.server_capabilities:
datastore = 'running'
else:
m.close_session()
module.fail_json(
msg='neither :candidate nor :writable-running are supported by this netconf server'
)
else:
m.close_session()
module.fail_json(
msg=module.params['datastore'] + ' datastore is not supported by this ansible module'
)
if module.params['save']:
if ':startup' not in m.server_capabilities:
module.fail_json(
msg='cannot copy <running/> to <startup/>, while :startup is not supported'
)
try:
changed = netconf_edit_config(
m=m,
xml=config_xml,
commit=True,
retkwargs=retkwargs,
datastore=datastore,
)
if changed and module.params['save']:
m.copy_config(source="running", target="startup")
except:
e = get_exception()
module.fail_json(
msg='error editing configuration: ' + str(e)
)
finally:
m.close_session()
module.exit_json(changed=changed, **retkwargs)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
kevinkindom/chrome_depto_tools
|
third_party/boto/auth.py
|
51
|
26228
|
# Copyright 2010 Google Inc.
# Copyright (c) 2011 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2011, Eucalyptus Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Handles authentication required to AWS and GS
"""
import base64
import boto
import boto.auth_handler
import boto.exception
import boto.plugin
import boto.utils
import hmac
import sys
import urllib
import time
import datetime
import copy
from email.utils import formatdate
from boto.auth_handler import AuthHandler
from boto.exception import BotoClientError
#
# the following is necessary because of the incompatibilities
# between Python 2.4, 2.5, and 2.6 as well as the fact that some
# people running 2.4 have installed hashlib as a separate module
# this fix was provided by boto user mccormix.
# see: http://code.google.com/p/boto/issues/detail?id=172
# for more details.
#
try:
from hashlib import sha1 as sha
from hashlib import sha256 as sha256
if sys.version[:3] == "2.4":
# we are using an hmac that expects a .new() method.
class Faker:
def __init__(self, which):
self.which = which
self.digest_size = self.which().digest_size
def new(self, *args, **kwargs):
return self.which(*args, **kwargs)
sha = Faker(sha)
sha256 = Faker(sha256)
except ImportError:
import sha
sha256 = None
class HmacKeys(object):
"""Key based Auth handler helper."""
def __init__(self, host, config, provider):
if provider.access_key is None or provider.secret_key is None:
raise boto.auth_handler.NotReadyToAuthenticate()
self.host = host
self.update_provider(provider)
def update_provider(self, provider):
self._provider = provider
self._hmac = hmac.new(self._provider.secret_key, digestmod=sha)
if sha256:
self._hmac_256 = hmac.new(self._provider.secret_key,
digestmod=sha256)
else:
self._hmac_256 = None
def algorithm(self):
if self._hmac_256:
return 'HmacSHA256'
else:
return 'HmacSHA1'
def _get_hmac(self):
if self._hmac_256:
digestmod = sha256
else:
digestmod = sha
return hmac.new(self._provider.secret_key,
digestmod=digestmod)
def sign_string(self, string_to_sign):
new_hmac = self._get_hmac()
new_hmac.update(string_to_sign)
return base64.encodestring(new_hmac.digest()).strip()
def __getstate__(self):
pickled_dict = copy.copy(self.__dict__)
del pickled_dict['_hmac']
del pickled_dict['_hmac_256']
return pickled_dict
def __setstate__(self, dct):
self.__dict__ = dct
self.update_provider(self._provider)
class AnonAuthHandler(AuthHandler, HmacKeys):
"""
Implements Anonymous requests.
"""
capability = ['anon']
def __init__(self, host, config, provider):
AuthHandler.__init__(self, host, config, provider)
def add_auth(self, http_request, **kwargs):
pass
class HmacAuthV1Handler(AuthHandler, HmacKeys):
""" Implements the HMAC request signing used by S3 and GS."""
capability = ['hmac-v1', 's3']
def __init__(self, host, config, provider):
AuthHandler.__init__(self, host, config, provider)
HmacKeys.__init__(self, host, config, provider)
self._hmac_256 = None
def update_provider(self, provider):
super(HmacAuthV1Handler, self).update_provider(provider)
self._hmac_256 = None
def add_auth(self, http_request, **kwargs):
headers = http_request.headers
method = http_request.method
auth_path = http_request.auth_path
if 'Date' not in headers:
headers['Date'] = formatdate(usegmt=True)
if self._provider.security_token:
key = self._provider.security_token_header
headers[key] = self._provider.security_token
string_to_sign = boto.utils.canonical_string(method, auth_path,
headers, None,
self._provider)
boto.log.debug('StringToSign:\n%s' % string_to_sign)
b64_hmac = self.sign_string(string_to_sign)
auth_hdr = self._provider.auth_header
headers['Authorization'] = ("%s %s:%s" %
(auth_hdr,
self._provider.access_key, b64_hmac))
class HmacAuthV2Handler(AuthHandler, HmacKeys):
"""
Implements the simplified HMAC authorization used by CloudFront.
"""
capability = ['hmac-v2', 'cloudfront']
def __init__(self, host, config, provider):
AuthHandler.__init__(self, host, config, provider)
HmacKeys.__init__(self, host, config, provider)
self._hmac_256 = None
def update_provider(self, provider):
super(HmacAuthV2Handler, self).update_provider(provider)
self._hmac_256 = None
def add_auth(self, http_request, **kwargs):
headers = http_request.headers
if 'Date' not in headers:
headers['Date'] = formatdate(usegmt=True)
if self._provider.security_token:
key = self._provider.security_token_header
headers[key] = self._provider.security_token
b64_hmac = self.sign_string(headers['Date'])
auth_hdr = self._provider.auth_header
headers['Authorization'] = ("%s %s:%s" %
(auth_hdr,
self._provider.access_key, b64_hmac))
class HmacAuthV3Handler(AuthHandler, HmacKeys):
"""Implements the new Version 3 HMAC authorization used by Route53."""
capability = ['hmac-v3', 'route53', 'ses']
def __init__(self, host, config, provider):
AuthHandler.__init__(self, host, config, provider)
HmacKeys.__init__(self, host, config, provider)
def add_auth(self, http_request, **kwargs):
headers = http_request.headers
if 'Date' not in headers:
headers['Date'] = formatdate(usegmt=True)
if self._provider.security_token:
key = self._provider.security_token_header
headers[key] = self._provider.security_token
b64_hmac = self.sign_string(headers['Date'])
s = "AWS3-HTTPS AWSAccessKeyId=%s," % self._provider.access_key
s += "Algorithm=%s,Signature=%s" % (self.algorithm(), b64_hmac)
headers['X-Amzn-Authorization'] = s
class HmacAuthV3HTTPHandler(AuthHandler, HmacKeys):
"""
Implements the new Version 3 HMAC authorization used by DynamoDB.
"""
capability = ['hmac-v3-http']
def __init__(self, host, config, provider):
AuthHandler.__init__(self, host, config, provider)
HmacKeys.__init__(self, host, config, provider)
def headers_to_sign(self, http_request):
"""
Select the headers from the request that need to be included
in the StringToSign.
"""
headers_to_sign = {}
headers_to_sign = {'Host': self.host}
for name, value in http_request.headers.items():
lname = name.lower()
if lname.startswith('x-amz'):
headers_to_sign[name] = value
return headers_to_sign
def canonical_headers(self, headers_to_sign):
"""
Return the headers that need to be included in the StringToSign
in their canonical form by converting all header keys to lower
case, sorting them in alphabetical order and then joining
them into a string, separated by newlines.
"""
l = sorted(['%s:%s' % (n.lower().strip(),
headers_to_sign[n].strip()) for n in headers_to_sign])
return '\n'.join(l)
def string_to_sign(self, http_request):
"""
Return the canonical StringToSign as well as a dict
containing the original version of all headers that
were included in the StringToSign.
"""
headers_to_sign = self.headers_to_sign(http_request)
canonical_headers = self.canonical_headers(headers_to_sign)
string_to_sign = '\n'.join([http_request.method,
http_request.auth_path,
'',
canonical_headers,
'',
http_request.body])
return string_to_sign, headers_to_sign
def add_auth(self, req, **kwargs):
"""
Add AWS3 authentication to a request.
:type req: :class`boto.connection.HTTPRequest`
:param req: The HTTPRequest object.
"""
# This could be a retry. Make sure the previous
# authorization header is removed first.
if 'X-Amzn-Authorization' in req.headers:
del req.headers['X-Amzn-Authorization']
req.headers['X-Amz-Date'] = formatdate(usegmt=True)
if self._provider.security_token:
req.headers['X-Amz-Security-Token'] = self._provider.security_token
string_to_sign, headers_to_sign = self.string_to_sign(req)
boto.log.debug('StringToSign:\n%s' % string_to_sign)
hash_value = sha256(string_to_sign).digest()
b64_hmac = self.sign_string(hash_value)
s = "AWS3 AWSAccessKeyId=%s," % self._provider.access_key
s += "Algorithm=%s," % self.algorithm()
s += "SignedHeaders=%s," % ';'.join(headers_to_sign)
s += "Signature=%s" % b64_hmac
req.headers['X-Amzn-Authorization'] = s
class HmacAuthV4Handler(AuthHandler, HmacKeys):
"""
Implements the new Version 4 HMAC authorization.
"""
capability = ['hmac-v4']
def __init__(self, host, config, provider,
service_name=None, region_name=None):
AuthHandler.__init__(self, host, config, provider)
HmacKeys.__init__(self, host, config, provider)
# You can set the service_name and region_name to override the
# values which would otherwise come from the endpoint, e.g.
# <service>.<region>.amazonaws.com.
self.service_name = service_name
self.region_name = region_name
def _sign(self, key, msg, hex=False):
if hex:
sig = hmac.new(key, msg.encode('utf-8'), sha256).hexdigest()
else:
sig = hmac.new(key, msg.encode('utf-8'), sha256).digest()
return sig
def headers_to_sign(self, http_request):
"""
Select the headers from the request that need to be included
in the StringToSign.
"""
headers_to_sign = {}
headers_to_sign = {'Host': self.host}
for name, value in http_request.headers.items():
lname = name.lower()
if lname.startswith('x-amz'):
headers_to_sign[name] = value
return headers_to_sign
def query_string(self, http_request):
parameter_names = sorted(http_request.params.keys())
pairs = []
for pname in parameter_names:
pval = str(http_request.params[pname]).encode('utf-8')
pairs.append(urllib.quote(pname, safe='') + '=' +
urllib.quote(pval, safe='-_~'))
return '&'.join(pairs)
def canonical_query_string(self, http_request):
# POST requests pass parameters in through the
# http_request.body field.
if http_request.method == 'POST':
return ""
l = []
for param in sorted(http_request.params):
value = str(http_request.params[param])
l.append('%s=%s' % (urllib.quote(param, safe='-_.~'),
urllib.quote(value, safe='-_.~')))
return '&'.join(l)
def canonical_headers(self, headers_to_sign):
"""
Return the headers that need to be included in the StringToSign
in their canonical form by converting all header keys to lower
case, sorting them in alphabetical order and then joining
them into a string, separated by newlines.
"""
l = sorted(['%s:%s' % (n.lower().strip(),
' '.join(headers_to_sign[n].strip().split()))
for n in headers_to_sign])
return '\n'.join(l)
def signed_headers(self, headers_to_sign):
l = ['%s' % n.lower().strip() for n in headers_to_sign]
l = sorted(l)
return ';'.join(l)
def canonical_uri(self, http_request):
return http_request.auth_path
def payload(self, http_request):
body = http_request.body
# If the body is a file like object, we can use
# boto.utils.compute_hash, which will avoid reading
# the entire body into memory.
if hasattr(body, 'seek') and hasattr(body, 'read'):
return boto.utils.compute_hash(body, hash_algorithm=sha256)[0]
return sha256(http_request.body).hexdigest()
def canonical_request(self, http_request):
cr = [http_request.method.upper()]
cr.append(self.canonical_uri(http_request))
cr.append(self.canonical_query_string(http_request))
headers_to_sign = self.headers_to_sign(http_request)
cr.append(self.canonical_headers(headers_to_sign) + '\n')
cr.append(self.signed_headers(headers_to_sign))
cr.append(self.payload(http_request))
return '\n'.join(cr)
def scope(self, http_request):
scope = [self._provider.access_key]
scope.append(http_request.timestamp)
scope.append(http_request.region_name)
scope.append(http_request.service_name)
scope.append('aws4_request')
return '/'.join(scope)
def credential_scope(self, http_request):
scope = []
http_request.timestamp = http_request.headers['X-Amz-Date'][0:8]
scope.append(http_request.timestamp)
# The service_name and region_name either come from:
# * The service_name/region_name attrs or (if these values are None)
# * parsed from the endpoint <service>.<region>.amazonaws.com.
parts = http_request.host.split('.')
if self.region_name is not None:
region_name = self.region_name
else:
if len(parts) == 3:
region_name = 'us-east-1'
else:
region_name = parts[1]
if self.service_name is not None:
service_name = self.service_name
else:
service_name = parts[0]
http_request.service_name = service_name
http_request.region_name = region_name
scope.append(http_request.region_name)
scope.append(http_request.service_name)
scope.append('aws4_request')
return '/'.join(scope)
def string_to_sign(self, http_request, canonical_request):
"""
Return the canonical StringToSign as well as a dict
containing the original version of all headers that
were included in the StringToSign.
"""
sts = ['AWS4-HMAC-SHA256']
sts.append(http_request.headers['X-Amz-Date'])
sts.append(self.credential_scope(http_request))
sts.append(sha256(canonical_request).hexdigest())
return '\n'.join(sts)
def signature(self, http_request, string_to_sign):
key = self._provider.secret_key
k_date = self._sign(('AWS4' + key).encode('utf-8'),
http_request.timestamp)
k_region = self._sign(k_date, http_request.region_name)
k_service = self._sign(k_region, http_request.service_name)
k_signing = self._sign(k_service, 'aws4_request')
return self._sign(k_signing, string_to_sign, hex=True)
def add_auth(self, req, **kwargs):
"""
Add AWS4 authentication to a request.
:type req: :class`boto.connection.HTTPRequest`
:param req: The HTTPRequest object.
"""
# This could be a retry. Make sure the previous
# authorization header is removed first.
if 'X-Amzn-Authorization' in req.headers:
del req.headers['X-Amzn-Authorization']
now = datetime.datetime.utcnow()
req.headers['X-Amz-Date'] = now.strftime('%Y%m%dT%H%M%SZ')
if self._provider.security_token:
req.headers['X-Amz-Security-Token'] = self._provider.security_token
qs = self.query_string(req)
if qs and req.method == 'POST':
# Stash request parameters into post body
# before we generate the signature.
req.body = qs
req.headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=UTF-8'
req.headers['Content-Length'] = str(len(req.body))
else:
# Safe to modify req.path here since
# the signature will use req.auth_path.
req.path = req.path.split('?')[0]
req.path = req.path + '?' + qs
canonical_request = self.canonical_request(req)
boto.log.debug('CanonicalRequest:\n%s' % canonical_request)
string_to_sign = self.string_to_sign(req, canonical_request)
boto.log.debug('StringToSign:\n%s' % string_to_sign)
signature = self.signature(req, string_to_sign)
boto.log.debug('Signature:\n%s' % signature)
headers_to_sign = self.headers_to_sign(req)
l = ['AWS4-HMAC-SHA256 Credential=%s' % self.scope(req)]
l.append('SignedHeaders=%s' % self.signed_headers(headers_to_sign))
l.append('Signature=%s' % signature)
req.headers['Authorization'] = ','.join(l)
class QuerySignatureHelper(HmacKeys):
"""
Helper for Query signature based Auth handler.
Concrete sub class need to implement _calc_sigature method.
"""
def add_auth(self, http_request, **kwargs):
headers = http_request.headers
params = http_request.params
params['AWSAccessKeyId'] = self._provider.access_key
params['SignatureVersion'] = self.SignatureVersion
params['Timestamp'] = boto.utils.get_ts()
qs, signature = self._calc_signature(
http_request.params, http_request.method,
http_request.auth_path, http_request.host)
boto.log.debug('query_string: %s Signature: %s' % (qs, signature))
if http_request.method == 'POST':
headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=UTF-8'
http_request.body = qs + '&Signature=' + urllib.quote_plus(signature)
http_request.headers['Content-Length'] = str(len(http_request.body))
else:
http_request.body = ''
# if this is a retried request, the qs from the previous try will
# already be there, we need to get rid of that and rebuild it
http_request.path = http_request.path.split('?')[0]
http_request.path = (http_request.path + '?' + qs +
'&Signature=' + urllib.quote_plus(signature))
class QuerySignatureV0AuthHandler(QuerySignatureHelper, AuthHandler):
"""Provides Signature V0 Signing"""
SignatureVersion = 0
capability = ['sign-v0']
def _calc_signature(self, params, *args):
boto.log.debug('using _calc_signature_0')
hmac = self._get_hmac()
s = params['Action'] + params['Timestamp']
hmac.update(s)
keys = params.keys()
keys.sort(cmp=lambda x, y: cmp(x.lower(), y.lower()))
pairs = []
for key in keys:
val = boto.utils.get_utf8_value(params[key])
pairs.append(key + '=' + urllib.quote(val))
qs = '&'.join(pairs)
return (qs, base64.b64encode(hmac.digest()))
class QuerySignatureV1AuthHandler(QuerySignatureHelper, AuthHandler):
"""
Provides Query Signature V1 Authentication.
"""
SignatureVersion = 1
capability = ['sign-v1', 'mturk']
def __init__(self, *args, **kw):
QuerySignatureHelper.__init__(self, *args, **kw)
AuthHandler.__init__(self, *args, **kw)
self._hmac_256 = None
def _calc_signature(self, params, *args):
boto.log.debug('using _calc_signature_1')
hmac = self._get_hmac()
keys = params.keys()
keys.sort(cmp=lambda x, y: cmp(x.lower(), y.lower()))
pairs = []
for key in keys:
hmac.update(key)
val = boto.utils.get_utf8_value(params[key])
hmac.update(val)
pairs.append(key + '=' + urllib.quote(val))
qs = '&'.join(pairs)
return (qs, base64.b64encode(hmac.digest()))
class QuerySignatureV2AuthHandler(QuerySignatureHelper, AuthHandler):
"""Provides Query Signature V2 Authentication."""
SignatureVersion = 2
capability = ['sign-v2', 'ec2', 'ec2', 'emr', 'fps', 'ecs',
'sdb', 'iam', 'rds', 'sns', 'sqs', 'cloudformation']
def _calc_signature(self, params, verb, path, server_name):
boto.log.debug('using _calc_signature_2')
string_to_sign = '%s\n%s\n%s\n' % (verb, server_name.lower(), path)
hmac = self._get_hmac()
params['SignatureMethod'] = self.algorithm()
if self._provider.security_token:
params['SecurityToken'] = self._provider.security_token
keys = sorted(params.keys())
pairs = []
for key in keys:
val = boto.utils.get_utf8_value(params[key])
pairs.append(urllib.quote(key, safe='') + '=' +
urllib.quote(val, safe='-_~'))
qs = '&'.join(pairs)
boto.log.debug('query string: %s' % qs)
string_to_sign += qs
boto.log.debug('string_to_sign: %s' % string_to_sign)
hmac.update(string_to_sign)
b64 = base64.b64encode(hmac.digest())
boto.log.debug('len(b64)=%d' % len(b64))
boto.log.debug('base64 encoded digest: %s' % b64)
return (qs, b64)
class POSTPathQSV2AuthHandler(QuerySignatureV2AuthHandler, AuthHandler):
"""
Query Signature V2 Authentication relocating signed query
into the path and allowing POST requests with Content-Types.
"""
capability = ['mws']
def add_auth(self, req, **kwargs):
req.params['AWSAccessKeyId'] = self._provider.access_key
req.params['SignatureVersion'] = self.SignatureVersion
req.params['Timestamp'] = boto.utils.get_ts()
qs, signature = self._calc_signature(req.params, req.method,
req.auth_path, req.host)
boto.log.debug('query_string: %s Signature: %s' % (qs, signature))
if req.method == 'POST':
req.headers['Content-Length'] = str(len(req.body))
req.headers['Content-Type'] = req.headers.get('Content-Type',
'text/plain')
else:
req.body = ''
# if this is a retried req, the qs from the previous try will
# already be there, we need to get rid of that and rebuild it
req.path = req.path.split('?')[0]
req.path = (req.path + '?' + qs +
'&Signature=' + urllib.quote_plus(signature))
def get_auth_handler(host, config, provider, requested_capability=None):
"""Finds an AuthHandler that is ready to authenticate.
Lists through all the registered AuthHandlers to find one that is willing
to handle for the requested capabilities, config and provider.
:type host: string
:param host: The name of the host
:type config:
:param config:
:type provider:
:param provider:
Returns:
An implementation of AuthHandler.
Raises:
boto.exception.NoAuthHandlerFound
"""
ready_handlers = []
auth_handlers = boto.plugin.get_plugin(AuthHandler, requested_capability)
total_handlers = len(auth_handlers)
for handler in auth_handlers:
try:
ready_handlers.append(handler(host, config, provider))
except boto.auth_handler.NotReadyToAuthenticate:
pass
if not ready_handlers:
checked_handlers = auth_handlers
names = [handler.__name__ for handler in checked_handlers]
raise boto.exception.NoAuthHandlerFound(
'No handler was ready to authenticate. %d handlers were checked.'
' %s '
'Check your credentials' % (len(names), str(names)))
# We select the last ready auth handler that was loaded, to allow users to
# customize how auth works in environments where there are shared boto
# config files (e.g., /etc/boto.cfg and ~/.boto): The more general,
# system-wide shared configs should be loaded first, and the user's
# customizations loaded last. That way, for example, the system-wide
# config might include a plugin_directory that includes a service account
# auth plugin shared by all users of a Google Compute Engine instance
# (allowing sharing of non-user data between various services), and the
# user could override this with a .boto config that includes user-specific
# credentials (for access to user data).
return ready_handlers[-1]
|
bsd-3-clause
|
lenovor/reweighted-ws
|
caltech/nade-nade.py
|
6
|
1678
|
import numpy as np
from learning.dataset import CalTechSilhouettes
from learning.preproc import PermuteColumns
from learning.termination import LogLikelihoodIncrease, EarlyStopping
from learning.monitor import MonitorLL, DLogModelParams, SampleFromP
from learning.training import Trainer
from learning.models.rws import LayerStack
from learning.models.sbn import SBN, SBNTop
from learning.models.darn import DARN, DARNTop
from learning.models.nade import NADE, NADETop
n_vis = 28*28
preproc = PermuteColumns()
dataset = CalTechSilhouettes(which_set='train', preproc=[preproc])
valiset = CalTechSilhouettes(which_set='valid', preproc=[preproc])
testset = CalTechSilhouettes(which_set='test', preproc=[preproc])
p_layers=[
NADE(
n_X=n_vis,
n_Y=150,
),
NADETop(
n_X=150,
),
]
q_layers=[
NADE(
n_Y=n_vis,
n_X=150,
),
]
model = LayerStack(
p_layers=p_layers,
q_layers=q_layers,
)
trainer = Trainer(
n_samples=5,
learning_rate_p=1e-3,
learning_rate_q=1e-3,
learning_rate_s=1e-3,
layer_discount=1.0,
batch_size=25,
dataset=dataset,
model=model,
termination=EarlyStopping(),
#step_monitors=[MonitorLL(data=smallset, n_samples=[1, 5, 25, 100])],
epoch_monitors=[
DLogModelParams(),
MonitorLL(name="valiset", data=valiset, n_samples=[1, 5, 25, 100]),
SampleFromP(n_samples=100)
],
final_monitors=[
MonitorLL(name="final-valiset", data=valiset, n_samples=[1, 5, 25, 100, 500, 1000]),
MonitorLL(name="final-testset", data=testset, n_samples=[1, 5, 25, 100, 500, 1000]),
],
monitor_nth_step=100,
)
|
agpl-3.0
|
chubbymaggie/miasm
|
test/utils/testset.py
|
2
|
9436
|
import os
import subprocess
import sys
import time
from multiprocessing import cpu_count, Queue, Process
from test import Test
class Message(object):
"Message exchanged in the TestSet message queue"
pass
class MessageTaskNew(Message):
"Stand for a new task"
def __init__(self, task):
self.task = task
class MessageTaskDone(Message):
"Stand for a task done"
def __init__(self, task, error):
self.task = task
self.error = error
class MessageClose(Message):
"Close the channel"
pass
class TestSet(object):
"Manage a set of test"
def __init__(self, base_dir):
"""Initalise a test set
@base_dir: base directory for tests
"""
# Parse arguments
self.base_dir = base_dir
# Init internals
self.task_done_cb = lambda tst, err: None # On task done callback
self.task_new_cb = lambda tst: None # On new task callback
self.todo_queue = Queue() # Tasks to do
self.message_queue = Queue() # Messages with workers
self.tests = [] # Tests to run
self.tests_done = [] # Tasks done
self.cpu_c = cpu_count() # CPUs available
self.errorcode = 0 # Non-zero if a test failed
self.additional_args = [] # Arguments to always add
def __add__(self, test):
"Same as TestSet.add"
self.add(test)
return self
def add(self, test):
"Add a test instance to the current test set"
if not isinstance(test, Test):
raise ValueError("%s is not a valid test instance" % (repr(test)))
self.tests.append(test)
def set_cpu_numbers(self, cpu_c):
"""Set the number of cpu to use
@cpu_c: Number of CPU to use (default is maximum)
"""
self.cpu_c = cpu_c
def set_callback(self, task_done=None, task_new=None):
"""Set callbacks for task information retrieval
@task_done: function(Test, Error message)
@task_new: function(Test)
"""
if task_done:
self.task_done_cb = task_done
if task_new:
self.task_new_cb = task_new
def _add_tasks(self):
"Add tests to do, regarding to dependencies"
for test in self.tests:
# Check dependencies
launchable = True
for dependency in test.depends:
if dependency not in self.tests_done:
launchable = False
break
if launchable:
# Add task
self.tests.remove(test)
self.todo_queue.put(test)
if len(self.tests) == 0:
# Poison pills
for _ in xrange(self.cpu_c):
self.todo_queue.put(None)
# All tasks done
if len(self.tests_done) == self.init_tests_number:
self.message_queue.put(MessageClose())
def _messages_handler(self):
"Manage message between Master and Workers"
# Main loop
while True:
message = self.message_queue.get()
if isinstance(message, MessageClose):
# Poison pill
break
elif isinstance(message, MessageTaskNew):
# A task begins
self.task_new_cb(message.task)
elif isinstance(message, MessageTaskDone):
# A task has been done
self.tests_done.append(message.task)
self._add_tasks()
self.task_done_cb(message.task, message.error)
if message.error is not None:
self.errorcode = -1
else:
raise ValueError("Unknown message type %s" % type(message))
@staticmethod
def worker(todo_queue, message_queue, init_args):
"""Worker launched in parrallel
@todo_queue: task to do
@message_queue: communication with Host
@init_args: additionnal arguments for command line
"""
# Main loop
while True:
# Acquire a task
test = todo_queue.get()
if test is None:
break
test.start_time = time.time()
message_queue.put(MessageTaskNew(test))
# Go to the expected directory
current_directory = os.getcwd()
os.chdir(test.base_dir)
# Launch test
executable = test.executable if test.executable else sys.executable
testpy = subprocess.Popen(([executable] +
init_args + test.command_line),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
outputs = testpy.communicate()
# Check result
error = None
if testpy.returncode != 0:
error = outputs[1]
# Restore directory
os.chdir(current_directory)
# Report task finish
message_queue.put(MessageTaskDone(test, error))
@staticmethod
def fast_unify(seq, idfun=None):
"""Order preserving unifying list function
@seq: list to unify
@idfun: marker function (default is identity)
"""
if idfun is None:
idfun = lambda x: x
seen = {}
result = []
for item in seq:
marker = idfun(item)
if marker in seen:
continue
seen[marker] = 1
result.append(item)
return result
def _clean(self):
"Remove produced files"
# Build the list of products
products = []
current_directory = os.getcwd()
for test in self.tests_done:
for product in test.products:
# Get the full product path
products.append(os.path.join(current_directory, test.base_dir,
product))
# Unify the list and remove products
for product in TestSet.fast_unify(products):
try:
os.remove(product)
except OSError:
print "Cleanning error: Unable to remove %s" % product
def add_additionnal_args(self, args):
"""Add arguments to used on the test command line
@args: list of str
"""
self.additional_args += args
def run(self):
"Launch tests"
# Go in the right directory
self.current_directory = os.getcwd()
os.chdir(self.base_dir)
# Launch workers
processes = []
for _ in xrange(self.cpu_c):
p = Process(target=TestSet.worker, args=(self.todo_queue,
self.message_queue,
self.additional_args))
processes.append(p)
p.start()
# Add initial tasks
self.init_tests_number = len(self.tests)
# Initial tasks
self._add_tasks()
# Handle messages
self._messages_handler()
# Close queue and join processes
self.todo_queue.close()
self.todo_queue.join_thread()
self.message_queue.close()
self.message_queue.join_thread()
for p in processes:
p.join()
def end(self, clean=True):
"""End a testset run
@clean: (optional) if set, remove tests products
PRE: run()
"""
# Clean
if clean:
self._clean()
# Restore directory
os.chdir(self.current_directory)
def tests_passed(self):
"Return a non zero value if at least one test failed"
return self.errorcode
def filter_tags(self, include_tags=None, exclude_tags=None):
"""Filter tests by tags
@include_tags: list of tags' name (whitelist)
@exclude_tags: list of tags' name (blacklist)
If @include_tags and @exclude_tags are used together, @exclude_tags will
act as a blacklist on @include_tags generated tests
"""
new_testset = []
include_tags = set(include_tags)
exclude_tags = set(exclude_tags)
if include_tags.intersection(exclude_tags):
raise ValueError("Tags are mutually included and excluded: %s" % include_tags.intersection(exclude_tags))
for test in self.tests:
tags = set(test.tags)
if exclude_tags.intersection(tags):
# Ignore the current test because it is excluded
continue
if not include_tags:
new_testset.append(test)
else:
if include_tags.intersection(tags):
new_testset.append(test)
# Add tests dependencies
dependency = list(test.depends)
while dependency:
subtest = dependency.pop()
if subtest not in new_testset:
new_testset.append(subtest)
for subdepends in subtest.depends:
if subdepends not in new_testset:
dependency.append(subdepends)
self.tests = new_testset
|
gpl-2.0
|
CloudWareChile/OpenChile
|
openerp/addons/hr_expense/__openerp__.py
|
9
|
2716
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Expenses Management',
'version': '1.0',
'category': 'Human Resources',
"sequence": 30,
'complexity': "easy",
'description': """
This module aims to manage employee's expenses.
===============================================
The whole workflow is implemented:
* Draft expense
* Confirmation of the sheet by the employee
* Validation by his manager
* Validation by the accountant and invoice creation
* Payment of the invoice to the employee
This module also uses the analytic accounting and is compatible with
the invoice on timesheet module so that you will be able to automatically
re-invoice your customer's expenses if your work by project.
""",
'author': 'OpenERP SA',
'website': 'http://www.openerp.com',
'images': ['images/hr_expenses_analysis.jpeg', 'images/hr_expenses.jpeg'],
'depends': ['hr', 'account'],
'init_xml': [],
'update_xml': [
'security/ir.model.access.csv',
'hr_expense_data.xml',
'hr_expense_sequence.xml',
'hr_expense_workflow.xml',
'hr_expense_view.xml',
'hr_expense_report.xml',
'process/hr_expense_process.xml',
'security/ir_rule.xml',
'report/hr_expense_report_view.xml',
'board_hr_expense_view.xml',
'board_hr_expense_manager_view.xml',
'hr_expense_installer_view.xml',
],
'demo_xml': [
'hr_expense_demo.xml',
],
'test': [
'test/expense_demo.yml',
'test/expense_process.yml',
],
'installable': True,
'auto_install': False,
'certificate': '0062479841789',
'application': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
jcoady9/python-for-android
|
python-modules/twisted/twisted/python/util.py
|
56
|
29731
|
# -*- test-case-name: twisted.python.test.test_util -*-
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
import os, sys, hmac, errno, new, inspect, warnings
try:
import pwd, grp
except ImportError:
pwd = grp = None
try:
from os import setgroups, getgroups
except ImportError:
setgroups = getgroups = None
from UserDict import UserDict
class InsensitiveDict:
"""Dictionary, that has case-insensitive keys.
Normally keys are retained in their original form when queried with
.keys() or .items(). If initialized with preserveCase=0, keys are both
looked up in lowercase and returned in lowercase by .keys() and .items().
"""
"""
Modified recipe at
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66315 originally
contributed by Sami Hangaslammi.
"""
def __init__(self, dict=None, preserve=1):
"""Create an empty dictionary, or update from 'dict'."""
self.data = {}
self.preserve=preserve
if dict:
self.update(dict)
def __delitem__(self, key):
k=self._lowerOrReturn(key)
del self.data[k]
def _lowerOrReturn(self, key):
if isinstance(key, str) or isinstance(key, unicode):
return key.lower()
else:
return key
def __getitem__(self, key):
"""Retrieve the value associated with 'key' (in any case)."""
k = self._lowerOrReturn(key)
return self.data[k][1]
def __setitem__(self, key, value):
"""Associate 'value' with 'key'. If 'key' already exists, but
in different case, it will be replaced."""
k = self._lowerOrReturn(key)
self.data[k] = (key, value)
def has_key(self, key):
"""Case insensitive test whether 'key' exists."""
k = self._lowerOrReturn(key)
return self.data.has_key(k)
__contains__=has_key
def _doPreserve(self, key):
if not self.preserve and (isinstance(key, str)
or isinstance(key, unicode)):
return key.lower()
else:
return key
def keys(self):
"""List of keys in their original case."""
return list(self.iterkeys())
def values(self):
"""List of values."""
return list(self.itervalues())
def items(self):
"""List of (key,value) pairs."""
return list(self.iteritems())
def get(self, key, default=None):
"""Retrieve value associated with 'key' or return default value
if 'key' doesn't exist."""
try:
return self[key]
except KeyError:
return default
def setdefault(self, key, default):
"""If 'key' doesn't exists, associate it with the 'default' value.
Return value associated with 'key'."""
if not self.has_key(key):
self[key] = default
return self[key]
def update(self, dict):
"""Copy (key,value) pairs from 'dict'."""
for k,v in dict.items():
self[k] = v
def __repr__(self):
"""String representation of the dictionary."""
items = ", ".join([("%r: %r" % (k,v)) for k,v in self.items()])
return "InsensitiveDict({%s})" % items
def iterkeys(self):
for v in self.data.itervalues():
yield self._doPreserve(v[0])
def itervalues(self):
for v in self.data.itervalues():
yield v[1]
def iteritems(self):
for (k, v) in self.data.itervalues():
yield self._doPreserve(k), v
def popitem(self):
i=self.items()[0]
del self[i[0]]
return i
def clear(self):
for k in self.keys():
del self[k]
def copy(self):
return InsensitiveDict(self, self.preserve)
def __len__(self):
return len(self.data)
def __eq__(self, other):
for k,v in self.items():
if not (k in other) or not (other[k]==v):
return 0
return len(self)==len(other)
class OrderedDict(UserDict):
"""A UserDict that preserves insert order whenever possible."""
def __init__(self, dict=None, **kwargs):
self._order = []
self.data = {}
if dict is not None:
if hasattr(dict,'keys'):
self.update(dict)
else:
for k,v in dict: # sequence
self[k] = v
if len(kwargs):
self.update(kwargs)
def __repr__(self):
return '{'+', '.join([('%r: %r' % item) for item in self.items()])+'}'
def __setitem__(self, key, value):
if not self.has_key(key):
self._order.append(key)
UserDict.__setitem__(self, key, value)
def copy(self):
return self.__class__(self)
def __delitem__(self, key):
UserDict.__delitem__(self, key)
self._order.remove(key)
def iteritems(self):
for item in self._order:
yield (item, self[item])
def items(self):
return list(self.iteritems())
def itervalues(self):
for item in self._order:
yield self[item]
def values(self):
return list(self.itervalues())
def iterkeys(self):
return iter(self._order)
def keys(self):
return list(self._order)
def popitem(self):
key = self._order[-1]
value = self[key]
del self[key]
return (key, value)
def setdefault(self, item, default):
if self.has_key(item):
return self[item]
self[item] = default
return default
def update(self, d):
for k, v in d.items():
self[k] = v
def uniquify(lst):
"""Make the elements of a list unique by inserting them into a dictionary.
This must not change the order of the input lst.
"""
dct = {}
result = []
for k in lst:
if not dct.has_key(k): result.append(k)
dct[k] = 1
return result
def padTo(n, seq, default=None):
"""Pads a sequence out to n elements,
filling in with a default value if it is not long enough.
If the input sequence is longer than n, raises ValueError.
Details, details:
This returns a new list; it does not extend the original sequence.
The new list contains the values of the original sequence, not copies.
"""
if len(seq) > n:
raise ValueError, "%d elements is more than %d." % (len(seq), n)
blank = [default] * n
blank[:len(seq)] = list(seq)
return blank
def getPluginDirs():
import twisted
systemPlugins = os.path.join(os.path.dirname(os.path.dirname(
os.path.abspath(twisted.__file__))), 'plugins')
userPlugins = os.path.expanduser("~/TwistedPlugins")
confPlugins = os.path.expanduser("~/.twisted")
allPlugins = filter(os.path.isdir, [systemPlugins, userPlugins, confPlugins])
return allPlugins
def addPluginDir():
sys.path.extend(getPluginDirs())
def sibpath(path, sibling):
"""Return the path to a sibling of a file in the filesystem.
This is useful in conjunction with the special __file__ attribute
that Python provides for modules, so modules can load associated
resource files.
"""
return os.path.join(os.path.dirname(os.path.abspath(path)), sibling)
def _getpass(prompt):
"""Helper to turn IOErrors into KeyboardInterrupts"""
import getpass
try:
return getpass.getpass(prompt)
except IOError, e:
if e.errno == errno.EINTR:
raise KeyboardInterrupt
raise
except EOFError:
raise KeyboardInterrupt
def getPassword(prompt = 'Password: ', confirm = 0, forceTTY = 0,
confirmPrompt = 'Confirm password: ',
mismatchMessage = "Passwords don't match."):
"""Obtain a password by prompting or from stdin.
If stdin is a terminal, prompt for a new password, and confirm (if
C{confirm} is true) by asking again to make sure the user typed the same
thing, as keystrokes will not be echoed.
If stdin is not a terminal, and C{forceTTY} is not true, read in a line
and use it as the password, less the trailing newline, if any. If
C{forceTTY} is true, attempt to open a tty and prompt for the password
using it. Raise a RuntimeError if this is not possible.
@returns: C{str}
"""
isaTTY = hasattr(sys.stdin, 'isatty') and sys.stdin.isatty()
old = None
try:
if not isaTTY:
if forceTTY:
try:
old = sys.stdin, sys.stdout
sys.stdin = sys.stdout = open('/dev/tty', 'r+')
except:
raise RuntimeError("Cannot obtain a TTY")
else:
password = sys.stdin.readline()
if password[-1] == '\n':
password = password[:-1]
return password
while 1:
try1 = _getpass(prompt)
if not confirm:
return try1
try2 = _getpass(confirmPrompt)
if try1 == try2:
return try1
else:
sys.stderr.write(mismatchMessage + "\n")
finally:
if old:
sys.stdin.close()
sys.stdin, sys.stdout = old
def dict(*a, **k):
import __builtin__
warnings.warn('twisted.python.util.dict is deprecated. Use __builtin__.dict instead')
return __builtin__.dict(*a, **k)
def println(*a):
sys.stdout.write(' '.join(map(str, a))+'\n')
# XXX
# This does not belong here
# But where does it belong?
def str_xor(s, b):
return ''.join([chr(ord(c) ^ b) for c in s])
def keyed_md5(secret, challenge):
"""
Create the keyed MD5 string for the given secret and challenge.
"""
warnings.warn(
"keyed_md5() is deprecated. Use the stdlib module hmac instead.",
DeprecationWarning, stacklevel=2
)
return hmac.HMAC(secret, challenge).hexdigest()
def makeStatBar(width, maxPosition, doneChar = '=', undoneChar = '-', currentChar = '>'):
"""Creates a function that will return a string representing a progress bar.
"""
aValue = width / float(maxPosition)
def statBar(position, force = 0, last = ['']):
assert len(last) == 1, "Don't mess with the last parameter."
done = int(aValue * position)
toDo = width - done - 2
result = "[%s%s%s]" % (doneChar * done, currentChar, undoneChar * toDo)
if force:
last[0] = result
return result
if result == last[0]:
return ''
last[0] = result
return result
statBar.__doc__ = """statBar(position, force = 0) -> '[%s%s%s]'-style progress bar
returned string is %d characters long, and the range goes from 0..%d.
The 'position' argument is where the '%s' will be drawn. If force is false,
'' will be returned instead if the resulting progress bar is identical to the
previously returned progress bar.
""" % (doneChar * 3, currentChar, undoneChar * 3, width, maxPosition, currentChar)
return statBar
def spewer(frame, s, ignored):
"""A trace function for sys.settrace that prints every function or method call."""
from twisted.python import reflect
if frame.f_locals.has_key('self'):
se = frame.f_locals['self']
if hasattr(se, '__class__'):
k = reflect.qual(se.__class__)
else:
k = reflect.qual(type(se))
print 'method %s of %s at %s' % (
frame.f_code.co_name, k, id(se)
)
else:
print 'function %s in %s, line %s' % (
frame.f_code.co_name,
frame.f_code.co_filename,
frame.f_lineno)
def searchupwards(start, files=[], dirs=[]):
"""Walk upwards from start, looking for a directory containing
all files and directories given as arguments::
>>> searchupwards('.', ['foo.txt'], ['bar', 'bam'])
If not found, return None
"""
start=os.path.abspath(start)
parents=start.split(os.sep)
exists=os.path.exists; join=os.sep.join; isdir=os.path.isdir
while len(parents):
candidate=join(parents)+os.sep
allpresent=1
for f in files:
if not exists("%s%s" % (candidate, f)):
allpresent=0
break
if allpresent:
for d in dirs:
if not isdir("%s%s" % (candidate, d)):
allpresent=0
break
if allpresent: return candidate
parents.pop(-1)
return None
class LineLog:
"""
A limited-size line-based log, useful for logging line-based
protocols such as SMTP.
When the log fills up, old entries drop off the end.
"""
def __init__(self, size=10):
"""
Create a new log, with size lines of storage (default 10).
A log size of 0 (or less) means an infinite log.
"""
if size < 0:
size = 0
self.log = [None]*size
self.size = size
def append(self,line):
if self.size:
self.log[:-1] = self.log[1:]
self.log[-1] = line
else:
self.log.append(line)
def str(self):
return '\n'.join(filter(None,self.log))
def __getitem__(self, item):
return filter(None,self.log)[item]
def clear(self):
"""Empty the log"""
self.log = [None]*self.size
def raises(exception, f, *args, **kwargs):
"""Determine whether the given call raises the given exception"""
try:
f(*args, **kwargs)
except exception:
return 1
return 0
class IntervalDifferential:
"""
Given a list of intervals, generate the amount of time to sleep between
\"instants\".
For example, given 7, 11 and 13, the three (infinite) sequences::
7 14 21 28 35 ...
11 22 33 44 ...
13 26 39 52 ...
will be generated, merged, and used to produce::
(7, 0) (4, 1) (2, 2) (1, 0) (7, 0) (1, 1) (4, 2) (2, 0) (5, 1) (2, 0)
New intervals may be added or removed as iteration proceeds using the
proper methods.
"""
def __init__(self, intervals, default=60):
"""
@type intervals: C{list} of C{int}, C{long}, or C{float} param
@param intervals: The intervals between instants.
@type default: C{int}, C{long}, or C{float}
@param default: The duration to generate if the intervals list
becomes empty.
"""
self.intervals = intervals[:]
self.default = default
def __iter__(self):
return _IntervalDifferentialIterator(self.intervals, self.default)
class _IntervalDifferentialIterator:
def __init__(self, i, d):
self.intervals = [[e, e, n] for (e, n) in zip(i, range(len(i)))]
self.default = d
self.last = 0
def next(self):
if not self.intervals:
return (self.default, None)
last, index = self.intervals[0][0], self.intervals[0][2]
self.intervals[0][0] += self.intervals[0][1]
self.intervals.sort()
result = last - self.last
self.last = last
return result, index
def addInterval(self, i):
if self.intervals:
delay = self.intervals[0][0] - self.intervals[0][1]
self.intervals.append([delay + i, i, len(self.intervals)])
self.intervals.sort()
else:
self.intervals.append([i, i, 0])
def removeInterval(self, interval):
for i in range(len(self.intervals)):
if self.intervals[i][1] == interval:
index = self.intervals[i][2]
del self.intervals[i]
for i in self.intervals:
if i[2] > index:
i[2] -= 1
return
raise ValueError, "Specified interval not in IntervalDifferential"
class FancyStrMixin:
"""
Set showAttributes to a sequence of strings naming attributes, OR
sequences of (attributeName, displayName, formatCharacter)
"""
showAttributes = ()
def __str__(self):
r = ['<', hasattr(self, 'fancybasename') and self.fancybasename or self.__class__.__name__]
for attr in self.showAttributes:
if isinstance(attr, str):
r.append(' %s=%r' % (attr, getattr(self, attr)))
else:
r.append((' %s=' + attr[2]) % (attr[1], getattr(self, attr[0])))
r.append('>')
return ''.join(r)
__repr__ = __str__
class FancyEqMixin:
compareAttributes = ()
def __eq__(self, other):
if not self.compareAttributes:
return self is other
if isinstance(self, other.__class__):
return (
[getattr(self, name) for name in self.compareAttributes] ==
[getattr(other, name) for name in self.compareAttributes])
return NotImplemented
def __ne__(self, other):
result = self.__eq__(other)
if result is NotImplemented:
return result
return not result
def dsu(list, key):
"""
decorate-sort-undecorate (aka "Schwartzian transform")
DEPRECATED. Use the built-in C{sorted()} instead.
"""
warnings.warn(("dsu is deprecated since Twisted 10.1. "
"Use the built-in sorted() instead."), DeprecationWarning, stacklevel=2)
L2 = [(key(e), i, e) for (i, e) in zip(range(len(list)), list)]
L2.sort()
return [e[2] for e in L2]
try:
from twisted.python._initgroups import initgroups as _c_initgroups
except ImportError:
_c_initgroups = None
if pwd is None or grp is None or setgroups is None or getgroups is None:
def initgroups(uid, primaryGid):
"""
Do nothing.
Underlying platform support require to manipulate groups is missing.
"""
else:
# Fallback to the inefficient Python version
def _setgroups_until_success(l):
while(1):
# NASTY NASTY HACK (but glibc does it so it must be okay):
# In case sysconfig didn't give the right answer, find the limit
# on max groups by just looping, trying to set fewer and fewer
# groups each time until it succeeds.
try:
setgroups(l)
except ValueError:
# This exception comes from python itself restricting
# number of groups allowed.
if len(l) > 1:
del l[-1]
else:
raise
except OSError, e:
if e.errno == errno.EINVAL and len(l) > 1:
# This comes from the OS saying too many groups
del l[-1]
else:
raise
else:
# Success, yay!
return
def initgroups(uid, primaryGid):
"""
Initializes the group access list.
If the C extension is present, we're calling it, which in turn calls
initgroups(3).
If not, this is done by reading the group database /etc/group and using
all groups of which C{uid} is a member. The additional group
C{primaryGid} is also added to the list.
If the given user is a member of more than C{NGROUPS}, arbitrary
groups will be silently discarded to bring the number below that
limit.
@type uid: C{int}
@param uid: The UID for which to look up group information.
@type primaryGid: C{int} or C{NoneType}
@param primaryGid: If provided, an additional GID to include when
setting the groups.
"""
if _c_initgroups is not None:
return _c_initgroups(pwd.getpwuid(uid)[0], primaryGid)
try:
# Try to get the maximum number of groups
max_groups = os.sysconf("SC_NGROUPS_MAX")
except:
# No predefined limit
max_groups = 0
username = pwd.getpwuid(uid)[0]
l = []
if primaryGid is not None:
l.append(primaryGid)
for groupname, password, gid, userlist in grp.getgrall():
if username in userlist:
l.append(gid)
if len(l) == max_groups:
break # No more groups, ignore any more
try:
_setgroups_until_success(l)
except OSError, e:
# We might be able to remove this code now that we
# don't try to setgid/setuid even when not asked to.
if e.errno == errno.EPERM:
for g in getgroups():
if g not in l:
raise
else:
raise
def switchUID(uid, gid, euid=False):
if euid:
setuid = os.seteuid
setgid = os.setegid
else:
setuid = os.setuid
setgid = os.setgid
if gid is not None:
setgid(gid)
if uid is not None:
initgroups(uid, gid)
setuid(uid)
class SubclassableCStringIO(object):
"""A wrapper around cStringIO to allow for subclassing"""
__csio = None
def __init__(self, *a, **kw):
from cStringIO import StringIO
self.__csio = StringIO(*a, **kw)
def __iter__(self):
return self.__csio.__iter__()
def next(self):
return self.__csio.next()
def close(self):
return self.__csio.close()
def isatty(self):
return self.__csio.isatty()
def seek(self, pos, mode=0):
return self.__csio.seek(pos, mode)
def tell(self):
return self.__csio.tell()
def read(self, n=-1):
return self.__csio.read(n)
def readline(self, length=None):
return self.__csio.readline(length)
def readlines(self, sizehint=0):
return self.__csio.readlines(sizehint)
def truncate(self, size=None):
return self.__csio.truncate(size)
def write(self, s):
return self.__csio.write(s)
def writelines(self, list):
return self.__csio.writelines(list)
def flush(self):
return self.__csio.flush()
def getvalue(self):
return self.__csio.getvalue()
def moduleMovedForSplit(origModuleName, newModuleName, moduleDesc,
projectName, projectURL, globDict):
"""
No-op function; only present for backwards compatibility. There is no
reason to call this function.
"""
warnings.warn(
"moduleMovedForSplit is deprecated since Twisted 9.0.",
DeprecationWarning, stacklevel=2)
def untilConcludes(f, *a, **kw):
while True:
try:
return f(*a, **kw)
except (IOError, OSError), e:
if e.args[0] == errno.EINTR:
continue
raise
_idFunction = id
def setIDFunction(idFunction):
"""
Change the function used by L{unsignedID} to determine the integer id value
of an object. This is largely useful for testing to give L{unsignedID}
deterministic, easily-controlled behavior.
@param idFunction: A function with the signature of L{id}.
@return: The previous function being used by L{unsignedID}.
"""
global _idFunction
oldIDFunction = _idFunction
_idFunction = idFunction
return oldIDFunction
# A value about twice as large as any Python int, to which negative values
# from id() will be added, moving them into a range which should begin just
# above where positive values from id() leave off.
_HUGEINT = (sys.maxint + 1L) * 2L
def unsignedID(obj):
"""
Return the id of an object as an unsigned number so that its hex
representation makes sense.
This is mostly necessary in Python 2.4 which implements L{id} to sometimes
return a negative value. Python 2.3 shares this behavior, but also
implements hex and the %x format specifier to represent negative values as
though they were positive ones, obscuring the behavior of L{id}. Python
2.5's implementation of L{id} always returns positive values.
"""
rval = _idFunction(obj)
if rval < 0:
rval += _HUGEINT
return rval
def mergeFunctionMetadata(f, g):
"""
Overwrite C{g}'s name and docstring with values from C{f}. Update
C{g}'s instance dictionary with C{f}'s.
To use this function safely you must use the return value. In Python 2.3,
L{mergeFunctionMetadata} will create a new function. In later versions of
Python, C{g} will be mutated and returned.
@return: A function that has C{g}'s behavior and metadata merged from
C{f}.
"""
try:
g.__name__ = f.__name__
except TypeError:
try:
merged = new.function(
g.func_code, g.func_globals,
f.__name__, inspect.getargspec(g)[-1],
g.func_closure)
except TypeError:
pass
else:
merged = g
try:
merged.__doc__ = f.__doc__
except (TypeError, AttributeError):
pass
try:
merged.__dict__.update(g.__dict__)
merged.__dict__.update(f.__dict__)
except (TypeError, AttributeError):
pass
merged.__module__ = f.__module__
return merged
def nameToLabel(mname):
"""
Convert a string like a variable name into a slightly more human-friendly
string with spaces and capitalized letters.
@type mname: C{str}
@param mname: The name to convert to a label. This must be a string
which could be used as a Python identifier. Strings which do not take
this form will result in unpredictable behavior.
@rtype: C{str}
"""
labelList = []
word = ''
lastWasUpper = False
for letter in mname:
if letter.isupper() == lastWasUpper:
# Continuing a word.
word += letter
else:
# breaking a word OR beginning a word
if lastWasUpper:
# could be either
if len(word) == 1:
# keep going
word += letter
else:
# acronym
# we're processing the lowercase letter after the acronym-then-capital
lastWord = word[:-1]
firstLetter = word[-1]
labelList.append(lastWord)
word = firstLetter + letter
else:
# definitely breaking: lower to upper
labelList.append(word)
word = letter
lastWasUpper = letter.isupper()
if labelList:
labelList[0] = labelList[0].capitalize()
else:
return mname.capitalize()
labelList.append(word)
return ' '.join(labelList)
def uidFromString(uidString):
"""
Convert a user identifier, as a string, into an integer UID.
@type uid: C{str}
@param uid: A string giving the base-ten representation of a UID or the
name of a user which can be converted to a UID via L{pwd.getpwnam}.
@rtype: C{int}
@return: The integer UID corresponding to the given string.
@raise ValueError: If the user name is supplied and L{pwd} is not
available.
"""
try:
return int(uidString)
except ValueError:
if pwd is None:
raise
return pwd.getpwnam(uidString)[2]
def gidFromString(gidString):
"""
Convert a group identifier, as a string, into an integer GID.
@type uid: C{str}
@param uid: A string giving the base-ten representation of a GID or the
name of a group which can be converted to a GID via L{grp.getgrnam}.
@rtype: C{int}
@return: The integer GID corresponding to the given string.
@raise ValueError: If the group name is supplied and L{grp} is not
available.
"""
try:
return int(gidString)
except ValueError:
if grp is None:
raise
return grp.getgrnam(gidString)[2]
def runAsEffectiveUser(euid, egid, function, *args, **kwargs):
"""
Run the given function wrapped with seteuid/setegid calls.
This will try to minimize the number of seteuid/setegid calls, comparing
current and wanted permissions
@param euid: effective UID used to call the function.
@type euid: C{int}
@type egid: effective GID used to call the function.
@param egid: C{int}
@param function: the function run with the specific permission.
@type function: any callable
@param *args: arguments passed to C{function}
@param **kwargs: keyword arguments passed to C{function}
"""
uid, gid = os.geteuid(), os.getegid()
if uid == euid and gid == egid:
return function(*args, **kwargs)
else:
if uid != 0 and (uid != euid or gid != egid):
os.seteuid(0)
if gid != egid:
os.setegid(egid)
if euid != 0 and (euid != uid or gid != egid):
os.seteuid(euid)
try:
return function(*args, **kwargs)
finally:
if euid != 0 and (uid != euid or gid != egid):
os.seteuid(0)
if gid != egid:
os.setegid(gid)
if uid != 0 and (uid != euid or gid != egid):
os.seteuid(uid)
__all__ = [
"uniquify", "padTo", "getPluginDirs", "addPluginDir", "sibpath",
"getPassword", "dict", "println", "keyed_md5", "makeStatBar",
"OrderedDict", "InsensitiveDict", "spewer", "searchupwards", "LineLog",
"raises", "IntervalDifferential", "FancyStrMixin", "FancyEqMixin",
"dsu", "switchUID", "SubclassableCStringIO", "moduleMovedForSplit",
"unsignedID", "mergeFunctionMetadata", "nameToLabel", "uidFromString",
"gidFromString", "runAsEffectiveUser", "moduleMovedForSplit",
]
|
apache-2.0
|
elijah513/django
|
django/db/models/manager.py
|
301
|
10722
|
import copy
import inspect
from importlib import import_module
from django.db import router
from django.db.models.query import QuerySet
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
def ensure_default_manager(cls):
"""
Ensures that a Model subclass contains a default manager and sets the
_default_manager attribute on the class. Also sets up the _base_manager
points to a plain Manager instance (which could be the same as
_default_manager if it's not a subclass of Manager).
"""
if cls._meta.swapped:
setattr(cls, 'objects', SwappedManagerDescriptor(cls))
return
if not getattr(cls, '_default_manager', None):
if any(f.name == 'objects' for f in cls._meta.fields):
raise ValueError(
"Model %s must specify a custom Manager, because it has a "
"field named 'objects'" % cls.__name__
)
# Create the default manager, if needed.
cls.add_to_class('objects', Manager())
cls._base_manager = cls.objects
elif not getattr(cls, '_base_manager', None):
default_mgr = cls._default_manager.__class__
if (default_mgr is Manager or
getattr(default_mgr, "use_for_related_fields", False)):
cls._base_manager = cls._default_manager
else:
# Default manager isn't a plain Manager class, or a suitable
# replacement, so we walk up the base class hierarchy until we hit
# something appropriate.
for base_class in default_mgr.mro()[1:]:
if (base_class is Manager or
getattr(base_class, "use_for_related_fields", False)):
cls.add_to_class('_base_manager', base_class())
return
raise AssertionError(
"Should never get here. Please report a bug, including your "
"model and model manager setup."
)
@python_2_unicode_compatible
class BaseManager(object):
# Tracks each time a Manager instance is created. Used to retain order.
creation_counter = 0
#: If set to True the manager will be serialized into migrations and will
#: thus be available in e.g. RunPython operations
use_in_migrations = False
def __new__(cls, *args, **kwargs):
# We capture the arguments to make returning them trivial
obj = super(BaseManager, cls).__new__(cls)
obj._constructor_args = (args, kwargs)
return obj
def __init__(self):
super(BaseManager, self).__init__()
self._set_creation_counter()
self.model = None
self.name = None
self._inherited = False
self._db = None
self._hints = {}
def __str__(self):
""" Return "app_label.model_label.manager_name". """
return '%s.%s' % (self.model._meta.label, self.name)
def deconstruct(self):
"""
Returns a 5-tuple of the form (as_manager (True), manager_class,
queryset_class, args, kwargs).
Raises a ValueError if the manager is dynamically generated.
"""
qs_class = self._queryset_class
if getattr(self, '_built_with_as_manager', False):
# using MyQuerySet.as_manager()
return (
True, # as_manager
None, # manager_class
'%s.%s' % (qs_class.__module__, qs_class.__name__), # qs_class
None, # args
None, # kwargs
)
else:
module_name = self.__module__
name = self.__class__.__name__
# Make sure it's actually there and not an inner class
module = import_module(module_name)
if not hasattr(module, name):
raise ValueError(
"Could not find manager %s in %s.\n"
"Please note that you need to inherit from managers you "
"dynamically generated with 'from_queryset()'."
% (name, module_name)
)
return (
False, # as_manager
'%s.%s' % (module_name, name), # manager_class
None, # qs_class
self._constructor_args[0], # args
self._constructor_args[1], # kwargs
)
def check(self, **kwargs):
return []
@classmethod
def _get_queryset_methods(cls, queryset_class):
def create_method(name, method):
def manager_method(self, *args, **kwargs):
return getattr(self.get_queryset(), name)(*args, **kwargs)
manager_method.__name__ = method.__name__
manager_method.__doc__ = method.__doc__
return manager_method
new_methods = {}
# Refs http://bugs.python.org/issue1785.
predicate = inspect.isfunction if six.PY3 else inspect.ismethod
for name, method in inspect.getmembers(queryset_class, predicate=predicate):
# Only copy missing methods.
if hasattr(cls, name):
continue
# Only copy public methods or methods with the attribute `queryset_only=False`.
queryset_only = getattr(method, 'queryset_only', None)
if queryset_only or (queryset_only is None and name.startswith('_')):
continue
# Copy the method onto the manager.
new_methods[name] = create_method(name, method)
return new_methods
@classmethod
def from_queryset(cls, queryset_class, class_name=None):
if class_name is None:
class_name = '%sFrom%s' % (cls.__name__, queryset_class.__name__)
class_dict = {
'_queryset_class': queryset_class,
}
class_dict.update(cls._get_queryset_methods(queryset_class))
return type(class_name, (cls,), class_dict)
def contribute_to_class(self, model, name):
# TODO: Use weakref because of possible memory leak / circular reference.
self.model = model
if not self.name:
self.name = name
# Only contribute the manager if the model is concrete
if model._meta.abstract:
setattr(model, name, AbstractManagerDescriptor(model))
elif model._meta.swapped:
setattr(model, name, SwappedManagerDescriptor(model))
else:
# if not model._meta.abstract and not model._meta.swapped:
setattr(model, name, ManagerDescriptor(self))
if (not getattr(model, '_default_manager', None) or
self.creation_counter < model._default_manager.creation_counter):
model._default_manager = self
abstract = False
if model._meta.abstract or (self._inherited and not self.model._meta.proxy):
abstract = True
model._meta.managers.append((self.creation_counter, self, abstract))
def _set_creation_counter(self):
"""
Sets the creation counter value for this instance and increments the
class-level copy.
"""
self.creation_counter = BaseManager.creation_counter
BaseManager.creation_counter += 1
def _copy_to_model(self, model):
"""
Makes a copy of the manager and assigns it to 'model', which should be
a child of the existing model (used when inheriting a manager from an
abstract base class).
"""
assert issubclass(model, self.model)
mgr = copy.copy(self)
mgr._set_creation_counter()
mgr.model = model
mgr._inherited = True
return mgr
def db_manager(self, using=None, hints=None):
obj = copy.copy(self)
obj._db = using or self._db
obj._hints = hints or self._hints
return obj
@property
def db(self):
return self._db or router.db_for_read(self.model, **self._hints)
#######################
# PROXIES TO QUERYSET #
#######################
def get_queryset(self):
"""
Returns a new QuerySet object. Subclasses can override this method to
easily customize the behavior of the Manager.
"""
return self._queryset_class(model=self.model, using=self._db, hints=self._hints)
def all(self):
# We can't proxy this method through the `QuerySet` like we do for the
# rest of the `QuerySet` methods. This is because `QuerySet.all()`
# works by creating a "copy" of the current queryset and in making said
# copy, all the cached `prefetch_related` lookups are lost. See the
# implementation of `RelatedManager.get_queryset()` for a better
# understanding of how this comes into play.
return self.get_queryset()
def __eq__(self, other):
return (
isinstance(other, self.__class__) and
self._constructor_args == other._constructor_args
)
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return id(self)
class Manager(BaseManager.from_queryset(QuerySet)):
pass
class ManagerDescriptor(object):
# This class ensures managers aren't accessible via model instances.
# For example, Poll.objects works, but poll_obj.objects raises AttributeError.
def __init__(self, manager):
self.manager = manager
def __get__(self, instance, type=None):
if instance is not None:
raise AttributeError("Manager isn't accessible via %s instances" % type.__name__)
return self.manager
class AbstractManagerDescriptor(object):
# This class provides a better error message when you try to access a
# manager on an abstract model.
def __init__(self, model):
self.model = model
def __get__(self, instance, type=None):
raise AttributeError("Manager isn't available; %s is abstract" % (
self.model._meta.object_name,
))
class SwappedManagerDescriptor(object):
# This class provides a better error message when you try to access a
# manager on a swapped model.
def __init__(self, model):
self.model = model
def __get__(self, instance, type=None):
raise AttributeError(
"Manager isn't available; '%s.%s' has been swapped for '%s'" % (
self.model._meta.app_label,
self.model._meta.object_name,
self.model._meta.swapped,
)
)
class EmptyManager(Manager):
def __init__(self, model):
super(EmptyManager, self).__init__()
self.model = model
def get_queryset(self):
return super(EmptyManager, self).get_queryset().none()
|
bsd-3-clause
|
ai-guild/ncm-adv
|
lib/twitter.py
|
1
|
1203
|
import data_utils
import numpy as np
PATH = '../data/twitter/'
class Twitter(object):
def __init__(self, path=PATH):
# data
metadata, idx_q, idx_a = data_utils.load_data('../data/')
# get dictionaries
i2w = metadata['idx2w']
w2i = metadata['w2idx']
# num of examples
n = len(idx_q)
def split_data():
data = {}
# sample indices from range(0,n)
n_train = int(n*0.8)
n_test = n - n_train
indices = list(range(n))
np.random.shuffle(indices)
data['train'] = ( idx_q[indices][:n_train], idx_a[indices][:n_train] )
data['test'] = ( idx_q[indices][n_train:], idx_a[indices][n_train:] )
self.data = data
def batch(self, batch_size, idx, data_key='train'):
# get indices of batch size
indices = list(range(batch_size))
# shuffle indices within batch
np.random.shuffle(indices)
# return shuffled batch
x,y = self.data[data_key]
start, end = idx*batch_size, (idx+1)*batch_size
return x[indices][start, end], y[indices][start, end]
|
gpl-3.0
|
garnaat/boto
|
tests/integration/datapipeline/test_cert_verification.py
|
121
|
1524
|
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Check that all of the certs on all service endpoints validate.
"""
import unittest
from tests.integration import ServiceCertVerificationTest
import boto.datapipeline
class DatapipelineCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
datapipeline = True
regions = boto.datapipeline.regions()
def sample_service_call(self, conn):
conn.list_pipelines()
|
mit
|
jbosboom/opentuner
|
examples/hpl/hpl.py
|
5
|
4117
|
import adddeps #fix sys.path
import argparse
import logging
import opentuner
from opentuner.search.manipulator import (ConfigurationManipulator,
IntegerParameter,
FloatParameter)
from opentuner.search.objective import MinimizeTime
from opentuner.measurement import MeasurementInterface
from opentuner.measurement.inputmanager import FixedInputManager
from opentuner.tuningrunmain import TuningRunMain
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser(parents=opentuner.argparsers())
parser.add_argument('--size', type=int, default=800,
help='dimensions for the HPL matrix')
parser.add_argument('--nprocs', type=int, default=4,
help='number of processors for each HPL run (minimum=4)')
parser.add_argument('--xhpl', type=str, default="hpl-2.1/bin/OSX/xhpl",
help='location of xhpl binary')
class HPLinpack(MeasurementInterface):
def run(self, desired_result, input, limit):
self.output_hpl_datfile(desired_result.configuration.data)
import subprocess, os
binary = self.args.xhpl
subprocess.call(["mpirun", "-np", str(self.args.nprocs), binary])
val = self.get_time_from_hpl_output()
return opentuner.resultsdb.models.Result(time=val)
def manipulator(self):
#FIXME: should some of these be expressed as booleans or switch parameters?
#FIXME: how to express P and Q, given PxQ=nprocs, with nprocs being fixed?
#FIXME: how to express logscaled parameter with a particular base?
manipulator = ConfigurationManipulator()
manipulator.add_parameter(IntegerParameter("blocksize", 1, 64))
manipulator.add_parameter(IntegerParameter("row_or_colmajor_pmapping", 0, 1))
manipulator.add_parameter(IntegerParameter("pfact", 0, 2))
manipulator.add_parameter(IntegerParameter("nbmin", 1, 4))
manipulator.add_parameter(IntegerParameter("ndiv", 2, 2))
manipulator.add_parameter(IntegerParameter("rfact", 0, 4))
manipulator.add_parameter(IntegerParameter("bcast", 0, 5))
manipulator.add_parameter(IntegerParameter("depth", 0, 4))
manipulator.add_parameter(IntegerParameter("swap", 0, 2))
manipulator.add_parameter(IntegerParameter("swapping_threshold", 64, 128))
manipulator.add_parameter(IntegerParameter("L1_transposed", 0, 1))
manipulator.add_parameter(IntegerParameter("U_transposed", 0, 1))
manipulator.add_parameter(IntegerParameter("mem_alignment", 4, 16))
return manipulator
def output_hpl_datfile(self, params):
"""HPL uses an input file to express the parameters, and this uses mako to render it."""
params["size"] = self.args.size
from mako.template import Template
template = Template(filename="HPL.dat.mako")
with open("HPL.dat", "w") as f:
f.write(template.render(**params))
def get_time_from_hpl_output(self, fname="HPL.out"):
"""Returns the elapsed time only, from the HPL output file"""
#FIXME: clean up with REs
elapsed = 0.0
with open(fname) as f:
line = f.readline()
while (line[0:3] != "T/V"):
line = f.readline()
line = f.readline()
while (line[0:3] != "T/V"):
line = f.readline()
f.readline() # line of dashes
splitted = f.readline().split()
elapsed = float(splitted[5])
return elapsed
def program_name(self):
return "HPL"
def program_version(self):
return "size=%d,nprocs=%d" % (self.args.size, self.args.nprocs)
def save_final_config(self, configuration):
'''
called at the end of autotuning with the best resultsdb.models.Configuration
'''
print "Final configuration", configuration.data
if __name__ == '__main__':
args = parser.parse_args()
HPLinpack.main(args)
|
mit
|
gangadharkadam/smrterp
|
erpnext/config/desktop.py
|
30
|
1190
|
from frappe import _
def get_data():
return {
"Accounts": {
"color": "#3498db",
"icon": "icon-money",
"type": "module"
},
"Activity": {
"color": "#e67e22",
"icon": "icon-play",
"label": _("Activity"),
"link": "activity",
"type": "page"
},
"Buying": {
"color": "#c0392b",
"icon": "icon-shopping-cart",
"type": "module"
},
"HR": {
"color": "#2ecc71",
"icon": "icon-group",
"label": _("Human Resources"),
"type": "module"
},
"Manufacturing": {
"color": "#7f8c8d",
"icon": "icon-cogs",
"type": "module"
},
"Notes": {
"color": "#95a5a6",
"doctype": "Note",
"icon": "icon-file-alt",
"label": _("Notes"),
"link": "List/Note",
"type": "list"
},
"POS": {
"color": "#589494",
"icon": "icon-th",
"type": "page",
"link": "pos"
},
"Projects": {
"color": "#8e44ad",
"icon": "icon-puzzle-piece",
"type": "module"
},
"Selling": {
"color": "#1abc9c",
"icon": "icon-tag",
"type": "module"
},
"Stock": {
"color": "#f39c12",
"icon": "icon-truck",
"type": "module"
},
"Support": {
"color": "#2c3e50",
"icon": "icon-phone",
"type": "module"
}
}
|
agpl-3.0
|
efortuna/AndroidSDKClone
|
ndk_experimental/prebuilt/linux-x86_64/lib/python2.7/MimeWriter.py
|
315
|
6482
|
"""Generic MIME writer.
This module defines the class MimeWriter. The MimeWriter class implements
a basic formatter for creating MIME multi-part files. It doesn't seek around
the output file nor does it use large amounts of buffer space. You must write
the parts out in the order that they should occur in the final file.
MimeWriter does buffer the headers you add, allowing you to rearrange their
order.
"""
import mimetools
__all__ = ["MimeWriter"]
import warnings
warnings.warn("the MimeWriter module is deprecated; use the email package instead",
DeprecationWarning, 2)
class MimeWriter:
"""Generic MIME writer.
Methods:
__init__()
addheader()
flushheaders()
startbody()
startmultipartbody()
nextpart()
lastpart()
A MIME writer is much more primitive than a MIME parser. It
doesn't seek around on the output file, and it doesn't use large
amounts of buffer space, so you have to write the parts in the
order they should occur on the output file. It does buffer the
headers you add, allowing you to rearrange their order.
General usage is:
f = <open the output file>
w = MimeWriter(f)
...call w.addheader(key, value) 0 or more times...
followed by either:
f = w.startbody(content_type)
...call f.write(data) for body data...
or:
w.startmultipartbody(subtype)
for each part:
subwriter = w.nextpart()
...use the subwriter's methods to create the subpart...
w.lastpart()
The subwriter is another MimeWriter instance, and should be
treated in the same way as the toplevel MimeWriter. This way,
writing recursive body parts is easy.
Warning: don't forget to call lastpart()!
XXX There should be more state so calls made in the wrong order
are detected.
Some special cases:
- startbody() just returns the file passed to the constructor;
but don't use this knowledge, as it may be changed.
- startmultipartbody() actually returns a file as well;
this can be used to write the initial 'if you can read this your
mailer is not MIME-aware' message.
- If you call flushheaders(), the headers accumulated so far are
written out (and forgotten); this is useful if you don't need a
body part at all, e.g. for a subpart of type message/rfc822
that's (mis)used to store some header-like information.
- Passing a keyword argument 'prefix=<flag>' to addheader(),
start*body() affects where the header is inserted; 0 means
append at the end, 1 means insert at the start; default is
append for addheader(), but insert for start*body(), which use
it to determine where the Content-Type header goes.
"""
def __init__(self, fp):
self._fp = fp
self._headers = []
def addheader(self, key, value, prefix=0):
"""Add a header line to the MIME message.
The key is the name of the header, where the value obviously provides
the value of the header. The optional argument prefix determines
where the header is inserted; 0 means append at the end, 1 means
insert at the start. The default is to append.
"""
lines = value.split("\n")
while lines and not lines[-1]: del lines[-1]
while lines and not lines[0]: del lines[0]
for i in range(1, len(lines)):
lines[i] = " " + lines[i].strip()
value = "\n".join(lines) + "\n"
line = key + ": " + value
if prefix:
self._headers.insert(0, line)
else:
self._headers.append(line)
def flushheaders(self):
"""Writes out and forgets all headers accumulated so far.
This is useful if you don't need a body part at all; for example,
for a subpart of type message/rfc822 that's (mis)used to store some
header-like information.
"""
self._fp.writelines(self._headers)
self._headers = []
def startbody(self, ctype, plist=[], prefix=1):
"""Returns a file-like object for writing the body of the message.
The content-type is set to the provided ctype, and the optional
parameter, plist, provides additional parameters for the
content-type declaration. The optional argument prefix determines
where the header is inserted; 0 means append at the end, 1 means
insert at the start. The default is to insert at the start.
"""
for name, value in plist:
ctype = ctype + ';\n %s=\"%s\"' % (name, value)
self.addheader("Content-Type", ctype, prefix=prefix)
self.flushheaders()
self._fp.write("\n")
return self._fp
def startmultipartbody(self, subtype, boundary=None, plist=[], prefix=1):
"""Returns a file-like object for writing the body of the message.
Additionally, this method initializes the multi-part code, where the
subtype parameter provides the multipart subtype, the boundary
parameter may provide a user-defined boundary specification, and the
plist parameter provides optional parameters for the subtype. The
optional argument, prefix, determines where the header is inserted;
0 means append at the end, 1 means insert at the start. The default
is to insert at the start. Subparts should be created using the
nextpart() method.
"""
self._boundary = boundary or mimetools.choose_boundary()
return self.startbody("multipart/" + subtype,
[("boundary", self._boundary)] + plist,
prefix=prefix)
def nextpart(self):
"""Returns a new instance of MimeWriter which represents an
individual part in a multipart message.
This may be used to write the part as well as used for creating
recursively complex multipart messages. The message must first be
initialized with the startmultipartbody() method before using the
nextpart() method.
"""
self._fp.write("\n--" + self._boundary + "\n")
return self.__class__(self._fp)
def lastpart(self):
"""This is used to designate the last part of a multipart message.
It should always be used when writing multipart messages.
"""
self._fp.write("\n--" + self._boundary + "--\n")
if __name__ == '__main__':
import test.test_MimeWriter
|
apache-2.0
|
andyh616/mne-python
|
mne/stats/regression.py
|
3
|
14851
|
# Authors: Tal Linzen <linzen@nyu.edu>
# Teon Brooks <teon.brooks@gmail.com>
# Denis A. Engemann <denis.engemann@gmail.com>
# Jona Sassenhagen <jona.sassenhagen@gmail.com>
# Marijn van Vliet <w.m.vanvliet@gmail.com>
#
# License: BSD (3-clause)
from collections import namedtuple
from inspect import isgenerator
import warnings
from ..externals.six import string_types
import numpy as np
from scipy import linalg, sparse
from ..source_estimate import SourceEstimate
from ..epochs import _BaseEpochs
from ..evoked import Evoked, EvokedArray
from ..utils import logger, _reject_data_segments, _get_fast_dot
from ..io.pick import pick_types, pick_info
from ..fixes import in1d
def linear_regression(inst, design_matrix, names=None):
"""Fit Ordinary Least Squares regression (OLS)
Parameters
----------
inst : instance of Epochs | iterable of SourceEstimate
The data to be regressed. Contains all the trials, sensors, and time
points for the regression. For Source Estimates, accepts either a list
or a generator object.
design_matrix : ndarray, shape (n_observations, n_regressors)
The regressors to be used. Must be a 2d array with as many rows as
the first dimension of `data`. The first column of this matrix will
typically consist of ones (intercept column).
names : list-like | None
Optional parameter to name the regressors. If provided, the length must
correspond to the number of columns present in regressors
(including the intercept, if present).
Otherwise the default names are x0, x1, x2...xn for n regressors.
Returns
-------
results : dict of namedtuple
For each regressor (key) a namedtuple is provided with the
following attributes:
beta : regression coefficients
stderr : standard error of regression coefficients
t_val : t statistics (beta / stderr)
p_val : two-sided p-value of t statistic under the t distribution
mlog10_p_val : -log10 transformed p-value.
The tuple members are numpy arrays. The shape of each numpy array is
the shape of the data minus the first dimension; e.g., if the shape of
the original data was (n_observations, n_channels, n_timepoints),
then the shape of each of the arrays will be
(n_channels, n_timepoints).
"""
if names is None:
names = ['x%i' % i for i in range(design_matrix.shape[1])]
if isinstance(inst, _BaseEpochs):
picks = pick_types(inst.info, meg=True, eeg=True, ref_meg=True,
stim=False, eog=False, ecg=False,
emg=False, exclude=['bads'])
if [inst.ch_names[p] for p in picks] != inst.ch_names:
warnings.warn('Fitting linear model to non-data or bad '
'channels. Check picking', UserWarning)
msg = 'Fitting linear model to epochs'
data = inst.get_data()
out = EvokedArray(np.zeros(data.shape[1:]), inst.info, inst.tmin)
elif isgenerator(inst):
msg = 'Fitting linear model to source estimates (generator input)'
out = next(inst)
data = np.array([out.data] + [i.data for i in inst])
elif isinstance(inst, list) and isinstance(inst[0], SourceEstimate):
msg = 'Fitting linear model to source estimates (list input)'
out = inst[0]
data = np.array([i.data for i in inst])
else:
raise ValueError('Input must be epochs or iterable of source '
'estimates')
logger.info(msg + ', (%s targets, %s regressors)' %
(np.product(data.shape[1:]), len(names)))
lm_params = _fit_lm(data, design_matrix, names)
lm = namedtuple('lm', 'beta stderr t_val p_val mlog10_p_val')
lm_fits = {}
for name in names:
parameters = [p[name] for p in lm_params]
for ii, value in enumerate(parameters):
out_ = out.copy()
if isinstance(out_, SourceEstimate):
out_._data[:] = value
elif isinstance(out_, Evoked):
out_.data[:] = value
else:
raise RuntimeError('Invalid container.')
parameters[ii] = out_
lm_fits[name] = lm(*parameters)
logger.info('Done')
return lm_fits
def _fit_lm(data, design_matrix, names):
"""Aux function"""
from scipy import stats
n_samples = len(data)
n_features = np.product(data.shape[1:])
if design_matrix.ndim != 2:
raise ValueError('Design matrix must be a 2d array')
n_rows, n_predictors = design_matrix.shape
if n_samples != n_rows:
raise ValueError('Number of rows in design matrix must be equal '
'to number of observations')
if n_predictors != len(names):
raise ValueError('Number of regressor names must be equal to '
'number of column in design matrix')
y = np.reshape(data, (n_samples, n_features))
betas, resid_sum_squares, _, _ = linalg.lstsq(a=design_matrix, b=y)
df = n_rows - n_predictors
sqrt_noise_var = np.sqrt(resid_sum_squares / df).reshape(data.shape[1:])
design_invcov = linalg.inv(np.dot(design_matrix.T, design_matrix))
unscaled_stderrs = np.sqrt(np.diag(design_invcov))
beta, stderr, t_val, p_val, mlog10_p_val = (dict() for _ in range(5))
for x, unscaled_stderr, predictor in zip(betas, unscaled_stderrs, names):
beta[predictor] = x.reshape(data.shape[1:])
stderr[predictor] = sqrt_noise_var * unscaled_stderr
t_val[predictor] = beta[predictor] / stderr[predictor]
cdf = stats.t.cdf(np.abs(t_val[predictor]), df)
p_val[predictor] = (1. - cdf) * 2.
mlog10_p_val[predictor] = -np.log10(p_val[predictor])
return beta, stderr, t_val, p_val, mlog10_p_val
def linear_regression_raw(raw, events, event_id=None, tmin=-.1, tmax=1,
covariates=None, reject=None, flat=None, tstep=1.,
decim=1, picks=None, solver='pinv'):
"""Estimate regression-based evoked potentials/fields by linear modelling
This models the full M/EEG time course, including correction for
overlapping potentials and allowing for continuous/scalar predictors.
Internally, this constructs a predictor matrix X of size
n_samples * (n_conds * window length), solving the linear system
``Y = bX`` and returning ``b`` as evoked-like time series split by
condition. See [1]_.
Parameters
----------
raw : instance of Raw
A raw object. Note: be very careful about data that is not
downsampled, as the resulting matrices can be enormous and easily
overload your computer. Typically, 100 Hz sampling rate is
appropriate - or using the decim keyword (see below).
events : ndarray of int, shape (n_events, 3)
An array where the first column corresponds to samples in raw
and the last to integer codes in event_id.
event_id : dict
As in Epochs; a dictionary where the values may be integers or
iterables of integers, corresponding to the 3rd column of
events, and the keys are condition names.
tmin : float | dict
If float, gives the lower limit (in seconds) for the time window for
which all event types' effects are estimated. If a dict, can be used to
specify time windows for specific event types: keys correspond to keys
in event_id and/or covariates; for missing values, the default (-.1) is
used.
tmax : float | dict
If float, gives the upper limit (in seconds) for the time window for
which all event types' effects are estimated. If a dict, can be used to
specify time windows for specific event types: keys correspond to keys
in event_id and/or covariates; for missing values, the default (1.) is
used.
covariates : dict-like | None
If dict-like (e.g., a pandas DataFrame), values have to be array-like
and of the same length as the columns in ```events```. Keys correspond
to additional event types/conditions to be estimated and are matched
with the time points given by the first column of ```events```. If
None, only binary events (from event_id) are used.
reject : None | dict
For cleaning raw data before the regression is performed: set up
rejection parameters based on peak-to-peak amplitude in continuously
selected subepochs. If None, no rejection is done.
If dict, keys are types ('grad' | 'mag' | 'eeg' | 'eog' | 'ecg')
and values are the maximal peak-to-peak values to select rejected
epochs, e.g.::
reject = dict(grad=4000e-12, # T / m (gradiometers)
mag=4e-11, # T (magnetometers)
eeg=40e-5, # uV (EEG channels)
eog=250e-5 # uV (EOG channels))
flat : None | dict
or cleaning raw data before the regression is performed: set up
rejection parameters based on flatness of the signal. If None, no
rejection is done. If a dict, keys are ('grad' | 'mag' |
'eeg' | 'eog' | 'ecg') and values are minimal peak-to-peak values to
select rejected epochs.
tstep : float
Length of windows for peak-to-peak detection for raw data cleaning.
decim : int
Decimate by choosing only a subsample of data points. Highly
recommended for data recorded at high sampling frequencies, as
otherwise huge intermediate matrices have to be created and inverted.
picks : None | list
List of indices of channels to be included. If None, defaults to all
MEG and EEG channels.
solver : str | function
Either a function which takes as its inputs the sparse predictor
matrix X and the observation matrix Y, and returns the coefficient
matrix b; or a string (for now, only 'pinv'), in which case the
solver used is dot(scipy.linalg.pinv(dot(X.T, X)), dot(X.T, Y.T)).T.
Returns
-------
evokeds : dict
A dict where the keys correspond to conditions and the values are
Evoked objects with the ER[F/P]s. These can be used exactly like any
other Evoked object, including e.g. plotting or statistics.
References
----------
.. [1] Smith, N. J., & Kutas, M. (2015). Regression-based estimation of ERP
waveforms: II. Non-linear effects, overlap correction, and practical
considerations. Psychophysiology, 52(2), 169-189.
"""
if isinstance(solver, string_types):
if solver == 'pinv':
fast_dot = _get_fast_dot()
# inv is slightly (~10%) faster, but pinv seemingly more stable
def solver(X, Y):
return fast_dot(linalg.pinv(X.T.dot(X).todense()),
X.T.dot(Y.T)).T
else:
raise ValueError("No such solver: {0}".format(solver))
# prepare raw and events
if picks is None:
picks = pick_types(raw.info, meg=True, eeg=True, ref_meg=True)
info = pick_info(raw.info, picks, copy=True)
info["sfreq"] /= decim
data, times = raw[:]
data = data[picks, ::decim]
times = times[::decim]
events = events.copy()
events[:, 0] -= raw.first_samp
events[:, 0] /= decim
conds = list(event_id)
if covariates is not None:
conds += list(covariates)
# time windows (per event type) are converted to sample points from times
if isinstance(tmin, (float, int)):
tmin_s = dict((cond, int(tmin * info["sfreq"])) for cond in conds)
else:
tmin_s = dict((cond, int(tmin.get(cond, -.1) * info["sfreq"]))
for cond in conds)
if isinstance(tmax, (float, int)):
tmax_s = dict(
(cond, int((tmax * info["sfreq"]) + 1.)) for cond in conds)
else:
tmax_s = dict((cond, int((tmax.get(cond, 1.) * info["sfreq"]) + 1))
for cond in conds)
# Construct predictor matrix
# We do this by creating one array per event type, shape (lags, samples)
# (where lags depends on tmin/tmax and can be different for different
# event types). Columns correspond to predictors, predictors correspond to
# time lags. Thus, each array is mostly sparse, with one diagonal of 1s
# per event (for binary predictors).
cond_length = dict()
xs = []
for cond in conds:
tmin_, tmax_ = tmin_s[cond], tmax_s[cond]
n_lags = int(tmax_ - tmin_) # width of matrix
if cond in event_id: # for binary predictors
ids = ([event_id[cond]]
if isinstance(event_id[cond], int)
else event_id[cond])
onsets = -(events[in1d(events[:, 2], ids), 0] + tmin_)
values = np.ones((len(onsets), n_lags))
else: # for predictors from covariates, e.g. continuous ones
covs = covariates[cond]
if len(covs) != len(events):
error = ("Condition {0} from ```covariates``` is "
"not the same length as ```events```").format(cond)
raise ValueError(error)
onsets = -(events[np.where(covs != 0), 0] + tmin_)[0]
v = np.asarray(covs)[np.nonzero(covs)].astype(float)
values = np.ones((len(onsets), n_lags)) * v[:, np.newaxis]
cond_length[cond] = len(onsets)
xs.append(sparse.dia_matrix((values, onsets),
shape=(data.shape[1], n_lags)))
X = sparse.hstack(xs)
# find only those positions where at least one predictor isn't 0
has_val = np.unique(X.nonzero()[0])
# additionally, reject positions based on extreme steps in the data
if reject is not None:
_, inds = _reject_data_segments(data, reject, flat, decim=None,
info=info, tstep=tstep)
for t0, t1 in inds:
has_val = np.setdiff1d(has_val, range(t0, t1))
# solve linear system
X, data = X.tocsr()[has_val], data[:, has_val]
coefs = solver(X, data)
# construct Evoked objects to be returned from output
evokeds = dict()
cum = 0
for cond in conds:
tmin_, tmax_ = tmin_s[cond], tmax_s[cond]
evokeds[cond] = EvokedArray(coefs[:, cum:cum + tmax_ - tmin_],
info=info, comment=cond,
tmin=tmin_ / float(info["sfreq"]),
nave=cond_length[cond],
kind='mean') # note that nave and kind are
cum += tmax_ - tmin_ # technically not correct
return evokeds
|
bsd-3-clause
|
MuhammadShuaib/mwmetrics
|
setup.py
|
3
|
1293
|
import os
from setuptools import find_packages, setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def requirements(fname):
return [line.strip()
for line in open(os.path.join(os.path.dirname(__file__), fname))]
setup(
name = "mwmetrics",
version = "0.0.1",
author = "Aaron Halfaker",
author_email = "ahalfaker@wikimedia.org",
description = "A collection of scripts and utilities for extracting " +
"behavioral metrics from Wikipedia editors",
license = "MIT",
url = "https://github.com/halfak/mwmetrics",
packages=find_packages(),
entry_points = {
'console_scripts': [
'mwmetrics=mwmetrics.mwmetrics:main',
],
},
long_description = read('README.md'),
install_requires = ['docopt', 'mediawiki-utilities'],
classifiers=[
"Programming Language :: Python :: 3",
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Intended Audience :: Science/Research",
"Intended Audience :: System Administrators",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Topic :: Utilities",
"Topic :: Scientific/Engineering"
]
)
|
mit
|
kelseyoo14/Wander
|
venv_2_7/lib/python2.7/site-packages/Django-1.9-py2.7.egg/django/contrib/admin/filters.py
|
188
|
16608
|
"""
This encapsulates the logic for displaying filters in the Django admin.
Filters are specified in models with the "list_filter" option.
Each filter subclass knows how to display a filter for a field that passes a
certain test -- e.g. being a DateField or ForeignKey.
"""
import datetime
from django.contrib.admin.options import IncorrectLookupParameters
from django.contrib.admin.utils import (
get_model_from_relation, prepare_lookup_value, reverse_field_path,
)
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.db import models
from django.utils import timezone
from django.utils.encoding import force_text, smart_text
from django.utils.translation import ugettext_lazy as _
class ListFilter(object):
title = None # Human-readable title to appear in the right sidebar.
template = 'admin/filter.html'
def __init__(self, request, params, model, model_admin):
# This dictionary will eventually contain the request's query string
# parameters actually used by this filter.
self.used_parameters = {}
if self.title is None:
raise ImproperlyConfigured(
"The list filter '%s' does not specify "
"a 'title'." % self.__class__.__name__)
def has_output(self):
"""
Returns True if some choices would be output for this filter.
"""
raise NotImplementedError('subclasses of ListFilter must provide a has_output() method')
def choices(self, cl):
"""
Returns choices ready to be output in the template.
"""
raise NotImplementedError('subclasses of ListFilter must provide a choices() method')
def queryset(self, request, queryset):
"""
Returns the filtered queryset.
"""
raise NotImplementedError('subclasses of ListFilter must provide a queryset() method')
def expected_parameters(self):
"""
Returns the list of parameter names that are expected from the
request's query string and that will be used by this filter.
"""
raise NotImplementedError('subclasses of ListFilter must provide an expected_parameters() method')
class SimpleListFilter(ListFilter):
# The parameter that should be used in the query string for that filter.
parameter_name = None
def __init__(self, request, params, model, model_admin):
super(SimpleListFilter, self).__init__(
request, params, model, model_admin)
if self.parameter_name is None:
raise ImproperlyConfigured(
"The list filter '%s' does not specify "
"a 'parameter_name'." % self.__class__.__name__)
if self.parameter_name in params:
value = params.pop(self.parameter_name)
self.used_parameters[self.parameter_name] = value
lookup_choices = self.lookups(request, model_admin)
if lookup_choices is None:
lookup_choices = ()
self.lookup_choices = list(lookup_choices)
def has_output(self):
return len(self.lookup_choices) > 0
def value(self):
"""
Returns the value (in string format) provided in the request's
query string for this filter, if any. If the value wasn't provided then
returns None.
"""
return self.used_parameters.get(self.parameter_name)
def lookups(self, request, model_admin):
"""
Must be overridden to return a list of tuples (value, verbose value)
"""
raise NotImplementedError(
'The SimpleListFilter.lookups() method must be overridden to '
'return a list of tuples (value, verbose value)')
def expected_parameters(self):
return [self.parameter_name]
def choices(self, cl):
yield {
'selected': self.value() is None,
'query_string': cl.get_query_string({}, [self.parameter_name]),
'display': _('All'),
}
for lookup, title in self.lookup_choices:
yield {
'selected': self.value() == force_text(lookup),
'query_string': cl.get_query_string({
self.parameter_name: lookup,
}, []),
'display': title,
}
class FieldListFilter(ListFilter):
_field_list_filters = []
_take_priority_index = 0
def __init__(self, field, request, params, model, model_admin, field_path):
self.field = field
self.field_path = field_path
self.title = getattr(field, 'verbose_name', field_path)
super(FieldListFilter, self).__init__(
request, params, model, model_admin)
for p in self.expected_parameters():
if p in params:
value = params.pop(p)
self.used_parameters[p] = prepare_lookup_value(p, value)
def has_output(self):
return True
def queryset(self, request, queryset):
try:
return queryset.filter(**self.used_parameters)
except ValidationError as e:
raise IncorrectLookupParameters(e)
@classmethod
def register(cls, test, list_filter_class, take_priority=False):
if take_priority:
# This is to allow overriding the default filters for certain types
# of fields with some custom filters. The first found in the list
# is used in priority.
cls._field_list_filters.insert(
cls._take_priority_index, (test, list_filter_class))
cls._take_priority_index += 1
else:
cls._field_list_filters.append((test, list_filter_class))
@classmethod
def create(cls, field, request, params, model, model_admin, field_path):
for test, list_filter_class in cls._field_list_filters:
if not test(field):
continue
return list_filter_class(field, request, params,
model, model_admin, field_path=field_path)
class RelatedFieldListFilter(FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
other_model = get_model_from_relation(field)
self.lookup_kwarg = '%s__%s__exact' % (field_path, field.target_field.name)
self.lookup_kwarg_isnull = '%s__isnull' % field_path
self.lookup_val = request.GET.get(self.lookup_kwarg)
self.lookup_val_isnull = request.GET.get(self.lookup_kwarg_isnull)
self.lookup_choices = self.field_choices(field, request, model_admin)
super(RelatedFieldListFilter, self).__init__(
field, request, params, model, model_admin, field_path)
if hasattr(field, 'verbose_name'):
self.lookup_title = field.verbose_name
else:
self.lookup_title = other_model._meta.verbose_name
self.title = self.lookup_title
self.empty_value_display = model_admin.get_empty_value_display()
@property
def include_empty_choice(self):
"""
Return True if a "(None)" choice should be included, which filters
out everything except empty relationships.
"""
return self.field.null or (self.field.is_relation and self.field.many_to_many)
def has_output(self):
if self.include_empty_choice:
extra = 1
else:
extra = 0
return len(self.lookup_choices) + extra > 1
def expected_parameters(self):
return [self.lookup_kwarg, self.lookup_kwarg_isnull]
def field_choices(self, field, request, model_admin):
return field.get_choices(include_blank=False)
def choices(self, cl):
yield {
'selected': self.lookup_val is None and not self.lookup_val_isnull,
'query_string': cl.get_query_string({},
[self.lookup_kwarg, self.lookup_kwarg_isnull]),
'display': _('All'),
}
for pk_val, val in self.lookup_choices:
yield {
'selected': self.lookup_val == smart_text(pk_val),
'query_string': cl.get_query_string({
self.lookup_kwarg: pk_val,
}, [self.lookup_kwarg_isnull]),
'display': val,
}
if self.include_empty_choice:
yield {
'selected': bool(self.lookup_val_isnull),
'query_string': cl.get_query_string({
self.lookup_kwarg_isnull: 'True',
}, [self.lookup_kwarg]),
'display': self.empty_value_display,
}
FieldListFilter.register(lambda f: f.remote_field, RelatedFieldListFilter)
class BooleanFieldListFilter(FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
self.lookup_kwarg = '%s__exact' % field_path
self.lookup_kwarg2 = '%s__isnull' % field_path
self.lookup_val = request.GET.get(self.lookup_kwarg)
self.lookup_val2 = request.GET.get(self.lookup_kwarg2)
super(BooleanFieldListFilter, self).__init__(field,
request, params, model, model_admin, field_path)
def expected_parameters(self):
return [self.lookup_kwarg, self.lookup_kwarg2]
def choices(self, cl):
for lookup, title in (
(None, _('All')),
('1', _('Yes')),
('0', _('No'))):
yield {
'selected': self.lookup_val == lookup and not self.lookup_val2,
'query_string': cl.get_query_string({
self.lookup_kwarg: lookup,
}, [self.lookup_kwarg2]),
'display': title,
}
if isinstance(self.field, models.NullBooleanField):
yield {
'selected': self.lookup_val2 == 'True',
'query_string': cl.get_query_string({
self.lookup_kwarg2: 'True',
}, [self.lookup_kwarg]),
'display': _('Unknown'),
}
FieldListFilter.register(lambda f: isinstance(f,
(models.BooleanField, models.NullBooleanField)), BooleanFieldListFilter)
class ChoicesFieldListFilter(FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
self.lookup_kwarg = '%s__exact' % field_path
self.lookup_val = request.GET.get(self.lookup_kwarg)
super(ChoicesFieldListFilter, self).__init__(
field, request, params, model, model_admin, field_path)
def expected_parameters(self):
return [self.lookup_kwarg]
def choices(self, cl):
yield {
'selected': self.lookup_val is None,
'query_string': cl.get_query_string({}, [self.lookup_kwarg]),
'display': _('All')
}
for lookup, title in self.field.flatchoices:
yield {
'selected': smart_text(lookup) == self.lookup_val,
'query_string': cl.get_query_string({
self.lookup_kwarg: lookup}),
'display': title,
}
FieldListFilter.register(lambda f: bool(f.choices), ChoicesFieldListFilter)
class DateFieldListFilter(FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
self.field_generic = '%s__' % field_path
self.date_params = {k: v for k, v in params.items()
if k.startswith(self.field_generic)}
now = timezone.now()
# When time zone support is enabled, convert "now" to the user's time
# zone so Django's definition of "Today" matches what the user expects.
if timezone.is_aware(now):
now = timezone.localtime(now)
if isinstance(field, models.DateTimeField):
today = now.replace(hour=0, minute=0, second=0, microsecond=0)
else: # field is a models.DateField
today = now.date()
tomorrow = today + datetime.timedelta(days=1)
if today.month == 12:
next_month = today.replace(year=today.year + 1, month=1, day=1)
else:
next_month = today.replace(month=today.month + 1, day=1)
next_year = today.replace(year=today.year + 1, month=1, day=1)
self.lookup_kwarg_since = '%s__gte' % field_path
self.lookup_kwarg_until = '%s__lt' % field_path
self.links = (
(_('Any date'), {}),
(_('Today'), {
self.lookup_kwarg_since: str(today),
self.lookup_kwarg_until: str(tomorrow),
}),
(_('Past 7 days'), {
self.lookup_kwarg_since: str(today - datetime.timedelta(days=7)),
self.lookup_kwarg_until: str(tomorrow),
}),
(_('This month'), {
self.lookup_kwarg_since: str(today.replace(day=1)),
self.lookup_kwarg_until: str(next_month),
}),
(_('This year'), {
self.lookup_kwarg_since: str(today.replace(month=1, day=1)),
self.lookup_kwarg_until: str(next_year),
}),
)
super(DateFieldListFilter, self).__init__(
field, request, params, model, model_admin, field_path)
def expected_parameters(self):
return [self.lookup_kwarg_since, self.lookup_kwarg_until]
def choices(self, cl):
for title, param_dict in self.links:
yield {
'selected': self.date_params == param_dict,
'query_string': cl.get_query_string(
param_dict, [self.field_generic]),
'display': title,
}
FieldListFilter.register(
lambda f: isinstance(f, models.DateField), DateFieldListFilter)
# This should be registered last, because it's a last resort. For example,
# if a field is eligible to use the BooleanFieldListFilter, that'd be much
# more appropriate, and the AllValuesFieldListFilter won't get used for it.
class AllValuesFieldListFilter(FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path):
self.lookup_kwarg = field_path
self.lookup_kwarg_isnull = '%s__isnull' % field_path
self.lookup_val = request.GET.get(self.lookup_kwarg)
self.lookup_val_isnull = request.GET.get(self.lookup_kwarg_isnull)
self.empty_value_display = model_admin.get_empty_value_display()
parent_model, reverse_path = reverse_field_path(model, field_path)
# Obey parent ModelAdmin queryset when deciding which options to show
if model == parent_model:
queryset = model_admin.get_queryset(request)
else:
queryset = parent_model._default_manager.all()
self.lookup_choices = (queryset
.distinct()
.order_by(field.name)
.values_list(field.name, flat=True))
super(AllValuesFieldListFilter, self).__init__(
field, request, params, model, model_admin, field_path)
def expected_parameters(self):
return [self.lookup_kwarg, self.lookup_kwarg_isnull]
def choices(self, cl):
yield {
'selected': (self.lookup_val is None
and self.lookup_val_isnull is None),
'query_string': cl.get_query_string({},
[self.lookup_kwarg, self.lookup_kwarg_isnull]),
'display': _('All'),
}
include_none = False
for val in self.lookup_choices:
if val is None:
include_none = True
continue
val = smart_text(val)
yield {
'selected': self.lookup_val == val,
'query_string': cl.get_query_string({
self.lookup_kwarg: val,
}, [self.lookup_kwarg_isnull]),
'display': val,
}
if include_none:
yield {
'selected': bool(self.lookup_val_isnull),
'query_string': cl.get_query_string({
self.lookup_kwarg_isnull: 'True',
}, [self.lookup_kwarg]),
'display': self.empty_value_display,
}
FieldListFilter.register(lambda f: True, AllValuesFieldListFilter)
class RelatedOnlyFieldListFilter(RelatedFieldListFilter):
def field_choices(self, field, request, model_admin):
limit_choices_to = {'pk__in': set(model_admin.get_queryset(request).values_list(field.name, flat=True))}
return field.get_choices(include_blank=False, limit_choices_to=limit_choices_to)
|
artistic-2.0
|
tboyce1/home-assistant
|
tests/components/scene/test_init.py
|
20
|
4086
|
"""The tests for the Scene component."""
import io
import unittest
from homeassistant.setup import setup_component
from homeassistant import loader
from homeassistant.components import light, scene
from homeassistant.util import yaml
from tests.common import get_test_home_assistant
class TestScene(unittest.TestCase):
"""Test the scene component."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
test_light = loader.get_component('light.test')
test_light.init()
self.assertTrue(setup_component(self.hass, light.DOMAIN, {
light.DOMAIN: {'platform': 'test'}
}))
self.light_1, self.light_2 = test_light.DEVICES[0:2]
light.turn_off(
self.hass, [self.light_1.entity_id, self.light_2.entity_id])
self.hass.block_till_done()
self.assertFalse(self.light_1.is_on)
self.assertFalse(self.light_2.is_on)
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
def test_config_yaml_alias_anchor(self):
"""Test the usage of YAML aliases and anchors.
The following test scene configuration is equivalent to:
scene:
- name: test
entities:
light_1: &light_1_state
state: 'on'
brightness: 100
light_2: *light_1_state
When encountering a YAML alias/anchor, the PyYAML parser will use a
reference to the original dictionary, instead of creating a copy, so
care needs to be taken to not modify the original.
"""
entity_state = {
'state': 'on',
'brightness': 100,
}
self.assertTrue(setup_component(self.hass, scene.DOMAIN, {
'scene': [{
'name': 'test',
'entities': {
self.light_1.entity_id: entity_state,
self.light_2.entity_id: entity_state,
}
}]
}))
scene.activate(self.hass, 'scene.test')
self.hass.block_till_done()
self.assertTrue(self.light_1.is_on)
self.assertTrue(self.light_2.is_on)
self.assertEqual(
100, self.light_1.last_call('turn_on')[1].get('brightness'))
self.assertEqual(
100, self.light_2.last_call('turn_on')[1].get('brightness'))
def test_config_yaml_bool(self):
"""Test parsing of booleans in yaml config."""
config = (
'scene:\n'
' - name: test\n'
' entities:\n'
' {0}: on\n'
' {1}:\n'
' state: on\n'
' brightness: 100\n').format(
self.light_1.entity_id, self.light_2.entity_id)
with io.StringIO(config) as file:
doc = yaml.yaml.safe_load(file)
self.assertTrue(setup_component(self.hass, scene.DOMAIN, doc))
scene.activate(self.hass, 'scene.test')
self.hass.block_till_done()
self.assertTrue(self.light_1.is_on)
self.assertTrue(self.light_2.is_on)
self.assertEqual(
100, self.light_2.last_call('turn_on')[1].get('brightness'))
def test_activate_scene(self):
"""Test active scene."""
self.assertTrue(setup_component(self.hass, scene.DOMAIN, {
'scene': [{
'name': 'test',
'entities': {
self.light_1.entity_id: 'on',
self.light_2.entity_id: {
'state': 'on',
'brightness': 100,
}
}
}]
}))
scene.activate(self.hass, 'scene.test')
self.hass.block_till_done()
self.assertTrue(self.light_1.is_on)
self.assertTrue(self.light_2.is_on)
self.assertEqual(
100, self.light_2.last_call('turn_on')[1].get('brightness'))
|
apache-2.0
|
Simran-B/arangodb
|
3rdParty/V8-4.3.61/third_party/python_26/Lib/test/test_shelve.py
|
55
|
4168
|
import os
import unittest
import shelve
import glob
from test import test_support
class TestCase(unittest.TestCase):
fn = "shelftemp" + os.extsep + "db"
def test_close(self):
d1 = {}
s = shelve.Shelf(d1, protocol=2, writeback=False)
s['key1'] = [1,2,3,4]
self.assertEqual(s['key1'], [1,2,3,4])
self.assertEqual(len(s), 1)
s.close()
self.assertRaises(ValueError, len, s)
try:
s['key1']
except ValueError:
pass
else:
self.fail('Closed shelf should not find a key')
def test_ascii_file_shelf(self):
try:
s = shelve.open(self.fn, protocol=0)
s['key1'] = (1,2,3,4)
self.assertEqual(s['key1'], (1,2,3,4))
s.close()
finally:
for f in glob.glob(self.fn+"*"):
os.unlink(f)
def test_binary_file_shelf(self):
try:
s = shelve.open(self.fn, protocol=1)
s['key1'] = (1,2,3,4)
self.assertEqual(s['key1'], (1,2,3,4))
s.close()
finally:
for f in glob.glob(self.fn+"*"):
os.unlink(f)
def test_proto2_file_shelf(self):
try:
s = shelve.open(self.fn, protocol=2)
s['key1'] = (1,2,3,4)
self.assertEqual(s['key1'], (1,2,3,4))
s.close()
finally:
for f in glob.glob(self.fn+"*"):
os.unlink(f)
def test_in_memory_shelf(self):
d1 = {}
s = shelve.Shelf(d1, protocol=0)
s['key1'] = (1,2,3,4)
self.assertEqual(s['key1'], (1,2,3,4))
s.close()
d2 = {}
s = shelve.Shelf(d2, protocol=1)
s['key1'] = (1,2,3,4)
self.assertEqual(s['key1'], (1,2,3,4))
s.close()
self.assertEqual(len(d1), 1)
self.assertNotEqual(d1, d2)
def test_mutable_entry(self):
d1 = {}
s = shelve.Shelf(d1, protocol=2, writeback=False)
s['key1'] = [1,2,3,4]
self.assertEqual(s['key1'], [1,2,3,4])
s['key1'].append(5)
self.assertEqual(s['key1'], [1,2,3,4])
s.close()
d2 = {}
s = shelve.Shelf(d2, protocol=2, writeback=True)
s['key1'] = [1,2,3,4]
self.assertEqual(s['key1'], [1,2,3,4])
s['key1'].append(5)
self.assertEqual(s['key1'], [1,2,3,4,5])
s.close()
self.assertEqual(len(d1), 1)
self.assertEqual(len(d2), 1)
from test import mapping_tests
class TestShelveBase(mapping_tests.BasicTestMappingProtocol):
fn = "shelftemp.db"
counter = 0
def __init__(self, *args, **kw):
self._db = []
mapping_tests.BasicTestMappingProtocol.__init__(self, *args, **kw)
type2test = shelve.Shelf
def _reference(self):
return {"key1":"value1", "key2":2, "key3":(1,2,3)}
def _empty_mapping(self):
if self._in_mem:
x= shelve.Shelf({}, **self._args)
else:
self.counter+=1
x= shelve.open(self.fn+str(self.counter), **self._args)
self._db.append(x)
return x
def tearDown(self):
for db in self._db:
db.close()
self._db = []
if not self._in_mem:
for f in glob.glob(self.fn+"*"):
test_support.unlink(f)
class TestAsciiFileShelve(TestShelveBase):
_args={'protocol':0}
_in_mem = False
class TestBinaryFileShelve(TestShelveBase):
_args={'protocol':1}
_in_mem = False
class TestProto2FileShelve(TestShelveBase):
_args={'protocol':2}
_in_mem = False
class TestAsciiMemShelve(TestShelveBase):
_args={'protocol':0}
_in_mem = True
class TestBinaryMemShelve(TestShelveBase):
_args={'protocol':1}
_in_mem = True
class TestProto2MemShelve(TestShelveBase):
_args={'protocol':2}
_in_mem = True
def test_main():
test_support.run_unittest(
TestAsciiFileShelve,
TestBinaryFileShelve,
TestProto2FileShelve,
TestAsciiMemShelve,
TestBinaryMemShelve,
TestProto2MemShelve,
TestCase
)
if __name__ == "__main__":
test_main()
|
apache-2.0
|
GheRivero/ansible
|
test/units/modules/network/f5/test_bigiq_regkey_license_assignment.py
|
12
|
4419
|
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock
from ansible.compat.tests.mock import patch
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigiq_regkey_license_assignment import ApiParameters
from library.modules.bigiq_regkey_license_assignment import ModuleParameters
from library.modules.bigiq_regkey_license_assignment import ModuleManager
from library.modules.bigiq_regkey_license_assignment import ArgumentSpec
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigiq_regkey_license_assignment import ApiParameters
from ansible.modules.network.f5.bigiq_regkey_license_assignment import ModuleParameters
from ansible.modules.network.f5.bigiq_regkey_license_assignment import ModuleManager
from ansible.modules.network.f5.bigiq_regkey_license_assignment import ArgumentSpec
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters_unmanaged(self):
args = dict(
pool='foo-pool',
key='XXXX-XXXX-XXXX-XXXX-XXXX',
device='1.1.1.1',
managed=False,
device_username='admin',
device_password='secret',
device_port='8443'
)
p = ModuleParameters(params=args)
assert p.pool == 'foo-pool'
assert p.key == 'XXXX-XXXX-XXXX-XXXX-XXXX'
assert p.device == '1.1.1.1'
assert p.managed is False
assert p.device_username == 'admin'
assert p.device_password == 'secret'
assert p.device_port == 8443
def test_module_parameters_managed(self):
args = dict(
pool='foo-pool',
key='XXXX-XXXX-XXXX-XXXX-XXXX',
device='1.1.1.1',
managed=True,
)
p = ModuleParameters(params=args)
assert p.pool == 'foo-pool'
assert p.key == 'XXXX-XXXX-XXXX-XXXX-XXXX'
assert p.device == '1.1.1.1'
assert p.managed is True
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create(self, *args):
set_module_args(dict(
pool='foo-pool',
key='XXXX-XXXX-XXXX-XXXX-XXXX',
device='1.1.1.1',
device_username='admin',
device_password='secret',
managed='no',
state='present',
password='passsword',
server='localhost',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode,
required_if=self.spec.required_if
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.exists = Mock(side_effect=[False, True])
mm.create_on_device = Mock(return_value=True)
mm.wait_for_device_to_be_licensed = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
|
gpl-3.0
|
Wilee999/panda3d
|
contrib/src/sceneeditor/seMopathRecorder.py
|
8
|
85754
|
#################################################################
# seMopathRecorder.py
# Originally from MopathRecorder.py
# Altered by Yi-Hong Lin, yihhongl@andrew.cmu.edu, 2004
#
# We have to change something about data flow and UI
# so the curves data can be saved into our dataHolder.
# Other things we have changed here is that we have added a copy model
# of target nodePath under the render when the recording begins.
# And, of course, we have removed it right after the recording ends.
# You can find it in toggleRecord function.
#
#################################################################
from direct.showbase.DirectObject import DirectObject
from direct.tkwidgets.AppShell import AppShell
#from direct.directtools.DirectGlobals import *
#from direct.directtools.DirectUtil import *
from seGeometry import *
from seSelection import *
from direct.task.Task import Task
from direct.tkwidgets.Dial import AngleDial
from direct.tkwidgets.Floater import Floater
from direct.tkwidgets.Slider import Slider
from direct.tkwidgets.EntryScale import EntryScale
from direct.tkwidgets.VectorWidgets import Vector2Entry, Vector3Entry
from direct.tkwidgets.VectorWidgets import ColorEntry
from Tkinter import Button, Frame, Radiobutton, Checkbutton, Label
from Tkinter import StringVar, BooleanVar, Entry, Scale
import os, string, Tkinter, Pmw
import __builtin__
PRF_UTILITIES = [
'lambda: camera.lookAt(render)',
'lambda: camera.setZ(render, 0.0)',
'lambda s = self: s.playbackMarker.lookAt(render)',
'lambda s = self: s.playbackMarker.setZ(render, 0.0)',
'lambda s = self: s.followTerrain(10.0)']
class MopathRecorder(AppShell, DirectObject):
# Override class variables here
appname = 'Mopath Recorder Panel'
frameWidth = 450
frameHeight = 550
usecommandarea = 0
usestatusarea = 0
count = 0
def __init__(self, parent = None, **kw):
INITOPT = Pmw.INITOPT
name = 'recorder-%d' % MopathRecorder.count
MopathRecorder.count += 1
optiondefs = (
('title', self.appname, None),
('nodePath', None, None),
('name', name, None)
)
self.defineoptions(kw, optiondefs)
# Call superclass initialization function
AppShell.__init__(self)
self.initialiseoptions(MopathRecorder)
self.selectNodePathNamed('camera')
self.parent.resizable(False,False) ## Disable the ability to resize for this Window.
def appInit(self):
self.mopathRecorderNode = render.attachNewNode("MopathRecorder")
self.name = self['name']
# Dictionary of widgets
self.widgetDict = {}
self.variableDict = {}
# Initialize state
# The active node path
self.nodePath = self['nodePath']
self.playbackNodePath = self.nodePath
# The active node path's parent
self.nodePathParent = render
# Top level node path
self.recorderNodePath = self.mopathRecorderNode.attachNewNode(self.name)
# Temp CS for use in refinement/path extension
self.tempCS = self.recorderNodePath.attachNewNode(
'mopathRecorderTempCS')
# Marker for use in playback
self.playbackMarker = loader.loadModel('models/misc/sphere') ###
self.playbackMarker.setName('Playback Marker')
self.playbackMarker.reparentTo(self.recorderNodePath)
self.playbackMarkerIds = self.getChildIds(
self.playbackMarker.getChild(0))
self.playbackMarker.hide()
# Tangent marker
self.tangentGroup = self.playbackMarker.attachNewNode('Tangent Group')
self.tangentGroup.hide()
self.tangentMarker = loader.loadModel('models/misc/sphere')
self.tangentMarker.reparentTo(self.tangentGroup)
self.tangentMarker.setScale(0.5)
self.tangentMarker.setColor(1,0,1,1)
self.tangentMarker.setName('Tangent Marker')
self.tangentMarkerIds = self.getChildIds(
self.tangentMarker.getChild(0))
self.tangentLines = LineNodePath(self.tangentGroup)
self.tangentLines.setColor(VBase4(1,0,1,1))
self.tangentLines.setThickness(1)
self.tangentLines.moveTo(0,0,0)
self.tangentLines.drawTo(0,0,0)
self.tangentLines.create()
# Active node path dictionary
self.nodePathDict = {}
self.nodePathDict['marker'] = self.playbackMarker
self.nodePathDict['camera'] = camera
self.nodePathDict['widget'] = SEditor.widget
self.nodePathDict['mopathRecorderTempCS'] = self.tempCS
self.nodePathNames = ['marker', 'camera', 'selected']
# ID of selected object
self.manipulandumId = None
self.trace = LineNodePath(self.recorderNodePath)
self.oldPlaybackNodePath = None
# Count of point sets recorded
self.pointSet = []
self.prePoints = []
self.postPoints = []
self.pointSetDict = {}
self.pointSetCount = 0
self.pointSetName = self.name + '-ps-' + `self.pointSetCount`
# User callback to call before recording point
self.samplingMode = 'Continuous'
self.preRecordFunc = None
# Hook to start/stop recording
self.startStopHook = 'f6'
self.keyframeHook = 'f10'
# Curve fitter object
self.lastPos = Point3(0)
self.curveFitter = CurveFitter()
# Curve variables
# Number of ticks per parametric unit
self.numTicks = 1
# Number of segments to represent each parametric unit
# This just affects the visual appearance of the curve
self.numSegs = 40
# The nurbs curves
self.curveCollection = None
# Curve drawers
self.nurbsCurveDrawer = NurbsCurveDrawer()
self.nurbsCurveDrawer.setCurves(ParametricCurveCollection())
self.nurbsCurveDrawer.setNumSegs(self.numSegs)
self.nurbsCurveDrawer.setShowHull(0)
self.nurbsCurveDrawer.setShowCvs(0)
self.nurbsCurveDrawer.setNumTicks(0)
self.nurbsCurveDrawer.setTickScale(5.0)
self.curveNodePath = self.recorderNodePath.attachNewNode(
self.nurbsCurveDrawer.getGeomNode())
useDirectRenderStyle(self.curveNodePath)
# Playback variables
self.maxT = 0.0
self.playbackTime = 0.0
self.loopPlayback = 1
self.playbackSF = 1.0
# Sample variables
self.desampleFrequency = 1
self.numSamples = 100
self.recordStart = 0.0
self.deltaTime = 0.0
self.controlStart = 0.0
self.controlStop = 0.0
self.recordStop = 0.0
self.cropFrom = 0.0
self.cropTo = 0.0
self.fAdjustingValues = 0
# For terrain following
self.iRayCS = self.recorderNodePath.attachNewNode(
'mopathRecorderIRayCS')
self.iRay = SelectionRay(self.iRayCS)
# Set up event hooks
self.actionEvents = [
('DIRECT_undo', self.undoHook),
('DIRECT_pushUndo', self.pushUndoHook),
('DIRECT_undoListEmpty', self.undoListEmptyHook),
('DIRECT_redo', self.redoHook),
('DIRECT_pushRedo', self.pushRedoHook),
('DIRECT_redoListEmpty', self.redoListEmptyHook),
('DIRECT_selectedNodePath', self.selectedNodePathHook),
('DIRECT_deselectedNodePath', self.deselectedNodePathHook),
('DIRECT_manipulateObjectStart', self.manipulateObjectStartHook),
('DIRECT_manipulateObjectCleanup',
self.manipulateObjectCleanupHook),
]
for event, method in self.actionEvents:
self.accept(event, method)
def createInterface(self):
interior = self.interior()
# FILE MENU
# Get a handle on the file menu so commands can be inserted
# before quit item
fileMenu = self.menuBar.component('File-menu')
fileMenu.insert_command(
fileMenu.index('Quit'),
label = 'Load Curve',
command = self.loadCurveFromFile)
fileMenu.insert_command(
fileMenu.index('Quit'),
label = 'Save Curve',
command = self.saveCurveToFile)
# Add mopath recorder commands to menubar
self.menuBar.addmenu('Recorder', 'Mopath Recorder Panel Operations')
self.menuBar.addmenuitem(
'Recorder', 'command',
'Save current curve as a new point set',
label = 'Save Point Set',
command = self.extractPointSetFromCurveCollection)
self.menuBar.addmenuitem(
'Recorder', 'command',
'Toggle widget visability',
label = 'Toggle Widget Vis',
command = self.toggleWidgetVis)
self.menuBar.addmenuitem(
'Recorder', 'command',
'Toggle widget manipulation mode',
label = 'Toggle Widget Mode',
command = SEditor.manipulationControl.toggleObjectHandlesMode)
self.historyWidget = self.createComboBox(self.menuFrame, 'Mopath', 'Path:',
'Select input points to fit curve to', '',
self.selectPointSetNamed, expand = 1)
self.undoButton = Button(self.menuFrame, text = 'Undo',
command = SEditor.undo)
if SEditor.undoList:
self.undoButton['state'] = 'normal'
else:
self.undoButton['state'] = 'disabled'
self.undoButton.pack(side = Tkinter.LEFT, expand = 0)
self.bind(self.undoButton, 'Undo last operation')
self.redoButton = Button(self.menuFrame, text = 'Redo',
command = SEditor.redo)
if SEditor.redoList:
self.redoButton['state'] = 'normal'
else:
self.redoButton['state'] = 'disabled'
self.redoButton.pack(side = Tkinter.LEFT, expand = 0)
self.bind(self.redoButton, 'Redo last operation')
# Record button
mainFrame = Frame(interior, relief = Tkinter.SUNKEN, borderwidth = 2)
frame = Frame(mainFrame)
# Active node path
# Button to select active node path
widget = self.createButton(frame, 'Recording', 'Node Path:',
'Select Active Mopath Node Path',
lambda s = self: SEditor.select(s.nodePath),
side = Tkinter.LEFT, expand = 0)
widget['relief'] = Tkinter.FLAT
self.nodePathMenu = Pmw.ComboBox(
frame, entry_width = 20,
selectioncommand = self.selectNodePathNamed,
scrolledlist_items = self.nodePathNames)
self.nodePathMenu.selectitem('camera')
self.nodePathMenuEntry = (
self.nodePathMenu.component('entryfield_entry'))
self.nodePathMenuBG = (
self.nodePathMenuEntry.configure('background')[3])
self.nodePathMenu.pack(side = Tkinter.LEFT, fill = Tkinter.X, expand = 1)
self.bind(self.nodePathMenu,
'Select active node path used for recording and playback')
# Recording type
self.recordingType = StringVar()
self.recordingType.set('New Curve')
widget = self.createRadiobutton(
frame, 'left',
'Recording', 'New Curve',
('Next record session records a new path'),
self.recordingType, 'New Curve',expand = 0)
widget = self.createRadiobutton(
frame, 'left',
'Recording', 'Refine',
('Next record session refines existing path'),
self.recordingType, 'Refine', expand = 0)
widget = self.createRadiobutton(
frame, 'left',
'Recording', 'Extend',
('Next record session extends existing path'),
self.recordingType, 'Extend', expand = 0)
frame.pack(fill = Tkinter.X, expand = 1)
frame = Frame(mainFrame)
widget = self.createCheckbutton(
frame, 'Recording', 'Record',
'On: path is being recorded', self.toggleRecord, 0,
side = Tkinter.LEFT, fill = Tkinter.BOTH, expand = 1)
widget.configure(foreground = 'Red', relief = Tkinter.RAISED, borderwidth = 2,
anchor = Tkinter.CENTER, width = 16)
widget = self.createButton(frame, 'Recording', 'Add Keyframe',
'Add Keyframe To Current Path',
self.addKeyframe,
side = Tkinter.LEFT, expand = 1)
widget = self.createButton(frame, 'Recording', 'Bind Path to Node',
'Bind Motion Path to selected Object',
self.bindMotionPathToNode,
side = Tkinter.LEFT, expand = 1)
frame.pack(fill = Tkinter.X, expand = 1)
mainFrame.pack(expand = 1, fill = Tkinter.X, pady = 3)
# Playback controls
playbackFrame = Frame(interior, relief = Tkinter.SUNKEN,
borderwidth = 2)
Label(playbackFrame, text = 'PLAYBACK CONTROLS',
font=('MSSansSerif', 12, 'bold')).pack(fill = Tkinter.X)
# Main playback control slider
widget = self.createEntryScale(
playbackFrame, 'Playback', 'Time', 'Set current playback time',
resolution = 0.01, command = self.playbackGoTo, side = Tkinter.TOP)
widget.component('hull')['relief'] = Tkinter.RIDGE
# Kill playback task if drag slider
widget['preCallback'] = self.stopPlayback
# Jam duration entry into entry scale
self.createLabeledEntry(widget.labelFrame, 'Resample', 'Path Duration',
'Set total curve duration',
command = self.setPathDuration,
side = Tkinter.LEFT, expand = 0)
# Start stop buttons
frame = Frame(playbackFrame)
widget = self.createButton(frame, 'Playback', '<<',
'Jump to start of playback',
self.jumpToStartOfPlayback,
side = Tkinter.LEFT, expand = 1)
widget['font'] = (('MSSansSerif', 12, 'bold'))
widget = self.createCheckbutton(frame, 'Playback', 'Play',
'Start/Stop playback',
self.startStopPlayback, 0,
side = Tkinter.LEFT, fill = Tkinter.BOTH, expand = 1)
widget.configure(anchor = 'center', justify = 'center',
relief = Tkinter.RAISED, font = ('MSSansSerif', 12, 'bold'))
widget = self.createButton(frame, 'Playback', '>>',
'Jump to end of playback',
self.jumpToEndOfPlayback,
side = Tkinter.LEFT, expand = 1)
widget['font'] = (('MSSansSerif', 12, 'bold'))
self.createCheckbutton(frame, 'Playback', 'Loop',
'On: loop playback',
self.setLoopPlayback, self.loopPlayback,
side = Tkinter.LEFT, fill = Tkinter.BOTH, expand = 0)
frame.pack(fill = Tkinter.X, expand = 1)
# Speed control
frame = Frame(playbackFrame)
widget = Button(frame, text = 'PB Speed Vernier', relief = Tkinter.FLAT,
command = lambda s = self: s.setSpeedScale(1.0))
widget.pack(side = Tkinter.LEFT, expand = 0)
self.speedScale = Scale(frame, from_ = -1, to = 1,
resolution = 0.01, showvalue = 0,
width = 10, orient = 'horizontal',
command = self.setPlaybackSF)
self.speedScale.pack(side = Tkinter.LEFT, fill = Tkinter.X, expand = 1)
self.speedVar = StringVar()
self.speedVar.set("0.00")
self.speedEntry = Entry(frame, textvariable = self.speedVar,
width = 8)
self.speedEntry.bind(
'<Return>',
lambda e = None, s = self: s.setSpeedScale(
string.atof(s.speedVar.get())))
self.speedEntry.pack(side = Tkinter.LEFT, expand = 0)
frame.pack(fill = Tkinter.X, expand = 1)
playbackFrame.pack(fill = Tkinter.X, pady = 2)
# Create notebook pages
self.mainNotebook = Pmw.NoteBook(interior)
self.mainNotebook.pack(fill = Tkinter.BOTH, expand = 1)
self.resamplePage = self.mainNotebook.add('Resample')
self.refinePage = self.mainNotebook.add('Refine')
self.extendPage = self.mainNotebook.add('Extend')
self.cropPage = self.mainNotebook.add('Crop')
self.drawPage = self.mainNotebook.add('Draw')
self.optionsPage = self.mainNotebook.add('Options')
## RESAMPLE PAGE
label = Label(self.resamplePage, text = 'RESAMPLE CURVE',
font=('MSSansSerif', 12, 'bold'))
label.pack(fill = Tkinter.X)
# Resample
resampleFrame = Frame(
self.resamplePage, relief = Tkinter.SUNKEN, borderwidth = 2)
label = Label(resampleFrame, text = 'RESAMPLE CURVE',
font=('MSSansSerif', 12, 'bold')).pack()
widget = self.createSlider(
resampleFrame, 'Resample', 'Num. Samples',
'Number of samples in resampled curve',
resolution = 1, min = 2, max = 1000, command = self.setNumSamples)
widget.component('hull')['relief'] = Tkinter.RIDGE
widget['postCallback'] = self.sampleCurve
frame = Frame(resampleFrame)
self.createButton(
frame, 'Resample', 'Make Even',
'Apply timewarp so resulting path has constant velocity',
self.makeEven, side = Tkinter.LEFT, fill = Tkinter.X, expand = 1)
self.createButton(
frame, 'Resample', 'Face Forward',
'Compute HPR so resulting hpr curve faces along xyz tangent',
self.faceForward, side = Tkinter.LEFT, fill = Tkinter.X, expand = 1)
frame.pack(fill = Tkinter.X, expand = 0)
resampleFrame.pack(fill = Tkinter.X, expand = 0, pady = 2)
# Desample
desampleFrame = Frame(
self.resamplePage, relief = Tkinter.SUNKEN, borderwidth = 2)
Label(desampleFrame, text = 'DESAMPLE CURVE',
font=('MSSansSerif', 12, 'bold')).pack()
widget = self.createSlider(
desampleFrame, 'Resample', 'Points Between Samples',
'Specify number of points to skip between samples',
min = 1, max = 100, resolution = 1,
command = self.setDesampleFrequency)
widget.component('hull')['relief'] = Tkinter.RIDGE
widget['postCallback'] = self.desampleCurve
desampleFrame.pack(fill = Tkinter.X, expand = 0, pady = 2)
## REFINE PAGE ##
refineFrame = Frame(self.refinePage, relief = Tkinter.SUNKEN,
borderwidth = 2)
label = Label(refineFrame, text = 'REFINE CURVE',
font=('MSSansSerif', 12, 'bold'))
label.pack(fill = Tkinter.X)
widget = self.createSlider(refineFrame,
'Refine Page', 'Refine From',
'Begin time of refine pass',
resolution = 0.01,
command = self.setRecordStart)
widget['preCallback'] = self.setRefineMode
widget['postCallback'] = lambda s = self: s.getPrePoints('Refine')
widget = self.createSlider(
refineFrame, 'Refine Page',
'Control Start',
'Time when full control of node path is given during refine pass',
resolution = 0.01,
command = self.setControlStart)
widget['preCallback'] = self.setRefineMode
widget = self.createSlider(
refineFrame, 'Refine Page',
'Control Stop',
'Time when node path begins transition back to original curve',
resolution = 0.01,
command = self.setControlStop)
widget['preCallback'] = self.setRefineMode
widget = self.createSlider(refineFrame, 'Refine Page', 'Refine To',
'Stop time of refine pass',
resolution = 0.01,
command = self.setRefineStop)
widget['preCallback'] = self.setRefineMode
widget['postCallback'] = self.getPostPoints
refineFrame.pack(fill = Tkinter.X)
## EXTEND PAGE ##
extendFrame = Frame(self.extendPage, relief = Tkinter.SUNKEN,
borderwidth = 2)
label = Label(extendFrame, text = 'EXTEND CURVE',
font=('MSSansSerif', 12, 'bold'))
label.pack(fill = Tkinter.X)
widget = self.createSlider(extendFrame,
'Extend Page', 'Extend From',
'Begin time of extend pass',
resolution = 0.01,
command = self.setRecordStart)
widget['preCallback'] = self.setExtendMode
widget['postCallback'] = lambda s = self: s.getPrePoints('Extend')
widget = self.createSlider(
extendFrame, 'Extend Page',
'Control Start',
'Time when full control of node path is given during extend pass',
resolution = 0.01,
command = self.setControlStart)
widget['preCallback'] = self.setExtendMode
extendFrame.pack(fill = Tkinter.X)
## CROP PAGE ##
cropFrame = Frame(self.cropPage, relief = Tkinter.SUNKEN,
borderwidth = 2)
label = Label(cropFrame, text = 'CROP CURVE',
font=('MSSansSerif', 12, 'bold'))
label.pack(fill = Tkinter.X)
widget = self.createSlider(
cropFrame,
'Crop Page', 'Crop From',
'Delete all curve points before this time',
resolution = 0.01,
command = self.setCropFrom)
widget = self.createSlider(
cropFrame,
'Crop Page', 'Crop To',
'Delete all curve points after this time',
resolution = 0.01,
command = self.setCropTo)
self.createButton(cropFrame, 'Crop Page', 'Crop Curve',
'Crop curve to specified from to times',
self.cropCurve, fill = Tkinter.NONE)
cropFrame.pack(fill = Tkinter.X)
## DRAW PAGE ##
drawFrame = Frame(self.drawPage, relief = Tkinter.SUNKEN,
borderwidth = 2)
self.sf = Pmw.ScrolledFrame(self.drawPage, horizflex = 'elastic')
self.sf.pack(fill = 'both', expand = 1)
sfFrame = self.sf.interior()
label = Label(sfFrame, text = 'CURVE RENDERING STYLE',
font=('MSSansSerif', 12, 'bold'))
label.pack(fill = Tkinter.X)
frame = Frame(sfFrame)
Label(frame, text = 'SHOW:').pack(side = Tkinter.LEFT, expand = 0)
widget = self.createCheckbutton(
frame, 'Style', 'Path',
'On: path is visible', self.setPathVis, 1,
side = Tkinter.LEFT, fill = Tkinter.X, expand = 1)
widget = self.createCheckbutton(
frame, 'Style', 'Knots',
'On: path knots are visible', self.setKnotVis, 1,
side = Tkinter.LEFT, fill = Tkinter.X, expand = 1)
widget = self.createCheckbutton(
frame, 'Style', 'CVs',
'On: path CVs are visible', self.setCvVis, 0,
side = Tkinter.LEFT, fill = Tkinter.X, expand = 1)
widget = self.createCheckbutton(
frame, 'Style', 'Hull',
'On: path hull is visible', self.setHullVis, 0,
side = Tkinter.LEFT, fill = Tkinter.X, expand = 1)
widget = self.createCheckbutton(
frame, 'Style', 'Trace',
'On: record is visible', self.setTraceVis, 0,
side = Tkinter.LEFT, fill = Tkinter.X, expand = 1)
widget = self.createCheckbutton(
frame, 'Style', 'Marker',
'On: playback marker is visible', self.setMarkerVis, 0,
side = Tkinter.LEFT, fill = Tkinter.X, expand = 1)
frame.pack(fill = Tkinter.X, expand = 1)
# Sliders
widget = self.createSlider(
sfFrame, 'Style', 'Num Segs',
'Set number of segments used to approximate each parametric unit',
min = 1.0, max = 400, resolution = 1.0,
value = 40,
command = self.setNumSegs, side = Tkinter.TOP)
widget.component('hull')['relief'] = Tkinter.RIDGE
widget = self.createSlider(
sfFrame, 'Style', 'Num Ticks',
'Set number of tick marks drawn for each unit of time',
min = 0.0, max = 10.0, resolution = 1.0,
value = 0.0,
command = self.setNumTicks, side = Tkinter.TOP)
widget.component('hull')['relief'] = Tkinter.RIDGE
widget = self.createSlider(
sfFrame, 'Style', 'Tick Scale',
'Set visible size of time tick marks',
min = 0.01, max = 100.0, resolution = 0.01,
value = 5.0,
command = self.setTickScale, side = Tkinter.TOP)
widget.component('hull')['relief'] = Tkinter.RIDGE
self.createColorEntry(
sfFrame, 'Style', 'Path Color',
'Color of curve',
command = self.setPathColor,
value = [255.0,255.0,255.0,255.0])
self.createColorEntry(
sfFrame, 'Style', 'Knot Color',
'Color of knots',
command = self.setKnotColor,
value = [0,0,255.0,255.0])
self.createColorEntry(
sfFrame, 'Style', 'CV Color',
'Color of CVs',
command = self.setCvColor,
value = [255.0,0,0,255.0])
self.createColorEntry(
sfFrame, 'Style', 'Tick Color',
'Color of Ticks',
command = self.setTickColor,
value = [255.0,0,0,255.0])
self.createColorEntry(
sfFrame, 'Style', 'Hull Color',
'Color of Hull',
command = self.setHullColor,
value = [255.0,128.0,128.0,255.0])
#drawFrame.pack(fill = Tkinter.X)
## OPTIONS PAGE ##
optionsFrame = Frame(self.optionsPage, relief = Tkinter.SUNKEN,
borderwidth = 2)
label = Label(optionsFrame, text = 'RECORDING OPTIONS',
font=('MSSansSerif', 12, 'bold'))
label.pack(fill = Tkinter.X)
# Hooks
frame = Frame(optionsFrame)
widget = self.createLabeledEntry(
frame, 'Recording', 'Record Hook',
'Hook used to start/stop recording',
value = self.startStopHook,
command = self.setStartStopHook)[0]
label = self.getWidget('Recording', 'Record Hook-Label')
label.configure(width = 16, anchor = Tkinter.W)
self.setStartStopHook()
widget = self.createLabeledEntry(
frame, 'Recording', 'Keyframe Hook',
'Hook used to add a new keyframe',
value = self.keyframeHook,
command = self.setKeyframeHook)[0]
label = self.getWidget('Recording', 'Keyframe Hook-Label')
label.configure(width = 16, anchor = Tkinter.W)
self.setKeyframeHook()
frame.pack(expand = 1, fill = Tkinter.X)
# PreRecordFunc
frame = Frame(optionsFrame)
widget = self.createComboBox(
frame, 'Recording', 'Pre-Record Func',
'Function called before sampling each point',
PRF_UTILITIES, self.setPreRecordFunc,
history = 1, expand = 1)
widget.configure(label_width = 16, label_anchor = Tkinter.W)
widget.configure(entryfield_entry_state = 'normal')
# Initialize preRecordFunc
self.preRecordFunc = eval(PRF_UTILITIES[0])
self.createCheckbutton(frame, 'Recording', 'PRF Active',
'On: Pre Record Func enabled',
None, 0,
side = Tkinter.LEFT, fill = Tkinter.BOTH, expand = 0)
frame.pack(expand = 1, fill = Tkinter.X)
# Pack record frame
optionsFrame.pack(fill = Tkinter.X, pady = 2)
self.mainNotebook.setnaturalsize()
def pushUndo(self, fResetRedo = 1):
SEditor.pushUndo([self.nodePath])
def undoHook(self):
# Reflect new changes
pass
def pushUndoHook(self):
# Make sure button is reactivated
self.undoButton.configure(state = 'normal')
def undoListEmptyHook(self):
# Make sure button is deactivated
self.undoButton.configure(state = 'disabled')
def pushRedo(self):
SEditor.pushRedo([self.nodePath])
def redoHook(self):
# Reflect new changes
pass
def pushRedoHook(self):
# Make sure button is reactivated
self.redoButton.configure(state = 'normal')
def redoListEmptyHook(self):
# Make sure button is deactivated
self.redoButton.configure(state = 'disabled')
def selectedNodePathHook(self, nodePath):
"""
Hook called upon selection of a node path used to select playback
marker if subnode selected
"""
taskMgr.remove(self.name + '-curveEditTask')
print nodePath.id()
if nodePath.id() in self.playbackMarkerIds:
SEditor.select(self.playbackMarker)
elif nodePath.id() in self.tangentMarkerIds:
SEditor.select(self.tangentMarker)
elif nodePath.id() == self.playbackMarker.id():
self.tangentGroup.show()
taskMgr.add(self.curveEditTask,
self.name + '-curveEditTask')
elif nodePath.id() == self.tangentMarker.id():
self.tangentGroup.show()
taskMgr.add(self.curveEditTask,
self.name + '-curveEditTask')
else:
self.tangentGroup.hide()
def getChildIds(self, nodePath):
ids = [nodePath.id()]
kids = nodePath.getChildren()
for kid in kids:
ids += self.getChildIds(kid)
return ids
def deselectedNodePathHook(self, nodePath):
"""
Hook called upon deselection of a node path used to select playback
marker if subnode selected
"""
if ((nodePath.id() == self.playbackMarker.id()) or
(nodePath.id() == self.tangentMarker.id())):
self.tangentGroup.hide()
def curveEditTask(self,state):
if self.curveCollection != None:
# Update curve position
if self.manipulandumId == self.playbackMarker.id():
# Show playback marker
self.playbackMarker.getChild(0).show()
pos = Point3(0)
hpr = Point3(0)
pos = self.playbackMarker.getPos(self.nodePathParent)
hpr = self.playbackMarker.getHpr(self.nodePathParent)
self.curveCollection.adjustXyz(
self.playbackTime, VBase3(pos[0], pos[1], pos[2]))
self.curveCollection.adjustHpr(
self.playbackTime, VBase3(hpr[0], hpr[1], hpr[2]))
# Note: this calls recompute on the curves
self.nurbsCurveDrawer.draw()
# Update tangent
if self.manipulandumId == self.tangentMarker.id():
# If manipulating marker, update tangent
# Hide playback marker
self.playbackMarker.getChild(0).hide()
# Where is tangent marker relative to playback marker
tan = self.tangentMarker.getPos()
# Transform this vector to curve space
tan2Curve = Vec3(
self.playbackMarker.getMat(
self.nodePathParent).xformVec(tan))
# Update nurbs curve
self.curveCollection.getXyzCurve().adjustTangent(
self.playbackTime,
tan2Curve[0], tan2Curve[1], tan2Curve[2])
# Note: this calls recompute on the curves
self.nurbsCurveDrawer.draw()
else:
# Show playback marker
self.playbackMarker.getChild(0).show()
# Update tangent marker line
tan = Point3(0)
self.curveCollection.getXyzCurve().getTangent(
self.playbackTime, tan)
# Transform this point to playback marker space
tan.assign(
self.nodePathParent.getMat(
self.playbackMarker).xformVec(tan))
self.tangentMarker.setPos(tan)
# In either case update tangent line
self.tangentLines.setVertex(1, tan[0], tan[1], tan[2])
return Task.cont
def manipulateObjectStartHook(self):
self.manipulandumId = None
if SEditor.selected.last:
if SEditor.selected.last.id() == self.playbackMarker.id():
self.manipulandumId = self.playbackMarker.id()
elif SEditor.selected.last.id() == self.tangentMarker.id():
self.manipulandumId = self.tangentMarker.id()
def manipulateObjectCleanupHook(self):
# Clear flag
self.manipulandumId = None
def onDestroy(self, event):
# Remove hooks
for event, method in self.actionEvents:
self.ignore(event)
# remove start stop hook
self.ignore(self.startStopHook)
self.ignore(self.keyframeHook)
self.curveNodePath.reparentTo(self.recorderNodePath)
self.trace.reparentTo(self.recorderNodePath)
self.recorderNodePath.removeNode()
# Make sure markers are deselected
SEditor.deselect(self.playbackMarker)
SEditor.deselect(self.tangentMarker)
# Remove tasks
taskMgr.remove(self.name + '-recordTask')
taskMgr.remove(self.name + '-playbackTask')
taskMgr.remove(self.name + '-curveEditTask')
self.mopathRecorderNode.removeChildren()
self.mopathRecorderNode.removeNode()
messenger.send('mPath_close')
messenger.send('SGE_Update Explorer',[render])
def createNewPointSet(self, curveName = None):
if curveName == None:
self.pointSetName = self.name + '-ps-' + `self.pointSetCount`
else:
self.pointSetName = curveName
# Update dictionary and record pointer to new point set
self.pointSet = self.pointSetDict[self.pointSetName] = []
# Update combo box
comboBox = self.getWidget('Mopath', 'Path:')
scrolledList = comboBox.component('scrolledlist')
listbox = scrolledList.component('listbox')
names = list(listbox.get(0,'end'))
names.append(self.pointSetName)
scrolledList.setlist(names)
comboBox.selectitem(self.pointSetName)
# Update count
self.pointSetCount += 1
def extractPointSetFromCurveFitter(self, curveName = None):
# Get new point set based on newly created curve
self.createNewPointSet(curveName)
for i in range(self.curveFitter.getNumSamples()):
time = self.curveFitter.getSampleT(i)
pos = Point3(self.curveFitter.getSampleXyz(i))
hpr = Point3(self.curveFitter.getSampleHpr(i))
self.pointSet.append([time, pos, hpr])
def extractPointSetFromCurveCollection(self, curveName=None):
# Use curve to compute new point set
# Record maxT
self.maxT = self.curveCollection.getMaxT()
# Determine num samples
# Limit point set to 1000 points and samples per second to 30
samplesPerSegment = min(30.0, 1000.0/self.curveCollection.getMaxT())
self.setNumSamples(self.maxT * samplesPerSegment)
# Sample the curve but don't create a new curve collection
self.sampleCurve(fCompute = 0, curveName = curveName)
# Update widgets based on new data
self.updateWidgets()
def selectPointSetNamed(self, name):
self.pointSet = self.pointSetDict.get(name, None)
# Reload points into curve fitter
# Reset curve fitters
self.curveFitter.reset()
for time, pos, hpr in self.pointSet:
# Add it to the curve fitters
self.curveFitter.addXyzHpr(time, pos, hpr)
# Compute curve
self.computeCurves()
def setPathVis(self):
if self.getVariable('Style', 'Path').get():
self.curveNodePath.show()
else:
self.curveNodePath.hide()
def setKnotVis(self):
self.nurbsCurveDrawer.setShowKnots(
self.getVariable('Style', 'Knots').get())
def setCvVis(self):
self.nurbsCurveDrawer.setShowCvs(
self.getVariable('Style', 'CVs').get())
def setHullVis(self):
self.nurbsCurveDrawer.setShowHull(
self.getVariable('Style', 'Hull').get())
def setTraceVis(self):
if self.getVariable('Style', 'Trace').get():
self.trace.show()
else:
self.trace.hide()
def setMarkerVis(self):
if self.getVariable('Style', 'Marker').get():
self.playbackMarker.reparentTo(self.recorderNodePath)
else:
self.playbackMarker.reparentTo(hidden)
def setNumSegs(self, value):
self.numSegs = int(value)
self.nurbsCurveDrawer.setNumSegs(self.numSegs)
def setNumTicks(self, value):
self.nurbsCurveDrawer.setNumTicks(float(value))
def setTickScale(self, value):
self.nurbsCurveDrawer.setTickScale(float(value))
def setPathColor(self, color):
self.nurbsCurveDrawer.setColor(
color[0]/255.0,color[1]/255.0,color[2]/255.0)
self.nurbsCurveDrawer.draw()
def setKnotColor(self, color):
self.nurbsCurveDrawer.setKnotColor(
color[0]/255.0,color[1]/255.0,color[2]/255.0)
def setCvColor(self, color):
self.nurbsCurveDrawer.setCvColor(
color[0]/255.0,color[1]/255.0,color[2]/255.0)
def setTickColor(self, color):
self.nurbsCurveDrawer.setTickColor(
color[0]/255.0,color[1]/255.0,color[2]/255.0)
def setHullColor(self, color):
self.nurbsCurveDrawer.setHullColor(
color[0]/255.0,color[1]/255.0,color[2]/255.0)
def setStartStopHook(self, event = None):
# Clear out old hook
self.ignore(self.startStopHook)
# Record new one
hook = self.getVariable('Recording', 'Record Hook').get()
self.startStopHook = hook
# Add new one
self.accept(self.startStopHook, self.toggleRecordVar)
def setKeyframeHook(self, event = None):
# Clear out old hook
self.ignore(self.keyframeHook)
# Record new one
hook = self.getVariable('Recording', 'Keyframe Hook').get()
self.keyframeHook = hook
# Add new one
self.accept(self.keyframeHook, self.addKeyframe)
def reset(self):
self.pointSet = []
self.hasPoints = 0
self.curveCollection = None
self.curveFitter.reset()
self.nurbsCurveDrawer.hide()
def setSamplingMode(self, mode):
self.samplingMode = mode
def disableKeyframeButton(self):
self.getWidget('Recording', 'Add Keyframe')['state'] = 'disabled'
def enableKeyframeButton(self):
self.getWidget('Recording', 'Add Keyframe')['state'] = 'normal'
def setRecordingType(self, type):
self.recordingType.set(type)
def setNewCurveMode(self):
self.setRecordingType('New Curve')
def setRefineMode(self):
self.setRecordingType('Refine')
def setExtendMode(self):
self.setRecordingType('Extend')
def toggleRecordVar(self):
# Get recording variable
v = self.getVariable('Recording', 'Record')
# Toggle it
v.set(1 - v.get())
# Call the command
self.toggleRecord()
def toggleRecord(self):
if self.getVariable('Recording', 'Record').get():
# Reparent a Marker to target nodePath to show where the recording start
self.markingNode = self.nodePath.getParent().attachNewNode('MopthMarkerNode')
self.nodePath.copyTo(self.markingNode)
self.markingNode.wrtReparentTo(render)
# Kill old tasks
taskMgr.remove(self.name + '-recordTask')
taskMgr.remove(self.name + '-curveEditTask')
# Remove old curve
self.nurbsCurveDrawer.hide()
# Reset curve fitters
self.curveFitter.reset()
# Update sampling mode button if necessary
if self.samplingMode == 'Continuous':
self.disableKeyframeButton()
# Create a new point set to hold raw data
self.createNewPointSet()
# Clear out old trace, get ready to draw new
self.initTrace()
# Keyframe mode?
if (self.samplingMode == 'Keyframe'):
# Record first point
self.lastPos.assign(Point3(
self.nodePath.getPos(self.nodePathParent)))
# Init delta time
self.deltaTime = 0.0
# Record first point
self.recordPoint(self.recordStart)
# Everything else
else:
if ((self.recordingType.get() == 'Refine') or
(self.recordingType.get() == 'Extend')):
# Turn off looping playback
self.loopPlayback = 0
# Update widget to reflect new value
self.getVariable('Playback', 'Loop').set(0)
# Select tempCS as playback nodepath
self.oldPlaybackNodePath = self.playbackNodePath
self.setPlaybackNodePath(self.tempCS)
# Parent record node path to temp
self.nodePath.reparentTo(self.playbackNodePath)
# Align with temp
self.nodePath.setPosHpr(0,0,0,0,0,0)
# Set playback start to self.recordStart
self.playbackGoTo(self.recordStart)
# start flying nodePath along path
self.startPlayback()
# Start new task
t = taskMgr.add(
self.recordTask, self.name + '-recordTask')
t.startTime = globalClock.getFrameTime()
else:
self.markingNode.removeNode() # Hide the marker in the end of recording
if self.samplingMode == 'Continuous':
# Kill old task
taskMgr.remove(self.name + '-recordTask')
if ((self.recordingType.get() == 'Refine') or
(self.recordingType.get() == 'Extend')):
# Reparent node path back to parent
self.nodePath.wrtReparentTo(self.nodePathParent)
# Restore playback Node Path
self.setPlaybackNodePath(self.oldPlaybackNodePath)
else:
# Add last point
self.addKeyframe(0)
# Reset sampling mode
self.setSamplingMode('Continuous')
self.enableKeyframeButton()
# Clean up after refine or extend
if ((self.recordingType.get() == 'Refine') or
(self.recordingType.get() == 'Extend')):
# Merge prePoints, pointSet, postPoints
self.mergePoints()
# Clear out pre and post list
self.prePoints = []
self.postPoints = []
# Reset recording mode
self.setNewCurveMode()
# Compute curve
self.computeCurves()
def recordTask(self, state):
# Record raw data point
time = self.recordStart + (
globalClock.getFrameTime() - state.startTime)
self.recordPoint(time)
return Task.cont
def addKeyframe(self, fToggleRecord = 1):
# Make sure we're in a recording mode!
if (fToggleRecord and
(not self.getVariable('Recording', 'Record').get())):
# Set sampling mode
self.setSamplingMode('Keyframe')
# This will automatically add the first point
self.toggleRecordVar()
else:
# Use distance as a time
pos = self.nodePath.getPos(self.nodePathParent)
deltaPos = Vec3(pos - self.lastPos).length()
if deltaPos != 0:
# If we've moved at all, use delta Pos as time
self.deltaTime = self.deltaTime + deltaPos
else:
# Otherwise add one second
self.deltaTime = self.deltaTime + 1.0
# Record point at new time
self.recordPoint(self.recordStart + self.deltaTime)
# Update last pos
self.lastPos.assign(pos)
def easeInOut(self, t):
x = t * t
return (3 * x) - (2 * t * x)
def setPreRecordFunc(self, func):
# Note: If func is one defined at command prompt, need to set
# __builtins__.func = func at command line
self.preRecordFunc = eval(func)
# Update widget to reflect new value
self.getVariable('Recording', 'PRF Active').set(1)
def recordPoint(self, time):
# Call user define callback before recording point
if (self.getVariable('Recording', 'PRF Active').get() and
(self.preRecordFunc != None)):
self.preRecordFunc()
# Get point
pos = self.nodePath.getPos(self.nodePathParent)
hpr = self.nodePath.getHpr(self.nodePathParent)
qNP = Quat()
qNP.setHpr(hpr)
# Blend between recordNodePath and self.nodePath
if ((self.recordingType.get() == 'Refine') or
(self.recordingType.get() == 'Extend')):
if ((time < self.controlStart) and
((self.controlStart - self.recordStart) != 0.0)):
rPos = self.playbackNodePath.getPos(self.nodePathParent)
rHpr = self.playbackNodePath.getHpr(self.nodePathParent)
qR = Quat()
qR.setHpr(rHpr)
t = self.easeInOut(((time - self.recordStart)/
(self.controlStart - self.recordStart)))
# Transition between the recorded node path and the driven one
pos = (rPos * (1 - t)) + (pos * t)
q = qSlerp(qR, qNP, t)
hpr.assign(q.getHpr())
elif ((self.recordingType.get() == 'Refine') and
(time > self.controlStop) and
((self.recordStop - self.controlStop) != 0.0)):
rPos = self.playbackNodePath.getPos(self.nodePathParent)
rHpr = self.playbackNodePath.getHpr(self.nodePathParent)
qR = Quat()
qR.setHpr(rHpr)
t = self.easeInOut(((time - self.controlStop)/
(self.recordStop - self.controlStop)))
# Transition between the recorded node path and the driven one
pos = (pos * (1 - t)) + (rPos * t)
q = qSlerp(qNP, qR, t)
hpr.assign(q.getHpr())
# Add it to the point set
self.pointSet.append([time, pos, hpr])
# Add it to the curve fitters
self.curveFitter.addXyzHpr(time, pos, hpr)
# Update trace now if recording keyframes
if (self.samplingMode == 'Keyframe'):
self.trace.reset()
for t, p, h in self.pointSet:
self.trace.drawTo(p[0], p[1], p[2])
self.trace.create()
def computeCurves(self):
# Check to make sure curve fitters have points
if (self.curveFitter.getNumSamples() == 0):
print 'MopathRecorder.computeCurves: Must define curve first'
return
# Create curves
# XYZ
self.curveFitter.sortPoints()
self.curveFitter.wrapHpr()
self.curveFitter.computeTangents(1)
# This is really a collection
self.curveCollection = self.curveFitter.makeNurbs()
self.nurbsCurveDrawer.setCurves(self.curveCollection)
self.nurbsCurveDrawer.draw()
# Update widget based on new curve
self.updateWidgets()
def initTrace(self):
self.trace.reset()
# Put trace line segs under node path's parent
self.trace.reparentTo(self.nodePathParent)
# Show it
self.trace.show()
def updateWidgets(self):
if not self.curveCollection:
return
self.fAdjustingValues = 1
# Widgets depending on max T
maxT = self.curveCollection.getMaxT()
maxT_text = '%0.2f' % maxT
# Playback controls
self.getWidget('Playback', 'Time').configure(max = maxT_text)
self.getVariable('Resample', 'Path Duration').set(maxT_text)
# Refine widgets
widget = self.getWidget('Refine Page', 'Refine From')
widget.configure(max = maxT)
widget.set(0.0)
widget = self.getWidget('Refine Page', 'Control Start')
widget.configure(max = maxT)
widget.set(0.0)
widget = self.getWidget('Refine Page', 'Control Stop')
widget.configure(max = maxT)
widget.set(float(maxT))
widget = self.getWidget('Refine Page', 'Refine To')
widget.configure(max = maxT)
widget.set(float(maxT))
# Extend widgets
widget = self.getWidget('Extend Page', 'Extend From')
widget.configure(max = maxT)
widget.set(float(0.0))
widget = self.getWidget('Extend Page', 'Control Start')
widget.configure(max = maxT)
widget.set(float(0.0))
# Crop widgets
widget = self.getWidget('Crop Page', 'Crop From')
widget.configure(max = maxT)
widget.set(float(0.0))
widget = self.getWidget('Crop Page', 'Crop To')
widget.configure(max = maxT)
widget.set(float(maxT))
self.maxT = float(maxT)
# Widgets depending on number of samples
numSamples = self.curveFitter.getNumSamples()
widget = self.getWidget('Resample', 'Points Between Samples')
widget.configure(max=numSamples)
widget = self.getWidget('Resample', 'Num. Samples')
widget.configure(max = 4 * numSamples)
widget.set(numSamples, 0)
self.fAdjustingValues = 0
def selectNodePathNamed(self, name):
nodePath = None
if name == 'init':
nodePath = self.nodePath
# Add Combo box entry for the initial node path
self.addNodePath(nodePath)
elif name == 'selected':
nodePath = SEditor.selected.last
# Add Combo box entry for this selected object
self.addNodePath(nodePath)
else:
nodePath = self.nodePathDict.get(name, None)
if (nodePath == None):
# See if this evaluates into a node path
try:
nodePath = eval(name)
if isinstance(nodePath, NodePath):
self.addNodePath(nodePath)
else:
# Good eval but not a node path, give up
nodePath = None
except:
# Bogus eval
nodePath = None
# Clear bogus entry from listbox
listbox = self.nodePathMenu.component('scrolledlist')
listbox.setlist(self.nodePathNames)
else:
if name == 'widget':
# Record relationship between selected nodes and widget
SEditor.selected.getWrtAll()
if name == 'marker':
self.playbackMarker.show()
# Initialize tangent marker position
tan = Point3(0)
if self.curveCollection != None:
self.curveCollection.getXyzCurve().getTangent(
self.playbackTime, tan)
self.tangentMarker.setPos(tan)
else:
self.playbackMarker.hide()
# Update active node path
self.setNodePath(nodePath)
messenger.send('mPath_requestCurveList',[nodePath,self.name])
self.accept('curveListFor'+self.name, self.addCurvesFromNodepath)
def setNodePath(self, nodePath):
self.playbackNodePath = self.nodePath = nodePath
if self.nodePath:
# Record nopath's parent
self.nodePathParent = self.nodePath.getParent()
# Put curve drawer under record node path's parent
self.curveNodePath.reparentTo(self.nodePathParent)
# Set entry color
self.nodePathMenuEntry.configure(
background = self.nodePathMenuBG)
else:
# Flash entry
self.nodePathMenuEntry.configure(background = 'Pink')
def setPlaybackNodePath(self, nodePath):
self.playbackNodePath = nodePath
def addNodePath(self, nodePath):
self.addNodePathToDict(nodePath, self.nodePathNames,
self.nodePathMenu, self.nodePathDict)
def addNodePathToDict(self, nodePath, names, menu, dict):
if not nodePath:
return
# Get node path's name
name = nodePath.getName()
if name in ['mopathRecorderTempCS', 'widget', 'camera', 'marker']:
dictName = name
else:
# Generate a unique name for the dict
dictName = name # + '-' + `nodePath.id()`
if not dict.has_key(dictName):
# Update combo box to include new item
names.append(dictName)
listbox = menu.component('scrolledlist')
listbox.setlist(names)
# Add new item to dictionary
dict[dictName] = nodePath
menu.selectitem(dictName)
def setLoopPlayback(self):
self.loopPlayback = self.getVariable('Playback', 'Loop').get()
def playbackGoTo(self, time):
if self.curveCollection == None:
return
self.playbackTime = CLAMP(time, 0.0, self.maxT)
if self.curveCollection != None:
pos = Point3(0)
hpr = Point3(0)
self.curveCollection.evaluate(self.playbackTime, pos, hpr)
self.playbackNodePath.setPosHpr(self.nodePathParent, pos, hpr)
def startPlayback(self):
if self.curveCollection == None:
return
# Kill any existing tasks
self.stopPlayback()
# Make sure checkbutton is set
self.getVariable('Playback', 'Play').set(1)
# Start new playback task
t = taskMgr.add(
self.playbackTask, self.name + '-playbackTask')
t.currentTime = self.playbackTime
t.lastTime = globalClock.getFrameTime()
def setSpeedScale(self, value):
self.speedScale.set(math.log10(value))
def setPlaybackSF(self, value):
self.playbackSF = pow(10.0, float(value))
self.speedVar.set('%0.2f' % self.playbackSF)
def playbackTask(self, state):
time = globalClock.getFrameTime()
dTime = self.playbackSF * (time - state.lastTime)
state.lastTime = time
if self.loopPlayback:
cTime = (state.currentTime + dTime) % self.maxT
else:
cTime = state.currentTime + dTime
# Stop task if not looping and at end of curve
# Or if refining curve and past recordStop
if ((self.recordingType.get() == 'Refine') and
(cTime > self.recordStop)):
# Go to recordStop
self.getWidget('Playback', 'Time').set(self.recordStop)
# Then stop playback
self.stopPlayback()
# Also kill record task
self.toggleRecordVar()
return Task.done
elif ((self.loopPlayback == 0) and (cTime > self.maxT)):
# Go to maxT
self.getWidget('Playback', 'Time').set(self.maxT)
# Then stop playback
self.stopPlayback()
return Task.done
elif ((self.recordingType.get() == 'Extend') and
(cTime > self.controlStart)):
# Go to final point
self.getWidget('Playback', 'Time').set(self.controlStart)
# Stop playback
self.stopPlayback()
return Task.done
# Otherwise go to specified time and continue
self.getWidget('Playback', 'Time').set(cTime)
state.currentTime = cTime
return Task.cont
def stopPlayback(self):
self.getVariable('Playback', 'Play').set(0)
taskMgr.remove(self.name + '-playbackTask')
def jumpToStartOfPlayback(self):
self.stopPlayback()
self.getWidget('Playback', 'Time').set(0.0)
def jumpToEndOfPlayback(self):
self.stopPlayback()
if self.curveCollection != None:
self.getWidget('Playback', 'Time').set(self.maxT)
def startStopPlayback(self):
if self.getVariable('Playback', 'Play').get():
self.startPlayback()
else:
self.stopPlayback()
def setDesampleFrequency(self, frequency):
self.desampleFrequency = frequency
def desampleCurve(self):
if (self.curveFitter.getNumSamples() == 0):
print 'MopathRecorder.desampleCurve: Must define curve first'
return
# NOTE: This is destructive, points will be deleted from curve fitter
self.curveFitter.desample(self.desampleFrequency)
# Compute new curve based on desampled data
self.computeCurves()
# Get point set from the curve fitter
self.extractPointSetFromCurveFitter()
def setNumSamples(self, numSamples):
self.numSamples = int(numSamples)
def sampleCurve(self, fCompute = 1, curveName = None):
if self.curveCollection == None:
print 'MopathRecorder.sampleCurve: Must define curve first'
return
# Reset curve fitters
self.curveFitter.reset()
# Sample curve using specified number of samples
self.curveFitter.sample(self.curveCollection, self.numSamples)
if fCompute:
# Now recompute curves
self.computeCurves()
# Get point set from the curve fitter
self.extractPointSetFromCurveFitter(curveName)
def makeEven(self):
# Note: segments_per_unit = 2 seems to give a good fit
self.curveCollection.makeEven(self.maxT, 2)
# Get point set from curve
self.extractPointSetFromCurveCollection()
def faceForward(self):
# Note: segments_per_unit = 2 seems to give a good fit
self.curveCollection.faceForward(2)
# Get point set from curve
self.extractPointSetFromCurveCollection()
def setPathDuration(self, event):
newMaxT = float(self.getWidget('Resample', 'Path Duration').get())
self.setPathDurationTo(newMaxT)
def setPathDurationTo(self, newMaxT):
# Compute scale factor
sf = newMaxT/self.maxT
# Scale curve collection
self.curveCollection.resetMaxT(newMaxT)
# Scale point set
# Save handle to old point set
oldPointSet = self.pointSet
# Create new point set
self.createNewPointSet()
# Reset curve fitters
self.curveFitter.reset()
# Now scale values
for time, pos, hpr in oldPointSet:
newTime = time * sf
# Update point set
self.pointSet.append([newTime, Point3(pos), Point3(hpr)])
# Add it to the curve fitters
self.curveFitter.addXyzHpr(newTime, pos, hpr)
# Update widgets
self.updateWidgets()
# Compute curve
#self.computeCurves()
def setRecordStart(self,value):
self.recordStart = value
# Someone else is adjusting values, let them take care of it
if self.fAdjustingValues:
return
self.fAdjustingValues = 1
# Adjust refine widgets
# Make sure we're in sync
self.getWidget('Refine Page', 'Refine From').set(
self.recordStart)
self.getWidget('Extend Page', 'Extend From').set(
self.recordStart)
# Check bounds
if self.recordStart > self.controlStart:
self.getWidget('Refine Page', 'Control Start').set(
self.recordStart)
self.getWidget('Extend Page', 'Control Start').set(
self.recordStart)
if self.recordStart > self.controlStop:
self.getWidget('Refine Page', 'Control Stop').set(
self.recordStart)
if self.recordStart > self.recordStop:
self.getWidget('Refine Page', 'Refine To').set(self.recordStart)
# Move playback node path to specified time
self.getWidget('Playback', 'Time').set(value)
self.fAdjustingValues = 0
def getPrePoints(self, type = 'Refine'):
# Switch to appropriate recording type
self.setRecordingType(type)
# Reset prePoints
self.prePoints = []
# See if we need to save any points before recordStart
for i in range(len(self.pointSet)):
# Have we passed recordStart?
if self.recordStart < self.pointSet[i][0]:
# Get a copy of the points prior to recordStart
self.prePoints = self.pointSet[:i-1]
break
def setControlStart(self, value):
self.controlStart = value
# Someone else is adjusting values, let them take care of it
if self.fAdjustingValues:
return
self.fAdjustingValues = 1
# Adjust refine widgets
# Make sure both pages are in sync
self.getWidget('Refine Page', 'Control Start').set(
self.controlStart)
self.getWidget('Extend Page', 'Control Start').set(
self.controlStart)
# Check bounds on other widgets
if self.controlStart < self.recordStart:
self.getWidget('Refine Page', 'Refine From').set(
self.controlStart)
self.getWidget('Extend Page', 'Extend From').set(
self.controlStart)
if self.controlStart > self.controlStop:
self.getWidget('Refine Page', 'Control Stop').set(
self.controlStart)
if self.controlStart > self.recordStop:
self.getWidget('Refine Page', 'Refine To').set(
self.controlStart)
# Move playback node path to specified time
self.getWidget('Playback', 'Time').set(value)
self.fAdjustingValues = 0
def setControlStop(self, value):
self.controlStop = value
# Someone else is adjusting values, let them take care of it
if self.fAdjustingValues:
return
self.fAdjustingValues = 1
if self.controlStop < self.recordStart:
self.getWidget('Refine Page', 'Refine From').set(
self.controlStop)
if self.controlStop < self.controlStart:
self.getWidget('Refine Page', 'Control Start').set(
self.controlStop)
if self.controlStop > self.recordStop:
self.getWidget('Refine Page', 'Refine To').set(
self.controlStop)
# Move playback node path to specified time
self.getWidget('Playback', 'Time').set(value)
self.fAdjustingValues = 0
def setRefineStop(self, value):
self.recordStop = value
# Someone else is adjusting values, let them take care of it
if self.fAdjustingValues:
return
self.fAdjustingValues = 1
if self.recordStop < self.recordStart:
self.getWidget('Refine Page', 'Refine From').set(
self.recordStop)
if self.recordStop < self.controlStart:
self.getWidget('Refine Page', 'Control Start').set(
self.recordStop)
if self.recordStop < self.controlStop:
self.getWidget('Refine Page', 'Control Stop').set(
self.recordStop)
# Move playback node path to specified time
self.getWidget('Playback', 'Time').set(value)
self.fAdjustingValues = 0
def getPostPoints(self):
# Set flag so we know to do a refine pass
self.setRefineMode()
# Reset postPoints
self.postPoints = []
# See if we need to save any points after recordStop
for i in range(len(self.pointSet)):
# Have we reached recordStop?
if self.recordStop < self.pointSet[i][0]:
# Get a copy of the points after recordStop
self.postPoints = self.pointSet[i:]
break
def mergePoints(self):
# prepend pre points
self.pointSet[0:0] = self.prePoints
for time, pos, hpr in self.prePoints:
# Add it to the curve fitters
self.curveFitter.addXyzHpr(time, pos, hpr)
# And post points
# What is end time of pointSet?
endTime = self.pointSet[-1][0]
for time, pos, hpr in self.postPoints:
adjustedTime = endTime + (time - self.recordStop)
# Add it to point set
self.pointSet.append([adjustedTime, pos, hpr])
# Add it to the curve fitters
self.curveFitter.addXyzHpr(adjustedTime, pos, hpr)
def setCropFrom(self,value):
self.cropFrom = value
# Someone else is adjusting values, let them take care of it
if self.fAdjustingValues:
return
self.fAdjustingValues = 1
if self.cropFrom > self.cropTo:
self.getWidget('Crop Page', 'Crop To').set(
self.cropFrom)
# Move playback node path to specified time
self.getWidget('Playback', 'Time').set(value)
self.fAdjustingValues = 0
def setCropTo(self,value):
self.cropTo = value
# Someone else is adjusting values, let them take care of it
if self.fAdjustingValues:
return
self.fAdjustingValues = 1
if self.cropTo < self.cropFrom:
self.getWidget('Crop Page', 'Crop From').set(
self.cropTo)
# Move playback node path to specified time
self.getWidget('Playback', 'Time').set(value)
self.fAdjustingValues = 0
def cropCurve(self):
if self.pointSet == None:
print 'Empty Point Set'
return
# Keep handle on old points
oldPoints = self.pointSet
# Create new point set
self.createNewPointSet()
# Copy over points between from/to
# Reset curve fitters
self.curveFitter.reset()
# Add start point
pos = Point3(0)
hpr = Point3(0)
self.curveCollection.evaluate(self.cropFrom, pos, hpr)
self.curveFitter.addXyzHpr(0.0, pos, hpr)
# Get points within bounds
for time, pos, hpr in oldPoints:
# Is it within the time?
if ((time > self.cropFrom) and
(time < self.cropTo)):
# Add it to the curve fitters
t = time - self.cropFrom
self.curveFitter.addXyzHpr(t, pos, hpr)
# And the point set
self.pointSet.append([t, pos, hpr])
# Add last point
pos = Vec3(0)
hpr = Vec3(0)
self.curveCollection.evaluate(self.cropTo, pos, hpr)
self.curveFitter.addXyzHpr(self.cropTo - self.cropFrom, pos, hpr)
# Compute curve
self.computeCurves()
def loadCurveFromFile(self):
# Use first directory in model path
mPath = getModelPath()
if mPath.getNumDirectories() > 0:
if `mPath.getDirectory(0)` == '.':
path = '.'
else:
path = mPath.getDirectory(0).toOsSpecific()
else:
path = '.'
if not os.path.isdir(path):
print 'MopathRecorder Info: Empty Model Path!'
print 'Using current directory'
path = '.'
mopathFilename = askopenfilename(
defaultextension = '.egg',
filetypes = (('Egg Files', '*.egg'),
('Bam Files', '*.bam'),
('All files', '*')),
initialdir = path,
title = 'Load Nurbs Curve',
parent = self.parent)
if mopathFilename:
self.reset()
nodePath = loader.loadModel(
Filename.fromOsSpecific(mopathFilename))
self.curveCollection = ParametricCurveCollection()
# MRM: Add error check
self.curveCollection.addCurves(nodePath.node())
nodePath.removeNode()
if self.curveCollection:
# Draw the curve
self.nurbsCurveDrawer.setCurves(self.curveCollection)
self.nurbsCurveDrawer.draw()
# Save a pointset for this curve
self.extractPointSetFromCurveCollection()
else:
self.reset()
def saveCurveToFile(self):
# Use first directory in model path
mPath = getModelPath()
if mPath.getNumDirectories() > 0:
if `mPath.getDirectory(0)` == '.':
path = '.'
else:
path = mPath.getDirectory(0).toOsSpecific()
else:
path = '.'
if not os.path.isdir(path):
print 'MopathRecorder Info: Empty Model Path!'
print 'Using current directory'
path = '.'
mopathFilename = asksaveasfilename(
defaultextension = '.egg',
filetypes = (('Egg Files', '*.egg'),
('Bam Files', '*.bam'),
('All files', '*')),
initialdir = path,
title = 'Save Nurbs Curve as',
parent = self.parent)
if mopathFilename:
self.curveCollection.writeEgg(Filename(mopathFilename))
def followTerrain(self, height = 1.0):
self.iRay.rayCollisionNodePath.reparentTo(self.nodePath)
entry = self.iRay.pickGeom3D()
if entry:
fromNodePath = entry.getFromNodePath()
hitPtDist = Vec3(entry.getSurfacePoint(fromNodePath))
self.nodePath.setZ(self.nodePath, height - hitPtDist)
self.iRay.rayCollisionNodePath.reparentTo(self.recorderNodePath)
## WIDGET UTILITY FUNCTIONS ##
def addWidget(self, widget, category, text):
self.widgetDict[category + '-' + text] = widget
def getWidget(self, category, text):
return self.widgetDict[category + '-' + text]
def getVariable(self, category, text):
return self.variableDict[category + '-' + text]
def createLabeledEntry(self, parent, category, text, balloonHelp,
value = '', command = None,
relief = 'sunken', side = Tkinter.LEFT,
expand = 1, width = 12):
frame = Frame(parent)
variable = StringVar()
variable.set(value)
label = Label(frame, text = text)
label.pack(side = Tkinter.LEFT, fill = Tkinter.X)
self.bind(label, balloonHelp)
self.widgetDict[category + '-' + text + '-Label'] = label
entry = Entry(frame, width = width, relief = relief,
textvariable = variable)
entry.pack(side = Tkinter.LEFT, fill = Tkinter.X, expand = expand)
self.bind(entry, balloonHelp)
self.widgetDict[category + '-' + text] = entry
self.variableDict[category + '-' + text] = variable
if command:
entry.bind('<Return>', command)
frame.pack(side = side, fill = Tkinter.X, expand = expand)
return (frame, label, entry)
def createButton(self, parent, category, text, balloonHelp, command,
side = 'top', expand = 0, fill = Tkinter.X):
widget = Button(parent, text = text)
# Do this after the widget so command isn't called on creation
widget['command'] = command
widget.pack(side = side, fill = fill, expand = expand)
self.bind(widget, balloonHelp)
self.widgetDict[category + '-' + text] = widget
return widget
def createCheckbutton(self, parent, category, text,
balloonHelp, command, initialState,
side = 'top', fill = Tkinter.X, expand = 0):
bool = BooleanVar()
bool.set(initialState)
widget = Checkbutton(parent, text = text, anchor = Tkinter.W,
variable = bool)
# Do this after the widget so command isn't called on creation
widget['command'] = command
widget.pack(side = side, fill = fill, expand = expand)
self.bind(widget, balloonHelp)
self.widgetDict[category + '-' + text] = widget
self.variableDict[category + '-' + text] = bool
return widget
def createRadiobutton(self, parent, side, category, text,
balloonHelp, variable, value,
command = None, fill = Tkinter.X, expand = 0):
widget = Radiobutton(parent, text = text, anchor = Tkinter.W,
variable = variable, value = value)
# Do this after the widget so command isn't called on creation
widget['command'] = command
widget.pack(side = side, fill = fill, expand = expand)
self.bind(widget, balloonHelp)
self.widgetDict[category + '-' + text] = widget
return widget
def createFloater(self, parent, category, text, balloonHelp,
command = None, min = 0.0, resolution = None,
maxVelocity = 10.0, **kw):
kw['text'] = text
kw['min'] = min
kw['maxVelocity'] = maxVelocity
kw['resolution'] = resolution
widget = apply(Floater, (parent,), kw)
# Do this after the widget so command isn't called on creation
widget['command'] = command
widget.pack(fill = Tkinter.X)
self.bind(widget, balloonHelp)
self.widgetDict[category + '-' + text] = widget
return widget
def createAngleDial(self, parent, category, text, balloonHelp,
command = None, **kw):
kw['text'] = text
widget = apply(AngleDial,(parent,), kw)
# Do this after the widget so command isn't called on creation
widget['command'] = command
widget.pack(fill = Tkinter.X)
self.bind(widget, balloonHelp)
self.widgetDict[category + '-' + text] = widget
return widget
def createSlider(self, parent, category, text, balloonHelp,
command = None, min = 0.0, max = 1.0,
resolution = None,
side = Tkinter.TOP, fill = Tkinter.X, expand = 1, **kw):
kw['text'] = text
kw['min'] = min
kw['max'] = max
kw['resolution'] = resolution
#widget = apply(EntryScale, (parent,), kw)
widget = apply(Slider, (parent,), kw)
# Do this after the widget so command isn't called on creation
widget['command'] = command
widget.pack(side = side, fill = fill, expand = expand)
self.bind(widget, balloonHelp)
self.widgetDict[category + '-' + text] = widget
return widget
def createEntryScale(self, parent, category, text, balloonHelp,
command = None, min = 0.0, max = 1.0,
resolution = None,
side = Tkinter.TOP, fill = Tkinter.X, expand = 1, **kw):
kw['text'] = text
kw['min'] = min
kw['max'] = max
kw['resolution'] = resolution
widget = apply(EntryScale, (parent,), kw)
# Do this after the widget so command isn't called on creation
widget['command'] = command
widget.pack(side = side, fill = fill, expand = expand)
self.bind(widget, balloonHelp)
self.widgetDict[category + '-' + text] = widget
return widget
def createVector2Entry(self, parent, category, text, balloonHelp,
command = None, **kw):
# Set label's text
kw['text'] = text
widget = apply(Vector2Entry, (parent,), kw)
# Do this after the widget so command isn't called on creation
widget['command'] = command
widget.pack(fill = Tkinter.X)
self.bind(widget, balloonHelp)
self.widgetDict[category + '-' + text] = widget
return widget
def createVector3Entry(self, parent, category, text, balloonHelp,
command = None, **kw):
# Set label's text
kw['text'] = text
widget = apply(Vector3Entry, (parent,), kw)
# Do this after the widget so command isn't called on creation
widget['command'] = command
widget.pack(fill = Tkinter.X)
self.bind(widget, balloonHelp)
self.widgetDict[category + '-' + text] = widget
return widget
def createColorEntry(self, parent, category, text, balloonHelp,
command = None, **kw):
# Set label's text
kw['text'] = text
widget = apply(ColorEntry, (parent,) ,kw)
# Do this after the widget so command isn't called on creation
widget['command'] = command
widget.pack(fill = Tkinter.X)
self.bind(widget, balloonHelp)
self.widgetDict[category + '-' + text] = widget
return widget
def createOptionMenu(self, parent, category, text, balloonHelp,
items, command):
optionVar = StringVar()
if len(items) > 0:
optionVar.set(items[0])
widget = Pmw.OptionMenu(parent, labelpos = Tkinter.W, label_text = text,
label_width = 12, menu_tearoff = 1,
menubutton_textvariable = optionVar,
items = items)
# Do this after the widget so command isn't called on creation
widget['command'] = command
widget.pack(fill = Tkinter.X)
self.bind(widget.component('menubutton'), balloonHelp)
self.widgetDict[category + '-' + text] = widget
self.variableDict[category + '-' + text] = optionVar
return optionVar
def createComboBox(self, parent, category, text, balloonHelp,
items, command, history = 0,
side = Tkinter.LEFT, expand = 0, fill = Tkinter.X):
widget = Pmw.ComboBox(parent,
labelpos = Tkinter.W,
label_text = text,
label_anchor = 'e',
label_width = 12,
entry_width = 16,
history = history,
scrolledlist_items = items)
# Don't allow user to edit entryfield
widget.configure(entryfield_entry_state = 'disabled')
# Select first item if it exists
if len(items) > 0:
widget.selectitem(items[0])
# Bind selection command
widget['selectioncommand'] = command
widget.pack(side = side, fill = fill, expand = expand)
# Bind help
self.bind(widget, balloonHelp)
# Record widget
self.widgetDict[category + '-' + text] = widget
return widget
def makeCameraWindow(self):
# First, we need to make a new layer on the window.
chan = base.win.getChannel(0)
self.cLayer = chan.makeLayer(1)
self.layerIndex = 1
self.cDr = self.cLayer.makeDisplayRegion(0.6, 1.0, 0, 0.4)
self.cDr.setClearDepthActive(1)
self.cDr.setClearColorActive(1)
self.cDr.setClearColor(Vec4(0))
# It gets its own camera
self.cCamera = render.attachNewNode('cCamera')
self.cCamNode = Camera('cCam')
self.cLens = PerspectiveLens()
self.cLens.setFov(40,40)
self.cLens.setNear(0.1)
self.cLens.setFar(100.0)
self.cCamNode.setLens(self.cLens)
self.cCamNode.setScene(render)
self.cCam = self.cCamera.attachNewNode(self.cCamNode)
self.cDr.setCamera(self.cCam)
def toggleWidgetVis(self):
## In order to make sure everything is going on right way...
messenger.send('SEditor-ToggleWidgetVis')
SEditor.toggleWidgetVis()
def bindMotionPathToNode(self):
if self.curveCollection == None:
print '----Error: you need to select or create a curve first!'
return
self.accept('MP_checkName', self.bindMotionPath)
self.askName = namePathPanel(MopathRecorder.count)
return
def bindMotionPath(self,name=None,test=None):
print test
self.ignore('MP_checkName')
del self.askName
self.curveCollection.getCurve(0).setName(name)
comboBox = self.getWidget('Mopath', 'Path:')
oldName = comboBox.get()
self.pointSetDict[name] = self.pointSetDict[oldName]
del self.pointSetDict[oldName]
scrolledList = comboBox.component('scrolledlist')
listbox = scrolledList.component('listbox')
names = list(listbox.get(0,'end'))
num = names.index(oldName)
names.pop(num)
names.append(name)
scrolledList.setlist(names)
comboBox.selectitem(name)
messenger.send('mPath_bindPathToNode',[self.playbackNodePath, self.curveCollection])
return
def addCurvesFromNodepath(self,curveList):
'''addCurvesFromNodepath(self,curveList)
This function will take a curveCollection list as a input.
If the list is not None, it will put the vurve back into the curve list.
else, do nothing.
'''
print curveList
self.ignore('curveListFor'+self.name)
if curveList != None:
for collection in curveList:
self.curveCollection = collection
self.extractPointSetFromCurveCollection(curveName=self.curveCollection.getCurve(0).getName())
else:
pass
return
class namePathPanel(AppShell):
# Override class variables
appname = 'Name the Path'
frameWidth = 575
frameHeight = 200
usecommandarea = 0
usestatusarea = 0
index = 0
def __init__(self, count, parent = None, **kw):
INITOPT = Pmw.INITOPT
self.id = 'Name the Path'
self.appname = self.id
optiondefs = (
('title', self.appname, None),
)
self.defineoptions(kw, optiondefs)
# Initialize the superclass
AppShell.__init__(self)
self.parent.resizable(False,False)
# Execute option callbacks
self.initialiseoptions(namePathPanel)
def createInterface(self):
self.menuBar.destroy()
interior = self.interior()
mainFrame = Frame(interior)
dataFrame = Frame(mainFrame)
label = Label(dataFrame, text='This name will be used as a reference for this Path.',font=('MSSansSerif', 10))
label.pack(side = Tkinter.TOP, expand = 0, fill = Tkinter.X)
dataFrame.pack(side = Tkinter.TOP, expand = 0, fill = Tkinter.X, padx=5, pady=10)
dataFrame = Frame(mainFrame)
self.inputZone = Pmw.EntryField(dataFrame, labelpos='w', label_text = 'Name Selected Path: ',
entry_font=('MSSansSerif', 10),
label_font=('MSSansSerif', 10),
validate = None,
entry_width = 20)
self.inputZone.pack(side = Tkinter.LEFT, fill=Tkinter.X,expand=0)
self.button_ok = Button(dataFrame, text="OK", command=self.ok_press,width=10)
self.button_ok.pack(fill=Tkinter.X,expand=0,side=Tkinter.LEFT, padx = 3)
dataFrame.pack(side = Tkinter.TOP, expand = 0, fill = Tkinter.X, padx=10, pady=10)
mainFrame.pack(expand = 1, fill = Tkinter.BOTH)
def onDestroy(self, event):
'''
If you have open any thing, please rewrite here!
'''
pass
def ok_press(self):
name = self.inputZone.getvalue()
messenger.send('MP_checkName',[name])
self.quit()
return
|
bsd-3-clause
|
matthiasplappert/keras-rl
|
tests/rl/test_util.py
|
1
|
2969
|
from __future__ import division
import pytest
import numpy as np
from numpy.testing import assert_allclose
from keras.models import Model, Sequential
from keras.layers import Input, Dense, Concatenate
from keras.optimizers import SGD
import keras.backend as K
from rl.util import clone_optimizer, clone_model, huber_loss, WhiteningNormalizer
def test_clone_sequential_model():
seq = Sequential()
seq.add(Dense(8, input_shape=(3,)))
seq.compile(optimizer='sgd', loss='mse')
clone = clone_model(seq)
clone.compile(optimizer='sgd', loss='mse')
ins = np.random.random((4, 3))
y_pred_seq = seq.predict_on_batch(ins)
y_pred_clone = clone.predict_on_batch(ins)
assert y_pred_seq.shape == y_pred_clone.shape
assert_allclose(y_pred_seq, y_pred_clone)
def test_clone_graph_model():
in1 = Input(shape=(2,))
in2 = Input(shape=(3,))
x = Dense(8)(Concatenate()([in1, in2]))
graph = Model([in1, in2], x)
graph.compile(optimizer='sgd', loss='mse')
clone = clone_model(graph)
clone.compile(optimizer='sgd', loss='mse')
ins = [np.random.random((4, 2)), np.random.random((4, 3))]
y_pred_graph = graph.predict_on_batch(ins)
y_pred_clone = clone.predict_on_batch(ins)
assert y_pred_graph.shape == y_pred_clone.shape
assert_allclose(y_pred_graph, y_pred_clone)
def test_clone_optimizer():
lr, momentum, clipnorm, clipvalue = np.random.random(size=4)
optimizer = SGD(lr=lr, momentum=momentum, clipnorm=clipnorm, clipvalue=clipvalue)
clone = clone_optimizer(optimizer)
assert isinstance(clone, SGD)
assert K.get_value(optimizer.lr) == K.get_value(clone.lr)
assert K.get_value(optimizer.momentum) == K.get_value(clone.momentum)
assert optimizer.clipnorm == clone.clipnorm
assert optimizer.clipvalue == clone.clipvalue
def test_clone_optimizer_from_string():
clone = clone_optimizer('sgd')
assert isinstance(clone, SGD)
def test_huber_loss():
a = np.array([1., 1.5, 2., 4.])
b = np.array([1.5, 1., 4., 2.])
assert_allclose(K.eval(huber_loss(a, b, 1.)), np.array([.125, .125, 1.5, 1.5]))
assert_allclose(K.eval(huber_loss(a, b, 3.)), np.array([.125, .125, 2., 2.]))
assert_allclose(K.eval(huber_loss(a, b, np.inf)), np.array([.125, .125, 2., 2.]))
def test_whitening_normalizer():
x = np.random.normal(loc=.2, scale=2., size=(1000, 5))
normalizer = WhiteningNormalizer(shape=(5,))
normalizer.update(x[:500])
normalizer.update(x[500:])
assert_allclose(normalizer.mean, np.mean(x, axis=0))
assert_allclose(normalizer.std, np.std(x, axis=0))
x_norm = normalizer.normalize(x)
assert_allclose(np.mean(x_norm, axis=0), np.zeros(5, dtype=normalizer.dtype), atol=1e-5)
assert_allclose(np.std(x_norm, axis=0), np.ones(5, dtype=normalizer.dtype), atol=1e-5)
x_denorm = normalizer.denormalize(x_norm)
assert_allclose(x_denorm, x)
if __name__ == '__main__':
pytest.main([__file__])
|
mit
|
broferek/ansible
|
test/units/modules/network/fortios/test_fortios_wireless_controller_hotspot20_anqp_ip_address_type.py
|
21
|
9225
|
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_wireless_controller_hotspot20_anqp_ip_address_type
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_wireless_controller_hotspot20_anqp_ip_address_type.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_wireless_controller_hotspot20_anqp_ip_address_type_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'wireless_controller_hotspot20_anqp_ip_address_type': {
'ipv4_address_type': 'not-available',
'ipv6_address_type': 'not-available',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_anqp_ip_address_type.fortios_wireless_controller_hotspot20(input_data, fos_instance)
expected_data = {
'ipv4-address-type': 'not-available',
'ipv6-address-type': 'not-available',
'name': 'default_name_5'
}
set_method_mock.assert_called_with('wireless-controller.hotspot20', 'anqp-ip-address-type', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_wireless_controller_hotspot20_anqp_ip_address_type_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'wireless_controller_hotspot20_anqp_ip_address_type': {
'ipv4_address_type': 'not-available',
'ipv6_address_type': 'not-available',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_anqp_ip_address_type.fortios_wireless_controller_hotspot20(input_data, fos_instance)
expected_data = {
'ipv4-address-type': 'not-available',
'ipv6-address-type': 'not-available',
'name': 'default_name_5'
}
set_method_mock.assert_called_with('wireless-controller.hotspot20', 'anqp-ip-address-type', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_wireless_controller_hotspot20_anqp_ip_address_type_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'wireless_controller_hotspot20_anqp_ip_address_type': {
'ipv4_address_type': 'not-available',
'ipv6_address_type': 'not-available',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_anqp_ip_address_type.fortios_wireless_controller_hotspot20(input_data, fos_instance)
delete_method_mock.assert_called_with('wireless-controller.hotspot20', 'anqp-ip-address-type', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_wireless_controller_hotspot20_anqp_ip_address_type_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'wireless_controller_hotspot20_anqp_ip_address_type': {
'ipv4_address_type': 'not-available',
'ipv6_address_type': 'not-available',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_anqp_ip_address_type.fortios_wireless_controller_hotspot20(input_data, fos_instance)
delete_method_mock.assert_called_with('wireless-controller.hotspot20', 'anqp-ip-address-type', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_wireless_controller_hotspot20_anqp_ip_address_type_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'wireless_controller_hotspot20_anqp_ip_address_type': {
'ipv4_address_type': 'not-available',
'ipv6_address_type': 'not-available',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_anqp_ip_address_type.fortios_wireless_controller_hotspot20(input_data, fos_instance)
expected_data = {
'ipv4-address-type': 'not-available',
'ipv6-address-type': 'not-available',
'name': 'default_name_5'
}
set_method_mock.assert_called_with('wireless-controller.hotspot20', 'anqp-ip-address-type', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_wireless_controller_hotspot20_anqp_ip_address_type_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'wireless_controller_hotspot20_anqp_ip_address_type': {
'random_attribute_not_valid': 'tag',
'ipv4_address_type': 'not-available',
'ipv6_address_type': 'not-available',
'name': 'default_name_5'
},
'vdom': 'root'}
is_error, changed, response = fortios_wireless_controller_hotspot20_anqp_ip_address_type.fortios_wireless_controller_hotspot20(input_data, fos_instance)
expected_data = {
'ipv4-address-type': 'not-available',
'ipv6-address-type': 'not-available',
'name': 'default_name_5'
}
set_method_mock.assert_called_with('wireless-controller.hotspot20', 'anqp-ip-address-type', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
|
gpl-3.0
|
RefugeeMatchmaking/HackZurich
|
GAE_Playground/libs/networkx/readwrite/tests/test_p2g.py
|
91
|
1328
|
from nose.tools import assert_equal, assert_raises, assert_not_equal
import networkx as nx
import io
import tempfile
import os
from networkx.readwrite.p2g import *
from networkx.testing import *
class TestP2G:
def setUp(self):
self.G=nx.Graph(name="test")
e=[('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')]
self.G.add_edges_from(e)
self.G.add_node('g')
self.DG=nx.DiGraph(self.G)
def test_read_p2g(self):
s = b"""\
name
3 4
a
1 2
b
c
0 2
"""
bytesIO = io.BytesIO(s)
G = read_p2g(bytesIO)
assert_equal(G.name,'name')
assert_equal(sorted(G),['a','b','c'])
edges = [(str(u),str(v)) for u,v in G.edges()]
assert_edges_equal(G.edges(),[('a','c'),('a','b'),('c','a'),('c','c')])
def test_write_p2g(self):
s=b"""foo
3 2
1
1
2
2
3
"""
fh=io.BytesIO()
G=nx.DiGraph()
G.name='foo'
G.add_edges_from([(1,2),(2,3)])
write_p2g(G,fh)
fh.seek(0)
r=fh.read()
assert_equal(r,s)
def test_write_read_p2g(self):
fh=io.BytesIO()
G=nx.DiGraph()
G.name='foo'
G.add_edges_from([('a','b'),('b','c')])
write_p2g(G,fh)
fh.seek(0)
H=read_p2g(fh)
assert_edges_equal(G.edges(),H.edges())
|
mit
|
VAMDC/NodeSoftware
|
nodes/chianti/node/models.py
|
2
|
6610
|
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin.py sqlcustom [appname]'
# into your database.
from django.db import models
from vamdctap.bibtextools import *
class Term:
l = 0
s = 0
class Species(models.Model):
id = models.IntegerField(null=False, primary_key=True, blank=False)
atomsymbol = models.CharField(max_length=6, db_column='AtomSymbol', blank=True)
atomnuclearcharge = models.IntegerField(null=True, db_column='AtomNuclearCharge', blank=True)
atomioncharge = models.IntegerField(null=True, db_column='AtomIonCharge', blank=True)
inchi = models.CharField(null=False, db_column='inchi', max_length=32, blank=False)
inchikey = models.CharField(null=False, db_column='inchikey', max_length=25, blank=False)
class Meta:
db_table = u'species'
class States(models.Model):
def _get_statistical_weight(self):
return (2 * self.atomstatetotalangmom) + 1
id = models.IntegerField(null=False, primary_key=True, blank=False)
chiantiiontype = models.CharField(max_length=3, db_column='ChiantiIonType', blank=True)
atomsymbol = models.CharField(max_length=6, db_column='AtomSymbol', blank=True)
species = models.ForeignKey(Species, related_name='+', db_column='species')
atomnuclearcharge = models.IntegerField(null=True, db_column='AtomNuclearCharge', blank=True)
atomioncharge = models.IntegerField(null=True, db_column='AtomIonCharge', blank=True)
atomstateconfigurationlabel = models.CharField(max_length=96, db_column='AtomStateConfigurationLabel', blank=True)
atomstates = models.FloatField(null=True, db_column='AtomStateS', blank=True)
atomstatel = models.IntegerField(null=True, db_column='AtomStateL', blank=True)
atomstatetotalangmom = models.FloatField(null=True, db_column='AtomStateTotalAngMom', blank=True)
parity = models.CharField(max_length=4, db_column='parity', null=False, blank=False)
energy = models.FloatField(null=True, db_column='AtomStateEnergy', blank=True)
energyMethod = models.CharField(max_length=4, db_column='AtomStateEnergyMethod', null=False, blank=False)
statisticalweight = property(_get_statistical_weight)
def allEnergies(self):
energies = []
if self.energyexperimental:
energies.append(self.energyexperimental)
if self.energytheoretical:
energies.append(self.energytheoretical)
return energies
def allEnergyMethods(self):
methods = []
if self.energyexperimental:
methods.append("EXP")
if self.energytheoretical:
methods.append("THEO")
return methods
def sourceIds(self):
# Chianti quotes references per species, not per states, so find the species associated with this state.
speciesId = self.species_id
# Find all the ojects in the source model linked to the species; return a list of their ID numbers.
sources = Sources.objects.filter(species = speciesId)
return Sources.objects.filter(species = speciesId).values_list('id', flat=True)
class Meta:
db_table = u'states'
class Components(models.Model):
id = models.IntegerField(db_column='id', primary_key=True)
label = models.CharField(db_column='label', max_length=32)
core = models.CharField(db_column='core', max_length=2, null=True)
lsl = models.IntegerField(db_column='lsl')
lss = models.FloatField(db_column='lss')
class Meta:
db_table=u'components'
class Subshells(models.Model):
id = models.AutoField(primary_key=True)
state = models.IntegerField(db_column='state')
n = models.IntegerField(db_column='n')
l = models.IntegerField(db_column='l');
population = models.IntegerField(db_column='pop');
class Meta:
db_table=u'subshells'
class Transitions(models.Model):
id = models.IntegerField(db_column='id', null=False, blank=False, primary_key=True)
chiantiradtranstype = models.CharField(max_length=3, db_column='ChiantiRadTransType', blank=True)
atomsymbol = models.CharField(max_length=24, db_column='AtomSymbol', blank=True)
finalstateindex = models.ForeignKey(States, related_name='+', db_column='chiantiradtransfinalstateindex')
initialstateindex = models.ForeignKey(States, related_name='+', db_column='chiantiradtransinitialstateindex')
wavelengthexperimental = models.FloatField(null=True, db_column='wavelengthexperimental', blank=True)
wavelengththeoretical = models.FloatField(null=True, db_column='wavelengththeoretical', blank=True)
wavelength = models.FloatField(null=True, db_column='wavelength', blank=True)
weightedoscillatorstrength = models.FloatField(null=True, db_column='RadTransProbabilityWeightedOscillatorStrength', blank=True)
probabilitya = models.FloatField(null=True, db_column='RadTransProbabilityTransitionProbabilityA', blank=True)
def upperStateRef(self):
if self.finalstateindex.energy > self.initialstateindex.energy:
return self.finalstateindex.id
else:
return self.initialstateindex.id
def lowerStateRef(self):
if self.finalstateindex.energy < self.initialstateindex.energy:
return self.finalstateindex.id
else:
return self.initialstateindex.id
def allWavelengths(self):
wavelengths = []
if self.wavelengthexperimental:
wavelengths.append(self.wavelengthexperimental)
if self.wavelengththeoretical:
wavelengths.append(self.wavelengththeoretical)
return wavelengths
def allWavelengthMethods(self):
methods = []
if self.wavelengthexperimental:
methods.append("EXP")
if self.wavelengththeoretical:
methods.append("THEO")
return methods
class Meta:
db_table = u'transitions'
# This is copied from the VALD node.
class Sources(models.Model):
id = models.CharField(max_length=7, primary_key=True, db_index=True)
species = models.ForeignKey(Species, related_name='+')
bibtex = models.TextField(null=True)
def XML(self):
return BibTeX2XML(self.bibtex, self.id)
class Meta:
db_table = u'sources'
def __unicode__(self):
return u'%s'%self.id
|
gpl-3.0
|
formiano/enigma2
|
lib/python/Components/Renderer/Watches.py
|
7
|
3012
|
#######################################################################
#
# Renderer for Dreambox-Enigma2
# Coded by shamann (c)2010
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
#######################################################################
import math
from Renderer import Renderer
from skin import parseColor
from enigma import eCanvas, eSize, gRGB, eRect
from Components.VariableText import VariableText
from Components.config import config
class Watches(Renderer):
def __init__(self):
Renderer.__init__(self)
self.fColor = gRGB(255, 255, 255, 0)
self.bColor = gRGB(0, 0, 0, 255)
self.numval = -1
GUI_WIDGET = eCanvas
def applySkin(self, desktop, parent):
attribs = []
for (attrib, what) in self.skinAttributes:
if (attrib == 'foregroundColor'):
self.fColor = parseColor(what)
elif (attrib == 'backgroundColor'):
self.bColor = parseColor(what)
else:
attribs.append((attrib, what))
self.skinAttributes = attribs
return Renderer.applySkin(self, desktop, parent)
def calculate(self, w, r, m):
a = (w * 6)
z = (math.pi / 180)
x = int(round((r * math.sin((a * z)))))
y = int(round((r * math.cos((a * z)))))
return ((m + x), (m - y))
def hand(self):
width = self.instance.size().width()
height = self.instance.size().height()
r = (min(width, height) / 2)
(endX, endY,) = self.calculate(self.numval, r, r)
self.draw_line(r, r, endX, endY)
def draw_line(self, x0, y0, x1, y1):
steep = abs(y1 - y0) > abs(x1 - x0)
if steep:
x0, y0 = y0, x0
x1, y1 = y1, x1
if x0 > x1:
x0, x1 = x1, x0
y0, y1 = y1, y0
if y0 < y1:
ystep = 1
else:
ystep = -1
deltax = x1 - x0
deltay = abs(y1 - y0)
error = -deltax / 2
y = y0
for x in range(x0, x1 + 1):
if steep:
self.instance.fillRect(eRect(y, x, 1, 1), self.fColor)
else:
self.instance.fillRect(eRect(x, y, 1, 1), self.fColor)
error = error + deltay
if error > 0:
y = y + ystep
error = error - deltax
def changed(self, what):
sss = self.source.value
if what[0] == self.CHANGED_CLEAR:
pass
else:
if self.instance:
if self.numval != sss:
self.numval = sss
self.instance.clear(self.bColor)
self.hand()
def postWidgetCreate(self, instance):
def parseSize(str):
(x, y,) = str.split(',')
return eSize(int(x), int(y))
for (attrib, value,) in self.skinAttributes:
if ((attrib == 'size') and self.instance.setSize(parseSize(value))):
pass
self.instance.clear(self.bColor)
|
gpl-2.0
|
40223219/2015_midterm
|
static/Brython3.1.1-20150328-091302/Lib/unittest/test/testmock/testhelpers.py
|
737
|
25793
|
import unittest
from unittest.mock import (
call, _Call, create_autospec, MagicMock,
Mock, ANY, _CallList, patch, PropertyMock
)
from datetime import datetime
class SomeClass(object):
def one(self, a, b):
pass
def two(self):
pass
def three(self, a=None):
pass
class AnyTest(unittest.TestCase):
def test_any(self):
self.assertEqual(ANY, object())
mock = Mock()
mock(ANY)
mock.assert_called_with(ANY)
mock = Mock()
mock(foo=ANY)
mock.assert_called_with(foo=ANY)
def test_repr(self):
self.assertEqual(repr(ANY), '<ANY>')
self.assertEqual(str(ANY), '<ANY>')
def test_any_and_datetime(self):
mock = Mock()
mock(datetime.now(), foo=datetime.now())
mock.assert_called_with(ANY, foo=ANY)
def test_any_mock_calls_comparison_order(self):
mock = Mock()
d = datetime.now()
class Foo(object):
def __eq__(self, other):
return False
def __ne__(self, other):
return True
for d in datetime.now(), Foo():
mock.reset_mock()
mock(d, foo=d, bar=d)
mock.method(d, zinga=d, alpha=d)
mock().method(a1=d, z99=d)
expected = [
call(ANY, foo=ANY, bar=ANY),
call.method(ANY, zinga=ANY, alpha=ANY),
call(), call().method(a1=ANY, z99=ANY)
]
self.assertEqual(expected, mock.mock_calls)
self.assertEqual(mock.mock_calls, expected)
class CallTest(unittest.TestCase):
def test_call_with_call(self):
kall = _Call()
self.assertEqual(kall, _Call())
self.assertEqual(kall, _Call(('',)))
self.assertEqual(kall, _Call(((),)))
self.assertEqual(kall, _Call(({},)))
self.assertEqual(kall, _Call(('', ())))
self.assertEqual(kall, _Call(('', {})))
self.assertEqual(kall, _Call(('', (), {})))
self.assertEqual(kall, _Call(('foo',)))
self.assertEqual(kall, _Call(('bar', ())))
self.assertEqual(kall, _Call(('baz', {})))
self.assertEqual(kall, _Call(('spam', (), {})))
kall = _Call(((1, 2, 3),))
self.assertEqual(kall, _Call(((1, 2, 3),)))
self.assertEqual(kall, _Call(('', (1, 2, 3))))
self.assertEqual(kall, _Call(((1, 2, 3), {})))
self.assertEqual(kall, _Call(('', (1, 2, 3), {})))
kall = _Call(((1, 2, 4),))
self.assertNotEqual(kall, _Call(('', (1, 2, 3))))
self.assertNotEqual(kall, _Call(('', (1, 2, 3), {})))
kall = _Call(('foo', (1, 2, 4),))
self.assertNotEqual(kall, _Call(('', (1, 2, 4))))
self.assertNotEqual(kall, _Call(('', (1, 2, 4), {})))
self.assertNotEqual(kall, _Call(('bar', (1, 2, 4))))
self.assertNotEqual(kall, _Call(('bar', (1, 2, 4), {})))
kall = _Call(({'a': 3},))
self.assertEqual(kall, _Call(('', (), {'a': 3})))
self.assertEqual(kall, _Call(('', {'a': 3})))
self.assertEqual(kall, _Call(((), {'a': 3})))
self.assertEqual(kall, _Call(({'a': 3},)))
def test_empty__Call(self):
args = _Call()
self.assertEqual(args, ())
self.assertEqual(args, ('foo',))
self.assertEqual(args, ((),))
self.assertEqual(args, ('foo', ()))
self.assertEqual(args, ('foo',(), {}))
self.assertEqual(args, ('foo', {}))
self.assertEqual(args, ({},))
def test_named_empty_call(self):
args = _Call(('foo', (), {}))
self.assertEqual(args, ('foo',))
self.assertEqual(args, ('foo', ()))
self.assertEqual(args, ('foo',(), {}))
self.assertEqual(args, ('foo', {}))
self.assertNotEqual(args, ((),))
self.assertNotEqual(args, ())
self.assertNotEqual(args, ({},))
self.assertNotEqual(args, ('bar',))
self.assertNotEqual(args, ('bar', ()))
self.assertNotEqual(args, ('bar', {}))
def test_call_with_args(self):
args = _Call(((1, 2, 3), {}))
self.assertEqual(args, ((1, 2, 3),))
self.assertEqual(args, ('foo', (1, 2, 3)))
self.assertEqual(args, ('foo', (1, 2, 3), {}))
self.assertEqual(args, ((1, 2, 3), {}))
def test_named_call_with_args(self):
args = _Call(('foo', (1, 2, 3), {}))
self.assertEqual(args, ('foo', (1, 2, 3)))
self.assertEqual(args, ('foo', (1, 2, 3), {}))
self.assertNotEqual(args, ((1, 2, 3),))
self.assertNotEqual(args, ((1, 2, 3), {}))
def test_call_with_kwargs(self):
args = _Call(((), dict(a=3, b=4)))
self.assertEqual(args, (dict(a=3, b=4),))
self.assertEqual(args, ('foo', dict(a=3, b=4)))
self.assertEqual(args, ('foo', (), dict(a=3, b=4)))
self.assertEqual(args, ((), dict(a=3, b=4)))
def test_named_call_with_kwargs(self):
args = _Call(('foo', (), dict(a=3, b=4)))
self.assertEqual(args, ('foo', dict(a=3, b=4)))
self.assertEqual(args, ('foo', (), dict(a=3, b=4)))
self.assertNotEqual(args, (dict(a=3, b=4),))
self.assertNotEqual(args, ((), dict(a=3, b=4)))
def test_call_with_args_call_empty_name(self):
args = _Call(((1, 2, 3), {}))
self.assertEqual(args, call(1, 2, 3))
self.assertEqual(call(1, 2, 3), args)
self.assertTrue(call(1, 2, 3) in [args])
def test_call_ne(self):
self.assertNotEqual(_Call(((1, 2, 3),)), call(1, 2))
self.assertFalse(_Call(((1, 2, 3),)) != call(1, 2, 3))
self.assertTrue(_Call(((1, 2), {})) != call(1, 2, 3))
def test_call_non_tuples(self):
kall = _Call(((1, 2, 3),))
for value in 1, None, self, int:
self.assertNotEqual(kall, value)
self.assertFalse(kall == value)
def test_repr(self):
self.assertEqual(repr(_Call()), 'call()')
self.assertEqual(repr(_Call(('foo',))), 'call.foo()')
self.assertEqual(repr(_Call(((1, 2, 3), {'a': 'b'}))),
"call(1, 2, 3, a='b')")
self.assertEqual(repr(_Call(('bar', (1, 2, 3), {'a': 'b'}))),
"call.bar(1, 2, 3, a='b')")
self.assertEqual(repr(call), 'call')
self.assertEqual(str(call), 'call')
self.assertEqual(repr(call()), 'call()')
self.assertEqual(repr(call(1)), 'call(1)')
self.assertEqual(repr(call(zz='thing')), "call(zz='thing')")
self.assertEqual(repr(call().foo), 'call().foo')
self.assertEqual(repr(call(1).foo.bar(a=3).bing),
'call().foo.bar().bing')
self.assertEqual(
repr(call().foo(1, 2, a=3)),
"call().foo(1, 2, a=3)"
)
self.assertEqual(repr(call()()), "call()()")
self.assertEqual(repr(call(1)(2)), "call()(2)")
self.assertEqual(
repr(call()().bar().baz.beep(1)),
"call()().bar().baz.beep(1)"
)
def test_call(self):
self.assertEqual(call(), ('', (), {}))
self.assertEqual(call('foo', 'bar', one=3, two=4),
('', ('foo', 'bar'), {'one': 3, 'two': 4}))
mock = Mock()
mock(1, 2, 3)
mock(a=3, b=6)
self.assertEqual(mock.call_args_list,
[call(1, 2, 3), call(a=3, b=6)])
def test_attribute_call(self):
self.assertEqual(call.foo(1), ('foo', (1,), {}))
self.assertEqual(call.bar.baz(fish='eggs'),
('bar.baz', (), {'fish': 'eggs'}))
mock = Mock()
mock.foo(1, 2 ,3)
mock.bar.baz(a=3, b=6)
self.assertEqual(mock.method_calls,
[call.foo(1, 2, 3), call.bar.baz(a=3, b=6)])
def test_extended_call(self):
result = call(1).foo(2).bar(3, a=4)
self.assertEqual(result, ('().foo().bar', (3,), dict(a=4)))
mock = MagicMock()
mock(1, 2, a=3, b=4)
self.assertEqual(mock.call_args, call(1, 2, a=3, b=4))
self.assertNotEqual(mock.call_args, call(1, 2, 3))
self.assertEqual(mock.call_args_list, [call(1, 2, a=3, b=4)])
self.assertEqual(mock.mock_calls, [call(1, 2, a=3, b=4)])
mock = MagicMock()
mock.foo(1).bar()().baz.beep(a=6)
last_call = call.foo(1).bar()().baz.beep(a=6)
self.assertEqual(mock.mock_calls[-1], last_call)
self.assertEqual(mock.mock_calls, last_call.call_list())
def test_call_list(self):
mock = MagicMock()
mock(1)
self.assertEqual(call(1).call_list(), mock.mock_calls)
mock = MagicMock()
mock(1).method(2)
self.assertEqual(call(1).method(2).call_list(),
mock.mock_calls)
mock = MagicMock()
mock(1).method(2)(3)
self.assertEqual(call(1).method(2)(3).call_list(),
mock.mock_calls)
mock = MagicMock()
int(mock(1).method(2)(3).foo.bar.baz(4)(5))
kall = call(1).method(2)(3).foo.bar.baz(4)(5).__int__()
self.assertEqual(kall.call_list(), mock.mock_calls)
def test_call_any(self):
self.assertEqual(call, ANY)
m = MagicMock()
int(m)
self.assertEqual(m.mock_calls, [ANY])
self.assertEqual([ANY], m.mock_calls)
def test_two_args_call(self):
args = _Call(((1, 2), {'a': 3}), two=True)
self.assertEqual(len(args), 2)
self.assertEqual(args[0], (1, 2))
self.assertEqual(args[1], {'a': 3})
other_args = _Call(((1, 2), {'a': 3}))
self.assertEqual(args, other_args)
class SpecSignatureTest(unittest.TestCase):
def _check_someclass_mock(self, mock):
self.assertRaises(AttributeError, getattr, mock, 'foo')
mock.one(1, 2)
mock.one.assert_called_with(1, 2)
self.assertRaises(AssertionError,
mock.one.assert_called_with, 3, 4)
self.assertRaises(TypeError, mock.one, 1)
mock.two()
mock.two.assert_called_with()
self.assertRaises(AssertionError,
mock.two.assert_called_with, 3)
self.assertRaises(TypeError, mock.two, 1)
mock.three()
mock.three.assert_called_with()
self.assertRaises(AssertionError,
mock.three.assert_called_with, 3)
self.assertRaises(TypeError, mock.three, 3, 2)
mock.three(1)
mock.three.assert_called_with(1)
mock.three(a=1)
mock.three.assert_called_with(a=1)
def test_basic(self):
for spec in (SomeClass, SomeClass()):
mock = create_autospec(spec)
self._check_someclass_mock(mock)
def test_create_autospec_return_value(self):
def f():
pass
mock = create_autospec(f, return_value='foo')
self.assertEqual(mock(), 'foo')
class Foo(object):
pass
mock = create_autospec(Foo, return_value='foo')
self.assertEqual(mock(), 'foo')
def test_autospec_reset_mock(self):
m = create_autospec(int)
int(m)
m.reset_mock()
self.assertEqual(m.__int__.call_count, 0)
def test_mocking_unbound_methods(self):
class Foo(object):
def foo(self, foo):
pass
p = patch.object(Foo, 'foo')
mock_foo = p.start()
Foo().foo(1)
mock_foo.assert_called_with(1)
def test_create_autospec_unbound_methods(self):
# see mock issue 128
# this is expected to fail until the issue is fixed
return
class Foo(object):
def foo(self):
pass
klass = create_autospec(Foo)
instance = klass()
self.assertRaises(TypeError, instance.foo, 1)
# Note: no type checking on the "self" parameter
klass.foo(1)
klass.foo.assert_called_with(1)
self.assertRaises(TypeError, klass.foo)
def test_create_autospec_keyword_arguments(self):
class Foo(object):
a = 3
m = create_autospec(Foo, a='3')
self.assertEqual(m.a, '3')
def test_create_autospec_keyword_only_arguments(self):
def foo(a, *, b=None):
pass
m = create_autospec(foo)
m(1)
m.assert_called_with(1)
self.assertRaises(TypeError, m, 1, 2)
m(2, b=3)
m.assert_called_with(2, b=3)
def test_function_as_instance_attribute(self):
obj = SomeClass()
def f(a):
pass
obj.f = f
mock = create_autospec(obj)
mock.f('bing')
mock.f.assert_called_with('bing')
def test_spec_as_list(self):
# because spec as a list of strings in the mock constructor means
# something very different we treat a list instance as the type.
mock = create_autospec([])
mock.append('foo')
mock.append.assert_called_with('foo')
self.assertRaises(AttributeError, getattr, mock, 'foo')
class Foo(object):
foo = []
mock = create_autospec(Foo)
mock.foo.append(3)
mock.foo.append.assert_called_with(3)
self.assertRaises(AttributeError, getattr, mock.foo, 'foo')
def test_attributes(self):
class Sub(SomeClass):
attr = SomeClass()
sub_mock = create_autospec(Sub)
for mock in (sub_mock, sub_mock.attr):
self._check_someclass_mock(mock)
def test_builtin_functions_types(self):
# we could replace builtin functions / methods with a function
# with *args / **kwargs signature. Using the builtin method type
# as a spec seems to work fairly well though.
class BuiltinSubclass(list):
def bar(self, arg):
pass
sorted = sorted
attr = {}
mock = create_autospec(BuiltinSubclass)
mock.append(3)
mock.append.assert_called_with(3)
self.assertRaises(AttributeError, getattr, mock.append, 'foo')
mock.bar('foo')
mock.bar.assert_called_with('foo')
self.assertRaises(TypeError, mock.bar, 'foo', 'bar')
self.assertRaises(AttributeError, getattr, mock.bar, 'foo')
mock.sorted([1, 2])
mock.sorted.assert_called_with([1, 2])
self.assertRaises(AttributeError, getattr, mock.sorted, 'foo')
mock.attr.pop(3)
mock.attr.pop.assert_called_with(3)
self.assertRaises(AttributeError, getattr, mock.attr, 'foo')
def test_method_calls(self):
class Sub(SomeClass):
attr = SomeClass()
mock = create_autospec(Sub)
mock.one(1, 2)
mock.two()
mock.three(3)
expected = [call.one(1, 2), call.two(), call.three(3)]
self.assertEqual(mock.method_calls, expected)
mock.attr.one(1, 2)
mock.attr.two()
mock.attr.three(3)
expected.extend(
[call.attr.one(1, 2), call.attr.two(), call.attr.three(3)]
)
self.assertEqual(mock.method_calls, expected)
def test_magic_methods(self):
class BuiltinSubclass(list):
attr = {}
mock = create_autospec(BuiltinSubclass)
self.assertEqual(list(mock), [])
self.assertRaises(TypeError, int, mock)
self.assertRaises(TypeError, int, mock.attr)
self.assertEqual(list(mock), [])
self.assertIsInstance(mock['foo'], MagicMock)
self.assertIsInstance(mock.attr['foo'], MagicMock)
def test_spec_set(self):
class Sub(SomeClass):
attr = SomeClass()
for spec in (Sub, Sub()):
mock = create_autospec(spec, spec_set=True)
self._check_someclass_mock(mock)
self.assertRaises(AttributeError, setattr, mock, 'foo', 'bar')
self.assertRaises(AttributeError, setattr, mock.attr, 'foo', 'bar')
def test_descriptors(self):
class Foo(object):
@classmethod
def f(cls, a, b):
pass
@staticmethod
def g(a, b):
pass
class Bar(Foo):
pass
class Baz(SomeClass, Bar):
pass
for spec in (Foo, Foo(), Bar, Bar(), Baz, Baz()):
mock = create_autospec(spec)
mock.f(1, 2)
mock.f.assert_called_once_with(1, 2)
mock.g(3, 4)
mock.g.assert_called_once_with(3, 4)
def test_recursive(self):
class A(object):
def a(self):
pass
foo = 'foo bar baz'
bar = foo
A.B = A
mock = create_autospec(A)
mock()
self.assertFalse(mock.B.called)
mock.a()
mock.B.a()
self.assertEqual(mock.method_calls, [call.a(), call.B.a()])
self.assertIs(A.foo, A.bar)
self.assertIsNot(mock.foo, mock.bar)
mock.foo.lower()
self.assertRaises(AssertionError, mock.bar.lower.assert_called_with)
def test_spec_inheritance_for_classes(self):
class Foo(object):
def a(self):
pass
class Bar(object):
def f(self):
pass
class_mock = create_autospec(Foo)
self.assertIsNot(class_mock, class_mock())
for this_mock in class_mock, class_mock():
this_mock.a()
this_mock.a.assert_called_with()
self.assertRaises(TypeError, this_mock.a, 'foo')
self.assertRaises(AttributeError, getattr, this_mock, 'b')
instance_mock = create_autospec(Foo())
instance_mock.a()
instance_mock.a.assert_called_with()
self.assertRaises(TypeError, instance_mock.a, 'foo')
self.assertRaises(AttributeError, getattr, instance_mock, 'b')
# The return value isn't isn't callable
self.assertRaises(TypeError, instance_mock)
instance_mock.Bar.f()
instance_mock.Bar.f.assert_called_with()
self.assertRaises(AttributeError, getattr, instance_mock.Bar, 'g')
instance_mock.Bar().f()
instance_mock.Bar().f.assert_called_with()
self.assertRaises(AttributeError, getattr, instance_mock.Bar(), 'g')
def test_inherit(self):
class Foo(object):
a = 3
Foo.Foo = Foo
# class
mock = create_autospec(Foo)
instance = mock()
self.assertRaises(AttributeError, getattr, instance, 'b')
attr_instance = mock.Foo()
self.assertRaises(AttributeError, getattr, attr_instance, 'b')
# instance
mock = create_autospec(Foo())
self.assertRaises(AttributeError, getattr, mock, 'b')
self.assertRaises(TypeError, mock)
# attribute instance
call_result = mock.Foo()
self.assertRaises(AttributeError, getattr, call_result, 'b')
def test_builtins(self):
# used to fail with infinite recursion
create_autospec(1)
create_autospec(int)
create_autospec('foo')
create_autospec(str)
create_autospec({})
create_autospec(dict)
create_autospec([])
create_autospec(list)
create_autospec(set())
create_autospec(set)
create_autospec(1.0)
create_autospec(float)
create_autospec(1j)
create_autospec(complex)
create_autospec(False)
create_autospec(True)
def test_function(self):
def f(a, b):
pass
mock = create_autospec(f)
self.assertRaises(TypeError, mock)
mock(1, 2)
mock.assert_called_with(1, 2)
f.f = f
mock = create_autospec(f)
self.assertRaises(TypeError, mock.f)
mock.f(3, 4)
mock.f.assert_called_with(3, 4)
def test_skip_attributeerrors(self):
class Raiser(object):
def __get__(self, obj, type=None):
if obj is None:
raise AttributeError('Can only be accessed via an instance')
class RaiserClass(object):
raiser = Raiser()
@staticmethod
def existing(a, b):
return a + b
s = create_autospec(RaiserClass)
self.assertRaises(TypeError, lambda x: s.existing(1, 2, 3))
s.existing(1, 2)
self.assertRaises(AttributeError, lambda: s.nonexisting)
# check we can fetch the raiser attribute and it has no spec
obj = s.raiser
obj.foo, obj.bar
def test_signature_class(self):
class Foo(object):
def __init__(self, a, b=3):
pass
mock = create_autospec(Foo)
self.assertRaises(TypeError, mock)
mock(1)
mock.assert_called_once_with(1)
mock(4, 5)
mock.assert_called_with(4, 5)
def test_class_with_no_init(self):
# this used to raise an exception
# due to trying to get a signature from object.__init__
class Foo(object):
pass
create_autospec(Foo)
def test_signature_callable(self):
class Callable(object):
def __init__(self):
pass
def __call__(self, a):
pass
mock = create_autospec(Callable)
mock()
mock.assert_called_once_with()
self.assertRaises(TypeError, mock, 'a')
instance = mock()
self.assertRaises(TypeError, instance)
instance(a='a')
instance.assert_called_once_with(a='a')
instance('a')
instance.assert_called_with('a')
mock = create_autospec(Callable())
mock(a='a')
mock.assert_called_once_with(a='a')
self.assertRaises(TypeError, mock)
mock('a')
mock.assert_called_with('a')
def test_signature_noncallable(self):
class NonCallable(object):
def __init__(self):
pass
mock = create_autospec(NonCallable)
instance = mock()
mock.assert_called_once_with()
self.assertRaises(TypeError, mock, 'a')
self.assertRaises(TypeError, instance)
self.assertRaises(TypeError, instance, 'a')
mock = create_autospec(NonCallable())
self.assertRaises(TypeError, mock)
self.assertRaises(TypeError, mock, 'a')
def test_create_autospec_none(self):
class Foo(object):
bar = None
mock = create_autospec(Foo)
none = mock.bar
self.assertNotIsInstance(none, type(None))
none.foo()
none.foo.assert_called_once_with()
def test_autospec_functions_with_self_in_odd_place(self):
class Foo(object):
def f(a, self):
pass
a = create_autospec(Foo)
a.f(self=10)
a.f.assert_called_with(self=10)
def test_autospec_property(self):
class Foo(object):
@property
def foo(self):
return 3
foo = create_autospec(Foo)
mock_property = foo.foo
# no spec on properties
self.assertTrue(isinstance(mock_property, MagicMock))
mock_property(1, 2, 3)
mock_property.abc(4, 5, 6)
mock_property.assert_called_once_with(1, 2, 3)
mock_property.abc.assert_called_once_with(4, 5, 6)
def test_autospec_slots(self):
class Foo(object):
__slots__ = ['a']
foo = create_autospec(Foo)
mock_slot = foo.a
# no spec on slots
mock_slot(1, 2, 3)
mock_slot.abc(4, 5, 6)
mock_slot.assert_called_once_with(1, 2, 3)
mock_slot.abc.assert_called_once_with(4, 5, 6)
class TestCallList(unittest.TestCase):
def test_args_list_contains_call_list(self):
mock = Mock()
self.assertIsInstance(mock.call_args_list, _CallList)
mock(1, 2)
mock(a=3)
mock(3, 4)
mock(b=6)
for kall in call(1, 2), call(a=3), call(3, 4), call(b=6):
self.assertTrue(kall in mock.call_args_list)
calls = [call(a=3), call(3, 4)]
self.assertTrue(calls in mock.call_args_list)
calls = [call(1, 2), call(a=3)]
self.assertTrue(calls in mock.call_args_list)
calls = [call(3, 4), call(b=6)]
self.assertTrue(calls in mock.call_args_list)
calls = [call(3, 4)]
self.assertTrue(calls in mock.call_args_list)
self.assertFalse(call('fish') in mock.call_args_list)
self.assertFalse([call('fish')] in mock.call_args_list)
def test_call_list_str(self):
mock = Mock()
mock(1, 2)
mock.foo(a=3)
mock.foo.bar().baz('fish', cat='dog')
expected = (
"[call(1, 2),\n"
" call.foo(a=3),\n"
" call.foo.bar(),\n"
" call.foo.bar().baz('fish', cat='dog')]"
)
self.assertEqual(str(mock.mock_calls), expected)
def test_propertymock(self):
p = patch('%s.SomeClass.one' % __name__, new_callable=PropertyMock)
mock = p.start()
try:
SomeClass.one
mock.assert_called_once_with()
s = SomeClass()
s.one
mock.assert_called_with()
self.assertEqual(mock.mock_calls, [call(), call()])
s.one = 3
self.assertEqual(mock.mock_calls, [call(), call(), call(3)])
finally:
p.stop()
def test_propertymock_returnvalue(self):
m = MagicMock()
p = PropertyMock()
type(m).foo = p
returned = m.foo
p.assert_called_once_with()
self.assertIsInstance(returned, MagicMock)
self.assertNotIsInstance(returned, PropertyMock)
if __name__ == '__main__':
unittest.main()
|
gpl-3.0
|
Yen-Chung-En/2015cdb_W12
|
static/Brython3.1.1-20150328-091302/Lib/unittest/loader.py
|
739
|
13883
|
"""Loading unittests."""
import os
import re
import sys
import traceback
import types
import functools
from fnmatch import fnmatch
from . import case, suite, util
__unittest = True
# what about .pyc or .pyo (etc)
# we would need to avoid loading the same tests multiple times
# from '.py', '.pyc' *and* '.pyo'
VALID_MODULE_NAME = re.compile(r'[_a-z]\w*\.py$', re.IGNORECASE)
def _make_failed_import_test(name, suiteClass):
message = 'Failed to import test module: %s\n%s' % (name, traceback.format_exc())
return _make_failed_test('ModuleImportFailure', name, ImportError(message),
suiteClass)
def _make_failed_load_tests(name, exception, suiteClass):
return _make_failed_test('LoadTestsFailure', name, exception, suiteClass)
def _make_failed_test(classname, methodname, exception, suiteClass):
def testFailure(self):
raise exception
attrs = {methodname: testFailure}
TestClass = type(classname, (case.TestCase,), attrs)
return suiteClass((TestClass(methodname),))
def _jython_aware_splitext(path):
if path.lower().endswith('$py.class'):
return path[:-9]
return os.path.splitext(path)[0]
class TestLoader(object):
"""
This class is responsible for loading tests according to various criteria
and returning them wrapped in a TestSuite
"""
testMethodPrefix = 'test'
sortTestMethodsUsing = staticmethod(util.three_way_cmp)
suiteClass = suite.TestSuite
_top_level_dir = None
def loadTestsFromTestCase(self, testCaseClass):
"""Return a suite of all tests cases contained in testCaseClass"""
if issubclass(testCaseClass, suite.TestSuite):
raise TypeError("Test cases should not be derived from TestSuite." \
" Maybe you meant to derive from TestCase?")
testCaseNames = self.getTestCaseNames(testCaseClass)
if not testCaseNames and hasattr(testCaseClass, 'runTest'):
testCaseNames = ['runTest']
loaded_suite = self.suiteClass(map(testCaseClass, testCaseNames))
return loaded_suite
def loadTestsFromModule(self, module, use_load_tests=True):
"""Return a suite of all tests cases contained in the given module"""
tests = []
for name in dir(module):
obj = getattr(module, name)
if isinstance(obj, type) and issubclass(obj, case.TestCase):
tests.append(self.loadTestsFromTestCase(obj))
load_tests = getattr(module, 'load_tests', None)
tests = self.suiteClass(tests)
if use_load_tests and load_tests is not None:
try:
return load_tests(self, tests, None)
except Exception as e:
return _make_failed_load_tests(module.__name__, e,
self.suiteClass)
return tests
def loadTestsFromName(self, name, module=None):
"""Return a suite of all tests cases given a string specifier.
The name may resolve either to a module, a test case class, a
test method within a test case class, or a callable object which
returns a TestCase or TestSuite instance.
The method optionally resolves the names relative to a given module.
"""
parts = name.split('.')
if module is None:
parts_copy = parts[:]
while parts_copy:
try:
module = __import__('.'.join(parts_copy))
break
except ImportError:
del parts_copy[-1]
if not parts_copy:
raise
parts = parts[1:]
obj = module
for part in parts:
parent, obj = obj, getattr(obj, part)
if isinstance(obj, types.ModuleType):
return self.loadTestsFromModule(obj)
elif isinstance(obj, type) and issubclass(obj, case.TestCase):
return self.loadTestsFromTestCase(obj)
elif (isinstance(obj, types.FunctionType) and
isinstance(parent, type) and
issubclass(parent, case.TestCase)):
name = parts[-1]
inst = parent(name)
# static methods follow a different path
if not isinstance(getattr(inst, name), types.FunctionType):
return self.suiteClass([inst])
elif isinstance(obj, suite.TestSuite):
return obj
if callable(obj):
test = obj()
if isinstance(test, suite.TestSuite):
return test
elif isinstance(test, case.TestCase):
return self.suiteClass([test])
else:
raise TypeError("calling %s returned %s, not a test" %
(obj, test))
else:
raise TypeError("don't know how to make test from: %s" % obj)
def loadTestsFromNames(self, names, module=None):
"""Return a suite of all tests cases found using the given sequence
of string specifiers. See 'loadTestsFromName()'.
"""
suites = [self.loadTestsFromName(name, module) for name in names]
return self.suiteClass(suites)
def getTestCaseNames(self, testCaseClass):
"""Return a sorted sequence of method names found within testCaseClass
"""
def isTestMethod(attrname, testCaseClass=testCaseClass,
prefix=self.testMethodPrefix):
return attrname.startswith(prefix) and \
callable(getattr(testCaseClass, attrname))
testFnNames = list(filter(isTestMethod, dir(testCaseClass)))
if self.sortTestMethodsUsing:
testFnNames.sort(key=functools.cmp_to_key(self.sortTestMethodsUsing))
return testFnNames
def discover(self, start_dir, pattern='test*.py', top_level_dir=None):
"""Find and return all test modules from the specified start
directory, recursing into subdirectories to find them and return all
tests found within them. Only test files that match the pattern will
be loaded. (Using shell style pattern matching.)
All test modules must be importable from the top level of the project.
If the start directory is not the top level directory then the top
level directory must be specified separately.
If a test package name (directory with '__init__.py') matches the
pattern then the package will be checked for a 'load_tests' function. If
this exists then it will be called with loader, tests, pattern.
If load_tests exists then discovery does *not* recurse into the package,
load_tests is responsible for loading all tests in the package.
The pattern is deliberately not stored as a loader attribute so that
packages can continue discovery themselves. top_level_dir is stored so
load_tests does not need to pass this argument in to loader.discover().
"""
set_implicit_top = False
if top_level_dir is None and self._top_level_dir is not None:
# make top_level_dir optional if called from load_tests in a package
top_level_dir = self._top_level_dir
elif top_level_dir is None:
set_implicit_top = True
top_level_dir = start_dir
top_level_dir = os.path.abspath(top_level_dir)
if not top_level_dir in sys.path:
# all test modules must be importable from the top level directory
# should we *unconditionally* put the start directory in first
# in sys.path to minimise likelihood of conflicts between installed
# modules and development versions?
sys.path.insert(0, top_level_dir)
self._top_level_dir = top_level_dir
is_not_importable = False
if os.path.isdir(os.path.abspath(start_dir)):
start_dir = os.path.abspath(start_dir)
if start_dir != top_level_dir:
is_not_importable = not os.path.isfile(os.path.join(start_dir, '__init__.py'))
else:
# support for discovery from dotted module names
try:
__import__(start_dir)
except ImportError:
is_not_importable = True
else:
the_module = sys.modules[start_dir]
top_part = start_dir.split('.')[0]
start_dir = os.path.abspath(os.path.dirname((the_module.__file__)))
if set_implicit_top:
self._top_level_dir = self._get_directory_containing_module(top_part)
sys.path.remove(top_level_dir)
if is_not_importable:
raise ImportError('Start directory is not importable: %r' % start_dir)
tests = list(self._find_tests(start_dir, pattern))
return self.suiteClass(tests)
def _get_directory_containing_module(self, module_name):
module = sys.modules[module_name]
full_path = os.path.abspath(module.__file__)
if os.path.basename(full_path).lower().startswith('__init__.py'):
return os.path.dirname(os.path.dirname(full_path))
else:
# here we have been given a module rather than a package - so
# all we can do is search the *same* directory the module is in
# should an exception be raised instead
return os.path.dirname(full_path)
def _get_name_from_path(self, path):
path = _jython_aware_splitext(os.path.normpath(path))
_relpath = os.path.relpath(path, self._top_level_dir)
assert not os.path.isabs(_relpath), "Path must be within the project"
assert not _relpath.startswith('..'), "Path must be within the project"
name = _relpath.replace(os.path.sep, '.')
return name
def _get_module_from_name(self, name):
__import__(name)
return sys.modules[name]
def _match_path(self, path, full_path, pattern):
# override this method to use alternative matching strategy
return fnmatch(path, pattern)
def _find_tests(self, start_dir, pattern):
"""Used by discovery. Yields test suites it loads."""
paths = os.listdir(start_dir)
for path in paths:
full_path = os.path.join(start_dir, path)
if os.path.isfile(full_path):
if not VALID_MODULE_NAME.match(path):
# valid Python identifiers only
continue
if not self._match_path(path, full_path, pattern):
continue
# if the test file matches, load it
name = self._get_name_from_path(full_path)
try:
module = self._get_module_from_name(name)
except:
yield _make_failed_import_test(name, self.suiteClass)
else:
mod_file = os.path.abspath(getattr(module, '__file__', full_path))
realpath = _jython_aware_splitext(os.path.realpath(mod_file))
fullpath_noext = _jython_aware_splitext(os.path.realpath(full_path))
if realpath.lower() != fullpath_noext.lower():
module_dir = os.path.dirname(realpath)
mod_name = _jython_aware_splitext(os.path.basename(full_path))
expected_dir = os.path.dirname(full_path)
msg = ("%r module incorrectly imported from %r. Expected %r. "
"Is this module globally installed?")
raise ImportError(msg % (mod_name, module_dir, expected_dir))
yield self.loadTestsFromModule(module)
elif os.path.isdir(full_path):
if not os.path.isfile(os.path.join(full_path, '__init__.py')):
continue
load_tests = None
tests = None
if fnmatch(path, pattern):
# only check load_tests if the package directory itself matches the filter
name = self._get_name_from_path(full_path)
package = self._get_module_from_name(name)
load_tests = getattr(package, 'load_tests', None)
tests = self.loadTestsFromModule(package, use_load_tests=False)
if load_tests is None:
if tests is not None:
# tests loaded from package file
yield tests
# recurse into the package
for test in self._find_tests(full_path, pattern):
yield test
else:
try:
yield load_tests(self, tests, pattern)
except Exception as e:
yield _make_failed_load_tests(package.__name__, e,
self.suiteClass)
defaultTestLoader = TestLoader()
def _makeLoader(prefix, sortUsing, suiteClass=None):
loader = TestLoader()
loader.sortTestMethodsUsing = sortUsing
loader.testMethodPrefix = prefix
if suiteClass:
loader.suiteClass = suiteClass
return loader
def getTestCaseNames(testCaseClass, prefix, sortUsing=util.three_way_cmp):
return _makeLoader(prefix, sortUsing).getTestCaseNames(testCaseClass)
def makeSuite(testCaseClass, prefix='test', sortUsing=util.three_way_cmp,
suiteClass=suite.TestSuite):
return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromTestCase(
testCaseClass)
def findTestCases(module, prefix='test', sortUsing=util.three_way_cmp,
suiteClass=suite.TestSuite):
return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromModule(\
module)
|
agpl-3.0
|
anbangleo/NlsdeWeb
|
Python-3.6.0/Lib/test/test_abstract_numbers.py
|
109
|
1528
|
"""Unit tests for numbers.py."""
import math
import operator
import unittest
from numbers import Complex, Real, Rational, Integral
class TestNumbers(unittest.TestCase):
def test_int(self):
self.assertTrue(issubclass(int, Integral))
self.assertTrue(issubclass(int, Complex))
self.assertEqual(7, int(7).real)
self.assertEqual(0, int(7).imag)
self.assertEqual(7, int(7).conjugate())
self.assertEqual(-7, int(-7).conjugate())
self.assertEqual(7, int(7).numerator)
self.assertEqual(1, int(7).denominator)
def test_float(self):
self.assertFalse(issubclass(float, Rational))
self.assertTrue(issubclass(float, Real))
self.assertEqual(7.3, float(7.3).real)
self.assertEqual(0, float(7.3).imag)
self.assertEqual(7.3, float(7.3).conjugate())
self.assertEqual(-7.3, float(-7.3).conjugate())
def test_complex(self):
self.assertFalse(issubclass(complex, Real))
self.assertTrue(issubclass(complex, Complex))
c1, c2 = complex(3, 2), complex(4,1)
# XXX: This is not ideal, but see the comment in math_trunc().
self.assertRaises(TypeError, math.trunc, c1)
self.assertRaises(TypeError, operator.mod, c1, c2)
self.assertRaises(TypeError, divmod, c1, c2)
self.assertRaises(TypeError, operator.floordiv, c1, c2)
self.assertRaises(TypeError, float, c1)
self.assertRaises(TypeError, int, c1)
if __name__ == "__main__":
unittest.main()
|
mit
|
Lab603/PicEncyclopedias
|
jni-build/jni/include/tensorflow/python/ops/batch_norm_benchmark.py
|
19
|
9591
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""End-to-end benchmark for batch normalization."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import tensorflow as tf
from tensorflow.python.ops import gen_nn_ops
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_boolean("use_gpu", True, """Run GPU benchmarks.""")
def batch_norm_op(tensor, mean, variance, beta, gamma, scale):
"""Fused kernel for batch normalization."""
# _batch_norm_with_global_normalization is deprecated in v9
tf.get_default_graph().graph_def_versions.producer = 8
# pylint: disable=protected-access
return gen_nn_ops._batch_norm_with_global_normalization(
tensor, mean, variance, beta, gamma, 0.001, scale)
# pylint: enable=protected-access
# Note that the naive implementation is much slower:
# batch_norm = (tensor - mean) * tf.rsqrt(variance + 0.001)
# if scale:
# batch_norm *= gamma
# return batch_norm + beta
def batch_norm_py(tensor, mean, variance, beta, gamma, scale):
"""Python implementation of batch normalization."""
return tf.nn.batch_normalization(
tensor, mean, variance, beta, gamma if scale else None, 0.001)
def batch_norm_slow(tensor, mean, variance, beta, gamma, scale):
batch_norm = (tensor - mean) * tf.rsqrt(variance + 0.001)
if scale:
batch_norm *= gamma
return batch_norm + beta
def build_graph(device, input_shape, axes, num_layers, mode, scale, train):
"""Build a graph containing a sequence of batch normalizations.
Args:
device: string, the device to run on.
input_shape: shape of the input tensor.
axes: axes that are to be normalized across.
num_layers: number of batch normalization layers in the graph.
mode: "op", "py" or "slow" depending on the implementation.
scale: scale after normalization.
train: if true, also run backprop.
Returns:
An array of tensors to run()
"""
moment_shape = []
keep_dims = mode == "py" or mode == "slow"
if keep_dims:
for axis in range(len(input_shape)):
if axis in axes:
moment_shape.append(1)
else:
moment_shape.append(input_shape[axis])
else:
for axis in range(len(input_shape)):
if axis not in axes:
moment_shape.append(input_shape[axis])
with tf.device("/%s:0" % device):
tensor = tf.Variable(tf.truncated_normal(input_shape))
for _ in range(num_layers):
mean, variance = tf.nn.moments(tensor, axes, keep_dims=keep_dims)
beta = tf.Variable(tf.zeros(moment_shape))
gamma = tf.Variable(tf.constant(1.0, shape=moment_shape))
if mode == "py":
tensor = batch_norm_py(tensor, mean, variance, beta, gamma, scale)
elif mode == "op":
tensor = batch_norm_op(tensor, mean, variance, beta, gamma, scale)
elif mode == "slow":
tensor = batch_norm_slow(tensor, mean, variance, beta, gamma, scale)
if train:
return tf.gradients([tensor], tf.trainable_variables())
else:
return [tensor]
def print_difference(mode, t1, t2):
"""Print the difference in timing between two runs."""
difference = (t2 - t1) / t1 * 100.0
print("=== %s: %.1f%% ===" % (mode, difference))
class BatchNormBenchmark(tf.test.Benchmark):
"""Benchmark batch normalization."""
def _run_graph(
self, device, input_shape, axes, num_layers, mode, scale, train,
num_iters):
"""Run the graph and print its execution time.
Args:
device: string, the device to run on.
input_shape: shape of the input tensor.
axes: axes that are to be normalized across.
num_layers: number of batch normalization layers in the graph.
mode: "op", "py" or "slow" depending on the implementation.
scale: scale after normalization.
train: if true, also run backprop.
num_iters: number of steps to run.
Returns:
The duration of the run in seconds.
"""
graph = tf.Graph()
with graph.as_default():
outputs = build_graph(device, input_shape, axes, num_layers, mode, scale,
train)
with tf.Session(graph=graph) as session:
tf.initialize_all_variables().run()
_ = session.run([out.op for out in outputs]) # warm up.
start_time = time.time()
for _ in range(num_iters):
_ = session.run([out.op for out in outputs])
duration = time.time() - start_time
print("%s shape:%d/%d #layers:%d mode:%s scale:%r train:%r - %f secs" %
(device, len(input_shape), len(axes), num_layers, mode, scale, train,
duration / num_iters))
name_template = (
"batch_norm_{device}_input_shape_{shape}_axes_{axes}_mode_{mode}_"
"layers_{num_layers}_scale_{scale}_"
"train_{train}")
self.report_benchmark(
name=name_template.format(
device=device, mode=mode, num_layers=num_layers, scale=scale,
train=train,
shape=str(input_shape).replace(" ", ""),
axes=str(axes)).replace(" ", ""),
iters=num_iters, wall_time=duration / num_iters)
return duration
def benchmark_batch_norm(self):
print("Forward convolution (lower layers).")
shape = [8, 128, 128, 32]
axes = [0, 1, 2]
t1 = self._run_graph("cpu", shape, axes, 10, "op", True, False, 5)
t2 = self._run_graph("cpu", shape, axes, 10, "py", True, False, 5)
t3 = self._run_graph("cpu", shape, axes, 10, "slow", True, False, 5)
print_difference("op vs py", t1, t2)
print_difference("py vs slow", t2, t3)
if FLAGS.use_gpu:
t1 = self._run_graph("gpu", shape, axes, 10, "op", True, False, 50)
t2 = self._run_graph("gpu", shape, axes, 10, "py", True, False, 50)
t3 = self._run_graph("gpu", shape, axes, 10, "slow", True, False, 50)
print_difference("op vs py", t1, t2)
print_difference("py vs slow", t2, t3)
print("Forward/backward convolution (lower layers).")
t1 = self._run_graph("cpu", shape, axes, 10, "op", True, True, 5)
t2 = self._run_graph("cpu", shape, axes, 10, "py", True, True, 5)
t3 = self._run_graph("cpu", shape, axes, 10, "slow", True, True, 5)
print_difference("op vs py", t1, t2)
print_difference("py vs slow", t2, t3)
if FLAGS.use_gpu:
t1 = self._run_graph("gpu", shape, axes, 10, "op", True, True, 50)
t2 = self._run_graph("gpu", shape, axes, 10, "py", True, True, 50)
t2 = self._run_graph("gpu", shape, axes, 10, "slow", True, True, 50)
print_difference("op vs py", t1, t2)
print_difference("py vs slow", t2, t3)
print("Forward convolution (higher layers).")
shape = [256, 17, 17, 32]
axes = [0, 1, 2]
t1 = self._run_graph("cpu", shape, axes, 10, "op", True, False, 5)
t2 = self._run_graph("cpu", shape, axes, 10, "py", True, False, 5)
t3 = self._run_graph("cpu", shape, axes, 10, "slow", True, False, 5)
print_difference("op vs py", t1, t2)
print_difference("py vs slow", t2, t3)
if FLAGS.use_gpu:
t1 = self._run_graph("gpu", shape, axes, 10, "op", True, False, 50)
t2 = self._run_graph("gpu", shape, axes, 10, "py", True, False, 50)
t3 = self._run_graph("gpu", shape, axes, 10, "slow", True, False, 50)
print_difference("op vs py", t1, t2)
print_difference("py vs slow", t2, t3)
print("Forward/backward convolution (higher layers).")
t1 = self._run_graph("cpu", shape, axes, 10, "op", True, True, 5)
t2 = self._run_graph("cpu", shape, axes, 10, "py", True, True, 5)
t3 = self._run_graph("cpu", shape, axes, 10, "slow", True, True, 5)
print_difference("op vs py", t1, t2)
print_difference("py vs slow", t2, t3)
if FLAGS.use_gpu:
t1 = self._run_graph("gpu", shape, axes, 10, "op", True, True, 50)
t2 = self._run_graph("gpu", shape, axes, 10, "py", True, True, 50)
t3 = self._run_graph("gpu", shape, axes, 10, "slow", True, True, 50)
print_difference("op vs py", t1, t2)
print_difference("py vs slow", t2, t3)
print("Forward fully-connected.")
shape = [1024, 32]
axes = [0]
t1 = self._run_graph("cpu", shape, axes, 10, "py", True, False, 5)
t2 = self._run_graph("cpu", shape, axes, 10, "slow", True, False, 5)
print_difference("py vs slow", t1, t2)
if FLAGS.use_gpu:
t1 = self._run_graph("gpu", shape, axes, 10, "py", True, False, 50)
t2 = self._run_graph("gpu", shape, axes, 10, "slow", True, False, 50)
print_difference("py vs slow", t1, t2)
print("Forward/backward fully-connected.")
t1 = self._run_graph("cpu", shape, axes, 10, "py", True, True, 50)
t2 = self._run_graph("cpu", shape, axes, 10, "slow", True, True, 50)
print_difference("py vs slow", t1, t2)
if FLAGS.use_gpu:
t1 = self._run_graph("gpu", shape, axes, 10, "py", True, True, 5)
t2 = self._run_graph("gpu", shape, axes, 10, "slow", True, True, 5)
print_difference("py vs slow", t1, t2)
if __name__ == "__main__":
tf.test.main()
|
mit
|
nhippenmeyer/django
|
tests/middleware_exceptions/tests.py
|
147
|
44086
|
import sys
from django.conf import settings
from django.core.exceptions import MiddlewareNotUsed
from django.core.signals import got_request_exception
from django.http import HttpResponse
from django.template import engines
from django.template.response import TemplateResponse
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import patch_logger
class TestException(Exception):
pass
# A middleware base class that tracks which methods have been called
class TestMiddleware(object):
def __init__(self):
self.process_request_called = False
self.process_view_called = False
self.process_response_called = False
self.process_template_response_called = False
self.process_exception_called = False
def process_request(self, request):
self.process_request_called = True
def process_view(self, request, view_func, view_args, view_kwargs):
self.process_view_called = True
def process_template_response(self, request, response):
self.process_template_response_called = True
return response
def process_response(self, request, response):
self.process_response_called = True
return response
def process_exception(self, request, exception):
self.process_exception_called = True
# Middleware examples that do the right thing
class RequestMiddleware(TestMiddleware):
def process_request(self, request):
super(RequestMiddleware, self).process_request(request)
return HttpResponse('Request Middleware')
class ViewMiddleware(TestMiddleware):
def process_view(self, request, view_func, view_args, view_kwargs):
super(ViewMiddleware, self).process_view(request, view_func, view_args, view_kwargs)
return HttpResponse('View Middleware')
class ResponseMiddleware(TestMiddleware):
def process_response(self, request, response):
super(ResponseMiddleware, self).process_response(request, response)
return HttpResponse('Response Middleware')
class TemplateResponseMiddleware(TestMiddleware):
def process_template_response(self, request, response):
super(TemplateResponseMiddleware, self).process_template_response(request, response)
template = engines['django'].from_string('Template Response Middleware')
return TemplateResponse(request, template)
class ExceptionMiddleware(TestMiddleware):
def process_exception(self, request, exception):
super(ExceptionMiddleware, self).process_exception(request, exception)
return HttpResponse('Exception Middleware')
# Sample middlewares that raise exceptions
class BadRequestMiddleware(TestMiddleware):
def process_request(self, request):
super(BadRequestMiddleware, self).process_request(request)
raise TestException('Test Request Exception')
class BadViewMiddleware(TestMiddleware):
def process_view(self, request, view_func, view_args, view_kwargs):
super(BadViewMiddleware, self).process_view(request, view_func, view_args, view_kwargs)
raise TestException('Test View Exception')
class BadTemplateResponseMiddleware(TestMiddleware):
def process_template_response(self, request, response):
super(BadTemplateResponseMiddleware, self).process_template_response(request, response)
raise TestException('Test Template Response Exception')
class BadResponseMiddleware(TestMiddleware):
def process_response(self, request, response):
super(BadResponseMiddleware, self).process_response(request, response)
raise TestException('Test Response Exception')
class BadExceptionMiddleware(TestMiddleware):
def process_exception(self, request, exception):
super(BadExceptionMiddleware, self).process_exception(request, exception)
raise TestException('Test Exception Exception')
# Sample middlewares that omit to return an HttpResonse
class NoTemplateResponseMiddleware(TestMiddleware):
def process_template_response(self, request, response):
super(NoTemplateResponseMiddleware, self).process_template_response(request, response)
class NoResponseMiddleware(TestMiddleware):
def process_response(self, request, response):
super(NoResponseMiddleware, self).process_response(request, response)
@override_settings(ROOT_URLCONF='middleware_exceptions.urls')
class BaseMiddlewareExceptionTest(SimpleTestCase):
def setUp(self):
self.exceptions = []
got_request_exception.connect(self._on_request_exception)
self.client.handler.load_middleware()
def tearDown(self):
got_request_exception.disconnect(self._on_request_exception)
self.exceptions = []
def _on_request_exception(self, sender, request, **kwargs):
self.exceptions.append(sys.exc_info())
def _add_middleware(self, middleware):
self.client.handler._request_middleware.insert(0, middleware.process_request)
self.client.handler._view_middleware.insert(0, middleware.process_view)
self.client.handler._template_response_middleware.append(middleware.process_template_response)
self.client.handler._response_middleware.append(middleware.process_response)
self.client.handler._exception_middleware.append(middleware.process_exception)
def assert_exceptions_handled(self, url, errors, extra_error=None):
try:
self.client.get(url)
except TestException:
# Test client intentionally re-raises any exceptions being raised
# during request handling. Hence actual testing that exception was
# properly handled is done by relying on got_request_exception
# signal being sent.
pass
except Exception as e:
if type(extra_error) != type(e):
self.fail("Unexpected exception: %s" % e)
self.assertEqual(len(self.exceptions), len(errors))
for i, error in enumerate(errors):
exception, value, tb = self.exceptions[i]
self.assertEqual(value.args, (error, ))
def assert_middleware_usage(self, middleware, request, view, template_response, response, exception):
self.assertEqual(middleware.process_request_called, request)
self.assertEqual(middleware.process_view_called, view)
self.assertEqual(middleware.process_template_response_called, template_response)
self.assertEqual(middleware.process_response_called, response)
self.assertEqual(middleware.process_exception_called, exception)
class MiddlewareTests(BaseMiddlewareExceptionTest):
def test_process_request_middleware(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_middleware(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_template_response_middleware(self):
pre_middleware = TestMiddleware()
middleware = TemplateResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/template_response/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, True, True, False)
self.assert_middleware_usage(middleware, True, True, True, True, False)
self.assert_middleware_usage(post_middleware, True, True, True, True, False)
def test_process_exception_middleware(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_request_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_template_response_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = TemplateResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, True)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_response_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, True)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_request_middleware_exception(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware_exception(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_middleware_exception(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Error in view'], Exception())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, True)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_middleware_exception(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_request_middleware_null_view(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware_null_view(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_middleware_null_view(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object. It returned None instead.",
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_exception_middleware_null_view(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object. It returned None instead."
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_request_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, True)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_template_response_error(self):
middleware = TestMiddleware()
self._add_middleware(middleware)
self.assert_exceptions_handled('/middleware_exceptions/template_response_error/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(middleware, True, True, True, True, False)
@override_settings(
MIDDLEWARE_CLASSES=['middleware_exceptions.middleware.ProcessExceptionMiddleware'],
)
def test_exception_in_render_passed_to_process_exception(self):
# Repopulate the list of middlewares since it's already been populated
# by setUp() before the MIDDLEWARE_CLASSES setting got overridden
self.client.handler.load_middleware()
response = self.client.get('/middleware_exceptions/exception_in_render/')
self.assertEqual(response.content, b'Exception caught')
class BadMiddlewareTests(BaseMiddlewareExceptionTest):
def test_process_request_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_template_response_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadTemplateResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/template_response/', ['Test Template Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, True, True, False)
self.assert_middleware_usage(post_middleware, True, True, True, True, False)
def test_process_response_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_exception_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_request_bad_middleware_not_found(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware_not_found(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_bad_middleware_not_found(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, True)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_bad_middleware_not_found(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Exception Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_request_bad_middleware_exception(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware_exception(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_bad_middleware_exception(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Error in view', 'Test Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, True)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_bad_middleware_exception(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test Exception Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_request_bad_middleware_null_view(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware_null_view(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_bad_middleware_null_view(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object. It returned None instead.",
'Test Response Exception'
])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_exception_bad_middleware_null_view(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object. It returned None instead."
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_request_bad_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_bad_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, True)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_bad_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Exception Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_response_no_response_middleware(self):
pre_middleware = TestMiddleware()
middleware = NoResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [
"NoResponseMiddleware.process_response didn't return an HttpResponse object. It returned None instead."
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_template_response_no_response_middleware(self):
pre_middleware = TestMiddleware()
middleware = NoTemplateResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/template_response/', [
"NoTemplateResponseMiddleware.process_template_response didn't return an HttpResponse object. It returned None instead."
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, True, True, False)
self.assert_middleware_usage(post_middleware, True, True, True, True, False)
_missing = object()
@override_settings(ROOT_URLCONF='middleware_exceptions.urls')
class RootUrlconfTests(SimpleTestCase):
@override_settings(ROOT_URLCONF=None)
def test_missing_root_urlconf(self):
# Removing ROOT_URLCONF is safe, as override_settings will restore
# the previously defined settings.
del settings.ROOT_URLCONF
self.assertRaises(AttributeError, self.client.get, "/middleware_exceptions/view/")
class MyMiddleware(object):
def __init__(self):
raise MiddlewareNotUsed
def process_request(self, request):
pass
class MyMiddlewareWithExceptionMessage(object):
def __init__(self):
raise MiddlewareNotUsed('spam eggs')
def process_request(self, request):
pass
@override_settings(
DEBUG=True,
ROOT_URLCONF='middleware_exceptions.urls',
)
class MiddlewareNotUsedTests(SimpleTestCase):
rf = RequestFactory()
def test_raise_exception(self):
request = self.rf.get('middleware_exceptions/view/')
with self.assertRaises(MiddlewareNotUsed):
MyMiddleware().process_request(request)
@override_settings(MIDDLEWARE_CLASSES=[
'middleware_exceptions.tests.MyMiddleware',
])
def test_log(self):
with patch_logger('django.request', 'debug') as calls:
self.client.get('/middleware_exceptions/view/')
self.assertEqual(len(calls), 1)
self.assertEqual(
calls[0],
"MiddlewareNotUsed: 'middleware_exceptions.tests.MyMiddleware'"
)
@override_settings(MIDDLEWARE_CLASSES=[
'middleware_exceptions.tests.MyMiddlewareWithExceptionMessage',
])
def test_log_custom_message(self):
with patch_logger('django.request', 'debug') as calls:
self.client.get('/middleware_exceptions/view/')
self.assertEqual(len(calls), 1)
self.assertEqual(
calls[0],
"MiddlewareNotUsed('middleware_exceptions.tests.MyMiddlewareWithExceptionMessage'): spam eggs"
)
@override_settings(DEBUG=False)
def test_do_not_log_when_debug_is_false(self):
with patch_logger('django.request', 'debug') as calls:
self.client.get('/middleware_exceptions/view/')
self.assertEqual(len(calls), 0)
|
bsd-3-clause
|
Serg09/socorro
|
socorro/external/hbase/connection_context.py
|
11
|
8757
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import threading
import contextlib
from socorro.external.hbase import hbase_client
from configman.config_manager import RequiredConfig
from configman import Namespace
class HBaseSingleConnectionContext(RequiredConfig):
"""a configman compliant class for setup of HBase connections
DO NOT SHARE HBASE CONNECTIONS BETWEEN THREADS
"""
#--------------------------------------------------------------------------
# configman parameter definition section
# here we're setting up the minimal parameters required for connecting
required_config = Namespace()
required_config.add_option(
'number_of_retries',
doc='Max. number of retries when fetching from hbaseClient',
default=0,
reference_value_from='resource.hbase'
)
required_config.add_option(
'hbase_host',
doc='Host to HBase server',
default='localhost',
reference_value_from='resource.hbase',
)
required_config.add_option(
'hbase_port',
doc='Port to HBase server',
default=9090,
reference_value_from='resource.hbase',
)
required_config.add_option(
'hbase_timeout',
doc='timeout in milliseconds for an HBase connection',
default=5000,
reference_value_from='resource.hbase',
)
required_config.add_option(
'temporary_file_system_storage_path',
doc='a local filesystem path where dumps temporarily '
'during processing',
default='/home/socorro/temp',
reference_value_from='resource.hbase',
)
required_config.add_option(
'dump_file_suffix',
doc='the suffix used to identify a dump file (for use in temp files)',
default='.dump',
reference_value_from='resource.hbase',
)
#--------------------------------------------------------------------------
def __init__(self, config, local_config=None):
"""Initialize the parts needed to start making database connections
parameters:
config - the complete config for the app. If a real app, this
would be where a logger or other resources could be
found.
local_config - this is the namespace within the complete config
where the actual database parameters are found"""
super(HBaseSingleConnectionContext, self).__init__()
self.config = config
if local_config is None:
local_config = config
dummy_connection = hbase_client.HBaseConnectionForCrashReports(
local_config.hbase_host,
local_config.hbase_port,
local_config.hbase_timeout,
logger=self.config.logger
)
dummy_connection.close()
self.operational_exceptions = \
dummy_connection.hbaseThriftExceptions
self.operational_exceptions += \
(hbase_client.NoConnectionException,)
self.conditional_exceptions = ()
#--------------------------------------------------------------------------
def connection(self, name_unused=None):
"""return a new database connection
parameters:
name_unused - optional named connections. Used by the
derived class
"""
#self.config.logger.debug('creating new HBase connection')
return hbase_client.HBaseConnectionForCrashReports(
self.config.hbase_host,
self.config.hbase_port,
self.config.hbase_timeout,
logger=self.config.logger
)
#--------------------------------------------------------------------------
@contextlib.contextmanager
def __call__(self, name=None):
"""returns a database connection wrapped in a contextmanager.
The context manager will assure that the connection is closed but will
not try to commit or rollback lingering transactions.
parameters:
name - an optional name for the database connection"""
conn = self.connection(name)
try:
#self.config.logger.debug('connection HBase acquired')
yield conn
finally:
self.close_connection(conn)
#--------------------------------------------------------------------------
def close_connection(self, connection, force=False):
"""close the connection passed in.
This function exists to allow derived classes to override the closing
behavior.
parameters:
connection - the database connection object
force - unused boolean to force closure; used in derived classes
"""
#self.config.logger.debug('connection HBase closed')
connection.close()
#--------------------------------------------------------------------------
def close(self):
"""close any pooled or cached connections. Since this base class
object does no caching, there is no implementation required. Derived
classes may implement it."""
pass
#--------------------------------------------------------------------------
def is_operational_exception(self, msg):
"""return True if a conditional exception is actually an operational
error. Return False if it's a genuine error that should probably be
raised and propagate up.
Some conditional exceptions might be actually be some form of
operational exception "labelled" wrong by the psycopg2 code error
handler.
"""
return False
#--------------------------------------------------------------------------
def force_reconnect(self):
pass
#==============================================================================
class HBaseConnectionContextPooled(HBaseSingleConnectionContext):
"""a configman compliant class that pools HBase database connections"""
#--------------------------------------------------------------------------
def __init__(self, config, local_config=None):
super(HBaseConnectionContextPooled, self).__init__(config,
local_config)
#self.config.logger.debug("HBaseConnectionContextPooled - "
# "setting up connection pool")
self.pool = {}
#--------------------------------------------------------------------------
def connection(self, name=None):
"""return a named connection.
This function will return a named connection by either finding one
in its pool by the name or creating a new one. If no name is given,
it will use the name of the current executing thread as the name of
the connection.
parameters:
name - a name as a string
"""
if not name:
name = self.config.executor_identity()
if name in self.pool:
#self.config.logger.debug('connection: %s', name)
return self.pool[name]
self.pool[name] = \
super(HBaseConnectionContextPooled, self).connection(name)
return self.pool[name]
#--------------------------------------------------------------------------
def close_connection(self, connection, force=False):
"""overriding the baseclass function, this routine will decline to
close a connection at the end of a transaction context. This allows
for reuse of connections."""
if force:
try:
(super(HBaseConnectionContextPooled, self)
.close_connection(connection, force))
except self.operational_exceptions:
self.config.logger.error('HBaseConnectionContextPooled - '
'failed closing')
for name, conn in self.pool.iteritems():
if conn is connection:
break
del self.pool[name]
#--------------------------------------------------------------------------
def close(self):
"""close all pooled connections"""
self.config.logger.debug("HBasePooled - "
"shutting down connection pool")
for name, conn in self.pool.iteritems():
conn.close()
self.config.logger.debug("HBasePooled - connection %s closed"
% name)
#--------------------------------------------------------------------------
def force_reconnect(self):
pass
|
mpl-2.0
|
kenshay/ImageScript
|
ProgramData/SystemFiles/opencv/sources/samples/python2/hist.py
|
9
|
3575
|
#!/usr/bin/env python
''' This is a sample for histogram plotting for RGB images and grayscale images for better understanding of colour distribution
Benefit : Learn how to draw histogram of images
Get familier with cv2.calcHist, cv2.equalizeHist,cv2.normalize and some drawing functions
Level : Beginner or Intermediate
Functions : 1) hist_curve : returns histogram of an image drawn as curves
2) hist_lines : return histogram of an image drawn as bins ( only for grayscale images )
Usage : python hist.py <image_file>
Abid Rahman 3/14/12 debug Gary Bradski
'''
import cv2
import numpy as np
bins = np.arange(256).reshape(256,1)
def hist_curve(im):
h = np.zeros((300,256,3))
if len(im.shape) == 2:
color = [(255,255,255)]
elif im.shape[2] == 3:
color = [ (255,0,0),(0,255,0),(0,0,255) ]
for ch, col in enumerate(color):
hist_item = cv2.calcHist([im],[ch],None,[256],[0,256])
cv2.normalize(hist_item,hist_item,0,255,cv2.NORM_MINMAX)
hist=np.int32(np.around(hist_item))
pts = np.int32(np.column_stack((bins,hist)))
cv2.polylines(h,[pts],False,col)
y=np.flipud(h)
return y
def hist_lines(im):
h = np.zeros((300,256,3))
if len(im.shape)!=2:
print "hist_lines applicable only for grayscale images"
#print "so converting image to grayscale for representation"
im = cv2.cvtColor(im,cv2.COLOR_BGR2GRAY)
hist_item = cv2.calcHist([im],[0],None,[256],[0,256])
cv2.normalize(hist_item,hist_item,0,255,cv2.NORM_MINMAX)
hist=np.int32(np.around(hist_item))
for x,y in enumerate(hist):
cv2.line(h,(x,0),(x,y),(255,255,255))
y = np.flipud(h)
return y
if __name__ == '__main__':
import sys
if len(sys.argv)>1:
fname = sys.argv[1]
else :
fname = '../data/lena.jpg'
print "usage : python hist.py <image_file>"
im = cv2.imread(fname)
if im is None:
print 'Failed to load image file:', fname
sys.exit(1)
gray = cv2.cvtColor(im,cv2.COLOR_BGR2GRAY)
print ''' Histogram plotting \n
Keymap :\n
a - show histogram for color image in curve mode \n
b - show histogram in bin mode \n
c - show equalized histogram (always in bin mode) \n
d - show histogram for color image in curve mode \n
e - show histogram for a normalized image in curve mode \n
Esc - exit \n
'''
cv2.imshow('image',im)
while True:
k = cv2.waitKey(0)&0xFF
if k == ord('a'):
curve = hist_curve(im)
cv2.imshow('histogram',curve)
cv2.imshow('image',im)
print 'a'
elif k == ord('b'):
print 'b'
lines = hist_lines(im)
cv2.imshow('histogram',lines)
cv2.imshow('image',gray)
elif k == ord('c'):
print 'c'
equ = cv2.equalizeHist(gray)
lines = hist_lines(equ)
cv2.imshow('histogram',lines)
cv2.imshow('image',equ)
elif k == ord('d'):
print 'd'
curve = hist_curve(gray)
cv2.imshow('histogram',curve)
cv2.imshow('image',gray)
elif k == ord('e'):
print 'e'
norm = cv2.normalize(gray,alpha = 0,beta = 255,norm_type = cv2.NORM_MINMAX)
lines = hist_lines(norm)
cv2.imshow('histogram',lines)
cv2.imshow('image',norm)
elif k == 27:
print 'ESC'
cv2.destroyAllWindows()
break
cv2.destroyAllWindows()
|
gpl-3.0
|
pauloschilling/sentry
|
tests/sentry/web/frontend/test_organization_home.py
|
13
|
2199
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.models import AuthProvider
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationHomePermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationHomePermissionTest, self).setUp()
self.path = reverse('sentry-organization-home', args=[self.organization.slug])
def test_teamless_member_can_load(self):
self.assert_teamless_member_can_access(self.path)
def test_org_member_can_load(self):
self.assert_org_member_can_access(self.path)
def test_non_member_cannot_load(self):
self.assert_non_member_cannot_access(self.path)
class OrganizationHomeTest(TestCase):
# this test isn't really specific to OrganizationHome, but it needs to
# guarantee this behavior so we stuff it here
def test_redirects_unlinked_sso_member(self):
user = self.create_user('not-a-superuser@example.com')
organization = self.create_organization(name='foo', owner=user)
team = self.create_team(organization=organization)
project = self.create_project(team=team)
auth_provider = AuthProvider.objects.create(organization=organization)
path = reverse('sentry-organization-home', args=[organization.slug])
self.login_as(user)
resp = self.client.get(path)
assert resp.status_code == 302
assert resp['Location'] == 'http://testserver{}'.format(
reverse('sentry-auth-link-identity', args=[organization.slug]),
)
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team = self.create_team(organization=organization)
project = self.create_project(team=team)
path = reverse('sentry-organization-home', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-home.html')
assert resp.context['organization'] == organization
assert resp.context['active_teams'] == [(team, [project])]
|
bsd-3-clause
|
CoDEmanX/ArangoDB
|
3rdParty/V8-4.3.61/third_party/python_26/Lib/uuid.py
|
59
|
20331
|
r"""UUID objects (universally unique identifiers) according to RFC 4122.
This module provides immutable UUID objects (class UUID) and the functions
uuid1(), uuid3(), uuid4(), uuid5() for generating version 1, 3, 4, and 5
UUIDs as specified in RFC 4122.
If all you want is a unique ID, you should probably call uuid1() or uuid4().
Note that uuid1() may compromise privacy since it creates a UUID containing
the computer's network address. uuid4() creates a random UUID.
Typical usage:
>>> import uuid
# make a UUID based on the host ID and current time
>>> uuid.uuid1()
UUID('a8098c1a-f86e-11da-bd1a-00112444be1e')
# make a UUID using an MD5 hash of a namespace UUID and a name
>>> uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org')
UUID('6fa459ea-ee8a-3ca4-894e-db77e160355e')
# make a random UUID
>>> uuid.uuid4()
UUID('16fd2706-8baf-433b-82eb-8c7fada847da')
# make a UUID using a SHA-1 hash of a namespace UUID and a name
>>> uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org')
UUID('886313e1-3b8a-5372-9b90-0c9aee199e5d')
# make a UUID from a string of hex digits (braces and hyphens ignored)
>>> x = uuid.UUID('{00010203-0405-0607-0809-0a0b0c0d0e0f}')
# convert a UUID to a string of hex digits in standard form
>>> str(x)
'00010203-0405-0607-0809-0a0b0c0d0e0f'
# get the raw 16 bytes of the UUID
>>> x.bytes
'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f'
# make a UUID from a 16-byte string
>>> uuid.UUID(bytes=x.bytes)
UUID('00010203-0405-0607-0809-0a0b0c0d0e0f')
"""
__author__ = 'Ka-Ping Yee <ping@zesty.ca>'
RESERVED_NCS, RFC_4122, RESERVED_MICROSOFT, RESERVED_FUTURE = [
'reserved for NCS compatibility', 'specified in RFC 4122',
'reserved for Microsoft compatibility', 'reserved for future definition']
class UUID(object):
"""Instances of the UUID class represent UUIDs as specified in RFC 4122.
UUID objects are immutable, hashable, and usable as dictionary keys.
Converting a UUID to a string with str() yields something in the form
'12345678-1234-1234-1234-123456789abc'. The UUID constructor accepts
five possible forms: a similar string of hexadecimal digits, or a tuple
of six integer fields (with 32-bit, 16-bit, 16-bit, 8-bit, 8-bit, and
48-bit values respectively) as an argument named 'fields', or a string
of 16 bytes (with all the integer fields in big-endian order) as an
argument named 'bytes', or a string of 16 bytes (with the first three
fields in little-endian order) as an argument named 'bytes_le', or a
single 128-bit integer as an argument named 'int'.
UUIDs have these read-only attributes:
bytes the UUID as a 16-byte string (containing the six
integer fields in big-endian byte order)
bytes_le the UUID as a 16-byte string (with time_low, time_mid,
and time_hi_version in little-endian byte order)
fields a tuple of the six integer fields of the UUID,
which are also available as six individual attributes
and two derived attributes:
time_low the first 32 bits of the UUID
time_mid the next 16 bits of the UUID
time_hi_version the next 16 bits of the UUID
clock_seq_hi_variant the next 8 bits of the UUID
clock_seq_low the next 8 bits of the UUID
node the last 48 bits of the UUID
time the 60-bit timestamp
clock_seq the 14-bit sequence number
hex the UUID as a 32-character hexadecimal string
int the UUID as a 128-bit integer
urn the UUID as a URN as specified in RFC 4122
variant the UUID variant (one of the constants RESERVED_NCS,
RFC_4122, RESERVED_MICROSOFT, or RESERVED_FUTURE)
version the UUID version number (1 through 5, meaningful only
when the variant is RFC_4122)
"""
def __init__(self, hex=None, bytes=None, bytes_le=None, fields=None,
int=None, version=None):
r"""Create a UUID from either a string of 32 hexadecimal digits,
a string of 16 bytes as the 'bytes' argument, a string of 16 bytes
in little-endian order as the 'bytes_le' argument, a tuple of six
integers (32-bit time_low, 16-bit time_mid, 16-bit time_hi_version,
8-bit clock_seq_hi_variant, 8-bit clock_seq_low, 48-bit node) as
the 'fields' argument, or a single 128-bit integer as the 'int'
argument. When a string of hex digits is given, curly braces,
hyphens, and a URN prefix are all optional. For example, these
expressions all yield the same UUID:
UUID('{12345678-1234-5678-1234-567812345678}')
UUID('12345678123456781234567812345678')
UUID('urn:uuid:12345678-1234-5678-1234-567812345678')
UUID(bytes='\x12\x34\x56\x78'*4)
UUID(bytes_le='\x78\x56\x34\x12\x34\x12\x78\x56' +
'\x12\x34\x56\x78\x12\x34\x56\x78')
UUID(fields=(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678))
UUID(int=0x12345678123456781234567812345678)
Exactly one of 'hex', 'bytes', 'bytes_le', 'fields', or 'int' must
be given. The 'version' argument is optional; if given, the resulting
UUID will have its variant and version set according to RFC 4122,
overriding the given 'hex', 'bytes', 'bytes_le', 'fields', or 'int'.
"""
if [hex, bytes, bytes_le, fields, int].count(None) != 4:
raise TypeError('need one of hex, bytes, bytes_le, fields, or int')
if hex is not None:
hex = hex.replace('urn:', '').replace('uuid:', '')
hex = hex.strip('{}').replace('-', '')
if len(hex) != 32:
raise ValueError('badly formed hexadecimal UUID string')
int = long(hex, 16)
if bytes_le is not None:
if len(bytes_le) != 16:
raise ValueError('bytes_le is not a 16-char string')
bytes = (bytes_le[3] + bytes_le[2] + bytes_le[1] + bytes_le[0] +
bytes_le[5] + bytes_le[4] + bytes_le[7] + bytes_le[6] +
bytes_le[8:])
if bytes is not None:
if len(bytes) != 16:
raise ValueError('bytes is not a 16-char string')
int = long(('%02x'*16) % tuple(map(ord, bytes)), 16)
if fields is not None:
if len(fields) != 6:
raise ValueError('fields is not a 6-tuple')
(time_low, time_mid, time_hi_version,
clock_seq_hi_variant, clock_seq_low, node) = fields
if not 0 <= time_low < 1<<32L:
raise ValueError('field 1 out of range (need a 32-bit value)')
if not 0 <= time_mid < 1<<16L:
raise ValueError('field 2 out of range (need a 16-bit value)')
if not 0 <= time_hi_version < 1<<16L:
raise ValueError('field 3 out of range (need a 16-bit value)')
if not 0 <= clock_seq_hi_variant < 1<<8L:
raise ValueError('field 4 out of range (need an 8-bit value)')
if not 0 <= clock_seq_low < 1<<8L:
raise ValueError('field 5 out of range (need an 8-bit value)')
if not 0 <= node < 1<<48L:
raise ValueError('field 6 out of range (need a 48-bit value)')
clock_seq = (clock_seq_hi_variant << 8L) | clock_seq_low
int = ((time_low << 96L) | (time_mid << 80L) |
(time_hi_version << 64L) | (clock_seq << 48L) | node)
if int is not None:
if not 0 <= int < 1<<128L:
raise ValueError('int is out of range (need a 128-bit value)')
if version is not None:
if not 1 <= version <= 5:
raise ValueError('illegal version number')
# Set the variant to RFC 4122.
int &= ~(0xc000 << 48L)
int |= 0x8000 << 48L
# Set the version number.
int &= ~(0xf000 << 64L)
int |= version << 76L
self.__dict__['int'] = int
def __cmp__(self, other):
if isinstance(other, UUID):
return cmp(self.int, other.int)
return NotImplemented
def __hash__(self):
return hash(self.int)
def __int__(self):
return self.int
def __repr__(self):
return 'UUID(%r)' % str(self)
def __setattr__(self, name, value):
raise TypeError('UUID objects are immutable')
def __str__(self):
hex = '%032x' % self.int
return '%s-%s-%s-%s-%s' % (
hex[:8], hex[8:12], hex[12:16], hex[16:20], hex[20:])
def get_bytes(self):
bytes = ''
for shift in range(0, 128, 8):
bytes = chr((self.int >> shift) & 0xff) + bytes
return bytes
bytes = property(get_bytes)
def get_bytes_le(self):
bytes = self.bytes
return (bytes[3] + bytes[2] + bytes[1] + bytes[0] +
bytes[5] + bytes[4] + bytes[7] + bytes[6] + bytes[8:])
bytes_le = property(get_bytes_le)
def get_fields(self):
return (self.time_low, self.time_mid, self.time_hi_version,
self.clock_seq_hi_variant, self.clock_seq_low, self.node)
fields = property(get_fields)
def get_time_low(self):
return self.int >> 96L
time_low = property(get_time_low)
def get_time_mid(self):
return (self.int >> 80L) & 0xffff
time_mid = property(get_time_mid)
def get_time_hi_version(self):
return (self.int >> 64L) & 0xffff
time_hi_version = property(get_time_hi_version)
def get_clock_seq_hi_variant(self):
return (self.int >> 56L) & 0xff
clock_seq_hi_variant = property(get_clock_seq_hi_variant)
def get_clock_seq_low(self):
return (self.int >> 48L) & 0xff
clock_seq_low = property(get_clock_seq_low)
def get_time(self):
return (((self.time_hi_version & 0x0fffL) << 48L) |
(self.time_mid << 32L) | self.time_low)
time = property(get_time)
def get_clock_seq(self):
return (((self.clock_seq_hi_variant & 0x3fL) << 8L) |
self.clock_seq_low)
clock_seq = property(get_clock_seq)
def get_node(self):
return self.int & 0xffffffffffff
node = property(get_node)
def get_hex(self):
return '%032x' % self.int
hex = property(get_hex)
def get_urn(self):
return 'urn:uuid:' + str(self)
urn = property(get_urn)
def get_variant(self):
if not self.int & (0x8000 << 48L):
return RESERVED_NCS
elif not self.int & (0x4000 << 48L):
return RFC_4122
elif not self.int & (0x2000 << 48L):
return RESERVED_MICROSOFT
else:
return RESERVED_FUTURE
variant = property(get_variant)
def get_version(self):
# The version bits are only meaningful for RFC 4122 UUIDs.
if self.variant == RFC_4122:
return int((self.int >> 76L) & 0xf)
version = property(get_version)
def _find_mac(command, args, hw_identifiers, get_index):
import os
for dir in ['', '/sbin/', '/usr/sbin']:
executable = os.path.join(dir, command)
if not os.path.exists(executable):
continue
try:
# LC_ALL to get English output, 2>/dev/null to
# prevent output on stderr
cmd = 'LC_ALL=C %s %s 2>/dev/null' % (executable, args)
pipe = os.popen(cmd)
except IOError:
continue
for line in pipe:
words = line.lower().split()
for i in range(len(words)):
if words[i] in hw_identifiers:
return int(words[get_index(i)].replace(':', ''), 16)
return None
def _ifconfig_getnode():
"""Get the hardware address on Unix by running ifconfig."""
# This works on Linux ('' or '-a'), Tru64 ('-av'), but not all Unixes.
for args in ('', '-a', '-av'):
mac = _find_mac('ifconfig', args, ['hwaddr', 'ether'], lambda i: i+1)
if mac:
return mac
import socket
ip_addr = socket.gethostbyname(socket.gethostname())
# Try getting the MAC addr from arp based on our IP address (Solaris).
mac = _find_mac('arp', '-an', [ip_addr], lambda i: -1)
if mac:
return mac
# This might work on HP-UX.
mac = _find_mac('lanscan', '-ai', ['lan0'], lambda i: 0)
if mac:
return mac
return None
def _ipconfig_getnode():
"""Get the hardware address on Windows by running ipconfig.exe."""
import os, re
dirs = ['', r'c:\windows\system32', r'c:\winnt\system32']
try:
import ctypes
buffer = ctypes.create_string_buffer(300)
ctypes.windll.kernel32.GetSystemDirectoryA(buffer, 300)
dirs.insert(0, buffer.value.decode('mbcs'))
except:
pass
for dir in dirs:
try:
pipe = os.popen(os.path.join(dir, 'ipconfig') + ' /all')
except IOError:
continue
for line in pipe:
value = line.split(':')[-1].strip().lower()
if re.match('([0-9a-f][0-9a-f]-){5}[0-9a-f][0-9a-f]', value):
return int(value.replace('-', ''), 16)
def _netbios_getnode():
"""Get the hardware address on Windows using NetBIOS calls.
See http://support.microsoft.com/kb/118623 for details."""
import win32wnet, netbios
ncb = netbios.NCB()
ncb.Command = netbios.NCBENUM
ncb.Buffer = adapters = netbios.LANA_ENUM()
adapters._pack()
if win32wnet.Netbios(ncb) != 0:
return
adapters._unpack()
for i in range(adapters.length):
ncb.Reset()
ncb.Command = netbios.NCBRESET
ncb.Lana_num = ord(adapters.lana[i])
if win32wnet.Netbios(ncb) != 0:
continue
ncb.Reset()
ncb.Command = netbios.NCBASTAT
ncb.Lana_num = ord(adapters.lana[i])
ncb.Callname = '*'.ljust(16)
ncb.Buffer = status = netbios.ADAPTER_STATUS()
if win32wnet.Netbios(ncb) != 0:
continue
status._unpack()
bytes = map(ord, status.adapter_address)
return ((bytes[0]<<40L) + (bytes[1]<<32L) + (bytes[2]<<24L) +
(bytes[3]<<16L) + (bytes[4]<<8L) + bytes[5])
# Thanks to Thomas Heller for ctypes and for his help with its use here.
# If ctypes is available, use it to find system routines for UUID generation.
_uuid_generate_random = _uuid_generate_time = _UuidCreate = None
try:
import ctypes, ctypes.util
# The uuid_generate_* routines are provided by libuuid on at least
# Linux and FreeBSD, and provided by libc on Mac OS X.
for libname in ['uuid', 'c']:
try:
lib = ctypes.CDLL(ctypes.util.find_library(libname))
except:
continue
if hasattr(lib, 'uuid_generate_random'):
_uuid_generate_random = lib.uuid_generate_random
if hasattr(lib, 'uuid_generate_time'):
_uuid_generate_time = lib.uuid_generate_time
# On Windows prior to 2000, UuidCreate gives a UUID containing the
# hardware address. On Windows 2000 and later, UuidCreate makes a
# random UUID and UuidCreateSequential gives a UUID containing the
# hardware address. These routines are provided by the RPC runtime.
# NOTE: at least on Tim's WinXP Pro SP2 desktop box, while the last
# 6 bytes returned by UuidCreateSequential are fixed, they don't appear
# to bear any relationship to the MAC address of any network device
# on the box.
try:
lib = ctypes.windll.rpcrt4
except:
lib = None
_UuidCreate = getattr(lib, 'UuidCreateSequential',
getattr(lib, 'UuidCreate', None))
except:
pass
def _unixdll_getnode():
"""Get the hardware address on Unix using ctypes."""
_buffer = ctypes.create_string_buffer(16)
_uuid_generate_time(_buffer)
return UUID(bytes=_buffer.raw).node
def _windll_getnode():
"""Get the hardware address on Windows using ctypes."""
_buffer = ctypes.create_string_buffer(16)
if _UuidCreate(_buffer) == 0:
return UUID(bytes=_buffer.raw).node
def _random_getnode():
"""Get a random node ID, with eighth bit set as suggested by RFC 4122."""
import random
return random.randrange(0, 1<<48L) | 0x010000000000L
_node = None
def getnode():
"""Get the hardware address as a 48-bit positive integer.
The first time this runs, it may launch a separate program, which could
be quite slow. If all attempts to obtain the hardware address fail, we
choose a random 48-bit number with its eighth bit set to 1 as recommended
in RFC 4122.
"""
global _node
if _node is not None:
return _node
import sys
if sys.platform == 'win32':
getters = [_windll_getnode, _netbios_getnode, _ipconfig_getnode]
else:
getters = [_unixdll_getnode, _ifconfig_getnode]
for getter in getters + [_random_getnode]:
try:
_node = getter()
except:
continue
if _node is not None:
return _node
_last_timestamp = None
def uuid1(node=None, clock_seq=None):
"""Generate a UUID from a host ID, sequence number, and the current time.
If 'node' is not given, getnode() is used to obtain the hardware
address. If 'clock_seq' is given, it is used as the sequence number;
otherwise a random 14-bit sequence number is chosen."""
# When the system provides a version-1 UUID generator, use it (but don't
# use UuidCreate here because its UUIDs don't conform to RFC 4122).
if _uuid_generate_time and node is clock_seq is None:
_buffer = ctypes.create_string_buffer(16)
_uuid_generate_time(_buffer)
return UUID(bytes=_buffer.raw)
global _last_timestamp
import time
nanoseconds = int(time.time() * 1e9)
# 0x01b21dd213814000 is the number of 100-ns intervals between the
# UUID epoch 1582-10-15 00:00:00 and the Unix epoch 1970-01-01 00:00:00.
timestamp = int(nanoseconds/100) + 0x01b21dd213814000L
if timestamp <= _last_timestamp:
timestamp = _last_timestamp + 1
_last_timestamp = timestamp
if clock_seq is None:
import random
clock_seq = random.randrange(1<<14L) # instead of stable storage
time_low = timestamp & 0xffffffffL
time_mid = (timestamp >> 32L) & 0xffffL
time_hi_version = (timestamp >> 48L) & 0x0fffL
clock_seq_low = clock_seq & 0xffL
clock_seq_hi_variant = (clock_seq >> 8L) & 0x3fL
if node is None:
node = getnode()
return UUID(fields=(time_low, time_mid, time_hi_version,
clock_seq_hi_variant, clock_seq_low, node), version=1)
def uuid3(namespace, name):
"""Generate a UUID from the MD5 hash of a namespace UUID and a name."""
from hashlib import md5
hash = md5(namespace.bytes + name).digest()
return UUID(bytes=hash[:16], version=3)
def uuid4():
"""Generate a random UUID."""
# When the system provides a version-4 UUID generator, use it.
if _uuid_generate_random:
_buffer = ctypes.create_string_buffer(16)
_uuid_generate_random(_buffer)
return UUID(bytes=_buffer.raw)
# Otherwise, get randomness from urandom or the 'random' module.
try:
import os
return UUID(bytes=os.urandom(16), version=4)
except:
import random
bytes = [chr(random.randrange(256)) for i in range(16)]
return UUID(bytes=bytes, version=4)
def uuid5(namespace, name):
"""Generate a UUID from the SHA-1 hash of a namespace UUID and a name."""
from hashlib import sha1
hash = sha1(namespace.bytes + name).digest()
return UUID(bytes=hash[:16], version=5)
# The following standard UUIDs are for use with uuid3() or uuid5().
NAMESPACE_DNS = UUID('6ba7b810-9dad-11d1-80b4-00c04fd430c8')
NAMESPACE_URL = UUID('6ba7b811-9dad-11d1-80b4-00c04fd430c8')
NAMESPACE_OID = UUID('6ba7b812-9dad-11d1-80b4-00c04fd430c8')
NAMESPACE_X500 = UUID('6ba7b814-9dad-11d1-80b4-00c04fd430c8')
|
apache-2.0
|
guijomatos/SickRage
|
lib/pysrt/commands.py
|
71
|
8471
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# pylint: disable-all
import os
import re
import sys
import codecs
import shutil
import argparse
from textwrap import dedent
from chardet import detect
from pysrt import SubRipFile, SubRipTime, VERSION_STRING
def underline(string):
return "\033[4m%s\033[0m" % string
class TimeAwareArgumentParser(argparse.ArgumentParser):
RE_TIME_REPRESENTATION = re.compile(r'^\-?(\d+[hms]{0,2}){1,4}$')
def parse_args(self, args=None, namespace=None):
time_index = -1
for index, arg in enumerate(args):
match = self.RE_TIME_REPRESENTATION.match(arg)
if match:
time_index = index
break
if time_index >= 0:
args.insert(time_index, '--')
return super(TimeAwareArgumentParser, self).parse_args(args, namespace)
class SubRipShifter(object):
BACKUP_EXTENSION = '.bak'
RE_TIME_STRING = re.compile(r'(\d+)([hms]{0,2})')
UNIT_RATIOS = {
'ms': 1,
'': SubRipTime.SECONDS_RATIO,
's': SubRipTime.SECONDS_RATIO,
'm': SubRipTime.MINUTES_RATIO,
'h': SubRipTime.HOURS_RATIO,
}
DESCRIPTION = dedent("""\
Srt subtitle editor
It can either shift, split or change the frame rate.
""")
TIMESTAMP_HELP = "A timestamp in the form: [-][Hh][Mm]S[s][MSms]"
SHIFT_EPILOG = dedent("""\
Examples:
1 minute and 12 seconds foreward (in place):
$ srt -i shift 1m12s movie.srt
half a second foreward:
$ srt shift 500ms movie.srt > othername.srt
1 second and half backward:
$ srt -i shift -1s500ms movie.srt
3 seconds backward:
$ srt -i shift -3 movie.srt
""")
RATE_EPILOG = dedent("""\
Examples:
Convert 23.9fps subtitles to 25fps:
$ srt -i rate 23.9 25 movie.srt
""")
LIMITS_HELP = "Each parts duration in the form: [Hh][Mm]S[s][MSms]"
SPLIT_EPILOG = dedent("""\
Examples:
For a movie in 2 parts with the first part 48 minutes and 18 seconds long:
$ srt split 48m18s movie.srt
=> creates movie.1.srt and movie.2.srt
For a movie in 3 parts of 20 minutes each:
$ srt split 20m 20m movie.srt
=> creates movie.1.srt, movie.2.srt and movie.3.srt
""")
FRAME_RATE_HELP = "A frame rate in fps (commonly 23.9 or 25)"
ENCODING_HELP = dedent("""\
Change file encoding. Useful for players accepting only latin1 subtitles.
List of supported encodings: http://docs.python.org/library/codecs.html#standard-encodings
""")
BREAK_EPILOG = dedent("""\
Break lines longer than defined length
""")
LENGTH_HELP = "Maximum number of characters per line"
def __init__(self):
self.output_file_path = None
def build_parser(self):
parser = TimeAwareArgumentParser(description=self.DESCRIPTION, formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('-i', '--in-place', action='store_true', dest='in_place',
help="Edit file in-place, saving a backup as file.bak (do not works for the split command)")
parser.add_argument('-e', '--output-encoding', metavar=underline('encoding'), action='store', dest='output_encoding',
type=self.parse_encoding, help=self.ENCODING_HELP)
parser.add_argument('-v', '--version', action='version', version='%%(prog)s %s' % VERSION_STRING)
subparsers = parser.add_subparsers(title='commands')
shift_parser = subparsers.add_parser('shift', help="Shift subtitles by specified time offset", epilog=self.SHIFT_EPILOG, formatter_class=argparse.RawTextHelpFormatter)
shift_parser.add_argument('time_offset', action='store', metavar=underline('offset'),
type=self.parse_time, help=self.TIMESTAMP_HELP)
shift_parser.set_defaults(action=self.shift)
rate_parser = subparsers.add_parser('rate', help="Convert subtitles from a frame rate to another", epilog=self.RATE_EPILOG, formatter_class=argparse.RawTextHelpFormatter)
rate_parser.add_argument('initial', action='store', type=float, help=self.FRAME_RATE_HELP)
rate_parser.add_argument('final', action='store', type=float, help=self.FRAME_RATE_HELP)
rate_parser.set_defaults(action=self.rate)
split_parser = subparsers.add_parser('split', help="Split a file in multiple parts", epilog=self.SPLIT_EPILOG, formatter_class=argparse.RawTextHelpFormatter)
split_parser.add_argument('limits', action='store', nargs='+', type=self.parse_time, help=self.LIMITS_HELP)
split_parser.set_defaults(action=self.split)
break_parser = subparsers.add_parser('break', help="Break long lines", epilog=self.BREAK_EPILOG, formatter_class=argparse.RawTextHelpFormatter)
break_parser.add_argument('length', action='store', type=int, help=self.LENGTH_HELP)
break_parser.set_defaults(action=self.break_lines)
parser.add_argument('file', action='store')
return parser
def run(self, args):
self.arguments = self.build_parser().parse_args(args)
if self.arguments.in_place:
self.create_backup()
self.arguments.action()
def parse_time(self, time_string):
negative = time_string.startswith('-')
if negative:
time_string = time_string[1:]
ordinal = sum(int(value) * self.UNIT_RATIOS[unit] for value, unit
in self.RE_TIME_STRING.findall(time_string))
return -ordinal if negative else ordinal
def parse_encoding(self, encoding_name):
try:
codecs.lookup(encoding_name)
except LookupError as error:
raise argparse.ArgumentTypeError(error.message)
return encoding_name
def shift(self):
self.input_file.shift(milliseconds=self.arguments.time_offset)
self.input_file.write_into(self.output_file)
def rate(self):
ratio = self.arguments.final / self.arguments.initial
self.input_file.shift(ratio=ratio)
self.input_file.write_into(self.output_file)
def split(self):
limits = [0] + self.arguments.limits + [self.input_file[-1].end.ordinal + 1]
base_name, extension = os.path.splitext(self.arguments.file)
for index, (start, end) in enumerate(zip(limits[:-1], limits[1:])):
file_name = '%s.%s%s' % (base_name, index + 1, extension)
part_file = self.input_file.slice(ends_after=start, starts_before=end)
part_file.shift(milliseconds=-start)
part_file.clean_indexes()
part_file.save(path=file_name, encoding=self.output_encoding)
def create_backup(self):
backup_file = self.arguments.file + self.BACKUP_EXTENSION
if not os.path.exists(backup_file):
shutil.copy2(self.arguments.file, backup_file)
self.output_file_path = self.arguments.file
self.arguments.file = backup_file
def break_lines(self):
split_re = re.compile(r'(.{,%i})(?:\s+|$)' % self.arguments.length)
for item in self.input_file:
item.text = '\n'.join(split_re.split(item.text)[1::2])
self.input_file.write_into(self.output_file)
@property
def output_encoding(self):
return self.arguments.output_encoding or self.input_file.encoding
@property
def input_file(self):
if not hasattr(self, '_source_file'):
with open(self.arguments.file, 'rb') as f:
content = f.read()
encoding = detect(content).get('encoding')
encoding = self.normalize_encoding(encoding)
self._source_file = SubRipFile.open(self.arguments.file,
encoding=encoding, error_handling=SubRipFile.ERROR_LOG)
return self._source_file
@property
def output_file(self):
if not hasattr(self, '_output_file'):
if self.output_file_path:
self._output_file = codecs.open(self.output_file_path, 'w+', encoding=self.output_encoding)
else:
self._output_file = sys.stdout
return self._output_file
def normalize_encoding(self, encoding):
return encoding.lower().replace('-', '_')
def main():
SubRipShifter().run(sys.argv[1:])
if __name__ == '__main__':
main()
|
gpl-3.0
|
simplegeo/shapely
|
shapely/geometry/base.py
|
1
|
17241
|
"""Base geometry class and utilities
"""
from functools import wraps
import sys
import warnings
from shapely.coords import CoordinateSequence
from shapely.geos import lgeos
from shapely.impl import DefaultImplementation
from shapely import wkb, wkt
GEOMETRY_TYPES = [
'Point',
'LineString',
'LinearRing',
'Polygon',
'MultiPoint',
'MultiLineString',
'MultiPolygon',
'GeometryCollection'
]
def geometry_type_name(g):
if g is None:
raise ValueError("Null geometry has no type")
return GEOMETRY_TYPES[lgeos.GEOSGeomTypeId(g)]
def geom_factory(g, parent=None):
# Abstract geometry factory for use with topological methods below
if not g:
raise ValueError("No Shapely geometry can be created from null value")
ob = BaseGeometry()
geom_type = geometry_type_name(g)
# TODO: check cost of dynamic import by profiling
mod = __import__(
'shapely.geometry',
globals(),
locals(),
[geom_type],
)
ob.__class__ = getattr(mod, geom_type)
ob.__geom__ = g
ob.__p__ = parent
ob._ndim = 2 # callers should be all from 2D worlds
return ob
def exceptNull(func):
"""Decorator which helps avoid GEOS operations on null pointers."""
@wraps(func)
def wrapper(*args, **kwargs):
if not args[0]._geom or args[0].is_empty:
raise ValueError("Null/empty geometry supports no operations")
return func(*args, **kwargs)
return wrapper
EMPTY = wkb.deserialize('010700000000000000'.decode('hex'))
class BaseGeometry(object):
"""
Provides GEOS spatial predicates and topological operations.
"""
# Attributes
# ----------
# __geom__ : c_void_p
# Cached ctypes pointer to GEOS geometry. Not to be accessed.
# _geom : c_void_p
# Property by which the GEOS geometry is accessed.
# __p__ : object
# Parent (Shapely) geometry
# _ctypes_data : object
# Cached ctypes data buffer
# _ndim : int
# Number of dimensions (2 or 3, generally)
# _crs : object
# Coordinate reference system. Available for Shapely extensions, but
# not implemented here.
# _owned : bool
# True if this object's GEOS geometry is owned by another as in the case
# of a multipart geometry member.
__geom__ = EMPTY
__p__ = None
_ctypes_data = None
_ndim = None
_crs = None
_owned = False
# Backend config
impl = DefaultImplementation
@property
def _is_empty(self):
return self.__geom__ in [EMPTY, None]
def empty(self):
if not (self._owned or self._is_empty):
from shapely.geos import lgeos
lgeos.GEOSGeom_destroy(self.__geom__)
self.__geom__ = EMPTY
def __del__(self):
self.empty()
self.__geom__ = None
self.__p__ = None
def __str__(self):
return self.to_wkt()
# To support pickling
def __reduce__(self):
return (self.__class__, (), self.to_wkb())
def __setstate__(self, state):
self.empty()
self.__geom__ = wkb.deserialize(state)
# The _geom property
def _get_geom(self):
return self.__geom__
def _set_geom(self, val):
self.empty()
self.__geom__ = val
_geom = property(_get_geom, _set_geom)
# Array and ctypes interfaces
# ---------------------------
@property
def ctypes(self):
"""Return ctypes buffer"""
raise NotImplementedError
@property
def array_interface_base(self):
if sys.byteorder == 'little':
typestr = '<f8'
elif sys.byteorder == 'big':
typestr = '>f8'
else:
raise ValueError(
"Unsupported byteorder: neither little nor big-endian")
return {
'version': 3,
'typestr': typestr,
'data': self.ctypes,
}
@property
def __array_interface__(self):
"""Provide the Numpy array protocol."""
raise NotImplementedError
# Coordinate access
# -----------------
@exceptNull
def _get_coords(self):
"""Access to geometry's coordinates (CoordinateSequence)"""
return CoordinateSequence(self)
def _set_coords(self, ob):
raise NotImplementedError(
"set_coords must be provided by derived classes")
coords = property(_get_coords, _set_coords)
@property
def xy(self):
"""Separate arrays of X and Y coordinate values"""
raise NotImplementedError
# Python feature protocol
@property
def __geo_interface__(self):
"""Dictionary representation of the geometry"""
raise NotImplementedError
# Type of geometry and its representations
# ----------------------------------------
@exceptNull
def geometryType(self):
return geometry_type_name(self._geom)
@property
def type(self):
return self.geometryType()
def to_wkb(self):
return wkb.dumps(self)
def to_wkt(self):
return wkt.dumps(self)
geom_type = property(geometryType,
doc="""Name of the geometry's type, such as 'Point'"""
)
wkt = property(to_wkt,
doc="""WKT representation of the geometry""")
wkb = property(to_wkb,
doc="""WKB representation of the geometry""")
# Real-valued properties and methods
# ----------------------------------
@property
def area(self):
"""Unitless area of the geometry (float)"""
return self.impl['area'](self)
def distance(self, other):
"""Unitless distance to other geometry (float)"""
return self.impl['distance'](self, other)
@property
def length(self):
"""Unitless length of the geometry (float)"""
return self.impl['length'](self)
# Topological properties
# ----------------------
@property
def boundary(self):
"""Returns a lower dimension geometry that bounds the object
The boundary of a polygon is a line, the boundary of a line is a
collection of points. The boundary of a point is an empty (null)
collection.
"""
return geom_factory(self.impl['boundary'](self))
@property
def bounds(self):
"""Returns minimum bounding region (minx, miny, maxx, maxy)"""
if self.is_empty:
return ()
else:
return self.impl['bounds'](self)
@property
def centroid(self):
"""Returns the geometric center of the polygon"""
return geom_factory(self.impl['centroid'](self))
@property
def convex_hull(self):
"""Imagine an elastic band stretched around the geometry: that's a
convex hull, more or less
The convex hull of a three member multipoint, for example, is a
triangular polygon.
"""
return geom_factory(self.impl['convex_hull'](self))
@property
def envelope(self):
"""A figure that envelopes the geometry"""
return geom_factory(self.impl['envelope'](self))
def buffer(self, distance, resolution=16, quadsegs=None):
"""Returns a geometry with an envelope at a distance from the object's
envelope
A negative distance has a "shrink" effect. A zero distance may be used
to "tidy" a polygon. The resolution of the buffer around each vertex of
the object increases by increasing the resolution keyword parameter
or second positional parameter. Note: the use of a `quadsegs` parameter
is deprecated and will be gone from the next major release.
Example:
>>> from shapely.wkt import loads
>>> g = loads('POINT (0.0 0.0)')
>>> g.buffer(1.0).area # 16-gon approx of a unit radius circle
3.1365484905459389
>>> g.buffer(1.0, 128).area # 128-gon approximation
3.1415138011443009
>>> g.buffer(1.0, 3).area # triangle approximation
3.0
"""
if quadsegs is not None:
warnings.warn(
"The `quadsegs` argument is deprecated. Use `resolution`.",
DeprecationWarning)
res = quadsegs
else:
res = resolution
return geom_factory(self.impl['buffer'](self, distance, res))
def simplify(self, tolerance, preserve_topology=True):
"""Returns a simplified geometry produced by the Douglas-Puecker
algorithm
Coordinates of the simplified geometry will be no more than the
tolerance distance from the original. Unless the topology preserving
option is used, the algorithm may produce self-intersecting or
otherwise invalid geometries.
"""
if preserve_topology:
op = self.impl['topology_preserve_simplify']
else:
op = self.impl['simplify']
return geom_factory(op(self, tolerance))
# Binary operations
# -----------------
def difference(self, other):
"""Returns the difference of the geometries"""
return geom_factory(self.impl['difference'](self, other))
def intersection(self, other):
"""Returns the intersection of the geometries"""
return geom_factory(self.impl['intersection'](self, other))
def symmetric_difference(self, other):
"""Returns the symmetric difference of the geometries
(Shapely geometry)"""
return geom_factory(self.impl['symmetric_difference'](self, other))
def union(self, other):
"""Returns the union of the geometries (Shapely geometry)"""
return geom_factory(self.impl['union'](self, other))
# Unary predicates
# ----------------
@property
def has_z(self):
"""True if the geometry's coordinate sequence(s) have z values (are
3-dimensional)"""
return bool(self.impl['has_z'](self))
@property
def is_empty(self):
"""True if the set of points in this geometry is empty, else False"""
return bool(self.impl['is_empty'](self)) or (self._geom is None)
@property
def is_ring(self):
"""True if the geometry is a closed ring, else False"""
return bool(self.impl['is_ring'](self))
@property
def is_simple(self):
"""True if the geometry is simple, meaning that any self-intersections
are only at boundary points, else False"""
return bool(self.impl['is_simple'](self))
@property
def is_valid(self):
"""True if the geometry is valid (definition depends on sub-class),
else False"""
return bool(self.impl['is_valid'](self))
# Binary predicates
# -----------------
def relate(self, other):
"""Returns the DE-9IM intersection matrix for the two geometries
(string)"""
return self.impl['relate'](self, other)
def contains(self, other):
"""Returns True if the geometry contains the other, else False"""
return bool(self.impl['contains'](self, other))
def crosses(self, other):
"""Returns True if the geometries cross, else False"""
return bool(self.impl['crosses'](self, other))
def disjoint(self, other):
"""Returns True if geometries are disjoint, else False"""
return bool(self.impl['disjoint'](self, other))
def equals(self, other):
"""Returns True if geometries are equal, else False"""
return bool(self.impl['equals'](self, other))
def intersects(self, other):
"""Returns True if geometries intersect, else False"""
return bool(self.impl['intersects'](self, other))
def overlaps(self, other):
"""Returns True if geometries overlap, else False"""
return bool(self.impl['overlaps'](self, other))
def touches(self, other):
"""Returns True if geometries touch, else False"""
return bool(self.impl['touches'](self, other))
def within(self, other):
"""Returns True if geometry is within the other, else False"""
return bool(self.impl['within'](self, other))
def equals_exact(self, other, tolerance):
"""Returns True if geometries are equal to within a specified
tolerance"""
# return BinaryPredicateOp('equals_exact', self)(other, tolerance)
return bool(self.impl['equals_exact'](self, other, tolerance))
def almost_equals(self, other, decimal=6):
"""Returns True if geometries are equal at all coordinates to a
specified decimal place"""
return self.equals_exact(other, 0.5 * 10**(-decimal))
# Linear referencing
# ------------------
def project(self, other, normalized=False):
"""Returns the distance along this geometry to a point nearest the
specified point
If the normalized arg is True, return the distance normalized to the
length of the linear geometry.
"""
if normalized:
op = self.impl['project_normalized']
else:
op = self.impl['project']
return op(self, other)
def interpolate(self, distance, normalized=False):
"""Return a point at the specified distance along a linear geometry
If the normalized arg is True, the distance will be interpreted as a
fraction of the geometry's length.
"""
if normalized:
op = self.impl['interpolate_normalized']
else:
op = self.impl['interpolate']
return geom_factory(op(self, distance))
class BaseMultipartGeometry(BaseGeometry):
def shape_factory(self, *args):
# Factory for part instances, usually a geometry class
raise NotImplementedError("To be implemented by derived classes")
@property
def ctypes(self):
raise NotImplementedError(
"Multi-part geometries have no ctypes representations")
@property
def __array_interface__(self):
"""Provide the Numpy array protocol."""
raise NotImplementedError(
"Multi-part geometries do not themselves provide the array interface")
def _get_coords(self):
raise NotImplementedError(
"Sub-geometries may have coordinate sequences, but collections do not")
def _set_coords(self, ob):
raise NotImplementedError(
"Sub-geometries may have coordinate sequences, but collections do not")
@property
def coords(self):
raise NotImplementedError(
"Multi-part geometries do not provide a coordinate sequence")
@property
@exceptNull
def geoms(self):
return GeometrySequence(self, self.shape_factory)
def __iter__(self):
if not self.is_empty:
return iter(self.geoms)
else:
return iter([])
def __len__(self):
if not self.is_empty:
return len(self.geoms)
else:
return 0
def __getitem__(self, index):
if not self.is_empty:
return self.geoms[index]
else:
return ()[index]
class GeometrySequence(object):
"""
Iterative access to members of a homogeneous multipart geometry.
"""
# Attributes
# ----------
# _factory : callable
# Returns instances of Shapely geometries
# _geom : c_void_p
# Ctypes pointer to the parent's GEOS geometry
# _ndim : int
# Number of dimensions (2 or 3, generally)
# __p__ : object
# Parent (Shapely) geometry
shape_factory = None
_geom = None
__p__ = None
_ndim = None
def __init__(self, parent, type):
self.shape_factory = type
self.__p__ = parent
def _update(self):
self._geom = self.__p__._geom
self._ndim = self.__p__._ndim
def _get_geom_item(self, i):
g = self.shape_factory()
g._owned = True
g._geom = lgeos.GEOSGetGeometryN(self._geom, i)
g._ndim = self._ndim
g.__p__ = self
return g
def __iter__(self):
self._update()
for i in range(self.__len__()):
yield self._get_geom_item(i)
def __len__(self):
self._update()
return lgeos.GEOSGetNumGeometries(self._geom)
def __getitem__(self, i):
self._update()
M = self.__len__()
if i + M < 0 or i >= M:
raise IndexError("index out of range")
if i < 0:
ii = M + i
else:
ii = i
return self._get_geom_item(i)
@property
def _longest(self):
max = 0
for g in iter(self):
l = len(g.coords)
if l > max:
max = l
class HeterogeneousGeometrySequence(GeometrySequence):
"""
Iterative access to a heterogeneous sequence of geometries.
"""
def __init__(self, parent):
super(HeterogeneousGeometrySequence, self).__init__(parent, None)
def _get_geom_item(self, i):
sub = lgeos.GEOSGetGeometryN(self._geom, i)
g = geom_factory(sub)
g._owned = True
return g
# Test runner
def _test():
import doctest
doctest.testmod()
if __name__ == "__main__":
_test()
|
bsd-3-clause
|
betoesquivel/CIE
|
flask/lib/python2.7/site-packages/tornado/httpclient.py
|
22
|
25772
|
"""Blocking and non-blocking HTTP client interfaces.
This module defines a common interface shared by two implementations,
``simple_httpclient`` and ``curl_httpclient``. Applications may either
instantiate their chosen implementation class directly or use the
`AsyncHTTPClient` class from this module, which selects an implementation
that can be overridden with the `AsyncHTTPClient.configure` method.
The default implementation is ``simple_httpclient``, and this is expected
to be suitable for most users' needs. However, some applications may wish
to switch to ``curl_httpclient`` for reasons such as the following:
* ``curl_httpclient`` has some features not found in ``simple_httpclient``,
including support for HTTP proxies and the ability to use a specified
network interface.
* ``curl_httpclient`` is more likely to be compatible with sites that are
not-quite-compliant with the HTTP spec, or sites that use little-exercised
features of HTTP.
* ``curl_httpclient`` is faster.
* ``curl_httpclient`` was the default prior to Tornado 2.0.
Note that if you are using ``curl_httpclient``, it is highly
recommended that you use a recent version of ``libcurl`` and
``pycurl``. Currently the minimum supported version of libcurl is
7.21.1, and the minimum version of pycurl is 7.18.2. It is highly
recommended that your ``libcurl`` installation is built with
asynchronous DNS resolver (threaded or c-ares), otherwise you may
encounter various problems with request timeouts (for more
information, see
http://curl.haxx.se/libcurl/c/curl_easy_setopt.html#CURLOPTCONNECTTIMEOUTMS
and comments in curl_httpclient.py).
To select ``curl_httpclient``, call `AsyncHTTPClient.configure` at startup::
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
"""
from __future__ import absolute_import, division, print_function, with_statement
import functools
import time
import weakref
from tornado.concurrent import TracebackFuture
from tornado.escape import utf8, native_str
from tornado import httputil, stack_context
from tornado.ioloop import IOLoop
from tornado.util import Configurable
class HTTPClient(object):
"""A blocking HTTP client.
This interface is provided for convenience and testing; most applications
that are running an IOLoop will want to use `AsyncHTTPClient` instead.
Typical usage looks like this::
http_client = httpclient.HTTPClient()
try:
response = http_client.fetch("http://www.google.com/")
print response.body
except httpclient.HTTPError as e:
print "Error:", e
http_client.close()
"""
def __init__(self, async_client_class=None, **kwargs):
self._io_loop = IOLoop()
if async_client_class is None:
async_client_class = AsyncHTTPClient
self._async_client = async_client_class(self._io_loop, **kwargs)
self._closed = False
def __del__(self):
self.close()
def close(self):
"""Closes the HTTPClient, freeing any resources used."""
if not self._closed:
self._async_client.close()
self._io_loop.close()
self._closed = True
def fetch(self, request, **kwargs):
"""Executes a request, returning an `HTTPResponse`.
The request may be either a string URL or an `HTTPRequest` object.
If it is a string, we construct an `HTTPRequest` using any additional
kwargs: ``HTTPRequest(request, **kwargs)``
If an error occurs during the fetch, we raise an `HTTPError`.
"""
response = self._io_loop.run_sync(functools.partial(
self._async_client.fetch, request, **kwargs))
response.rethrow()
return response
class AsyncHTTPClient(Configurable):
"""An non-blocking HTTP client.
Example usage::
def handle_request(response):
if response.error:
print "Error:", response.error
else:
print response.body
http_client = AsyncHTTPClient()
http_client.fetch("http://www.google.com/", handle_request)
The constructor for this class is magic in several respects: It
actually creates an instance of an implementation-specific
subclass, and instances are reused as a kind of pseudo-singleton
(one per `.IOLoop`). The keyword argument ``force_instance=True``
can be used to suppress this singleton behavior. Unless
``force_instance=True`` is used, no arguments other than
``io_loop`` should be passed to the `AsyncHTTPClient` constructor.
The implementation subclass as well as arguments to its
constructor can be set with the static method `configure()`
All `AsyncHTTPClient` implementations support a ``defaults``
keyword argument, which can be used to set default values for
`HTTPRequest` attributes. For example::
AsyncHTTPClient.configure(
None, defaults=dict(user_agent="MyUserAgent"))
# or with force_instance:
client = AsyncHTTPClient(force_instance=True,
defaults=dict(user_agent="MyUserAgent"))
"""
@classmethod
def configurable_base(cls):
return AsyncHTTPClient
@classmethod
def configurable_default(cls):
from tornado.simple_httpclient import SimpleAsyncHTTPClient
return SimpleAsyncHTTPClient
@classmethod
def _async_clients(cls):
attr_name = '_async_client_dict_' + cls.__name__
if not hasattr(cls, attr_name):
setattr(cls, attr_name, weakref.WeakKeyDictionary())
return getattr(cls, attr_name)
def __new__(cls, io_loop=None, force_instance=False, **kwargs):
io_loop = io_loop or IOLoop.current()
if force_instance:
instance_cache = None
else:
instance_cache = cls._async_clients()
if instance_cache is not None and io_loop in instance_cache:
return instance_cache[io_loop]
instance = super(AsyncHTTPClient, cls).__new__(cls, io_loop=io_loop,
**kwargs)
# Make sure the instance knows which cache to remove itself from.
# It can't simply call _async_clients() because we may be in
# __new__(AsyncHTTPClient) but instance.__class__ may be
# SimpleAsyncHTTPClient.
instance._instance_cache = instance_cache
if instance_cache is not None:
instance_cache[instance.io_loop] = instance
return instance
def initialize(self, io_loop, defaults=None):
self.io_loop = io_loop
self.defaults = dict(HTTPRequest._DEFAULTS)
if defaults is not None:
self.defaults.update(defaults)
self._closed = False
def close(self):
"""Destroys this HTTP client, freeing any file descriptors used.
This method is **not needed in normal use** due to the way
that `AsyncHTTPClient` objects are transparently reused.
``close()`` is generally only necessary when either the
`.IOLoop` is also being closed, or the ``force_instance=True``
argument was used when creating the `AsyncHTTPClient`.
No other methods may be called on the `AsyncHTTPClient` after
``close()``.
"""
if self._closed:
return
self._closed = True
if self._instance_cache is not None:
if self._instance_cache.get(self.io_loop) is not self:
raise RuntimeError("inconsistent AsyncHTTPClient cache")
del self._instance_cache[self.io_loop]
def fetch(self, request, callback=None, **kwargs):
"""Executes a request, asynchronously returning an `HTTPResponse`.
The request may be either a string URL or an `HTTPRequest` object.
If it is a string, we construct an `HTTPRequest` using any additional
kwargs: ``HTTPRequest(request, **kwargs)``
This method returns a `.Future` whose result is an
`HTTPResponse`. The ``Future`` will raise an `HTTPError` if
the request returned a non-200 response code.
If a ``callback`` is given, it will be invoked with the `HTTPResponse`.
In the callback interface, `HTTPError` is not automatically raised.
Instead, you must check the response's ``error`` attribute or
call its `~HTTPResponse.rethrow` method.
"""
if self._closed:
raise RuntimeError("fetch() called on closed AsyncHTTPClient")
if not isinstance(request, HTTPRequest):
request = HTTPRequest(url=request, **kwargs)
# We may modify this (to add Host, Accept-Encoding, etc),
# so make sure we don't modify the caller's object. This is also
# where normal dicts get converted to HTTPHeaders objects.
request.headers = httputil.HTTPHeaders(request.headers)
request = _RequestProxy(request, self.defaults)
future = TracebackFuture()
if callback is not None:
callback = stack_context.wrap(callback)
def handle_future(future):
exc = future.exception()
if isinstance(exc, HTTPError) and exc.response is not None:
response = exc.response
elif exc is not None:
response = HTTPResponse(
request, 599, error=exc,
request_time=time.time() - request.start_time)
else:
response = future.result()
self.io_loop.add_callback(callback, response)
future.add_done_callback(handle_future)
def handle_response(response):
if response.error:
future.set_exception(response.error)
else:
future.set_result(response)
self.fetch_impl(request, handle_response)
return future
def fetch_impl(self, request, callback):
raise NotImplementedError()
@classmethod
def configure(cls, impl, **kwargs):
"""Configures the `AsyncHTTPClient` subclass to use.
``AsyncHTTPClient()`` actually creates an instance of a subclass.
This method may be called with either a class object or the
fully-qualified name of such a class (or ``None`` to use the default,
``SimpleAsyncHTTPClient``)
If additional keyword arguments are given, they will be passed
to the constructor of each subclass instance created. The
keyword argument ``max_clients`` determines the maximum number
of simultaneous `~AsyncHTTPClient.fetch()` operations that can
execute in parallel on each `.IOLoop`. Additional arguments
may be supported depending on the implementation class in use.
Example::
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
"""
super(AsyncHTTPClient, cls).configure(impl, **kwargs)
class HTTPRequest(object):
"""HTTP client request object."""
# Default values for HTTPRequest parameters.
# Merged with the values on the request object by AsyncHTTPClient
# implementations.
_DEFAULTS = dict(
connect_timeout=20.0,
request_timeout=20.0,
follow_redirects=True,
max_redirects=5,
decompress_response=True,
proxy_password='',
allow_nonstandard_methods=False,
validate_cert=True)
def __init__(self, url, method="GET", headers=None, body=None,
auth_username=None, auth_password=None, auth_mode=None,
connect_timeout=None, request_timeout=None,
if_modified_since=None, follow_redirects=None,
max_redirects=None, user_agent=None, use_gzip=None,
network_interface=None, streaming_callback=None,
header_callback=None, prepare_curl_callback=None,
proxy_host=None, proxy_port=None, proxy_username=None,
proxy_password=None, allow_nonstandard_methods=None,
validate_cert=None, ca_certs=None,
allow_ipv6=None,
client_key=None, client_cert=None, body_producer=None,
expect_100_continue=False, decompress_response=None):
r"""All parameters except ``url`` are optional.
:arg string url: URL to fetch
:arg string method: HTTP method, e.g. "GET" or "POST"
:arg headers: Additional HTTP headers to pass on the request
:type headers: `~tornado.httputil.HTTPHeaders` or `dict`
:arg body: HTTP request body as a string (byte or unicode; if unicode
the utf-8 encoding will be used)
:arg body_producer: Callable used for lazy/asynchronous request bodies.
It is called with one argument, a ``write`` function, and should
return a `.Future`. It should call the write function with new
data as it becomes available. The write function returns a
`.Future` which can be used for flow control.
Only one of ``body`` and ``body_producer`` may
be specified. ``body_producer`` is not supported on
``curl_httpclient``. When using ``body_producer`` it is recommended
to pass a ``Content-Length`` in the headers as otherwise chunked
encoding will be used, and many servers do not support chunked
encoding on requests. New in Tornado 4.0
:arg string auth_username: Username for HTTP authentication
:arg string auth_password: Password for HTTP authentication
:arg string auth_mode: Authentication mode; default is "basic".
Allowed values are implementation-defined; ``curl_httpclient``
supports "basic" and "digest"; ``simple_httpclient`` only supports
"basic"
:arg float connect_timeout: Timeout for initial connection in seconds
:arg float request_timeout: Timeout for entire request in seconds
:arg if_modified_since: Timestamp for ``If-Modified-Since`` header
:type if_modified_since: `datetime` or `float`
:arg bool follow_redirects: Should redirects be followed automatically
or return the 3xx response?
:arg int max_redirects: Limit for ``follow_redirects``
:arg string user_agent: String to send as ``User-Agent`` header
:arg bool decompress_response: Request a compressed response from
the server and decompress it after downloading. Default is True.
New in Tornado 4.0.
:arg bool use_gzip: Deprecated alias for ``decompress_response``
since Tornado 4.0.
:arg string network_interface: Network interface to use for request.
``curl_httpclient`` only; see note below.
:arg callable streaming_callback: If set, ``streaming_callback`` will
be run with each chunk of data as it is received, and
``HTTPResponse.body`` and ``HTTPResponse.buffer`` will be empty in
the final response.
:arg callable header_callback: If set, ``header_callback`` will
be run with each header line as it is received (including the
first line, e.g. ``HTTP/1.0 200 OK\r\n``, and a final line
containing only ``\r\n``. All lines include the trailing newline
characters). ``HTTPResponse.headers`` will be empty in the final
response. This is most useful in conjunction with
``streaming_callback``, because it's the only way to get access to
header data while the request is in progress.
:arg callable prepare_curl_callback: If set, will be called with
a ``pycurl.Curl`` object to allow the application to make additional
``setopt`` calls.
:arg string proxy_host: HTTP proxy hostname. To use proxies,
``proxy_host`` and ``proxy_port`` must be set; ``proxy_username`` and
``proxy_pass`` are optional. Proxies are currently only supported
with ``curl_httpclient``.
:arg int proxy_port: HTTP proxy port
:arg string proxy_username: HTTP proxy username
:arg string proxy_password: HTTP proxy password
:arg bool allow_nonstandard_methods: Allow unknown values for ``method``
argument?
:arg bool validate_cert: For HTTPS requests, validate the server's
certificate?
:arg string ca_certs: filename of CA certificates in PEM format,
or None to use defaults. See note below when used with
``curl_httpclient``.
:arg bool allow_ipv6: Use IPv6 when available? Default is false in
``simple_httpclient`` and true in ``curl_httpclient``
:arg string client_key: Filename for client SSL key, if any. See
note below when used with ``curl_httpclient``.
:arg string client_cert: Filename for client SSL certificate, if any.
See note below when used with ``curl_httpclient``.
:arg bool expect_100_continue: If true, send the
``Expect: 100-continue`` header and wait for a continue response
before sending the request body. Only supported with
simple_httpclient.
.. note::
When using ``curl_httpclient`` certain options may be
inherited by subsequent fetches because ``pycurl`` does
not allow them to be cleanly reset. This applies to the
``ca_certs``, ``client_key``, ``client_cert``, and
``network_interface`` arguments. If you use these
options, you should pass them on every request (you don't
have to always use the same values, but it's not possible
to mix requests that specify these options with ones that
use the defaults).
.. versionadded:: 3.1
The ``auth_mode`` argument.
.. versionadded:: 4.0
The ``body_producer`` and ``expect_100_continue`` arguments.
"""
# Note that some of these attributes go through property setters
# defined below.
self.headers = headers
if if_modified_since:
self.headers["If-Modified-Since"] = httputil.format_timestamp(
if_modified_since)
self.proxy_host = proxy_host
self.proxy_port = proxy_port
self.proxy_username = proxy_username
self.proxy_password = proxy_password
self.url = url
self.method = method
self.body = body
self.body_producer = body_producer
self.auth_username = auth_username
self.auth_password = auth_password
self.auth_mode = auth_mode
self.connect_timeout = connect_timeout
self.request_timeout = request_timeout
self.follow_redirects = follow_redirects
self.max_redirects = max_redirects
self.user_agent = user_agent
if decompress_response is not None:
self.decompress_response = decompress_response
else:
self.decompress_response = use_gzip
self.network_interface = network_interface
self.streaming_callback = streaming_callback
self.header_callback = header_callback
self.prepare_curl_callback = prepare_curl_callback
self.allow_nonstandard_methods = allow_nonstandard_methods
self.validate_cert = validate_cert
self.ca_certs = ca_certs
self.allow_ipv6 = allow_ipv6
self.client_key = client_key
self.client_cert = client_cert
self.expect_100_continue = expect_100_continue
self.start_time = time.time()
@property
def headers(self):
return self._headers
@headers.setter
def headers(self, value):
if value is None:
self._headers = httputil.HTTPHeaders()
else:
self._headers = value
@property
def body(self):
return self._body
@body.setter
def body(self, value):
self._body = utf8(value)
@property
def body_producer(self):
return self._body_producer
@body_producer.setter
def body_producer(self, value):
self._body_producer = stack_context.wrap(value)
@property
def streaming_callback(self):
return self._streaming_callback
@streaming_callback.setter
def streaming_callback(self, value):
self._streaming_callback = stack_context.wrap(value)
@property
def header_callback(self):
return self._header_callback
@header_callback.setter
def header_callback(self, value):
self._header_callback = stack_context.wrap(value)
@property
def prepare_curl_callback(self):
return self._prepare_curl_callback
@prepare_curl_callback.setter
def prepare_curl_callback(self, value):
self._prepare_curl_callback = stack_context.wrap(value)
class HTTPResponse(object):
"""HTTP Response object.
Attributes:
* request: HTTPRequest object
* code: numeric HTTP status code, e.g. 200 or 404
* reason: human-readable reason phrase describing the status code
* headers: `tornado.httputil.HTTPHeaders` object
* effective_url: final location of the resource after following any
redirects
* buffer: ``cStringIO`` object for response body
* body: response body as string (created on demand from ``self.buffer``)
* error: Exception object, if any
* request_time: seconds from request start to finish
* time_info: dictionary of diagnostic timing information from the request.
Available data are subject to change, but currently uses timings
available from http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html,
plus ``queue``, which is the delay (if any) introduced by waiting for
a slot under `AsyncHTTPClient`'s ``max_clients`` setting.
"""
def __init__(self, request, code, headers=None, buffer=None,
effective_url=None, error=None, request_time=None,
time_info=None, reason=None):
if isinstance(request, _RequestProxy):
self.request = request.request
else:
self.request = request
self.code = code
self.reason = reason or httputil.responses.get(code, "Unknown")
if headers is not None:
self.headers = headers
else:
self.headers = httputil.HTTPHeaders()
self.buffer = buffer
self._body = None
if effective_url is None:
self.effective_url = request.url
else:
self.effective_url = effective_url
if error is None:
if self.code < 200 or self.code >= 300:
self.error = HTTPError(self.code, message=self.reason,
response=self)
else:
self.error = None
else:
self.error = error
self.request_time = request_time
self.time_info = time_info or {}
def _get_body(self):
if self.buffer is None:
return None
elif self._body is None:
self._body = self.buffer.getvalue()
return self._body
body = property(_get_body)
def rethrow(self):
"""If there was an error on the request, raise an `HTTPError`."""
if self.error:
raise self.error
def __repr__(self):
args = ",".join("%s=%r" % i for i in sorted(self.__dict__.items()))
return "%s(%s)" % (self.__class__.__name__, args)
class HTTPError(Exception):
"""Exception thrown for an unsuccessful HTTP request.
Attributes:
* ``code`` - HTTP error integer error code, e.g. 404. Error code 599 is
used when no HTTP response was received, e.g. for a timeout.
* ``response`` - `HTTPResponse` object, if any.
Note that if ``follow_redirects`` is False, redirects become HTTPErrors,
and you can look at ``error.response.headers['Location']`` to see the
destination of the redirect.
"""
def __init__(self, code, message=None, response=None):
self.code = code
message = message or httputil.responses.get(code, "Unknown")
self.response = response
Exception.__init__(self, "HTTP %d: %s" % (self.code, message))
class _RequestProxy(object):
"""Combines an object with a dictionary of defaults.
Used internally by AsyncHTTPClient implementations.
"""
def __init__(self, request, defaults):
self.request = request
self.defaults = defaults
def __getattr__(self, name):
request_attr = getattr(self.request, name)
if request_attr is not None:
return request_attr
elif self.defaults is not None:
return self.defaults.get(name, None)
else:
return None
def main():
from tornado.options import define, options, parse_command_line
define("print_headers", type=bool, default=False)
define("print_body", type=bool, default=True)
define("follow_redirects", type=bool, default=True)
define("validate_cert", type=bool, default=True)
args = parse_command_line()
client = HTTPClient()
for arg in args:
try:
response = client.fetch(arg,
follow_redirects=options.follow_redirects,
validate_cert=options.validate_cert,
)
except HTTPError as e:
if e.response is not None:
response = e.response
else:
raise
if options.print_headers:
print(response.headers)
if options.print_body:
print(native_str(response.body))
client.close()
if __name__ == "__main__":
main()
|
mit
|
xxsergzzxx/python-for-android
|
python3-alpha/python3-src/Lib/importlib/test/test_util.py
|
51
|
3794
|
from importlib import util
from . import util as test_util
import imp
import sys
import types
import unittest
class ModuleForLoaderTests(unittest.TestCase):
"""Tests for importlib.util.module_for_loader."""
def return_module(self, name):
fxn = util.module_for_loader(lambda self, module: module)
return fxn(self, name)
def raise_exception(self, name):
def to_wrap(self, module):
raise ImportError
fxn = util.module_for_loader(to_wrap)
try:
fxn(self, name)
except ImportError:
pass
def test_new_module(self):
# Test that when no module exists in sys.modules a new module is
# created.
module_name = 'a.b.c'
with test_util.uncache(module_name):
module = self.return_module(module_name)
self.assertTrue(module_name in sys.modules)
self.assertTrue(isinstance(module, types.ModuleType))
self.assertEqual(module.__name__, module_name)
def test_reload(self):
# Test that a module is reused if already in sys.modules.
name = 'a.b.c'
module = imp.new_module('a.b.c')
with test_util.uncache(name):
sys.modules[name] = module
returned_module = self.return_module(name)
self.assertIs(returned_module, sys.modules[name])
def test_new_module_failure(self):
# Test that a module is removed from sys.modules if added but an
# exception is raised.
name = 'a.b.c'
with test_util.uncache(name):
self.raise_exception(name)
self.assertTrue(name not in sys.modules)
def test_reload_failure(self):
# Test that a failure on reload leaves the module in-place.
name = 'a.b.c'
module = imp.new_module(name)
with test_util.uncache(name):
sys.modules[name] = module
self.raise_exception(name)
self.assertIs(module, sys.modules[name])
class SetPackageTests(unittest.TestCase):
"""Tests for importlib.util.set_package."""
def verify(self, module, expect):
"""Verify the module has the expected value for __package__ after
passing through set_package."""
fxn = lambda: module
wrapped = util.set_package(fxn)
wrapped()
self.assertTrue(hasattr(module, '__package__'))
self.assertEqual(expect, module.__package__)
def test_top_level(self):
# __package__ should be set to the empty string if a top-level module.
# Implicitly tests when package is set to None.
module = imp.new_module('module')
module.__package__ = None
self.verify(module, '')
def test_package(self):
# Test setting __package__ for a package.
module = imp.new_module('pkg')
module.__path__ = ['<path>']
module.__package__ = None
self.verify(module, 'pkg')
def test_submodule(self):
# Test __package__ for a module in a package.
module = imp.new_module('pkg.mod')
module.__package__ = None
self.verify(module, 'pkg')
def test_setting_if_missing(self):
# __package__ should be set if it is missing.
module = imp.new_module('mod')
if hasattr(module, '__package__'):
delattr(module, '__package__')
self.verify(module, '')
def test_leaving_alone(self):
# If __package__ is set and not None then leave it alone.
for value in (True, False):
module = imp.new_module('mod')
module.__package__ = value
self.verify(module, value)
def test_main():
from test import support
support.run_unittest(ModuleForLoaderTests, SetPackageTests)
if __name__ == '__main__':
test_main()
|
apache-2.0
|
zhmcclient/zhmccli
|
zhmccli/_cmd_storagegroup.py
|
1
|
24874
|
# Copyright 2020 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Commands for storage groups on CPCs in DPM mode.
"""
from __future__ import absolute_import
from __future__ import print_function
import click
import zhmcclient
from .zhmccli import cli
from ._cmd_cpc import find_cpc
from ._cmd_port import find_port
from ._helper import print_properties, print_resources, abort_if_false, \
options_to_properties, original_options, COMMAND_OPTIONS_METAVAR, \
click_exception, add_options, LIST_OPTIONS, EMAIL_OPTIONS, \
ASYNC_TIMEOUT_OPTIONS
ALL_TYPES = ['fcp', 'fc']
ALL_PARTITION_STATUSES = [
"communications-not-active",
"status-check",
"stopped",
"terminated",
"starting",
"active",
"stopping",
"degraded",
"reservation-error",
"paused",
]
# Defaults for storage group creation unless created from storage template
DEFAULT_TYPE = 'fcp'
DEFAULT_CONNECTIVITY = 2
DEFAULT_SHARED = True
DEFAULT_MAX_PARTITIONS = 2
DEFAULT_DIRECT_CONNECTION_COUNT = 0
def find_storagegroup(cmd_ctx, client, stogrp_name):
"""
Find a storage group by name and return its resource object.
"""
console = client.consoles.console
try:
stogrp = console.storage_groups.find(name=stogrp_name)
except zhmcclient.Error as exc:
raise click_exception(exc, cmd_ctx.error_format)
return stogrp
@cli.group('storagegroup', options_metavar=COMMAND_OPTIONS_METAVAR)
def storagegroup_group():
"""
Command group for managing storage groups (DPM mode only).
Storage groups are definitions in the HMC that simplify the management of
storage attached to partitions.
The commands in this group work only on z14 and later CPCs that are in DPM
mode.
In addition to the command-specific options shown in this help text, the
general options (see 'zhmc --help') can also be specified right after the
'zhmc' command name.
"""
@storagegroup_group.command('list', options_metavar=COMMAND_OPTIONS_METAVAR)
@add_options(LIST_OPTIONS)
@click.pass_obj
def storagegroup_list(cmd_ctx, **options):
"""
List the storage groups defined in the HMC.
Storage groups for which the authenticated user does not have
object-access permission will not be included.
In addition to the command-specific options shown in this help text, the
general options (see 'zhmc --help') can also be specified right after the
'zhmc' command name.
"""
cmd_ctx.execute_cmd(lambda: cmd_storagegroup_list(cmd_ctx, options))
@storagegroup_group.command('show', options_metavar=COMMAND_OPTIONS_METAVAR)
@click.argument('STORAGEGROUP', type=str, metavar='STORAGEGROUP')
@click.pass_obj
def storagegroup_show(cmd_ctx, storagegroup):
"""
Show the details of a storage group.
In addition to the command-specific options shown in this help text, the
general options (see 'zhmc --help') can also be specified right after the
'zhmc' command name.
"""
cmd_ctx.execute_cmd(lambda: cmd_storagegroup_show(cmd_ctx, storagegroup))
@storagegroup_group.command('create', options_metavar=COMMAND_OPTIONS_METAVAR)
@click.option('--name', type=str, required=True,
help='The name of the new storage group.')
@click.option('--cpc', type=str, required=True,
help='The name of the CPC associated with the new storage group.')
@click.option('--type', type=click.Choice(ALL_TYPES),
required=False, default=DEFAULT_TYPE,
help='The type of the new storage group. '
'Mutually exclusive with --template; one of them is required.')
@click.option('--template', type=str, required=False,
help='The name of the storage template on which the new storage '
'group is to be based. '
'Mutually exclusive with --type; one of them is required.')
@click.option('--description', type=str, required=False,
help='The description of the new storage group. '
'Default: Empty, or from template')
@click.option('--shared', type=bool, required=False,
help='Indicates whether the storage group can be attached to '
'more than one partition. '
'Default: {d}, or from template'.
format(d=DEFAULT_SHARED))
@click.option('--connectivity', type=int, required=False,
help='The number of adapters to utilize for the new storage '
'group. '
'Default: {d}, or from template'.
format(d=DEFAULT_CONNECTIVITY))
@click.option('--max-partitions', type=int, required=False,
help='The maximum number of partitions to which the new storage '
'group can be attached. '
'Default: {d}, or from template'.
format(d=DEFAULT_MAX_PARTITIONS))
@click.option('--direct-connection-count', type=int, required=False,
help='The number of additional virtual storage resource '
'connections for the host that can be directly assigned to a '
'guest virtual machine. A value of 0 indicates this feature is '
'disabled. '
'Default: {d}, or from template'.
format(d=DEFAULT_DIRECT_CONNECTION_COUNT))
@add_options(EMAIL_OPTIONS)
@click.pass_obj
def storagegroup_create(cmd_ctx, **options):
"""
Create a storage group.
When created using --type, the new storage group will have no storage
volumes. Storage volumes can be created and added to the storage group
with the 'storagevolume' command.
In addition to the command-specific options shown in this help text, the
general options (see 'zhmc --help') can also be specified right after the
'zhmc' command name.
"""
cmd_ctx.execute_cmd(lambda: cmd_storagegroup_create(cmd_ctx, options))
@storagegroup_group.command('update', options_metavar=COMMAND_OPTIONS_METAVAR)
@click.argument('STORAGEGROUP', type=str, metavar='STORAGEGROUP')
@click.option('--name', type=str, required=False,
help='The new name of the storage group.')
@click.option('--description', type=str, required=False,
help='The new description of the storage group.')
@click.option('--shared', type=bool, required=False,
help='Indicates whether the storage group can be attached to '
'more than one partition.')
@click.option('--connectivity', type=int, required=False,
help='The number of adapters to utilize for the new storage '
'group.')
@click.option('--max-partitions', type=int, required=False,
help='The maximum number of partitions to which the new storage '
'group can be attached.')
@click.option('--direct-connection-count', type=int, required=False,
help='The number of additional virtual storage resource '
'connections for the host that can be directly assigned to a '
'guest virtual machine. A value of 0 indicates this feature is '
'disabled.')
@add_options(EMAIL_OPTIONS)
@click.pass_obj
def storagegroup_update(cmd_ctx, storagegroup, **options):
"""
Update the properties of a storage group.
Only the properties will be changed for which a corresponding option is
specified, so the default for all options is not to change properties.
In addition to the command-specific options shown in this help text, the
general options (see 'zhmc --help') can also be specified right after the
'zhmc' command name.
"""
cmd_ctx.execute_cmd(
lambda: cmd_storagegroup_update(cmd_ctx, storagegroup, options))
@storagegroup_group.command('delete', options_metavar=COMMAND_OPTIONS_METAVAR)
@click.argument('STORAGEGROUP', type=str, metavar='STORAGEGROUP')
@click.option('-y', '--yes', is_flag=True, callback=abort_if_false,
expose_value=False,
help='Skip prompt to confirm deletion of the storage group.',
prompt='Are you sure you want to delete this storage group ?')
@add_options(EMAIL_OPTIONS)
@click.pass_obj
def storagegroup_delete(cmd_ctx, storagegroup, **options):
"""
Delete a storage group.
In addition to the command-specific options shown in this help text, the
general options (see 'zhmc --help') can also be specified right after the
'zhmc' command name.
"""
cmd_ctx.execute_cmd(
lambda: cmd_storagegroup_delete(cmd_ctx, storagegroup, options))
@storagegroup_group.command('list-partitions',
options_metavar=COMMAND_OPTIONS_METAVAR)
@click.argument('STORAGEGROUP', type=str, metavar='STORAGEGROUP')
@click.option('--name', type=str, required=False,
help='Regular expression filter to limit the returned partitions '
'to those with a matching name.')
@click.option('--status', type=str, required=False,
help='Filter to limit the returned partitions to those with a '
'matching status. Valid status values are: {sv}.'.
format(sv=', '.join(ALL_PARTITION_STATUSES)))
@click.pass_obj
def storagegroup_list_partitions(cmd_ctx, storagegroup, **options):
"""
List the partitions to which a storage group is attached.
Partitions for which the authenticated user does not have object-access
permission will not be included.
In addition to the command-specific options shown in this help text, the
general options (see 'zhmc --help') can also be specified right after the
'zhmc' command name.
"""
cmd_ctx.execute_cmd(
lambda: cmd_storagegroup_list_partitions(cmd_ctx, storagegroup,
options))
@storagegroup_group.command('list-ports',
options_metavar=COMMAND_OPTIONS_METAVAR)
@click.argument('STORAGEGROUP', type=str, metavar='STORAGEGROUP')
@click.pass_obj
def storagegroup_list_ports(cmd_ctx, storagegroup):
"""
List the candidate adapter ports of a storage group.
In addition to the command-specific options shown in this help text, the
general options (see 'zhmc --help') can also be specified right after the
'zhmc' command name.
"""
cmd_ctx.execute_cmd(
lambda: cmd_storagegroup_list_ports(cmd_ctx, storagegroup))
@storagegroup_group.command('add-ports',
options_metavar=COMMAND_OPTIONS_METAVAR)
@click.argument('STORAGEGROUP', type=str, metavar='STORAGEGROUP')
@click.option('--adapter', type=str, metavar='NAME',
required=False, multiple=True,
help='The name of the storage adapter with the new port to be '
'added. '
'The --adapter and --port options can be specified multiple '
'times and correspond to each other via their order.')
@click.option('--port', type=str, metavar='NAME',
required=False, multiple=True,
help='The name of the storage adapter port to be added. '
'The --adapter and --port options can be specified multiple '
'times and correspond to each other via their order.')
@click.pass_obj
def storagegroup_add_ports(cmd_ctx, storagegroup, **options):
"""
Add storage adapter ports to the candidate adapter port list of a storage
group.
In addition to the command-specific options shown in this help text, the
general options (see 'zhmc --help') can also be specified right after the
'zhmc' command name.
"""
cmd_ctx.execute_cmd(
lambda: cmd_storagegroup_add_ports(cmd_ctx, storagegroup, options))
@storagegroup_group.command('remove-ports',
options_metavar=COMMAND_OPTIONS_METAVAR)
@click.argument('STORAGEGROUP', type=str, metavar='STORAGEGROUP')
@click.option('--adapter', type=str, metavar='NAME',
required=False, multiple=True,
help='The name of the storage adapter with the new port to be '
'added. '
'The --adapter and --port options can be specified multiple '
'times and correspond to each other via their order.')
@click.option('--port', type=str, metavar='NAME',
required=False, multiple=True,
help='The name of the storage adapter port to be added. '
'The --adapter and --port options can be specified multiple '
'times and correspond to each other via their order.')
@click.pass_obj
def storagegroup_remove_ports(cmd_ctx, storagegroup, **options):
"""
Remove ports from the candidate adapter port list of a storage group.
In addition to the command-specific options shown in this help text, the
general options (see 'zhmc --help') can also be specified right after the
'zhmc' command name.
"""
cmd_ctx.execute_cmd(
lambda: cmd_storagegroup_remove_ports(cmd_ctx, storagegroup, options))
@storagegroup_group.command('discover-fcp',
options_metavar=COMMAND_OPTIONS_METAVAR)
@click.argument('STORAGEGROUP', type=str, metavar='STORAGEGROUP')
@click.option('--force-restart', type=bool, required=False, default=False,
help='Indicates if there is an in-progress discovery operation '
'for the specified storage group, it should be terminated and '
'started again.')
@add_options(ASYNC_TIMEOUT_OPTIONS)
@click.pass_obj
def storagegroup_discover_fcp(cmd_ctx, storagegroup, **options):
"""
Perform Logical Unit Number (LUN) discovery for an FCP storage group.
This command only applies to storage groups of type "fcp".
In addition to the command-specific options shown in this help text, the
general options (see 'zhmc --help') can also be specified right after the
'zhmc' command name.
"""
cmd_ctx.execute_cmd(
lambda: cmd_storagegroup_discover_fcp(cmd_ctx, storagegroup, options))
def cmd_storagegroup_list(cmd_ctx, options):
# pylint: disable=missing-function-docstring
client = zhmcclient.Client(cmd_ctx.session)
console = client.consoles.console
try:
stogrps = console.storage_groups.list()
except zhmcclient.Error as exc:
raise click_exception(exc, cmd_ctx.error_format)
show_list = [
'name',
]
if not options['names_only']:
show_list.extend([
'device-number',
'type',
'shared',
'fulfillment-state',
'cpc', # CPC name, as additional property
])
if options['uri']:
show_list.extend([
'object-uri',
])
cpc_additions = {}
for sg in stogrps:
try:
cpc_uri = sg.prop('cpc-uri')
cpc = client.cpcs.find(**{'object-uri': cpc_uri})
cpc_additions[sg.uri] = cpc.name
except zhmcclient.Error as exc:
raise click_exception(exc, cmd_ctx.error_format)
additions = {
'cpc': cpc_additions,
}
print_resources(cmd_ctx, stogrps, cmd_ctx.output_format, show_list,
additions, all=options['all'])
def cmd_storagegroup_show(cmd_ctx, stogrp_name):
# pylint: disable=missing-function-docstring
client = zhmcclient.Client(cmd_ctx.session)
stogrp = find_storagegroup(cmd_ctx, client, stogrp_name)
try:
stogrp.pull_full_properties()
except zhmcclient.Error as exc:
raise click_exception(exc, cmd_ctx.error_format)
print_properties(cmd_ctx, stogrp.properties, cmd_ctx.output_format)
def cmd_storagegroup_create(cmd_ctx, options):
# pylint: disable=missing-function-docstring
client = zhmcclient.Client(cmd_ctx.session)
console = client.consoles.console
name_map = {
# The following options are handled in this function:
'cpc': None,
'email-to-address': None,
'email-cc-address': None,
}
org_options = original_options(options)
properties = options_to_properties(org_options, name_map)
cpc_name = org_options['cpc'] # It is required
cpc = find_cpc(cmd_ctx, client, cpc_name)
properties['cpc-uri'] = cpc.uri
email_to_addresses = org_options['email-to-address']
if email_to_addresses:
properties['email-to-addresses'] = email_to_addresses
email_cc_addresses = org_options['email-cc-address']
if email_cc_addresses:
properties['email-cc-addresses'] = email_cc_addresses
try:
new_stogrp = console.storage_groups.create(properties)
except zhmcclient.Error as exc:
raise click_exception(exc, cmd_ctx.error_format)
cmd_ctx.spinner.stop()
click.echo("New storage group {sg} has been created.".
format(sg=new_stogrp.properties['name']))
def cmd_storagegroup_update(cmd_ctx, stogrp_name, options):
# pylint: disable=missing-function-docstring
client = zhmcclient.Client(cmd_ctx.session)
stogrp = find_storagegroup(cmd_ctx, client, stogrp_name)
name_map = {
# The following options are handled in this function:
'email-to-address': None,
'email-cc-address': None,
}
options = original_options(options)
properties = options_to_properties(options, name_map)
email_to_addresses = options['email-to-address']
if email_to_addresses:
properties['email-to-addresses'] = email_to_addresses
email_cc_addresses = options['email-cc-address']
if email_cc_addresses:
properties['email-cc-addresses'] = email_cc_addresses
if not properties:
cmd_ctx.spinner.stop()
click.echo("No properties specified for updating storage group {sg}.".
format(sg=stogrp_name))
return
try:
stogrp.update_properties(properties)
except zhmcclient.Error as exc:
raise click_exception(exc, cmd_ctx.error_format)
cmd_ctx.spinner.stop()
if 'name' in properties and properties['name'] != stogrp_name:
click.echo("Storage group {sg} has been renamed to {sgn} and was "
"updated.".
format(sg=stogrp_name, sgn=properties['name']))
else:
click.echo("Storage group {sg} has been updated.".
format(sg=stogrp_name))
def cmd_storagegroup_delete(cmd_ctx, stogrp_name, options):
# pylint: disable=missing-function-docstring
client = zhmcclient.Client(cmd_ctx.session)
stogrp = find_storagegroup(cmd_ctx, client, stogrp_name)
options = original_options(options)
email_insert = options['email-insert']
email_to_addresses = options['email-to-address'] or None
email_cc_addresses = options['email-cc-address'] or None
try:
stogrp.delete(email_to_addresses=email_to_addresses,
email_cc_addresses=email_cc_addresses,
email_insert=email_insert)
except zhmcclient.Error as exc:
raise click_exception(exc, cmd_ctx.error_format)
cmd_ctx.spinner.stop()
click.echo("Storage group {sg} has been deleted.".format(sg=stogrp_name))
def cmd_storagegroup_list_partitions(cmd_ctx, stogrp_name, options):
# pylint: disable=missing-function-docstring
client = zhmcclient.Client(cmd_ctx.session)
stogrp = find_storagegroup(cmd_ctx, client, stogrp_name)
filter_name = options['name']
filter_status = options['status']
try:
partitions = stogrp.list_attached_partitions(
name=filter_name, status=filter_status)
except zhmcclient.Error as exc:
raise click_exception(exc, cmd_ctx.error_format)
show_list = [
'cpc', # CPC name, as additional property
'name',
'type',
'status',
]
cpc_additions = {}
for part in partitions:
cpc = part.manager.parent
cpc_additions[part.uri] = cpc.name
additions = {
'cpc': cpc_additions,
}
print_resources(cmd_ctx, partitions, cmd_ctx.output_format, show_list,
additions)
def cmd_storagegroup_list_ports(cmd_ctx, stogrp_name):
# pylint: disable=missing-function-docstring
client = zhmcclient.Client(cmd_ctx.session)
stogrp = find_storagegroup(cmd_ctx, client, stogrp_name)
try:
ports = stogrp.list_candidate_adapter_ports()
except zhmcclient.Error as exc:
raise click_exception(exc, cmd_ctx.error_format)
show_list = [
'cpc', # CPC name, as additional property
'adapter', # Adapter name, as additional property
'name',
'index',
'fabric-id',
]
cpc_additions = {}
adapter_additions = {}
for port in ports:
adapter = port.manager.parent
adapter_additions[port.uri] = adapter.name
cpc = adapter.manager.parent
cpc_additions[port.uri] = cpc.name
additions = {
'cpc': cpc_additions,
'adapter': adapter_additions,
}
print_resources(cmd_ctx, ports, cmd_ctx.output_format, show_list, additions)
def cmd_storagegroup_add_ports(cmd_ctx, stogrp_name, options):
# pylint: disable=missing-function-docstring
client = zhmcclient.Client(cmd_ctx.session)
stogrp = find_storagegroup(cmd_ctx, client, stogrp_name)
cpc = stogrp.cpc
adapter_names = options['adapter'] # List
port_names = options['port'] # List
if len(adapter_names) != len(port_names):
raise click_exception(
"The --adapter and --port options must be specified the same "
"number of times, but have been specified {na} and {np} times.".
format(na=len(adapter_names), np=len(port_names)),
cmd_ctx.error_format)
ports = []
for i, adapter_name in enumerate(adapter_names):
port_name = port_names[i]
port = find_port(cmd_ctx, client, cpc, adapter_name, port_name)
ports.append(port)
if not ports:
cmd_ctx.spinner.stop()
click.echo("No ports specified for adding to the candidate list "
"of storage group {sg}.".format(sg=stogrp_name))
return
try:
stogrp.add_candidate_adapter_ports(ports)
except zhmcclient.Error as exc:
raise click_exception(exc, cmd_ctx.error_format)
cmd_ctx.spinner.stop()
click.echo("The specified ports have been added to the candidate list "
"of storage group {sg}.".format(sg=stogrp_name))
def cmd_storagegroup_remove_ports(cmd_ctx, stogrp_name, options):
# pylint: disable=missing-function-docstring
client = zhmcclient.Client(cmd_ctx.session)
stogrp = find_storagegroup(cmd_ctx, client, stogrp_name)
cpc = stogrp.cpc
adapter_names = options['adapter'] # List
port_names = options['port'] # List
if len(adapter_names) != len(port_names):
raise click_exception(
"The --adapter and --port options must be specified the same "
"number of times, but have been specified {na} and {np} times.".
format(na=len(adapter_names), np=len(port_names)),
cmd_ctx.error_format)
ports = []
for i, adapter_name in enumerate(adapter_names):
port_name = port_names[i]
port = find_port(cmd_ctx, client, cpc, adapter_name, port_name)
ports.append(port)
if not ports:
cmd_ctx.spinner.stop()
click.echo("No ports specified for removing from the candidate list "
"of storage group {sg}.".format(sg=stogrp_name))
return
try:
stogrp.remove_candidate_adapter_ports(ports)
except zhmcclient.Error as exc:
raise click_exception(exc, cmd_ctx.error_format)
cmd_ctx.spinner.stop()
click.echo("The specified ports have been removed from the candidate list "
"of storage group {sg}.".format(sg=stogrp_name))
def cmd_storagegroup_discover_fcp(cmd_ctx, stogrp_name, options):
# pylint: disable=missing-function-docstring
client = zhmcclient.Client(cmd_ctx.session)
stogrp = find_storagegroup(cmd_ctx, client, stogrp_name)
force_restart = options['force_restart']
try:
stogrp.discover_fcp(
force_restart=force_restart, wait_for_completion=True,
operation_timeout=options['operation_timeout'])
except zhmcclient.Error as exc:
raise click_exception(exc, cmd_ctx.error_format)
cmd_ctx.spinner.stop()
click.echo("LUN discovery has been completed for FCP storage group {sg}.".
format(sg=stogrp_name))
|
apache-2.0
|
rzr/synapse
|
synapse/rest/client/v1/base.py
|
4
|
1585
|
# -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains base REST classes for constructing client v1 servlets.
"""
from synapse.http.servlet import RestServlet
from synapse.api.urls import CLIENT_PREFIX
from .transactions import HttpTransactionStore
import re
import logging
logger = logging.getLogger(__name__)
def client_path_pattern(path_regex):
"""Creates a regex compiled client path with the correct client path
prefix.
Args:
path_regex (str): The regex string to match. This should NOT have a ^
as this will be prefixed.
Returns:
SRE_Pattern
"""
return re.compile("^" + CLIENT_PREFIX + path_regex)
class ClientV1RestServlet(RestServlet):
"""A base Synapse REST Servlet for the client version 1 API.
"""
def __init__(self, hs):
self.hs = hs
self.handlers = hs.get_handlers()
self.builder_factory = hs.get_event_builder_factory()
self.auth = hs.get_v1auth()
self.txns = HttpTransactionStore()
|
apache-2.0
|
CantemoInternal/django-comments-xtd
|
django_comments_xtd/demos/simple/settings.py
|
1
|
3973
|
#-*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
PRJ_PATH = os.path.abspath(os.path.curdir)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Alice Bloggs', 'alice@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'django_comments_xtd_demo.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/Brussels'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = os.path.join(PRJ_PATH, "media")
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
# ADMIN_MEDIA_PREFIX = '/media/'
STATIC_ROOT = os.path.join(PRJ_PATH, "static")
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
#os.path.join(PRJ_PATH, "static"),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
SECRET_KEY = 'v2824l&2-n+4zznbsk9c-ap5i)b3e8b+%*a=dxqlahm^%)68jn'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'simple.urls'
TEMPLATE_DIRS = (
os.path.join(os.path.dirname(__file__), "templates"),
)
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.comments',
'simple.articles',
'django_comments_xtd',
'south',
)
# EMAIL_HOST = "smtp.gmail.com"
# EMAIL_PORT = "587"
# EMAIL_HOST_USER = "username@gmail.com"
# EMAIL_HOST_PASSWORD = ""
# EMAIL_USE_TLS = True # Yes for Gmail
# DEFAULT_FROM_EMAIL = "Alice Bloggs <alice@example.com>"
# SERVER_EMAIL = DEFAULT_FROM_EMAIL
# Fill in actual EMAIL settings above, and comment out the
# following line to let this django demo sending emails
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
COMMENTS_APP = "django_comments_xtd"
COMMENTS_XTD_CONFIRM_EMAIL = False
COMMENTS_XTD_SALT = b"es-war-einmal-una-bella-princesa-in-a-beautiful-castle"
#COMMENTS_XTD_MAX_THREAD_LEVEL = 0 # Default value
|
bsd-2-clause
|
tangyibin/goblin-core
|
riscv/llvm/3.5/cfe-3.5.0.src/bindings/python/examples/cindex/cindex-includes.py
|
110
|
1644
|
#!/usr/bin/env python
#===- cindex-includes.py - cindex/Python Inclusion Graph -----*- python -*--===#
#
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
#
#===------------------------------------------------------------------------===#
"""
A simple command line tool for dumping a Graphviz description (dot) that
describes include dependencies.
"""
def main():
import sys
from clang.cindex import Index
from optparse import OptionParser, OptionGroup
parser = OptionParser("usage: %prog [options] {filename} [clang-args*]")
parser.disable_interspersed_args()
(opts, args) = parser.parse_args()
if len(args) == 0:
parser.error('invalid number arguments')
# FIXME: Add an output file option
out = sys.stdout
index = Index.create()
tu = index.parse(None, args)
if not tu:
parser.error("unable to load input")
# A helper function for generating the node name.
def name(f):
if f:
return "\"" + f.name + "\""
# Generate the include graph
out.write("digraph G {\n")
for i in tu.get_includes():
line = " ";
if i.is_input_file:
# Always write the input file as a node just in case it doesn't
# actually include anything. This would generate a 1 node graph.
line += name(i.include)
else:
line += '%s->%s' % (name(i.source), name(i.include))
line += "\n";
out.write(line)
out.write("}\n")
if __name__ == '__main__':
main()
|
bsd-3-clause
|
gsnedders/presto-testo
|
core/standards/scripts/opjsunit/harness/winprocess.py
|
14
|
9554
|
# A module to expose various thread/process/job related structures and
# methods from kernel32
#
# The MIT License
#
# Copyright (c) 2006 the Mozilla Foundation <http://www.mozilla.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from ctypes import c_void_p, POINTER, sizeof, Structure, windll, WinError, WINFUNCTYPE
from ctypes.wintypes import BOOL, BYTE, DWORD, HANDLE, LPCWSTR, LPWSTR, UINT, WORD
LPVOID = c_void_p
LPBYTE = POINTER(BYTE)
LPDWORD = POINTER(DWORD)
def ErrCheckBool(result, func, args):
"""errcheck function for Windows functions that return a BOOL True
on success"""
if not result:
raise WinError()
return args
# CloseHandle()
CloseHandleProto = WINFUNCTYPE(BOOL, HANDLE)
CloseHandle = CloseHandleProto(("CloseHandle", windll.kernel32))
CloseHandle.errcheck = ErrCheckBool
# AutoHANDLE
class AutoHANDLE(HANDLE):
"""Subclass of HANDLE which will call CloseHandle() on deletion."""
def Close(self):
if self.value:
CloseHandle(self)
self.value = 0
def __del__(self):
self.Close()
def __int__(self):
return self.value
def ErrCheckHandle(result, func, args):
"""errcheck function for Windows functions that return a HANDLE."""
if not result:
raise WinError()
return AutoHANDLE(result)
# PROCESS_INFORMATION structure
class PROCESS_INFORMATION(Structure):
_fields_ = [("hProcess", HANDLE),
("hThread", HANDLE),
("dwProcessID", DWORD),
("dwThreadID", DWORD)]
def __init__(self):
Structure.__init__(self)
self.cb = sizeof(self)
LPPROCESS_INFORMATION = POINTER(PROCESS_INFORMATION)
# STARTUPINFO structure
class STARTUPINFO(Structure):
_fields_ = [("cb", DWORD),
("lpReserved", LPWSTR),
("lpDesktop", LPWSTR),
("lpTitle", LPWSTR),
("dwX", DWORD),
("dwY", DWORD),
("dwXSize", DWORD),
("dwYSize", DWORD),
("dwXCountChars", DWORD),
("dwYCountChars", DWORD),
("dwFillAttribute", DWORD),
("dwFlags", DWORD),
("wShowWindow", WORD),
("cbReserved2", WORD),
("lpReserved2", LPBYTE),
("hStdInput", HANDLE),
("hStdOutput", HANDLE),
("hStdError", HANDLE)
]
LPSTARTUPINFO = POINTER(STARTUPINFO)
STARTF_USESHOWWINDOW = 0x01
STARTF_USESIZE = 0x02
STARTF_USEPOSITION = 0x04
STARTF_USECOUNTCHARS = 0x08
STARTF_USEFILLATTRIBUTE = 0x10
STARTF_RUNFULLSCREEN = 0x20
STARTF_FORCEONFEEDBACK = 0x40
STARTF_FORCEOFFFEEDBACK = 0x80
STARTF_USESTDHANDLES = 0x100
# EnvironmentBlock
class EnvironmentBlock:
"""An object which can be passed as the lpEnv parameter of CreateProcess.
It is initialized with a dictionary."""
def __init__(self, dict):
if not dict:
self._as_parameter_ = None
else:
values = ["%s=%s" % (key, value)
for (key, value) in dict.iteritems()]
values.append("")
self._as_parameter_ = LPCWSTR("\0".join(values))
# CreateProcess()
CreateProcessProto = WINFUNCTYPE(BOOL, # Return type
LPCWSTR, # lpApplicationName
LPWSTR, # lpCommandLine
LPVOID, # lpProcessAttributes
LPVOID, # lpThreadAttributes
BOOL, # bInheritHandles
DWORD, # dwCreationFlags
LPVOID, # lpEnvironment
LPCWSTR, # lpCurrentDirectory
LPSTARTUPINFO, # lpStartupInfo
LPPROCESS_INFORMATION # lpProcessInformation
)
CreateProcessFlags = ((1, "lpApplicationName", None),
(1, "lpCommandLine"),
(1, "lpProcessAttributes", None),
(1, "lpThreadAttributes", None),
(1, "bInheritHandles", True),
(1, "dwCreationFlags", 0),
(1, "lpEnvironment", None),
(1, "lpCurrentDirectory", None),
(1, "lpStartupInfo"),
(2, "lpProcessInformation"))
def ErrCheckCreateProcess(result, func, args):
ErrCheckBool(result, func, args)
# return a tuple (hProcess, hThread, dwProcessID, dwThreadID)
pi = args[9]
return AutoHANDLE(pi.hProcess), AutoHANDLE(pi.hThread), pi.dwProcessID, pi.dwThreadID
CreateProcess = CreateProcessProto(("CreateProcessW", windll.kernel32),
CreateProcessFlags)
CreateProcess.errcheck = ErrCheckCreateProcess
CREATE_BREAKAWAY_FROM_JOB = 0x01000000
CREATE_DEFAULT_ERROR_MODE = 0x04000000
CREATE_NEW_CONSOLE = 0x00000010
CREATE_NEW_PROCESS_GROUP = 0x00000200
CREATE_NO_WINDOW = 0x08000000
CREATE_SUSPENDED = 0x00000004
CREATE_UNICODE_ENVIRONMENT = 0x00000400
DEBUG_ONLY_THIS_PROCESS = 0x00000002
DEBUG_PROCESS = 0x00000001
DETACHED_PROCESS = 0x00000008
# CreateJobObject()
CreateJobObjectProto = WINFUNCTYPE(HANDLE, # Return type
LPVOID, # lpJobAttributes
LPCWSTR # lpName
)
CreateJobObjectFlags = ((1, "lpJobAttributes", None),
(1, "lpName", None))
CreateJobObject = CreateJobObjectProto(("CreateJobObjectW", windll.kernel32),
CreateJobObjectFlags)
CreateJobObject.errcheck = ErrCheckHandle
# AssignProcessToJobObject()
AssignProcessToJobObjectProto = WINFUNCTYPE(BOOL, # Return type
HANDLE, # hJob
HANDLE # hProcess
)
AssignProcessToJobObjectFlags = ((1, "hJob"),
(1, "hProcess"))
AssignProcessToJobObject = AssignProcessToJobObjectProto(
("AssignProcessToJobObject", windll.kernel32),
AssignProcessToJobObjectFlags)
AssignProcessToJobObject.errcheck = ErrCheckBool
# ResumeThread()
def ErrCheckResumeThread(result, func, args):
if result == -1:
raise WinError()
return args
ResumeThreadProto = WINFUNCTYPE(DWORD, # Return type
HANDLE # hThread
)
ResumeThreadFlags = ((1, "hThread"),)
ResumeThread = ResumeThreadProto(("ResumeThread", windll.kernel32),
ResumeThreadFlags)
ResumeThread.errcheck = ErrCheckResumeThread
# TerminateJobObject()
TerminateJobObjectProto = WINFUNCTYPE(BOOL, # Return type
HANDLE, # hJob
UINT # uExitCode
)
TerminateJobObjectFlags = ((1, "hJob"),
(1, "uExitCode", 127))
TerminateJobObject = TerminateJobObjectProto(
("TerminateJobObject", windll.kernel32),
TerminateJobObjectFlags)
TerminateJobObject.errcheck = ErrCheckBool
# WaitForSingleObject()
WaitForSingleObjectProto = WINFUNCTYPE(DWORD, # Return type
HANDLE, # hHandle
DWORD, # dwMilliseconds
)
WaitForSingleObjectFlags = ((1, "hHandle"),
(1, "dwMilliseconds", -1))
WaitForSingleObject = WaitForSingleObjectProto(
("WaitForSingleObject", windll.kernel32),
WaitForSingleObjectFlags)
INFINITE = -1
WAIT_TIMEOUT = 0x0102
WAIT_OBJECT_0 = 0x0
WAIT_ABANDONED = 0x0080
# GetExitCodeProcess()
GetExitCodeProcessProto = WINFUNCTYPE(BOOL, # Return type
HANDLE, # hProcess
LPDWORD, # lpExitCode
)
GetExitCodeProcessFlags = ((1, "hProcess"),
(2, "lpExitCode"))
GetExitCodeProcess = GetExitCodeProcessProto(
("GetExitCodeProcess", windll.kernel32),
GetExitCodeProcessFlags)
GetExitCodeProcess.errcheck = ErrCheckBool
|
bsd-3-clause
|
Livit/Livit.Learn.EdX
|
pavelib/utils/envs.py
|
9
|
7028
|
"""
Helper functions for loading environment settings.
"""
from __future__ import print_function
import os
import sys
import json
from lazy import lazy
from path import Path as path
import memcache
class Env(object):
"""
Load information about the execution environment.
"""
# Root of the git repository (edx-platform)
REPO_ROOT = path(__file__).abspath().parent.parent.parent
# Reports Directory
REPORT_DIR = REPO_ROOT / 'reports'
METRICS_DIR = REPORT_DIR / 'metrics'
# Python unittest dirs
PYTHON_COVERAGERC = REPO_ROOT / ".coveragerc"
# Bok_choy dirs
BOK_CHOY_DIR = REPO_ROOT / "common" / "test" / "acceptance"
BOK_CHOY_LOG_DIR = REPO_ROOT / "test_root" / "log"
BOK_CHOY_REPORT_DIR = REPORT_DIR / "bok_choy"
BOK_CHOY_A11Y_REPORT_DIR = REPORT_DIR / "a11y"
BOK_CHOY_COVERAGERC = BOK_CHOY_DIR / ".coveragerc"
BOK_CHOY_A11Y_COVERAGERC = BOK_CHOY_DIR / ".a11ycoveragerc"
BOK_CHOY_A11Y_CUSTOM_RULES_FILE = (
REPO_ROOT / "node_modules" / "edx-custom-a11y-rules" /
"lib" / "custom_a11y_rules.js"
)
PA11YCRAWLER_REPORT_DIR = REPORT_DIR / "pa11ycrawler"
PA11YCRAWLER_COVERAGERC = BOK_CHOY_DIR / ".pa11ycrawlercoveragerc"
# If set, put reports for run in "unique" directories.
# The main purpose of this is to ensure that the reports can be 'slurped'
# in the main jenkins flow job without overwriting the reports from other
# build steps. For local development/testing, this shouldn't be needed.
if os.environ.get("SHARD", None):
shard_str = "shard_{}".format(os.environ.get("SHARD"))
BOK_CHOY_REPORT_DIR = BOK_CHOY_REPORT_DIR / shard_str
BOK_CHOY_LOG_DIR = BOK_CHOY_LOG_DIR / shard_str
# For the time being, stubs are used by both the bok-choy and lettuce acceptance tests
# For this reason, the stubs package is currently located in the Django app called "terrain"
# where other lettuce configuration is stored.
BOK_CHOY_STUB_DIR = REPO_ROOT / "common" / "djangoapps" / "terrain"
# Directory that videos are served from
VIDEO_SOURCE_DIR = REPO_ROOT / "test_root" / "data" / "video"
BOK_CHOY_SERVERS = {
'lms': {
'port': 8003,
'log': BOK_CHOY_LOG_DIR / "bok_choy_lms.log"
},
'cms': {
'port': 8031,
'log': BOK_CHOY_LOG_DIR / "bok_choy_studio.log"
}
}
BOK_CHOY_STUBS = {
'xqueue': {
'port': 8040,
'log': BOK_CHOY_LOG_DIR / "bok_choy_xqueue.log",
'config': 'register_submission_url=http://0.0.0.0:8041/test/register_submission',
},
'ora': {
'port': 8041,
'log': BOK_CHOY_LOG_DIR / "bok_choy_ora.log",
'config': '',
},
'comments': {
'port': 4567,
'log': BOK_CHOY_LOG_DIR / "bok_choy_comments.log",
},
'video': {
'port': 8777,
'log': BOK_CHOY_LOG_DIR / "bok_choy_video_sources.log",
'config': "root_dir={}".format(VIDEO_SOURCE_DIR),
},
'youtube': {
'port': 9080,
'log': BOK_CHOY_LOG_DIR / "bok_choy_youtube.log",
},
'edxnotes': {
'port': 8042,
'log': BOK_CHOY_LOG_DIR / "bok_choy_edxnotes.log",
},
'programs': {
'port': 8090,
'log': BOK_CHOY_LOG_DIR / "bok_choy_programs.log",
},
'ecommerce': {
'port': 8043,
'log': BOK_CHOY_LOG_DIR / "bok_choy_ecommerce.log",
}
}
# Mongo databases that will be dropped before/after the tests run
BOK_CHOY_MONGO_DATABASE = "test"
BOK_CHOY_CACHE = memcache.Client(['0.0.0.0:11211'], debug=0)
# Test Ids Directory
TEST_DIR = REPO_ROOT / ".testids"
# Files used to run each of the js test suites
# TODO: Store this as a dict. Order seems to matter for some
# reason. See issue TE-415.
KARMA_CONFIG_FILES = [
REPO_ROOT / 'cms/static/karma_cms.conf.js',
REPO_ROOT / 'cms/static/karma_cms_squire.conf.js',
REPO_ROOT / 'lms/static/karma_lms.conf.js',
REPO_ROOT / 'lms/static/karma_lms_coffee.conf.js',
REPO_ROOT / 'common/lib/xmodule/xmodule/js/karma_xmodule.conf.js',
REPO_ROOT / 'common/static/karma_common.conf.js',
REPO_ROOT / 'common/static/karma_common_requirejs.conf.js',
]
JS_TEST_ID_KEYS = [
'cms',
'cms-squire',
'lms',
'lms-coffee',
'xmodule',
'common',
'common-requirejs'
]
JS_REPORT_DIR = REPORT_DIR / 'javascript'
# Directories used for common/lib/ tests
LIB_TEST_DIRS = []
for item in (REPO_ROOT / "common/lib").listdir():
if (REPO_ROOT / 'common/lib' / item).isdir():
LIB_TEST_DIRS.append(path("common/lib") / item.basename())
LIB_TEST_DIRS.append(path("pavelib/paver_tests"))
# Directory for i18n test reports
I18N_REPORT_DIR = REPORT_DIR / 'i18n'
# Service variant (lms, cms, etc.) configured with an environment variable
# We use this to determine which envs.json file to load.
SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None)
# If service variant not configured in env, then pass the correct
# environment for lms / cms
if not SERVICE_VARIANT: # this will intentionally catch "";
if any(i in sys.argv[1:] for i in ('cms', 'studio')):
SERVICE_VARIANT = 'cms'
else:
SERVICE_VARIANT = 'lms'
@lazy
def env_tokens(self):
"""
Return a dict of environment settings.
If we couldn't find the JSON file, issue a warning and return an empty dict.
"""
# Find the env JSON file
if self.SERVICE_VARIANT:
env_path = self.REPO_ROOT.parent / "{service}.env.json".format(service=self.SERVICE_VARIANT)
else:
env_path = path("env.json").abspath()
# If the file does not exist, here or one level up,
# issue a warning and return an empty dict
if not env_path.isfile():
env_path = env_path.parent.parent / env_path.basename()
if not env_path.isfile():
print(
"Warning: could not find environment JSON file "
"at '{path}'".format(path=env_path),
file=sys.stderr,
)
return dict()
# Otherwise, load the file as JSON and return the resulting dict
try:
with open(env_path) as env_file:
return json.load(env_file)
except ValueError:
print(
"Error: Could not parse JSON "
"in {path}".format(path=env_path),
file=sys.stderr,
)
sys.exit(1)
@lazy
def feature_flags(self):
"""
Return a dictionary of feature flags configured by the environment.
"""
return self.env_tokens.get('FEATURES', dict())
|
agpl-3.0
|
szilveszter/django
|
django/utils/importlib.py
|
98
|
1547
|
# Taken from Python 2.7 with permission from/by the original author.
import warnings
import sys
from django.utils import six
from django.utils.deprecation import RemovedInDjango19Warning
warnings.warn("django.utils.importlib will be removed in Django 1.9.",
RemovedInDjango19Warning, stacklevel=2)
def _resolve_name(name, package, level):
"""Return the absolute name of the module to be imported."""
if not hasattr(package, 'rindex'):
raise ValueError("'package' not set to a string")
dot = len(package)
for x in range(level, 1, -1):
try:
dot = package.rindex('.', 0, dot)
except ValueError:
raise ValueError("attempted relative import beyond top-level package")
return "%s.%s" % (package[:dot], name)
if six.PY3:
from importlib import import_module
else:
def import_module(name, package=None):
"""Import a module.
The 'package' argument is required when performing a relative import. It
specifies the package to use as the anchor point from which to resolve the
relative import to an absolute import.
"""
if name.startswith('.'):
if not package:
raise TypeError("relative imports require the 'package' argument")
level = 0
for character in name:
if character != '.':
break
level += 1
name = _resolve_name(name[level:], package, level)
__import__(name)
return sys.modules[name]
|
bsd-3-clause
|
wimberosa/samba
|
buildtools/wafadmin/Tools/tex.py
|
16
|
7234
|
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
"TeX/LaTeX/PDFLaTeX support"
import os, re
import Utils, TaskGen, Task, Runner, Build
from TaskGen import feature, before
from Logs import error, warn, debug
re_tex = re.compile(r'\\(?P<type>include|input|import|bringin|lstinputlisting){(?P<file>[^{}]*)}', re.M)
def scan(self):
node = self.inputs[0]
env = self.env
nodes = []
names = []
if not node: return (nodes, names)
code = Utils.readf(node.abspath(env))
curdirnode = self.curdirnode
abs = curdirnode.abspath()
for match in re_tex.finditer(code):
path = match.group('file')
if path:
for k in ['', '.tex', '.ltx']:
# add another loop for the tex include paths?
debug('tex: trying %s%s' % (path, k))
try:
os.stat(abs+os.sep+path+k)
except OSError:
continue
found = path+k
node = curdirnode.find_resource(found)
if node:
nodes.append(node)
else:
debug('tex: could not find %s' % path)
names.append(path)
debug("tex: found the following : %s and names %s" % (nodes, names))
return (nodes, names)
latex_fun, _ = Task.compile_fun('latex', '${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
pdflatex_fun, _ = Task.compile_fun('pdflatex', '${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
bibtex_fun, _ = Task.compile_fun('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False)
makeindex_fun, _ = Task.compile_fun('bibtex', '${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False)
g_bibtex_re = re.compile('bibdata', re.M)
def tex_build(task, command='LATEX'):
env = task.env
bld = task.generator.bld
if not env['PROMPT_LATEX']:
env.append_value('LATEXFLAGS', '-interaction=batchmode')
env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
fun = latex_fun
if command == 'PDFLATEX':
fun = pdflatex_fun
node = task.inputs[0]
reldir = node.bld_dir(env)
#lst = []
#for c in Utils.split_path(reldir):
# if c: lst.append('..')
#srcfile = os.path.join(*(lst + [node.srcpath(env)]))
#sr2 = os.path.join(*(lst + [node.parent.srcpath(env)]))
srcfile = node.abspath(env)
sr2 = node.parent.abspath() + os.pathsep + node.parent.abspath(env) + os.pathsep
aux_node = node.change_ext('.aux')
idx_node = node.change_ext('.idx')
nm = aux_node.name
docuname = nm[ : len(nm) - 4 ] # 4 is the size of ".aux"
# important, set the cwd for everybody
task.cwd = task.inputs[0].parent.abspath(task.env)
warn('first pass on %s' % command)
task.env.env = {'TEXINPUTS': sr2}
task.env.SRCFILE = srcfile
ret = fun(task)
if ret:
return ret
# look in the .aux file if there is a bibfile to process
try:
ct = Utils.readf(aux_node.abspath(env))
except (OSError, IOError):
error('error bibtex scan')
else:
fo = g_bibtex_re.findall(ct)
# there is a .aux file to process
if fo:
warn('calling bibtex')
task.env.env = {'BIBINPUTS': sr2, 'BSTINPUTS': sr2}
task.env.SRCFILE = docuname
ret = bibtex_fun(task)
if ret:
error('error when calling bibtex %s' % docuname)
return ret
# look on the filesystem if there is a .idx file to process
try:
idx_path = idx_node.abspath(env)
os.stat(idx_path)
except OSError:
error('error file.idx scan')
else:
warn('calling makeindex')
task.env.SRCFILE = idx_node.name
task.env.env = {}
ret = makeindex_fun(task)
if ret:
error('error when calling makeindex %s' % idx_path)
return ret
hash = ''
i = 0
while i < 10:
# prevent against infinite loops - one never knows
i += 1
# watch the contents of file.aux
prev_hash = hash
try:
hash = Utils.h_file(aux_node.abspath(env))
except KeyError:
error('could not read aux.h -> %s' % aux_node.abspath(env))
pass
# debug
#print "hash is, ", hash, " ", old_hash
# stop if file.aux does not change anymore
if hash and hash == prev_hash:
break
# run the command
warn('calling %s' % command)
task.env.env = {'TEXINPUTS': sr2 + os.pathsep}
task.env.SRCFILE = srcfile
ret = fun(task)
if ret:
error('error when calling %s %s' % (command, latex_compile_cmd))
return ret
return None # ok
latex_vardeps = ['LATEX', 'LATEXFLAGS']
def latex_build(task):
return tex_build(task, 'LATEX')
pdflatex_vardeps = ['PDFLATEX', 'PDFLATEXFLAGS']
def pdflatex_build(task):
return tex_build(task, 'PDFLATEX')
class tex_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('tex')
@before('apply_core')
def apply_tex(self):
if not getattr(self, 'type', None) in ['latex', 'pdflatex']:
self.type = 'pdflatex'
tree = self.bld
outs = Utils.to_list(getattr(self, 'outs', []))
# prompt for incomplete files (else the batchmode is used)
self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1)
deps_lst = []
if getattr(self, 'deps', None):
deps = self.to_list(self.deps)
for filename in deps:
n = self.path.find_resource(filename)
if not n in deps_lst: deps_lst.append(n)
self.source = self.to_list(self.source)
for filename in self.source:
base, ext = os.path.splitext(filename)
node = self.path.find_resource(filename)
if not node: raise Utils.WafError('cannot find %s' % filename)
if self.type == 'latex':
task = self.create_task('latex', node, node.change_ext('.dvi'))
elif self.type == 'pdflatex':
task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
task.env = self.env
task.curdirnode = self.path
# add the manual dependencies
if deps_lst:
variant = node.variant(self.env)
try:
lst = tree.node_deps[task.unique_id()]
for n in deps_lst:
if not n in lst:
lst.append(n)
except KeyError:
tree.node_deps[task.unique_id()] = deps_lst
if self.type == 'latex':
if 'ps' in outs:
tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.abspath(self.env)}
if 'pdf' in outs:
tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.abspath(self.env)}
elif self.type == 'pdflatex':
if 'ps' in outs:
self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
self.source = []
def detect(conf):
v = conf.env
for p in 'tex latex pdflatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
conf.find_program(p, var=p.upper())
v[p.upper()+'FLAGS'] = ''
v['DVIPSFLAGS'] = '-Ppdf'
b = Task.simple_task_type
b('tex', '${TEX} ${TEXFLAGS} ${SRC}', color='BLUE', shell=False) # not used anywhere
b('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRC}', color='BLUE', shell=False) # not used anywhere
b('dvips', '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
b('dvipdf', '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
b('pdf2ps', '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}', color='BLUE', after="dvipdf pdflatex", shell=False)
b = Task.task_type_from_func
cls = b('latex', latex_build, vars=latex_vardeps)
cls.scan = scan
cls = b('pdflatex', pdflatex_build, vars=pdflatex_vardeps)
cls.scan = scan
|
gpl-3.0
|
JustasB/MitralSuite
|
Models/Migliore2014/mkmitral.py
|
1
|
1261
|
from neuron import h
h.load_file('mitral.hoc')
from getmitral import getmitral
def mkmitral(gid):
nrn = getmitral(gid)
m = h.Mitral()
m.createsec(len(nrn.dend), len(nrn.tuft))
m.subsets()
m.topol(0) # need to connect secondary dendrites explicitly
for i, d in enumerate(nrn.dend):
# <<< check my changed if
if(d.parent == nrn.soma): # <<< changed name
m.secden[i].connect(m.soma(.5))
else:
m.secden[i].connect(m.secden[d.parent.index](1)) # <<< changed name
m.geometry()
m.segments() # depends on geometry
m.geometry() # again to get the hillock stylized shape
fillall(nrn, m)
m.segments() # again to get the proper number of segments for tuft and secden
m.soma.push()
m.x = h.x3d(0)
m.y = h.y3d(0)
m.z = h.z3d(0)
h.pop_section()
m.memb()
return m
def fillall(n, m):
fillshape(n.soma, m.soma)
fillshape(n.apic, m.priden)
for i,s in enumerate(n.dend):
fillshape(s, m.secden[i])
for i,s in enumerate(n.tuft):
fillshape(s, m.tuftden[i])
def fillshape(s1, s2):
s2.push()
h.pt3dclear()
for x in s1.points:
h.pt3dadd(x[0], x[1], x[2], x[3])
h.pop_section()
if __name__ == "__main__":
for mgid in range(635):
print mgid
mkmitral(mgid)
|
mit
|
pataquets/phantomjs
|
src/qt/qtwebkit/Tools/Scripts/webkitpy/common/system/environment_unittest.py
|
124
|
1853
|
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from .environment import Environment
class EnvironmentTest(unittest.TestCase):
def test_disable_gcc_smartquotes(self):
environment = Environment({})
environment.disable_gcc_smartquotes()
env = environment.to_dictionary()
self.assertEqual(env['LC_ALL'], 'C')
|
bsd-3-clause
|
ryfeus/lambda-packs
|
LightGBM_sklearn_scipy_numpy/source/scipy/optimize/_lsq/bvls.py
|
15
|
4994
|
"""Bounded-Variable Least-Squares algorithm."""
from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.linalg import norm, lstsq
from scipy.optimize import OptimizeResult
from .common import print_header_linear, print_iteration_linear
def compute_kkt_optimality(g, on_bound):
"""Compute the maximum violation of KKT conditions."""
g_kkt = g * on_bound
free_set = on_bound == 0
g_kkt[free_set] = np.abs(g[free_set])
return np.max(g_kkt)
def bvls(A, b, x_lsq, lb, ub, tol, max_iter, verbose):
m, n = A.shape
x = x_lsq.copy()
on_bound = np.zeros(n)
mask = x < lb
x[mask] = lb[mask]
on_bound[mask] = -1
mask = x > ub
x[mask] = ub[mask]
on_bound[mask] = 1
free_set = on_bound == 0
active_set = ~free_set
free_set, = np.where(free_set)
r = A.dot(x) - b
cost = 0.5 * np.dot(r, r)
initial_cost = cost
g = A.T.dot(r)
cost_change = None
step_norm = None
iteration = 0
if verbose == 2:
print_header_linear()
# This is the initialization loop. The requirement is that the
# least-squares solution on free variables is feasible before BVLS starts.
# One possible initialization is to set all variables to lower or upper
# bounds, but many iterations may be required from this state later on.
# The implemented ad-hoc procedure which intuitively should give a better
# initial state: find the least-squares solution on current free variables,
# if its feasible then stop, otherwise set violating variables to
# corresponding bounds and continue on the reduced set of free variables.
while free_set.size > 0:
if verbose == 2:
optimality = compute_kkt_optimality(g, on_bound)
print_iteration_linear(iteration, cost, cost_change, step_norm,
optimality)
iteration += 1
x_free_old = x[free_set].copy()
A_free = A[:, free_set]
b_free = b - A.dot(x * active_set)
z = lstsq(A_free, b_free, rcond=-1)[0]
lbv = z < lb[free_set]
ubv = z > ub[free_set]
v = lbv | ubv
if np.any(lbv):
ind = free_set[lbv]
x[ind] = lb[ind]
active_set[ind] = True
on_bound[ind] = -1
if np.any(ubv):
ind = free_set[ubv]
x[ind] = ub[ind]
active_set[ind] = True
on_bound[ind] = 1
ind = free_set[~v]
x[ind] = z[~v]
r = A.dot(x) - b
cost_new = 0.5 * np.dot(r, r)
cost_change = cost - cost_new
cost = cost_new
g = A.T.dot(r)
step_norm = norm(x[free_set] - x_free_old)
if np.any(v):
free_set = free_set[~v]
else:
break
if max_iter is None:
max_iter = n
max_iter += iteration
termination_status = None
# Main BVLS loop.
optimality = compute_kkt_optimality(g, on_bound)
for iteration in range(iteration, max_iter):
if verbose == 2:
print_iteration_linear(iteration, cost, cost_change,
step_norm, optimality)
if optimality < tol:
termination_status = 1
if termination_status is not None:
break
move_to_free = np.argmax(g * on_bound)
on_bound[move_to_free] = 0
free_set = on_bound == 0
active_set = ~free_set
free_set, = np.nonzero(free_set)
x_free = x[free_set]
x_free_old = x_free.copy()
lb_free = lb[free_set]
ub_free = ub[free_set]
A_free = A[:, free_set]
b_free = b - A.dot(x * active_set)
z = lstsq(A_free, b_free, rcond=-1)[0]
lbv, = np.nonzero(z < lb_free)
ubv, = np.nonzero(z > ub_free)
v = np.hstack((lbv, ubv))
if v.size > 0:
alphas = np.hstack((
lb_free[lbv] - x_free[lbv],
ub_free[ubv] - x_free[ubv])) / (z[v] - x_free[v])
i = np.argmin(alphas)
i_free = v[i]
alpha = alphas[i]
x_free *= 1 - alpha
x_free += alpha * z
if i < lbv.size:
on_bound[free_set[i_free]] = -1
else:
on_bound[free_set[i_free]] = 1
else:
x_free = z
x[free_set] = x_free
step_norm = norm(x_free - x_free_old)
r = A.dot(x) - b
cost_new = 0.5 * np.dot(r, r)
cost_change = cost - cost_new
if cost_change < tol * cost:
termination_status = 2
cost = cost_new
g = A.T.dot(r)
optimality = compute_kkt_optimality(g, on_bound)
if termination_status is None:
termination_status = 0
return OptimizeResult(
x=x, fun=r, cost=cost, optimality=optimality, active_mask=on_bound,
nit=iteration + 1, status=termination_status,
initial_cost=initial_cost)
|
mit
|
freedryk/cloudtracker
|
cloudtracker/generate_cloudlets.py
|
1
|
6967
|
#!/usr/bin/env python
"""
This program generates a pkl file containing a list of dictionaries.
Each dictionary in the list represents a condensedlet.
The dictionaries have the structure:
{'core': array of ints of core points,
'condensed': array of ints of condensed points,
'plume': array of ints of plume points,
'u_condensed': ,
'v_condensed': ,
'w_condensed': ,
'u_plume': ,
'v_plume': ,
'w_plume': }
pkl files are saved in pkl/ subdirectory indexed by time
"""
from __future__ import print_function
from __future__ import absolute_import
import numpy
from .utility_functions import index_to_zyx, expand_indexes
#-------------------
def expand_cloudlet(cloudlet, indexes, MC):
"""Given an array of indexes composing a cloudlet and a boolean mask
array indicating if each model index may be expanded into (True) or
not (False), expand the cloudlet into the permissable indicies that
are find all indicies adjacent to the cloudlet.
Returns an array of the indicies composing the expanded cloudlet, and
an array of the remaining indicies that may be expanded into.
"""
# Expand the cloudlet indexes into their nearest neighbours
expanded_cloudlet = expand_indexes(cloudlet, MC)
# Find the mask values of the expanded indexes
mask = indexes[expanded_cloudlet]
# Select the expanded cloudlet indexes that may be expanded into
new_points = expanded_cloudlet[mask]
# Remove the indicies that have been added to the cloudlet
indexes[new_points] = False
return new_points, indexes
#---------------------
def expand_current_cloudlets(key, cloudlets, mask, MC):
cloudlet_points = []
for cloudlet in cloudlets:
cloudlet_points.append( [cloudlet[key]] )
cloudlet_expand_indexes = range(len(cloudlet_points))
while cloudlet_expand_indexes:
next_loop_cloudlet_list = []
# Go through the current list of cloudlets
for n in cloudlet_expand_indexes:
expanded_points, mask = expand_cloudlet(cloudlet_points[n][-1],
mask,
MC)
if len(expanded_points) > 0:
cloudlet_points[n].append(expanded_points)
next_loop_cloudlet_list.append(n)
cloudlet_expand_indexes = next_loop_cloudlet_list
for n, cloudlet in enumerate(cloudlet_points):
cloudlets[n][key] = numpy.hstack(cloudlet)
return cloudlets, mask
#---------------------
def make_new_cloudlets(key, mask, MC):
indexes = numpy.arange(MC['nx']*MC['ny']*MC['nz'])[mask]
cloudlets = []
for n in indexes:
if mask[n]:
mask[n] = False
cloudlet_indexes = [numpy.array((n,))]
# add_new_cloudlet
done = False
while not done:
new_indexes, mask = expand_cloudlet(cloudlet_indexes[-1], mask, MC)
if len(new_indexes) > 0:
cloudlet_indexes.append( new_indexes )
else:
# If the number of points in the cloudlet has not changed, we are done
done = True
cloudlet = {}
cloudlet[key] = numpy.hstack(cloudlet_indexes)
cloudlets.append( cloudlet )
return cloudlets
#-----------------
def find_mean_cloudlet_velocity(cloudlets,
u, v, w,
MC):
dx, dy, dz, dt = MC['dx'], MC['dy'], MC['dz'], MC['dt']
ug, vg = MC['ug'], MC['vg']
for cloudlet in cloudlets:
if len(cloudlet['condensed']) > 0:
K, J, I = index_to_zyx( cloudlet['condensed'], MC )
# find the mean motion of the cloudlet
u_mean = u[K, J, I].mean()-ug
v_mean = v[K, J, I].mean()-vg
w_mean = w[K, J, I].mean()
cloudlet['u_condensed'] = round(u_mean*dt/dx)
cloudlet['v_condensed'] = round(v_mean*dt/dy)
cloudlet['w_condensed'] = round(w_mean*dt/dz)
else:
cloudlet['u_condensed'] = 0.
cloudlet['v_condensed'] = 0.
cloudlet['w_condensed'] = 0.
K, J, I = index_to_zyx( cloudlet['plume'], MC )
# find the mean motion of the cloudlet
u_mean = u[K, J, I].mean()-ug
v_mean = v[K, J, I].mean()-vg
w_mean = w[K, J, I].mean()
cloudlet['u_plume'] = round(u_mean*dt/dx)
cloudlet['v_plume'] = round(v_mean*dt/dy)
cloudlet['w_plume'] = round(w_mean*dt/dz)
return cloudlets
#----------------------------
def generate_cloudlets(core, condensed, plume, u, v, w, MC):
# find the indexes of all the core and plume points
core = core.flatten()
condensed = condensed.flatten()
plume = plume.flatten()
plume[condensed] = False
condensed[core] = False
# Create the list that will hold the cloudlets
cloudlets = make_new_cloudlets('core', core, MC)
for cloudlet in cloudlets:
cloudlet['condensed'] = cloudlet['core'][:]
ncore = len(cloudlets)
print("\t%d core cloudlets" % ncore)
cloudlets, condensed = expand_current_cloudlets('condensed',
cloudlets,
condensed,
MC)
# Add any remaining points that have not been added to cloudlets
# as new cloudlets.
condensed_cloudlets = make_new_cloudlets('condensed', condensed, MC)
for cloudlet in condensed_cloudlets:
cloudlet['core'] = numpy.array([], dtype=numpy.int)
cloudlets.append(cloudlet)
for cloudlet in cloudlets:
cloudlet['plume'] = cloudlet['condensed'][:]
ncondensed = len(cloudlets)
print("\t%d condensed cloudlets" % (ncondensed-ncore))
cloudlets, plume = expand_current_cloudlets('plume',
cloudlets,
plume,
MC)
# Add any remaining points that have not been added to cloudlets
# as new cloudlets.
plume_cloudlets = make_new_cloudlets('plume', plume, MC)
for cloudlet in plume_cloudlets:
cloudlet['core'] = numpy.array([], dtype=numpy.int)
cloudlet['condensed'] = numpy.array([], dtype=numpy.int)
cloudlets.append(cloudlet)
nplume = len(cloudlets)
print("\t%d plume cloudlets" % (nplume-ncondensed))
cloudlets = find_mean_cloudlet_velocity(cloudlets,
u, v, w,
MC)
return cloudlets
if __name__ == "__main__":
import doctest
doctest.testmod()
|
bsd-2-clause
|
Ahuge/ShortcutEditorNuke
|
keyboard/keys/base_key.py
|
1
|
1369
|
import weakref
from PySide import QtGui, QtCore
from resources import HEX_COLOUR_NONE, HEX_COLOUR_RED, HEX_COLOUR_GREEN, HEX_COLOUR_BLUE
class BaseKey(QtGui.QPushButton):
KEY_TYPE = "default"
KEY_CLICKED = QtCore.Signal(weakref.ReferenceType)
BG_COLOUR = HEX_COLOUR_NONE
def __init__(self, *args, **kwargs):
super(BaseKey, self).__init__(*args, **kwargs)
self.colour = self.BG_COLOUR
self.type = self.KEY_TYPE
self.setMinimumWidth(32)
self.setMinimumHeight(32)
self.setCheckable(True)
self.clicked.connect(self.emit_clicked)
def emit_clicked(self, *args, **kwargs):
weak_self = weakref.ref(self)
self.KEY_CLICKED.emit(weak_self)
def set_colour(self):
self.setStyleSheet("")
if self.colour is not HEX_COLOUR_NONE:
css_colour = "#{message:{fill}{align}{width}}".format(message=hex(self.colour)[2:], fill='0',
align='>', width=6)
print css_colour
self.setStyleSheet("""
QPushButton {
background-color: %s;
}
""" % css_colour)
def click(self, set_to=None):
r = super(BaseKey, self).click()
if set_to:
self.setChecked(set_to)
self.set_colour()
return r
|
gpl-3.0
|
geraldinepascal/FROGS
|
assessment/bin/its_assessRealMock.py
|
1
|
14370
|
#!/usr/bin/env python2.7
#
# Copyright (C) 2016 INRA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__author__ = 'Plateforme bioinformatique Toulouse / Sigenae Jouy en Josas'
__copyright__ = 'Copyright (C) 2016 INRA'
__license__ = 'GNU General Public License'
__version__ = '1.0.0'
__email__ = 'frogs-support@inrae.fr'
__status__ = 'prod'
import re
import sys
import argparse
import warnings
from frogsBiom import BiomIO
##################################################################################################################################################
#
# FUNCTIONS
#
##################################################################################################################################################
def getCleanedTaxonomy( taxonomy ):
"""
@summary: Returns the taxonomic ranks for the observation.
@param observation_name: [str] The specified observation.
@param taxonomy_key: [str] The metadata key for the taxonomy.
@retrurn: [list] The list of taxonomic ranks.
@note: Unfortunately some BIOM have a non-canonical format for store the taxonomy metadata. This method manages the below formats.
- list or tuple:
["d:Bacteria", "Proteobacteria", "Epsilonproteobacteria", "Campylobacterales", "Helicobacteraceae", "Helicobacter"]
- string:
"Bacteria;Proteobacteria;Epsilonproteobacteria;Campylobacterales;Helicobacteraceae;Helicobacter"
- string ended by rank separator:
"Bacteria;Proteobacteria;Epsilonproteobacteria;Campylobacterales;Helicobacteraceae;Helicobacter;"
- string with bootstrap:
"Bacteria(1.0000);Proteobacteria(0.9997);Epsilonproteobacteria(1.0000);Campylobacterales(1.0000);Helicobacteraceae(0.9898);Helicobacter(0.9912)"
- string with bootstrap and ended by rank separator:
"Bacteria(1.0000);Proteobacteria(0.9997);Epsilonproteobacteria(1.0000);Campylobacterales(1.0000);Helicobacteraceae(0.9898);Helicobacter(0.9912);"
"""
cleaned_taxonomy = list()
first_rank = "d:"
# Get taxonomy as a r/w list
if isinstance(taxonomy, list) or isinstance(taxonomy, tuple): # Copy the list
cleaned_taxonomy = [taxon.strip() for taxon in taxonomy]
else: # Convert taxonomy in list
cleaned_taxonomy = taxonomy
if cleaned_taxonomy.strip().endswith(";"):
cleaned_taxonomy = cleaned_taxonomy.strip()[:-1]
if len(cleaned_taxonomy.split(";")) <= 3: # The tax separator is ","
cleaned_taxonomy = cleaned_taxonomy.replace(",", ";")
cleaned_taxonomy = [taxon.strip() for taxon in cleaned_taxonomy.split(";")]
# Remove bootstrap information if its exist
boostrap_regexp = re.compile("^(.+)\(\d+(\.\d+)?\)$")
if len(cleaned_taxonomy) != 0 and boostrap_regexp.match(cleaned_taxonomy[0]) is not None: # Taxonomy contains bootstrap values
for rank, taxon in enumerate(cleaned_taxonomy):
matches = boostrap_regexp.search(taxon)
cleaned_taxonomy[rank] = matches.group(1).strip()
# Remove IDs
cleaned_taxonomy = [taxon.split('[id:')[0].strip() for taxon in cleaned_taxonomy] # remove "[id: .....]"
# Remove quotes
for rank, taxon in enumerate(cleaned_taxonomy):
cleaned_taxonomy[rank] = cleaned_taxonomy[rank].replace('\"', "").replace('"', "")
# Deal with OTUs without affiliation
if len(cleaned_taxonomy) == 0:
cleaned_taxonomy = ["d:unknown_taxa", "p:unknown_taxa", "c:unknown_taxa", "o:unknown_taxa", "f:unknown_taxa", "g:unknown_taxa","s:unknown_taxa"]
# Remove root
if cleaned_taxonomy[0].lower() == "root" or cleaned_taxonomy[0].lower() == "rootrank" or cleaned_taxonomy[0].lower() == "r:root":
cleaned_taxonomy = cleaned_taxonomy[1:]
# Complete taxonomy for uparse db
if cleaned_taxonomy[0].startswith("k:"):
first_rank = "k:"
if cleaned_taxonomy[0].startswith(first_rank):
tmp_tax = list()
rank_idx = 0
ranks = [first_rank, "p:", "c:", "o:", "f:", "g:","s:"]
for taxa in cleaned_taxonomy:
while not taxa.startswith(ranks[rank_idx]) and taxa != "Multi-affiliation" and taxa != "unclassified" and taxa != "NA":
tmp_tax.append(ranks[rank_idx] + "unknown_taxa")
rank_idx += 1
tmp_tax.append(taxa)
rank_idx += 1
while rank_idx != len(ranks):
tmp_tax.append(ranks[rank_idx] + "unknown_taxa")
rank_idx += 1
cleaned_taxonomy = tmp_tax
return cleaned_taxonomy
def cmpTaxAbund( expected, checked, depth ):
identity = 0
divergence = 0
common_taxa = 0
expected_specific = 0
checked_specific = 0
detailed_cmp = dict()
already_processed = list()
total_expected = sum([abund for taxon, abund in expected[depth].items()])
#~ if len(checked) == 0:
#~ a=1
#~ return {
#~ 'divergence': 100.0,
#~ 'common_taxa': 0,
#~ 'expected_specific': 0,
#~ 'checked_specific': checked_specific,
#~ 'detailed_cmp': detailed_cmp
#~ }
#~ else:
#~ min_level_in_checked = max(checked.keys()) # 6 for Species, 5 for Genus (case where no Species were detected)
total_checked = sum([abund for taxon, abund in checked[depth].items()])
for taxon, abund in expected[depth].items():
already_processed.append( taxon )
if checked[depth].has_key(taxon):
common_taxa += 1
prct_expected = (float(abund)*100)/total_expected
prct_checked = (float(checked[depth][taxon])*100)/total_checked
identity += min( prct_expected, prct_checked )
detailed_cmp[taxon] = {"expected": prct_expected, "checked": prct_checked}
else:
expected_specific += 1
prct_expected = (float(abund)*100)/total_expected
detailed_cmp[taxon] = {"expected": prct_expected, "checked": 0}
for taxon, abund in checked[depth].items():
if not taxon in already_processed:
checked_specific += 1
prct_checked = (float(checked[depth][taxon])*100)/total_checked
detailed_cmp[taxon] = {"expected": 0, "checked": prct_checked}
divergence = 100 - identity
return {
'divergence': divergence,
'common_taxa': common_taxa,
'expected_specific': expected_specific,
'checked_specific': checked_specific,
'detailed_cmp': detailed_cmp
}
def get_expected( abund_file ):
expected_by_depth = dict()
FH_expected = open(abund_file)
for line in FH_expected:
taxonomy, count = line.strip().split("\t")
clean_taxonomy = getCleanedTaxonomy(taxonomy)
for rank_depth in range(len(clean_taxonomy)):
rank_taxonomy = ";".join(clean_taxonomy[:rank_depth + 1])
if rank_depth not in expected_by_depth:
expected_by_depth[rank_depth] = dict()
if rank_taxonomy not in expected_by_depth[rank_depth]:
expected_by_depth[rank_depth][rank_taxonomy] = 0
expected_by_depth[rank_depth][rank_taxonomy] += float(count)
FH_expected.close()
return expected_by_depth
def get_checked( abund_file, checked_sample, taxonomy_key, expected_by_depth ):
checked_by_depth = dict()
biom = BiomIO.from_json(abund_file)
for current_obs in biom.get_observations():
#recuperation de la taxo
clean_taxonomy = getCleanedTaxonomy(current_obs["metadata"][taxonomy_key]) if current_obs["metadata"][taxonomy_key] is not None else ["unknown_taxa"]*len(expected_by_depth)
#print(clean_taxonomy)
# recuperation du count
count = biom.get_count(current_obs["id"], checked_sample)
if count > 0:
# check multiaffi
if clean_taxonomy[len(clean_taxonomy)-1] == "Multi-affiliation":
#print(">>>> ",clean_taxonomy)
selected = list()
taxonomies = list()
expected_taxonomies = expected_by_depth[len(clean_taxonomy)-1]
# pour chaque multi-affi
for affi_idx in range(len(current_obs["metadata"]["blast_affiliations"])):
# clean taxo au format string
affi_taxonomy = ";".join(getCleanedTaxonomy(current_obs["metadata"]["blast_affiliations"][affi_idx]["taxonomy"]))
#print("#####################", affi_taxonomy)
# compte des taxo qui sont attendues
if affi_taxonomy not in taxonomies:
taxonomies.append(affi_taxonomy)
if affi_taxonomy in expected_taxonomies:
selected.append((getCleanedTaxonomy(current_obs["metadata"]["blast_affiliations"][affi_idx]["taxonomy"])))
#print(affi_taxonomy," ????????????????????")
#print("\t", selected)
if len(selected) > 0:
#clean_taxonomy = selected[0]
for multi in selected:
for rank_depth in range(len(multi)):
rank_taxonomy = ";".join(multi[:rank_depth + 1])
if rank_depth not in checked_by_depth:
checked_by_depth[rank_depth] = dict()
if rank_taxonomy not in checked_by_depth[rank_depth]:
checked_by_depth[rank_depth][rank_taxonomy] = 0
checked_by_depth[rank_depth][rank_taxonomy] += count/len(selected)
#if len(selected) > 1 :
#warnings.warn( "Multi-affiliation cannot be resolved for " + str((float(count)*100)/biom.get_total_count()) + "% sequences. Possible taxonomies: '" + "', '".join(taxonomies) + "'." )
else:
clean_taxonomy = taxonomies[0].split(";")
for rank_depth in range(len(clean_taxonomy)):
rank_taxonomy = ";".join(clean_taxonomy[:rank_depth + 1])
if rank_depth not in checked_by_depth:
checked_by_depth[rank_depth] = dict()
if rank_taxonomy not in checked_by_depth[rank_depth]:
checked_by_depth[rank_depth][rank_taxonomy] = 0
checked_by_depth[rank_depth][rank_taxonomy] += count
#return checked_by_depth
#print("\n")
#print(clean_taxonomy)
#print("\n")
else:
for rank_depth in range(len(clean_taxonomy)):
rank_taxonomy = ";".join(clean_taxonomy[:rank_depth + 1])
if rank_depth not in checked_by_depth:
checked_by_depth[rank_depth] = dict()
if rank_taxonomy not in checked_by_depth[rank_depth]:
checked_by_depth[rank_depth][rank_taxonomy] = 0
checked_by_depth[rank_depth][rank_taxonomy] += count
return checked_by_depth
##################################################################################################################################################
#
# MAIN
#
##################################################################################################################################################
if __name__ == "__main__":
# Manage parameters
parser = argparse.ArgumentParser( description='Compare expected abundance by taxon (in INPUT_TSV) with obtained abudance for the sample (sample in INPUT_BIOM).' )
parser.add_argument( '-v', '--version', action='version', version=__version__ )
parser.add_argument( '-e', '--expected-abund', required=True, help='Path to the expected abundance by taxon (format: TSV).' )
parser.add_argument( '-c', '--checked-abund', required=True, help='Path to the checked abundance file (format: BIOM).' )
parser.add_argument( '-s', '--checked-sample', required=True, help='Name of checked sample.' )
parser.add_argument( '-k', '--taxonomy-key', default="taxonomy", help='The used tag to store taxonomy in the BIOM file. [Default: taxonomy].' )
parser.add_argument( '-r', '--checked-ranks', nargs='*', default=["Domain", "Phylum", "Class", "Order", "Family", "Genus", "Species"], help='Evaluated ranks.' )
args = parser.parse_args()
# Expected
expected_by_depth = get_expected(args.expected_abund)
# Retrieved
checked_by_depth = get_checked(args.checked_abund, args.checked_sample, args.taxonomy_key, expected_by_depth)
#print checked_by_depth
#for i in sorted(expected_by_depth[6]):
# print i, expected_by_depth[6][i]
#print "\n------\n"
#for i in sorted(checked_by_depth[6]):
# print i, checked_by_depth[6][i]
#sys.exit()
# Comparison
details = dict()
print "#Rank\tDivergence (%)\tCommon\tExpected specific\tChecked specific"
#try:
for depth, rank in enumerate(args.checked_ranks):
metrics = cmpTaxAbund( expected_by_depth, checked_by_depth, depth )
#print metrics
details[depth] = metrics["detailed_cmp"]
print rank + "\t" + str(metrics['divergence']) + "\t" + str(metrics['common_taxa']) + "\t" + str(metrics['expected_specific']) + "\t" + str(metrics['checked_specific'])
print ""
#except KeyError:
#print rank + "\t" + str(metrics['divergence']) + "\t" + str(metrics['common_taxa']) + "\t" + str(metrics['expected_specific']) + "\t" + str(metrics['checked_specific'])
#print rank + "\t" + str(100.0) + "\t" + str(0) + "\t" + str(len(expected_by_depth[len(args.checked_ranks)-1])) + "\t" + str(0)
try:
for depth, rank in enumerate(args.checked_ranks):
print "#Rank " + rank
print "#Taxon\tExpected (%)\tChecked (%)"
for taxon in sorted(details[depth]):
print taxon + "\t" + str(details[depth][taxon]["expected"]) + "\t" + str(details[depth][taxon]["checked"])
print ""
except KeyError:
pass
|
gpl-3.0
|
tinkerinestudio/Tinkerine-Suite
|
TinkerineSuite/python/Lib/OpenGL/raw/GL/EXT/pixel_transform.py
|
3
|
2932
|
'''OpenGL extension EXT.pixel_transform
Automatically generated by the get_gl_extensions script, do not edit!
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions
from OpenGL.GL import glget
import ctypes
EXTENSION_NAME = 'GL_EXT_pixel_transform'
_DEPRECATED = False
GL_PIXEL_TRANSFORM_2D_EXT = constant.Constant( 'GL_PIXEL_TRANSFORM_2D_EXT', 0x8330 )
GL_PIXEL_MAG_FILTER_EXT = constant.Constant( 'GL_PIXEL_MAG_FILTER_EXT', 0x8331 )
GL_PIXEL_MIN_FILTER_EXT = constant.Constant( 'GL_PIXEL_MIN_FILTER_EXT', 0x8332 )
GL_PIXEL_CUBIC_WEIGHT_EXT = constant.Constant( 'GL_PIXEL_CUBIC_WEIGHT_EXT', 0x8333 )
GL_CUBIC_EXT = constant.Constant( 'GL_CUBIC_EXT', 0x8334 )
GL_AVERAGE_EXT = constant.Constant( 'GL_AVERAGE_EXT', 0x8335 )
GL_PIXEL_TRANSFORM_2D_STACK_DEPTH_EXT = constant.Constant( 'GL_PIXEL_TRANSFORM_2D_STACK_DEPTH_EXT', 0x8336 )
glget.addGLGetConstant( GL_PIXEL_TRANSFORM_2D_STACK_DEPTH_EXT, (1,) )
GL_MAX_PIXEL_TRANSFORM_2D_STACK_DEPTH_EXT = constant.Constant( 'GL_MAX_PIXEL_TRANSFORM_2D_STACK_DEPTH_EXT', 0x8337 )
glget.addGLGetConstant( GL_MAX_PIXEL_TRANSFORM_2D_STACK_DEPTH_EXT, (1,) )
GL_PIXEL_TRANSFORM_2D_MATRIX_EXT = constant.Constant( 'GL_PIXEL_TRANSFORM_2D_MATRIX_EXT', 0x8338 )
glPixelTransformParameteriEXT = platform.createExtensionFunction(
'glPixelTransformParameteriEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,constants.GLint,),
doc='glPixelTransformParameteriEXT(GLenum(target), GLenum(pname), GLint(param)) -> None',
argNames=('target','pname','param',),
deprecated=_DEPRECATED,
)
glPixelTransformParameterfEXT = platform.createExtensionFunction(
'glPixelTransformParameterfEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,constants.GLfloat,),
doc='glPixelTransformParameterfEXT(GLenum(target), GLenum(pname), GLfloat(param)) -> None',
argNames=('target','pname','param',),
deprecated=_DEPRECATED,
)
glPixelTransformParameterivEXT = platform.createExtensionFunction(
'glPixelTransformParameterivEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,arrays.GLintArray,),
doc='glPixelTransformParameterivEXT(GLenum(target), GLenum(pname), GLintArray(params)) -> None',
argNames=('target','pname','params',),
deprecated=_DEPRECATED,
)
glPixelTransformParameterfvEXT = platform.createExtensionFunction(
'glPixelTransformParameterfvEXT',dll=platform.GL,
extension=EXTENSION_NAME,
resultType=None,
argTypes=(constants.GLenum,constants.GLenum,arrays.GLfloatArray,),
doc='glPixelTransformParameterfvEXT(GLenum(target), GLenum(pname), GLfloatArray(params)) -> None',
argNames=('target','pname','params',),
deprecated=_DEPRECATED,
)
def glInitPixelTransformEXT():
'''Return boolean indicating whether this extension is available'''
return extensions.hasGLExtension( EXTENSION_NAME )
|
agpl-3.0
|
doismellburning/django
|
django/conf/locale/de_CH/formats.py
|
115
|
1445
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
from __future__ import unicode_literals
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
)
# these are the separators for non-monetary numbers. For monetary numbers,
# the DECIMAL_SEPARATOR is a . (decimal point) and the THOUSAND_SEPARATOR is a
# ' (single quote).
# For details, please refer to http://www.bk.admin.ch/dokumentation/sprachen/04915/05016/index.html?lang=de
# (in German) and the documentation
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
|
bsd-3-clause
|
nttks/edx-platform
|
cms/djangoapps/contentstore/tests/test_libraries.py
|
1
|
45755
|
"""
Content library unit tests that require the CMS runtime.
"""
from django.test.client import RequestFactory
from django.test.utils import override_settings
from contentstore.tests.utils import AjaxEnabledTestClient, parse_json, switch_ga_global_course_creator
from contentstore.utils import reverse_url, reverse_usage_url, reverse_course_url
from contentstore.views.item import _duplicate_item
from contentstore.views.library import _list_libraries
from contentstore.views.preview import _load_preview_module
import ddt
from mock import patch
from student import auth
from student.auth import has_studio_read_access, has_studio_write_access
from student.roles import (
CourseInstructorRole, CourseStaffRole, CourseCreatorRole, LibraryUserRole,
OrgStaffRole, OrgInstructorRole, OrgLibraryUserRole,
)
from xblock.reference.user_service import XBlockUser
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from mock import Mock
from opaque_keys.edx.locator import CourseKey, LibraryLocator
from openedx.core.djangoapps.content.course_structures.tests import SignalDisconnectTestMixin
from openedx.core.djangoapps.ga_optional.models import CourseOptionalConfiguration
from xmodule.x_module import STUDIO_VIEW
class LibraryTestCase(ModuleStoreTestCase):
"""
Common functionality for content libraries tests
"""
def setUp(self):
self.user_password = super(LibraryTestCase, self).setUp()
self.course = CourseFactory.create()
self.client = AjaxEnabledTestClient()
self._login_as_staff_user(logout_first=False)
self._setting_library_option(self.course)
self.lib_key = self._create_library(self.course)
self.library = modulestore().get_library(self.lib_key)
self.session_data = {} # Used by _bind_module
def _setting_library_option(self, course):
CourseOptionalConfiguration(
id=1,
change_date="2015-06-18 11:02:13",
enabled=True,
key='library-for-settings',
course_key=course.id,
changed_by_id=self.user.id
).save()
def _login_as_staff_user(self, logout_first=True):
""" Login as a staff user """
if logout_first:
self.client.logout()
self.client.login(username=self.user.username, password=self.user_password)
def _create_library(self, course, org='org', library='lib', display_name='Test Library'):
"""
Helper method used to create a library. Uses the REST API.
"""
lib_url = reverse_course_url('course_library_handler', course.id)
response = self.client.ajax_post(lib_url, {
'org': org,
'library': library,
'display_name': display_name,
})
self.assertEqual(response.status_code, 200)
lib_info = parse_json(response)
lib_key = CourseKey.from_string(lib_info['library_key'])
self.assertIsInstance(lib_key, LibraryLocator)
libraries = getattr(course, 'target_library', [])
libraries.append(unicode(lib_key))
setattr(course, 'target_library', libraries)
modulestore().update_item(course, self.user.id)
return lib_key
def _add_library_content_block(self, course, library_key, other_settings=None):
"""
Helper method to add a LibraryContent block to a course.
The block will be configured to select content from the library
specified by library_key.
other_settings can be a dict of Scope.settings fields to set on the block.
"""
return ItemFactory.create(
category='library_content',
parent_location=course.location,
user_id=self.user.id,
publish_item=False,
source_library_id=unicode(library_key),
**(other_settings or {})
)
def _add_simple_content_block(self):
""" Adds simple HTML block to library """
return ItemFactory.create(
category="html", parent_location=self.library.location,
user_id=self.user.id, publish_item=False
)
def _refresh_children(self, lib_content_block, status_code_expected=200):
"""
Helper method: Uses the REST API to call the 'refresh_children' handler
of a LibraryContent block
"""
if 'user' not in lib_content_block.runtime._services: # pylint: disable=protected-access
mocked_user_service = Mock(user_id=self.user.id)
mocked_user_service.get_current_user.return_value = XBlockUser(is_current_user=True)
lib_content_block.runtime._services['user'] = mocked_user_service # pylint: disable=protected-access
handler_url = reverse_usage_url(
'component_handler',
lib_content_block.location,
kwargs={'handler': 'refresh_children'}
)
response = self.client.ajax_post(handler_url)
self.assertEqual(response.status_code, status_code_expected)
return modulestore().get_item(lib_content_block.location)
def _bind_module(self, descriptor, user=None):
"""
Helper to use the CMS's module system so we can access student-specific fields.
"""
if user is None:
user = self.user
if user not in self.session_data:
self.session_data[user] = {}
request = Mock(user=user, session=self.session_data[user])
_load_preview_module(request, descriptor)
def _update_item(self, usage_key, metadata):
"""
Helper method: Uses the REST API to update the fields of an XBlock.
This will result in the XBlock's editor_saved() method being called.
"""
update_url = reverse_usage_url("xblock_handler", usage_key)
return self.client.ajax_post(
update_url,
data={
'metadata': metadata,
}
)
def _list_libraries(self, course_key):
"""
Use the REST API to get a list of libraries visible to the current user.
"""
self.request_factory = RequestFactory()
self.request = self.request_factory.get('foo')
self.request.user = self.user
response = _list_libraries(self.request, course_key)
self.assertEqual(response.status_code, 200)
return parse_json(response)
@ddt.ddt
class TestLibraries(LibraryTestCase):
"""
High-level tests for libraries
"""
@ddt.data(
(2, 1, 1),
(2, 2, 2),
(2, 20, 2),
)
@ddt.unpack
def test_max_items(self, num_to_create, num_to_select, num_expected):
"""
Test the 'max_count' property of LibraryContent blocks.
"""
for _ in range(num_to_create):
self._add_simple_content_block()
with modulestore().default_store(ModuleStoreEnum.Type.split):
course = CourseFactory.create()
lc_block = self._add_library_content_block(course, self.lib_key, {'max_count': num_to_select})
self.assertEqual(len(lc_block.children), 0)
lc_block = self._refresh_children(lc_block)
# Now, we want to make sure that .children has the total # of potential
# children, and that get_child_descriptors() returns the actual children
# chosen for a given student.
# In order to be able to call get_child_descriptors(), we must first
# call bind_for_student:
self._bind_module(lc_block)
self.assertEqual(len(lc_block.children), num_to_create)
self.assertEqual(len(lc_block.get_child_descriptors()), num_expected)
def test_consistent_children(self):
"""
Test that the same student will always see the same selected child block
"""
# Create many blocks in the library and add them to a course:
for num in range(8):
ItemFactory.create(
data="This is #{}".format(num + 1),
category="html", parent_location=self.library.location, user_id=self.user.id, publish_item=False
)
with modulestore().default_store(ModuleStoreEnum.Type.split):
course = CourseFactory.create()
lc_block = self._add_library_content_block(course, self.lib_key, {'max_count': 1})
lc_block_key = lc_block.location
lc_block = self._refresh_children(lc_block)
def get_child_of_lc_block(block):
"""
Fetch the child shown to the current user.
"""
children = block.get_child_descriptors()
self.assertEqual(len(children), 1)
return children[0]
# Check which child a student will see:
self._bind_module(lc_block)
chosen_child = get_child_of_lc_block(lc_block)
chosen_child_defn_id = chosen_child.definition_locator.definition_id
lc_block.save()
modulestore().update_item(lc_block, self.user.id)
# Now re-load the block and try again:
def check():
"""
Confirm that chosen_child is still the child seen by the test student
"""
for _ in range(6): # Repeat many times b/c blocks are randomized
lc_block = modulestore().get_item(lc_block_key) # Reload block from the database
self._bind_module(lc_block)
current_child = get_child_of_lc_block(lc_block)
self.assertEqual(current_child.location, chosen_child.location)
self.assertEqual(current_child.data, chosen_child.data)
self.assertEqual(current_child.definition_locator.definition_id, chosen_child_defn_id)
check()
# Refresh the children:
lc_block = self._refresh_children(lc_block)
# Now re-load the block and try yet again, in case refreshing the children changed anything:
check()
def test_definition_shared_with_library(self):
"""
Test that the same block definition is used for the library and course[s]
"""
block1 = self._add_simple_content_block()
def_id1 = block1.definition_locator.definition_id
block2 = self._add_simple_content_block()
def_id2 = block2.definition_locator.definition_id
self.assertNotEqual(def_id1, def_id2)
# Next, create a course:
with modulestore().default_store(ModuleStoreEnum.Type.split):
course = CourseFactory.create()
# Add a LibraryContent block to the course:
lc_block = self._add_library_content_block(course, self.lib_key)
lc_block = self._refresh_children(lc_block)
for child_key in lc_block.children:
child = modulestore().get_item(child_key)
def_id = child.definition_locator.definition_id
self.assertIn(def_id, (def_id1, def_id2))
def test_fields(self):
"""
Test that blocks used from a library have the same field values as
defined by the library author.
"""
data_value = "A Scope.content value"
name_value = "A Scope.settings value"
lib_block = ItemFactory.create(
category="html",
parent_location=self.library.location,
user_id=self.user.id,
publish_item=False,
display_name=name_value,
data=data_value,
)
self.assertEqual(lib_block.data, data_value)
self.assertEqual(lib_block.display_name, name_value)
# Next, create a course:
with modulestore().default_store(ModuleStoreEnum.Type.split):
course = CourseFactory.create()
# Add a LibraryContent block to the course:
lc_block = self._add_library_content_block(course, self.lib_key)
lc_block = self._refresh_children(lc_block)
course_block = modulestore().get_item(lc_block.children[0])
self.assertEqual(course_block.data, data_value)
self.assertEqual(course_block.display_name, name_value)
def test_block_with_children(self):
"""
Test that blocks used from a library can have children.
"""
data_value = "A Scope.content value"
name_value = "A Scope.settings value"
# In the library, create a vertical block with a child:
vert_block = ItemFactory.create(
category="vertical",
parent_location=self.library.location,
user_id=self.user.id,
publish_item=False,
)
child_block = ItemFactory.create(
category="html",
parent_location=vert_block.location,
user_id=self.user.id,
publish_item=False,
display_name=name_value,
data=data_value,
)
self.assertEqual(child_block.data, data_value)
self.assertEqual(child_block.display_name, name_value)
# Next, create a course:
with modulestore().default_store(ModuleStoreEnum.Type.split):
course = CourseFactory.create()
# Add a LibraryContent block to the course:
lc_block = self._add_library_content_block(course, self.lib_key)
lc_block = self._refresh_children(lc_block)
self.assertEqual(len(lc_block.children), 1)
course_vert_block = modulestore().get_item(lc_block.children[0])
self.assertEqual(len(course_vert_block.children), 1)
course_child_block = modulestore().get_item(course_vert_block.children[0])
self.assertEqual(course_child_block.data, data_value)
self.assertEqual(course_child_block.display_name, name_value)
def test_change_after_first_sync(self):
"""
Check that nothing goes wrong if we (A) Set up a LibraryContent block
and use it successfully, then (B) Give it an invalid configuration.
No children should be deleted until the configuration is fixed.
"""
# Add a block to the library:
data_value = "Hello world!"
ItemFactory.create(
category="html",
parent_location=self.library.location,
user_id=self.user.id,
publish_item=False,
display_name="HTML BLock",
data=data_value,
)
# Create a course:
with modulestore().default_store(ModuleStoreEnum.Type.split):
course = CourseFactory.create()
# Add a LibraryContent block to the course:
lc_block = self._add_library_content_block(course, self.lib_key)
lc_block = self._refresh_children(lc_block)
self.assertEqual(len(lc_block.children), 1)
# Now, change the block settings to have an invalid library key:
resp = self._update_item(
lc_block.location,
{"source_library_id": "library-v1:NOT+FOUND"},
)
self.assertEqual(resp.status_code, 200)
lc_block = modulestore().get_item(lc_block.location)
self.assertEqual(len(lc_block.children), 1) # Children should not be deleted due to a bad setting.
html_block = modulestore().get_item(lc_block.children[0])
self.assertEqual(html_block.data, data_value)
def test_refreshes_children_if_libraries_change(self):
""" Tests that children are automatically refreshed if libraries list changes """
library2key = self._create_library(self.course, 'org2', 'lib2', 'Library2')
library2 = modulestore().get_library(library2key)
data1, data2 = "Hello world!", "Hello other world!"
ItemFactory.create(
category="html",
parent_location=self.library.location,
user_id=self.user.id,
publish_item=False,
display_name="Lib1: HTML BLock",
data=data1,
)
ItemFactory.create(
category="html",
parent_location=library2.location,
user_id=self.user.id,
publish_item=False,
display_name="Lib 2: HTML BLock",
data=data2,
)
# Create a course:
with modulestore().default_store(ModuleStoreEnum.Type.split):
course = CourseFactory.create()
# Add a LibraryContent block to the course:
lc_block = self._add_library_content_block(course, self.lib_key)
lc_block = self._refresh_children(lc_block)
self.assertEqual(len(lc_block.children), 1)
# Now, change the block settings to have an invalid library key:
resp = self._update_item(
lc_block.location,
{"source_library_id": str(library2key)},
)
self.assertEqual(resp.status_code, 200)
lc_block = modulestore().get_item(lc_block.location)
self.assertEqual(len(lc_block.children), 1)
html_block = modulestore().get_item(lc_block.children[0])
self.assertEqual(html_block.data, data2)
@patch("xmodule.library_tools.SearchEngine.get_search_engine", Mock(return_value=None, autospec=True))
def test_refreshes_children_if_capa_type_change(self):
""" Tests that children are automatically refreshed if capa type field changes """
name1, name2 = "Option Problem", "Multiple Choice Problem"
ItemFactory.create(
category="problem",
parent_location=self.library.location,
user_id=self.user.id,
publish_item=False,
display_name=name1,
data="<problem><optionresponse></optionresponse></problem>",
)
ItemFactory.create(
category="problem",
parent_location=self.library.location,
user_id=self.user.id,
publish_item=False,
display_name=name2,
data="<problem><multiplechoiceresponse></multiplechoiceresponse></problem>",
)
# Create a course:
with modulestore().default_store(ModuleStoreEnum.Type.split):
course = CourseFactory.create()
# Add a LibraryContent block to the course:
lc_block = self._add_library_content_block(course, self.lib_key)
lc_block = self._refresh_children(lc_block)
self.assertEqual(len(lc_block.children), 2)
resp = self._update_item(
lc_block.location,
{"capa_type": 'optionresponse'},
)
self.assertEqual(resp.status_code, 200)
lc_block = modulestore().get_item(lc_block.location)
self.assertEqual(len(lc_block.children), 1)
html_block = modulestore().get_item(lc_block.children[0])
self.assertEqual(html_block.display_name, name1)
resp = self._update_item(
lc_block.location,
{"capa_type": 'multiplechoiceresponse'},
)
self.assertEqual(resp.status_code, 200)
lc_block = modulestore().get_item(lc_block.location)
self.assertEqual(len(lc_block.children), 1)
html_block = modulestore().get_item(lc_block.children[0])
self.assertEqual(html_block.display_name, name2)
def test_refresh_fails_for_unknown_library(self):
""" Tests that refresh children fails if unknown library is configured """
# Create a course:
with modulestore().default_store(ModuleStoreEnum.Type.split):
course = CourseFactory.create()
# Add a LibraryContent block to the course:
lc_block = self._add_library_content_block(course, self.lib_key)
lc_block = self._refresh_children(lc_block)
self.assertEqual(len(lc_block.children), 0)
# Now, change the block settings to have an invalid library key:
resp = self._update_item(
lc_block.location,
{"source_library_id": "library-v1:NOT+FOUND"},
)
self.assertEqual(resp.status_code, 200)
with self.assertRaises(ValueError):
self._refresh_children(lc_block, status_code_expected=400)
class TestLibrariesWithGaGlobalCourseCreator(TestLibraries):
def setUp(self):
super(TestLibrariesWithGaGlobalCourseCreator, self).setUp()
switch_ga_global_course_creator(self.user)
@ddt.ddt
@patch('django.conf.settings.SEARCH_ENGINE', None)
class TestLibraryAccess(SignalDisconnectTestMixin, LibraryTestCase):
"""
Test Roles and Permissions related to Content Libraries
"""
def setUp(self):
""" Create a library, staff user, and non-staff user """
super(TestLibraryAccess, self).setUp()
self.non_staff_user, self.non_staff_user_password = self.create_non_staff_user()
def _login_as_non_staff_user(self, logout_first=True):
""" Login as a user that starts out with no roles/permissions granted. """
if logout_first:
self.client.logout() # We start logged in as a staff user
self.client.login(username=self.non_staff_user.username, password=self.non_staff_user_password)
def _assert_cannot_create_library(self, org="org", library="libfail", expected_code=403):
""" Ensure the current user is not able to create a library. """
self.assertTrue(expected_code >= 300)
lib_url = reverse_course_url('course_library_handler', self.course.id)
response = self.client.ajax_post(
lib_url,
{'org': org, 'library': library, 'display_name': "Irrelevant"}
)
self.assertEqual(response.status_code, expected_code)
key = LibraryLocator(org=org, library=library)
self.assertEqual(modulestore().get_library(key), None)
def _can_access_library(self, course, library):
"""
Use the normal studio library URL to check if we have access
`library` can be a LibraryLocator or the library's root XBlock
"""
if isinstance(library, (basestring, LibraryLocator)):
lib_key = library
else:
lib_key = library.location.library_key
lib_url = reverse_course_url('course_library_handler', course.id, kwargs={'library_key_string': unicode(lib_key)})
response = self.client.get(lib_url)
self.assertIn(response.status_code, (200, 302, 403, 404))
return response.status_code == 200
def tearDown(self):
"""
Log out when done each test
"""
self.client.logout()
super(TestLibraryAccess, self).tearDown()
def test_creation(self):
"""
The user that creates a library should have instructor (admin) and staff permissions
"""
instructor_role = CourseInstructorRole(self.course.id)
auth.add_users(self.user, instructor_role, self.non_staff_user)
CourseInstructorRole(self.course.location.course_key).add_users(self.non_staff_user)
# self.library has been auto-created by the staff user.
self.assertTrue(has_studio_write_access(self.user, self.lib_key))
self.assertTrue(has_studio_read_access(self.user, self.lib_key))
# Make sure the user was actually assigned the instructor role and not just using is_staff superpowers:
self.assertTrue(CourseInstructorRole(self.lib_key).has_user(self.user))
# Now log out and ensure we are forbidden from creating a library:
self.client.logout()
self._assert_cannot_create_library(expected_code=302) # 302 redirect to login expected
# Now check that logged-in users without CourseCreator role can still create libraries
self._login_as_non_staff_user(logout_first=False)
self.assertFalse(CourseCreatorRole().has_user(self.non_staff_user))
with patch.dict('django.conf.settings.FEATURES', {'ENABLE_CREATOR_GROUP': True}):
lib_key2 = self._create_library(self.course, library='lib2', display_name='Test Library 2')
library2 = modulestore().get_library(lib_key2)
self.assertIsNotNone(library2)
@ddt.data(
CourseInstructorRole,
CourseStaffRole,
LibraryUserRole,
)
def test_acccess(self, access_role):
"""
Test whether the library is correctly linked to the course
"""
# At this point, one library exists, created by the currently-logged-in staff user.
# Create another library as staff:
library2_key = self._create_library(self.course, library='lib2')
# Login as staff_user:
self._login_as_staff_user()
course2 = CourseFactory.create(org='org2', course='course2', run='run2')
# Library is not linked to course2
lib_list = self._list_libraries(unicode(course2.id))
self.assertEqual(len(lib_list), 0)
self.assertFalse(self._can_access_library(course2, self.library))
self.assertFalse(self._can_access_library(course2, library2_key))
# Two libraries are linked to self.course
lib_list = self._list_libraries(unicode(self.course.id))
self.assertEqual(len(lib_list), 2)
self.assertIn(lib_list[0]["library_key"], [unicode(library2_key), unicode(self.lib_key)])
self.assertTrue(self._can_access_library(self.course, library2_key))
self.assertTrue(self._can_access_library(self.course, self.library))
@ddt.data(
OrgStaffRole,
OrgInstructorRole,
OrgLibraryUserRole,
)
def test_org_based_access(self, org_access_role):
"""
Test the various roles that allow viewing all of an organization's
libraries are working correctly.
"""
# Create some libraries as the staff user:
course_pacific = CourseFactory.create(org='PacificX', course='courseP', run='runP')
self._setting_library_option(course_pacific)
lib_key_pacific = self._create_library(course_pacific, org='PacificX', library='lib')
course_atlantic = CourseFactory.create(org='AtlanticX', course='courseA', run='runA')
self._setting_library_option(course_atlantic)
lib_key_atlantic = self._create_library(course_atlantic, org='AtlanticX', library='libA')
# Now manually intervene to give non_staff_user access to all "PacificX" libraries:
org_access_role(lib_key_pacific.org).add_users(self.non_staff_user)
CourseStaffRole(course_pacific.location.course_key).add_users(self.non_staff_user)
# Login as a non-staff:
self._login_as_non_staff_user()
# Now non_staff_user should be able to access lib_key_pacific only:
lib_list = self._list_libraries(unicode(course_pacific.id))
self.assertEqual(len(lib_list), 1)
self.assertEqual(lib_list[0]["library_key"], unicode(lib_key_pacific))
if org_access_role == OrgInstructorRole:
self.assertTrue(self._can_access_library(course_pacific, lib_key_pacific))
else:
self.assertFalse(self._can_access_library(course_pacific, lib_key_pacific))
self.assertFalse(self._can_access_library(course_atlantic, lib_key_atlantic))
self.assertFalse(self._can_access_library(self.course, self.lib_key))
@ddt.data(True, False)
def test_read_only_role(self, use_org_level_role):
"""
Test the read-only role (LibraryUserRole and its org-level equivalent)
"""
# As staff user, add a block to self.library:
block = self._add_simple_content_block()
# Login as a non_staff_user:
self._login_as_non_staff_user()
self.assertFalse(self._can_access_library(self.course, self.library))
block_url = reverse_usage_url('xblock_handler', block.location)
def can_read_block():
""" Check if studio lets us view the XBlock in the library """
response = self.client.get_json(block_url)
self.assertIn(response.status_code, (200, 403)) # 400 would be ambiguous
return response.status_code == 200
def can_edit_block():
""" Check if studio lets us edit the XBlock in the library """
response = self.client.ajax_post(block_url)
self.assertIn(response.status_code, (200, 403)) # 400 would be ambiguous
return response.status_code == 200
def can_delete_block():
""" Check if studio lets us delete the XBlock in the library """
response = self.client.delete(block_url)
self.assertIn(response.status_code, (200, 403)) # 400 would be ambiguous
return response.status_code == 200
def can_copy_block():
""" Check if studio lets us duplicate the XBlock in the library """
response = self.client.ajax_post(reverse_url('xblock_handler'), {
'parent_locator': unicode(self.library.location),
'duplicate_source_locator': unicode(block.location),
})
self.assertIn(response.status_code, (200, 403)) # 400 would be ambiguous
return response.status_code == 200
def can_create_block():
""" Check if studio lets us make a new XBlock in the library """
response = self.client.ajax_post(reverse_url('xblock_handler'), {
'parent_locator': unicode(self.library.location), 'category': 'html',
})
self.assertIn(response.status_code, (200, 403)) # 400 would be ambiguous
return response.status_code == 200
# Check that we do not have read or write access to block:
self.assertFalse(can_read_block())
self.assertFalse(can_edit_block())
self.assertFalse(can_delete_block())
self.assertFalse(can_copy_block())
self.assertFalse(can_create_block())
# Give non_staff_user read-only permission:
if use_org_level_role:
OrgLibraryUserRole(self.lib_key.org).add_users(self.non_staff_user)
else:
LibraryUserRole(self.lib_key).add_users(self.non_staff_user)
CourseStaffRole(self.course.location.course_key).add_users(self.non_staff_user)
self.assertFalse(self._can_access_library(self.course, self.library))
self.assertTrue(can_read_block())
self.assertFalse(can_edit_block())
self.assertFalse(can_delete_block())
self.assertFalse(can_copy_block())
self.assertFalse(can_create_block())
@ddt.data(
(LibraryUserRole, CourseStaffRole, True),
(CourseStaffRole, CourseStaffRole, True),
(None, CourseStaffRole, False),
(LibraryUserRole, None, False),
)
@ddt.unpack
def test_duplicate_across_courses(self, library_role, course_role, expected_result):
"""
Test that the REST API will correctly allow/refuse when copying
from a library with (write, read, or no) access to a course with (write or no) access.
"""
# As staff user, add a block to self.library:
block = self._add_simple_content_block()
# And create a course:
with modulestore().default_store(ModuleStoreEnum.Type.split):
course = CourseFactory.create()
self._login_as_non_staff_user()
# Assign roles:
if library_role:
library_role(self.lib_key).add_users(self.non_staff_user)
if course_role:
course_role(course.location.course_key).add_users(self.non_staff_user)
# Copy block to the course:
response = self.client.ajax_post(reverse_url('xblock_handler'), {
'parent_locator': unicode(course.location),
'duplicate_source_locator': unicode(block.location),
})
self.assertIn(response.status_code, (200, 403)) # 400 would be ambiguous
duplicate_action_allowed = (response.status_code == 200)
self.assertEqual(duplicate_action_allowed, expected_result)
@ddt.data(
(LibraryUserRole, CourseStaffRole, True),
(CourseStaffRole, CourseStaffRole, True),
(None, CourseStaffRole, True),
(LibraryUserRole, None, False),
)
@ddt.unpack
def test_refresh_library_content_permissions(self, library_role, course_role, expected_result):
"""
Test that the LibraryContent block's 'refresh_children' handler will correctly
handle permissions and allow/refuse when updating its content with the latest
version of a library. We try updating from a library with (write, read, or no)
access to a course with (write or no) access.
"""
# As staff user, add a block to self.library:
self._add_simple_content_block()
# And create a course:
with modulestore().default_store(ModuleStoreEnum.Type.split):
course = CourseFactory.create()
self._login_as_non_staff_user()
# Assign roles:
if library_role:
library_role(self.lib_key).add_users(self.non_staff_user)
if course_role:
course_role(course.location.course_key).add_users(self.non_staff_user)
# Try updating our library content block:
lc_block = self._add_library_content_block(course, self.lib_key)
# We must use the CMS's module system in order to get permissions checks.
self._bind_module(lc_block, user=self.non_staff_user)
lc_block = self._refresh_children(lc_block, status_code_expected=200 if expected_result else 403)
self.assertEqual(len(lc_block.children), 1 if expected_result else 0)
def test_studio_user_permissions(self):
"""
Test that user could attach to the problem only libraries that he has access (or which were created by him).
This test was created on the basis of bug described in the pull requests on github:
https://github.com/edx/edx-platform/pull/11331
https://github.com/edx/edx-platform/pull/11611
"""
with modulestore().default_store(ModuleStoreEnum.Type.split):
course = CourseFactory.create()
self._setting_library_option(course)
self._create_library(course, org='admin_org_1', library='lib_adm_1', display_name='admin_lib_1')
self._create_library(course, org='admin_org_2', library='lib_adm_2', display_name='admin_lib_2')
self._login_as_non_staff_user()
instructor_role = CourseInstructorRole(course.id)
auth.add_users(self.user, instructor_role, self.non_staff_user)
self._create_library(course, org='staff_org_1', library='lib_staff_1', display_name='staff_lib_1')
self._create_library(course, org='staff_org_2', library='lib_staff_2', display_name='staff_lib_2')
lib_block = ItemFactory.create(
category='library_content',
parent_location=course.location,
user_id=self.non_staff_user.id,
publish_item=False
)
def _get_settings_html():
"""
Helper function to get block settings HTML
Used to check the available libraries.
"""
edit_view_url = reverse_usage_url('xblock_view_handler', lib_block.location, {'view_name': STUDIO_VIEW})
resp = self.client.get_json(edit_view_url)
self.assertEquals(resp.status_code, 200)
return parse_json(resp)['html']
self._login_as_staff_user()
staff_settings_html = _get_settings_html()
self.assertIn('staff_lib_1', staff_settings_html)
self.assertIn('staff_lib_2', staff_settings_html)
self.assertIn('admin_lib_1', staff_settings_html)
self.assertIn('admin_lib_2', staff_settings_html)
self._login_as_non_staff_user()
staff_libs = self._list_libraries(unicode(course.id))
self.assertEqual(len(staff_libs), 4)
non_staff_settings_html = _get_settings_html()
self.assertIn('staff_lib_1', non_staff_settings_html)
self.assertIn('staff_lib_2', non_staff_settings_html)
self.assertIn('admin_lib_1', non_staff_settings_html)
self.assertIn('admin_lib_2', non_staff_settings_html)
@ddt.ddt
@override_settings(SEARCH_ENGINE=None)
class TestOverrides(LibraryTestCase):
"""
Test that overriding block Scope.settings fields from a library in a specific course works
"""
def setUp(self):
super(TestOverrides, self).setUp()
self.original_display_name = "A Problem Block"
self.original_weight = 1
# Create a problem block in the library:
self.problem = ItemFactory.create(
category="problem",
parent_location=self.library.location,
display_name=self.original_display_name, # display_name is a Scope.settings field
weight=self.original_weight, # weight is also a Scope.settings field
user_id=self.user.id,
publish_item=False,
)
# Refresh library now that we've added something.
self.library = modulestore().get_library(self.lib_key)
# Also create a course:
with modulestore().default_store(ModuleStoreEnum.Type.split):
self.course = CourseFactory.create()
# Add a LibraryContent block to the course:
self.lc_block = self._add_library_content_block(self.course, self.lib_key)
self.lc_block = self._refresh_children(self.lc_block)
self.problem_in_course = modulestore().get_item(self.lc_block.children[0])
def test_overrides(self):
"""
Test that we can override Scope.settings values in a course.
"""
new_display_name = "Modified Problem Title"
new_weight = 10
self.problem_in_course.display_name = new_display_name
self.problem_in_course.weight = new_weight
modulestore().update_item(self.problem_in_course, self.user.id)
# Add a second LibraryContent block to the course, with no override:
lc_block2 = self._add_library_content_block(self.course, self.lib_key)
lc_block2 = self._refresh_children(lc_block2)
# Re-load the two problem blocks - one with and one without an override:
self.problem_in_course = modulestore().get_item(self.lc_block.children[0])
problem2_in_course = modulestore().get_item(lc_block2.children[0])
self.assertEqual(self.problem_in_course.display_name, new_display_name)
self.assertEqual(self.problem_in_course.weight, new_weight)
self.assertEqual(problem2_in_course.display_name, self.original_display_name)
self.assertEqual(problem2_in_course.weight, self.original_weight)
def test_reset_override(self):
"""
If we override a setting and then reset it, we should get the library value.
"""
new_display_name = "Modified Problem Title"
new_weight = 10
self.problem_in_course.display_name = new_display_name
self.problem_in_course.weight = new_weight
modulestore().update_item(self.problem_in_course, self.user.id)
self.problem_in_course = modulestore().get_item(self.problem_in_course.location)
self.assertEqual(self.problem_in_course.display_name, new_display_name)
self.assertEqual(self.problem_in_course.weight, new_weight)
# Reset:
for field_name in ["display_name", "weight"]:
self.problem_in_course.fields[field_name].delete_from(self.problem_in_course)
# Save, reload, and verify:
modulestore().update_item(self.problem_in_course, self.user.id)
self.problem_in_course = modulestore().get_item(self.problem_in_course.location)
self.assertEqual(self.problem_in_course.display_name, self.original_display_name)
self.assertEqual(self.problem_in_course.weight, self.original_weight)
def test_consistent_definitions(self):
"""
Make sure that the new child of the LibraryContent block
shares its definition with the original (self.problem).
This test is specific to split mongo.
"""
definition_id = self.problem.definition_locator.definition_id
self.assertEqual(self.problem_in_course.definition_locator.definition_id, definition_id)
# Now even if we change some Scope.settings fields and refresh, the definition should be unchanged
self.problem.weight = 20
self.problem.display_name = "NEW"
modulestore().update_item(self.problem, self.user.id)
self.lc_block = self._refresh_children(self.lc_block)
self.problem_in_course = modulestore().get_item(self.problem_in_course.location)
self.assertEqual(self.problem.definition_locator.definition_id, definition_id)
self.assertEqual(self.problem_in_course.definition_locator.definition_id, definition_id)
@ddt.data(False, True)
def test_persistent_overrides(self, duplicate):
"""
Test that when we override Scope.settings values in a course,
the override values persist even when the block is refreshed
with updated blocks from the library.
"""
new_display_name = "Modified Problem Title"
new_weight = 15
self.problem_in_course.display_name = new_display_name
self.problem_in_course.weight = new_weight
modulestore().update_item(self.problem_in_course, self.user.id)
if duplicate:
# Check that this also works when the RCB is duplicated.
self.lc_block = modulestore().get_item(
_duplicate_item(self.course.location, self.lc_block.location, self.user)
)
self.problem_in_course = modulestore().get_item(self.lc_block.children[0])
else:
self.problem_in_course = modulestore().get_item(self.problem_in_course.location)
self.assertEqual(self.problem_in_course.display_name, new_display_name)
self.assertEqual(self.problem_in_course.weight, new_weight)
# Change the settings in the library version:
self.problem.display_name = "X"
self.problem.weight = 99
new_data_value = "<problem><p>Changed data to check that non-overriden fields *do* get updated.</p></problem>"
self.problem.data = new_data_value
modulestore().update_item(self.problem, self.user.id)
self.lc_block = self._refresh_children(self.lc_block)
self.problem_in_course = modulestore().get_item(self.problem_in_course.location)
self.assertEqual(self.problem_in_course.display_name, new_display_name)
self.assertEqual(self.problem_in_course.weight, new_weight)
self.assertEqual(self.problem_in_course.data, new_data_value)
def test_duplicated_version(self):
"""
Test that if a library is updated, and the content block is duplicated,
the new block will use the old library version and not the new one.
"""
store = modulestore()
self.assertEqual(len(self.library.children), 1)
self.assertEqual(len(self.lc_block.children), 1)
# Edit the only problem in the library:
self.problem.display_name = "--changed in library--"
store.update_item(self.problem, self.user.id)
# Create an additional problem block in the library:
ItemFactory.create(
category="problem",
parent_location=self.library.location,
user_id=self.user.id,
publish_item=False,
)
# Refresh our reference to the library
self.library = store.get_library(self.lib_key)
# Refresh our reference to the block
self.lc_block = store.get_item(self.lc_block.location)
self.problem_in_course = store.get_item(self.problem_in_course.location)
# The library has changed...
self.assertEqual(len(self.library.children), 2)
# But the block hasn't.
self.assertEqual(len(self.lc_block.children), 1)
self.assertEqual(self.problem_in_course.location, self.lc_block.children[0])
self.assertEqual(self.problem_in_course.display_name, self.original_display_name)
# Duplicate self.lc_block:
duplicate = store.get_item(
_duplicate_item(self.course.location, self.lc_block.location, self.user)
)
# The duplicate should have identical children to the original:
self.assertEqual(len(duplicate.children), 1)
self.assertTrue(self.lc_block.source_library_version)
self.assertEqual(self.lc_block.source_library_version, duplicate.source_library_version)
problem2_in_course = store.get_item(duplicate.children[0])
self.assertEqual(problem2_in_course.display_name, self.original_display_name)
class TestOverridesWithGaGlobalCourseCreator(TestOverrides):
"""
Test that overriding block Scope.settings fields from a library in a specific course works
"""
def setUp(self):
super(TestOverridesWithGaGlobalCourseCreator, self).setUp()
switch_ga_global_course_creator(self.user)
class TestIncompatibleModuleStore(LibraryTestCase):
"""
Tests for proper validation errors with an incompatible course modulestore.
"""
def setUp(self):
super(TestIncompatibleModuleStore, self).setUp()
# Create a course in an incompatible modulestore.
with modulestore().default_store(ModuleStoreEnum.Type.mongo):
self.course = CourseFactory.create()
# Add a LibraryContent block to the course:
self.lc_block = self._add_library_content_block(self.course, self.lib_key)
def test_incompatible_modulestore(self):
"""
Verifies that, if a user is using a modulestore that doesn't support libraries,
a validation error will be produced.
"""
validation = self.lc_block.validate()
self.assertEqual(validation.summary.type, validation.summary.ERROR)
self.assertIn(
"This course does not support content libraries.", validation.summary.text)
|
agpl-3.0
|
b-cube/OwsCapable
|
owscapable/map/wms130.py
|
1
|
25313
|
# -*- coding: iso-8859-15 -*-
# =============================================================================
# Copyright (c) 2004, 2006 Sean C. Gillies
# Copyright (c) 2005 Nuxeo SARL <http://nuxeo.com>
#
# Authors : Sean Gillies <sgillies@frii.com>
# Julien Anguenot <ja@nuxeo.com>
#
# Contact email: sgillies@frii.com
# =============================================================================
"""
API for Web Map Service (WMS) methods and metadata.
Support for version 1.3.0 of the WMS protocol.
"""
from __future__ import (absolute_import, division, print_function)
import cgi
import urllib2
from urllib import urlencode
import warnings
from owscapable.etree import etree
from owscapable.util import openURL, testXMLValue, extract_xml_list, xmltag_split, nspath
from owscapable.fgdc import Metadata
from owscapable.iso import MD_Metadata
WMS_NAMESPACE = 'http://www.opengis.net/wms'
def strip_ns(tag):
return tag[tag.index('}') + 1:]
class ServiceException(Exception):
"""WMS ServiceException
Attributes:
message -- short error message
xml -- full xml error message from server
"""
def __init__(self, message, xml):
self.message = message
self.xml = xml
def __str__(self):
return repr(self.message)
class WebMapService_1_3_0(object):
"""Abstraction for OGC Web Map Service (WMS)
Implements IWebMapService
"""
def __getitem__(self, name):
''' check contents dictionary to allow dict like access to service layers'''
if name in self.__getattribute__('contents').keys():
return self.__getattribute__('contents')[name]
else:
raise KeyError("No content named %s" % name)
def __init__(self, url, version='1.3.0', xml=None,
username=None, password=None, parse_remote_metadata=False):
"""Initialize."""
self.url = url
self.username = username
self.password = password
self.version = version
self._capabilities = None
# Authentication handled by Reader
reader = WMSCapabilitiesReader(self.version, url=self.url,
un=self.username, pw=self.password)
if xml: # read from stored xml
self._capabilities = reader.readString(xml)
else: # read from server
self._capabilities = reader.read(self.url)
# avoid building capabilities metadata if the response is a ServiceExceptionReport
se = self._capabilities.find(nspath('ServiceException', WMS_NAMESPACE))
if se is not None:
err_message = str(se.text).strip()
raise ServiceException(err_message, xml)
# build metadata objects
self._buildMetadata(parse_remote_metadata)
def _buildMetadata(self, parse_remote_metadata=False):
''' set up capabilities metadata objects '''
# serviceIdentification metadata
serviceelem = self._capabilities.find(nspath('Service', WMS_NAMESPACE))
self.identification = ServiceIdentification(serviceelem, self.version)
# serviceProvider metadata
self.provider = ServiceProvider(serviceelem)
# serviceOperations metadata
self.operations = []
for elem in self._capabilities.find(nspath('Capability/Request', WMS_NAMESPACE))[:]:
self.operations.append(OperationMetadata(elem))
# serviceContents metadata: our assumption is that services use a top-level
# layer as a metadata organizer, nothing more.
self.contents = {}
caps = self._capabilities.find(nspath('Capability', WMS_NAMESPACE))
# recursively gather content metadata for all layer elements.
# To the WebMapService.contents store only metadata of named layers.
def gather_layers(parent_elem, parent_metadata):
for index, elem in enumerate(parent_elem.findall(nspath('Layer', WMS_NAMESPACE))):
cm = ContentMetadata(elem, parent=parent_metadata, index=index + 1,
parse_remote_metadata=parse_remote_metadata)
if cm.id:
if cm.id in self.contents:
warnings.warn('Content metadata for layer "%s" already exists. Using child layer' % cm.id)
self.contents[cm.id] = cm
gather_layers(elem, cm)
gather_layers(caps, None)
# exceptions
self.exceptions = [f.text for f
in self._capabilities.findall(nspath('Capability/Exception/Format',
WMS_NAMESPACE))]
def items(self):
'''supports dict-like items() access'''
items = []
for item in self.contents:
items.append((item, self.contents[item]))
return items
def getcapabilities(self):
pass
def getmap(self, layers=None, styles=None, srs=None, bbox=None,
format=None, size=None, time=None, transparent=False,
bgcolor='#FFFFFF',
exceptions='XML',
method='Get',
**kwargs
):
"""Request and return an image from the WMS as a file-like object.
Parameters
----------
layers : list
List of content layer names.
styles : list
Optional list of named styles, must be the same length as the
layers list.
srs : string
A spatial reference system identifier.
bbox : tuple
(left, bottom, right, top) in srs units.
format : string
Output image format such as 'image/jpeg'.
size : tuple
(width, height) in pixels.
transparent : bool
Optional. Transparent background if True.
bgcolor : string
Optional. Image background color.
method : string
Optional. HTTP DCP method name: Get or Post.
**kwargs : extra arguments
anything else e.g. vendor specific parameters
Example
-------
>>> wms = WebMapService('http://webservices.nationalatlas.gov/wms/1million',
version='1.3.0')
>>> img = wms.getmap(layers=['airports1m'],\
styles=['default'],\
srs='EPSG:4326',\
bbox=(-176.646, 17.7016, -64.8017, 71.2854),\
size=(300, 300),\
format='image/jpeg',\
transparent=True)
>>> out = open('example.jpg.jpg', 'wb')
>>> out.write(img.read())
>>> out.close()
"""
try:
base_url = next((m.get('url') for m in self.getOperationByName('GetMap').methods if
m.get('type').lower() == method.lower()))
except StopIteration:
base_url = self.url
request = {'version': self.version, 'request': 'GetMap'}
# check layers and styles
assert len(layers) > 0
request['layers'] = ','.join(layers)
if styles:
assert len(styles) == len(layers)
request['styles'] = ','.join(styles)
else:
request['styles'] = ''
# size
request['width'] = str(size[0])
request['height'] = str(size[1])
request['crs'] = str(srs)
request['bbox'] = ','.join([repr(x) for x in bbox])
request['format'] = str(format)
request['transparent'] = str(transparent).upper()
request['bgcolor'] = '0x' + bgcolor[1:7]
request['exceptions'] = str(exceptions)
if time is not None:
request['time'] = str(time)
if kwargs:
for kw in kwargs:
request[kw] = kwargs[kw]
data = urlencode(request)
print (data)
u = openURL(base_url, data, method, username=self.username, password=self.password)
# check for service exceptions, and return
# error: AttributeError: 'RereadableURL' object has no attribute 'info'
if u.info()['Content-Type'] in ['application/vnd.ogc.se_xml', 'text/xml']:
se_xml = u.read()
print (se_xml)
se_tree = etree.fromstring(se_xml)
# TODO: add the ogc namespace for this
err_message = unicode(se_tree.find('{http://www.opengis.net/ogc}ServiceExceptionReport').text).strip()
raise ServiceException(err_message, se_xml)
return u
def getServiceXML(self):
xml = None
if self._capabilities is not None:
xml = etree.tostring(self._capabilities)
return xml
def getfeatureinfo(self):
raise NotImplementedError
def getOperationByName(self, name):
"""Return a named content item."""
for item in self.operations:
if item.name == name:
return item
raise KeyError("No operation named %s" % name)
class ServiceIdentification(object):
''' Implements IServiceIdentificationMetadata '''
def __init__(self, infoset, version):
self._root = infoset
self.type = testXMLValue(self._root.find(nspath('Name', WMS_NAMESPACE)))
self.version = version
self.title = testXMLValue(self._root.find(nspath('Title', WMS_NAMESPACE)))
self.abstract = testXMLValue(self._root.find(nspath('Abstract', WMS_NAMESPACE)))
self.keywords = extract_xml_list(self._root.findall(nspath('KeywordList/Keyword', WMS_NAMESPACE)))
self.accessconstraints = testXMLValue(self._root.find(nspath('AccessConstraints', WMS_NAMESPACE)))
self.fees = testXMLValue(self._root.find(nspath('Fees', WMS_NAMESPACE)))
class ServiceProvider(object):
''' Implements IServiceProviderMetatdata '''
def __init__(self, infoset):
self._root = infoset
name = self._root.find(nspath('ContactInformation/ContactPersonPrimary/ContactOrganization', WMS_NAMESPACE))
if name is not None:
self.name = name.text
else:
self.name = None
self.url = self._root.find(nspath('OnlineResource', WMS_NAMESPACE)).attrib.get('{http://www.w3.org/1999/xlink}href', '')
# contact metadata
contact = self._root.find(nspath('ContactInformation', WMS_NAMESPACE))
# sometimes there is a contact block that is empty, so make
# sure there are children to parse
if contact is not None and contact[:] != []:
self.contact = ContactMetadata(contact)
else:
self.contact = None
def getContentByName(self, name):
"""Return a named content item."""
for item in self.contents:
if item.name == name:
return item
raise KeyError("No content named %s" % name)
def getOperationByName(self, name):
"""Return a named content item."""
for item in self.operations:
if item.name == name:
return item
raise KeyError("No operation named %s" % name)
class ContentMetadata:
"""
Abstraction for WMS layer metadata.
Implements IContentMetadata.
"""
def __init__(self, elem, parent=None, index=0, parse_remote_metadata=False, timeout=30):
if strip_ns(elem.tag) != 'Layer':
raise ValueError('%s should be a Layer' % (elem,))
self.parent = parent
if parent:
self.index = "%s.%d" % (parent.index, index)
else:
self.index = str(index)
self.id = self.name = testXMLValue(elem.find(nspath('Name', WMS_NAMESPACE)))
# layer attributes
self.queryable = int(elem.attrib.get('queryable', 0))
self.cascaded = int(elem.attrib.get('cascaded', 0))
self.opaque = int(elem.attrib.get('opaque', 0))
self.noSubsets = int(elem.attrib.get('noSubsets', 0))
self.fixedWidth = int(elem.attrib.get('fixedWidth', 0))
self.fixedHeight = int(elem.attrib.get('fixedHeight', 0))
# title is mandatory property
self.title = None
title = testXMLValue(elem.find(nspath('Title', WMS_NAMESPACE)))
if title is not None:
self.title = title.strip()
self.abstract = testXMLValue(elem.find(nspath('Abstract', WMS_NAMESPACE)))
# bboxes
boxes = elem.findall(nspath('BoundingBox', WMS_NAMESPACE))
self.boundingBoxes = []
for b in boxes:
try:
# sometimes the SRS attribute is (wrongly) not provided
srs = b.attrib['CRS']
except KeyError:
srs = None
self.boundingBoxes.append((
float(b.attrib['minx']),
float(b.attrib['miny']),
float(b.attrib['maxx']),
float(b.attrib['maxy']),
srs,
))
if self.parent:
if hasattr(self.parent, 'boundingBox'):
self.boundingBoxes.append(self.parent.boundingBox)
self.boundingBoxes = list(set(self.boundingBoxes))
# ScaleHint
sh = elem.find(nspath('ScaleHint', WMS_NAMESPACE))
self.scaleHint = None
if sh is not None:
if 'min' in sh.attrib and 'max' in sh.attrib:
self.scaleHint = {'min': sh.attrib['min'], 'max': sh.attrib['max']}
attribution = elem.find(nspath('Attribution', WMS_NAMESPACE))
self.attribution = {}
if attribution is not None:
title = attribution.find(nspath('Title', WMS_NAMESPACE))
url = attribution.find(nspath('OnlineResource', WMS_NAMESPACE))
logo = attribution.find(nspath('LogoURL', WMS_NAMESPACE))
if title is not None:
self.attribution['title'] = title.text
if url is not None:
self.attribution['url'] = url.attrib['{http://www.w3.org/1999/xlink}href']
if logo is not None:
self.attribution['logo_size'] = (
int(logo.attrib['width']),
int(logo.attrib['height'])
)
self.attribution['logo_url'] = logo.find(
nspath('OnlineResource', WMS_NAMESPACE)
).attrib['{http://www.w3.org/1999/xlink}href']
b = elem.find(nspath('EX_GeographicBoundingBox', WMS_NAMESPACE))
if b is not None:
west_longitude = b.find(nspath('westBoundLongitude', WMS_NAMESPACE))
east_longitude = b.find(nspath('eastBoundLongitude', WMS_NAMESPACE))
south_latitude = b.find(nspath('southBoundLatitude', WMS_NAMESPACE))
north_latitude = b.find(nspath('northBoundLatitude', WMS_NAMESPACE))
self.boundingBoxWGS84 = (
float(west_longitude.text if west_longitude is not None else ''),
float(south_latitude.text if south_latitude is not None else ''),
float(east_longitude.text if east_longitude is not None else ''),
float(north_latitude.text if north_latitude is not None else ''),
)
elif self.parent:
self.boundingBoxWGS84 = self.parent.boundingBoxWGS84
else:
self.boundingBoxWGS84 = None
# TODO: get this from the bbox attributes instead (deal with parents)
# SRS options
self.crsOptions = []
# Copy any parent SRS options (they are inheritable properties)
if self.parent:
self.crsOptions = list(self.parent.crsOptions)
# Look for SRS option attached to this layer
if elem.find(nspath('CRS', WMS_NAMESPACE)) is not None:
# some servers found in the wild use a single SRS
# tag containing a whitespace separated list of SRIDs
# instead of several SRS tags. hence the inner loop
for srslist in map(lambda x: x.text, elem.findall(nspath('CRS', WMS_NAMESPACE))):
if srslist:
for srs in srslist.split():
self.crsOptions.append(srs)
# Get rid of duplicate entries
self.crsOptions = list(set(self.crsOptions))
# Set self.crsOptions to None if the layer (and parents) had no SRS options
if len(self.crsOptions) == 0:
# raise ValueError('%s no SRS available!?' % (elem,))
# Comment by D Lowe.
# Do not raise ValueError as it is possible that a layer is purely a parent layer and does not have SRS specified. Instead set crsOptions to None
# Comment by Jachym:
# Do not set it to None, but to [], which will make the code
# work further. Fixed by anthonybaxter
self.crsOptions = []
# Styles
self.styles = {}
# Copy any parent styles (they are inheritable properties)
if self.parent:
self.styles = self.parent.styles.copy()
# Get the styles for this layer (items with the same name are replaced)
for s in elem.findall(nspath('Style', WMS_NAMESPACE)):
name = s.find(nspath('Name', WMS_NAMESPACE))
title = s.find(nspath('Title', WMS_NAMESPACE))
if name is None or title is None:
raise ValueError('%s missing name or title' % (s,))
style = {'title': title.text}
# legend url
legend = s.find(nspath('LegendURL/OnlineResource', WMS_NAMESPACE))
if legend is not None:
style['legend'] = legend.attrib['{http://www.w3.org/1999/xlink}href']
self.styles[name.text] = style
# keywords
self.keywords = [f.text for f in elem.findall(nspath('KeywordList/Keyword', WMS_NAMESPACE))]
# timepositions - times for which data is available.
self.timepositions = None
self.defaulttimeposition = None
for extent in elem.findall(nspath('Extent', WMS_NAMESPACE)):
if extent.attrib.get("name").lower() == 'time':
if extent.text:
self.timepositions = extent.text.split(',')
self.defaulttimeposition = extent.attrib.get("default")
break
# Elevations - available vertical levels
self.elevations = None
for extent in elem.findall(nspath('Extent', WMS_NAMESPACE)):
if extent.attrib.get("name").lower() == 'elevation':
if extent.text:
self.elevations = extent.text.split(',')
break
# MetadataURLs
self.metadataUrls = []
for m in elem.findall(nspath('MetadataURL', WMS_NAMESPACE)):
metadataUrl = {
'type': testXMLValue(m.attrib['type'], attrib=True),
'format': testXMLValue(m.find(nspath('Format', WMS_NAMESPACE))),
'url': testXMLValue(m.find(nspath('OnlineResource', WMS_NAMESPACE)).attrib['{http://www.w3.org/1999/xlink}href'], attrib=True)
}
if metadataUrl['url'] is not None and parse_remote_metadata: # download URL
try:
content = urllib2.urlopen(metadataUrl['url'], timeout=timeout)
doc = etree.parse(content)
if metadataUrl['type'] is not None:
if metadataUrl['type'] == 'FGDC':
metadataUrl['metadata'] = Metadata(doc)
if metadataUrl['type'] == 'TC211':
metadataUrl['metadata'] = MD_Metadata(doc)
except Exception:
metadataUrl['metadata'] = None
self.metadataUrls.append(metadataUrl)
# DataURLs
self.dataUrls = []
for m in elem.findall(nspath('DataURL', WMS_NAMESPACE)):
dataUrl = {
'format': m.find(nspath('Format', WMS_NAMESPACE)).text.strip(),
'url': m.find(nspath('OnlineResource', WMS_NAMESPACE)).attrib['{http://www.w3.org/1999/xlink}href']
}
self.dataUrls.append(dataUrl)
self.layers = []
for child in elem.findall(nspath('Layer', WMS_NAMESPACE)):
self.layers.append(ContentMetadata(child, self))
def __str__(self):
return 'Layer Name: %s Title: %s' % (self.name, self.title)
class OperationMetadata:
"""Abstraction for WMS OperationMetadata.
Implements IOperationMetadata.
"""
def __init__(self, elem):
"""."""
self.name = xmltag_split(elem.tag)
# formatOptions
self.formatOptions = [f.text for f in elem.findall(nspath('Format', WMS_NAMESPACE))]
self.methods = []
for verb in elem.findall(nspath('DCPType/HTTP/*', WMS_NAMESPACE)):
url = verb.find(nspath('OnlineResource', WMS_NAMESPACE)).attrib['{http://www.w3.org/1999/xlink}href']
self.methods.append({'type': xmltag_split(verb.tag), 'url': url})
class ContactMetadata:
"""Abstraction for contact details advertised in GetCapabilities.
"""
def __init__(self, elem):
name = elem.find(nspath('ContactPersonPrimary/ContactPerson', WMS_NAMESPACE))
if name is not None:
self.name = name.text
else:
self.name = None
email = elem.find('ContactElectronicMailAddress')
if email is not None:
self.email = email.text
else:
self.email = None
self.address = self.city = self.region = None
self.postcode = self.country = None
address = elem.find(nspath('ContactAddress', WMS_NAMESPACE))
if address is not None:
street = address.find(nspath('Address', WMS_NAMESPACE))
if street is not None:
self.address = street.text
city = address.find(nspath('City', WMS_NAMESPACE))
if city is not None:
self.city = city.text
region = address.find(nspath('StateOrProvince', WMS_NAMESPACE))
if region is not None:
self.region = region.text
postcode = address.find(nspath('PostCode', WMS_NAMESPACE))
if postcode is not None:
self.postcode = postcode.text
country = address.find(nspath('Country', WMS_NAMESPACE))
if country is not None:
self.country = country.text
organization = elem.find(nspath('ContactPersonPrimary/ContactOrganization', WMS_NAMESPACE))
if organization is not None:
self.organization = organization.text
else:
self.organization = None
position = elem.find(nspath('ContactPosition', WMS_NAMESPACE))
if position is not None:
self.position = position.text
else:
self.position = None
class WMSCapabilitiesReader:
"""Read and parse capabilities document into a lxml.etree infoset
"""
def __init__(self, version='1.3.0', url=None, un=None, pw=None):
"""Initialize"""
self.version = version
self._infoset = None
self.url = url
self.username = un
self.password = pw
#if self.username and self.password:
## Provide login information in order to use the WMS server
## Create an OpenerDirector with support for Basic HTTP
## Authentication...
#passman = HTTPPasswordMgrWithDefaultRealm()
#passman.add_password(None, self.url, self.username, self.password)
#auth_handler = HTTPBasicAuthHandler(passman)
#opener = build_opener(auth_handler)
#self._open = opener.open
def capabilities_url(self, service_url):
"""Return a capabilities url
"""
qs = []
if service_url.find('?') != -1:
qs = cgi.parse_qsl(service_url.split('?')[1])
params = [x[0] for x in qs]
if 'service' not in params:
qs.append(('service', 'WMS'))
if 'request' not in params:
qs.append(('request', 'GetCapabilities'))
if 'version' not in params:
qs.append(('version', self.version))
urlqs = urlencode(tuple(qs))
return service_url.split('?')[0] + '?' + urlqs
def read(self, service_url):
"""Get and parse a WMS capabilities document, returning an
elementtree instance
service_url is the base url, to which is appended the service,
version, and request parameters
"""
getcaprequest = self.capabilities_url(service_url)
# now split it up again to use the generic openURL function...
spliturl = getcaprequest.split('?')
u = openURL(
spliturl[0],
spliturl[1],
method='Get',
username=self.username,
password=self.password
)
return etree.fromstring(u.read())
def readString(self, st):
"""Parse a WMS capabilities document, returning an elementtree instance
string should be an XML capabilities document
"""
if not isinstance(st, str):
raise ValueError("String must be of type string, not %s" % type(st))
return etree.fromstring(st)
|
bsd-3-clause
|
jakirkham/dask-distance
|
dask_distance/_utils.py
|
1
|
1884
|
import functools
import itertools
import numpy
import dask
import dask.array
from . import _compat
from . import _pycompat
def _broadcast_uv(u, v):
U = _compat._atleast_2d(u)
V = _compat._atleast_2d(v)
if U.ndim != 2:
raise ValueError("u must be a 1-D or 2-D array.")
if V.ndim != 2:
raise ValueError("v must be a 1-D or 2-D array.")
U = U[:, None]
V = V[None, :]
return U, V
def _unbroadcast_uv(u, v, result):
u = _compat._asarray(u)
v = _compat._asarray(v)
if v.ndim == 1:
result = result[:, 0]
if u.ndim == 1:
result = result[0]
return result
def _broadcast_uv_wrapper(func):
@functools.wraps(func)
def _wrapped_broadcast_uv(u, v):
U, V = _broadcast_uv(u, v)
result = func(U, V)
result = _unbroadcast_uv(u, v, result)
return result
return _wrapped_broadcast_uv
def _cdist_apply(U, V, metric):
U = U[:, 0, :]
V = V[0, :, :]
result = numpy.empty((len(U), len(V)), dtype=float)
for i, j in numpy.ndindex(result.shape):
result[i, j] = metric(U[i], V[j])
return result
def _bool_cmp_cnts(U, V):
U = _compat._asarray(U)
V = _compat._asarray(V)
U = U.astype(bool)
V = V.astype(bool)
U_01 = [~U, U]
V_01 = [~V, V]
UV_cmp_cnts = numpy.empty((2, 2), dtype=object)
UV_ranges = [_pycompat.irange(e) for e in UV_cmp_cnts.shape]
for i, j in itertools.product(*UV_ranges):
UV_cmp_cnts[i, j] = (U_01[i] & V_01[j]).sum(axis=-1, dtype=float)
for i in _pycompat.irange(UV_cmp_cnts.ndim - 1, -1, -1):
UV_cmp_cnts2 = UV_cmp_cnts[..., 0]
for j in itertools.product(*(UV_ranges[:i])):
UV_cmp_cnts2[j] = dask.array.stack(UV_cmp_cnts[j].tolist(), axis=0)
UV_cmp_cnts = UV_cmp_cnts2
UV_cmp_cnts = UV_cmp_cnts[()]
return UV_cmp_cnts
|
bsd-3-clause
|
lrowe/rdflib
|
rdflib/plugins/parsers/pyRdfa/transform/OpenID.py
|
26
|
2119
|
# -*- coding: utf-8 -*-
"""
Simple transfomer: handle OpenID elements. Ie: an openid namespace is added and the usual
'link' elements for openid are exchanged against a namespaced version.
@summary: OpenID transformer module.
@requires: U{RDFLib package<http://rdflib.net>}
@organization: U{World Wide Web Consortium<http://www.w3.org>}
@author: U{Ivan Herman<a href="http://www.w3.org/People/Ivan/">}
@license: This software is available for use under the
U{W3C® SOFTWARE NOTICE AND LICENSE<href="http://www.w3.org/Consortium/Legal/2002/copyright-software-20021231">}
@contact: Ivan Herman, ivan@w3.org
@var OPENID_NS: the OpenID URI used in the package
"""
"""
$Id: OpenID.py,v 1.4 2012-01-18 14:16:44 ivan Exp $
$Date: 2012-01-18 14:16:44 $
"""
OPENID_NS = "http://xmlns.openid.net/auth#"
def OpenID_transform(html, options, state) :
"""
Replace C{openid.XXX} type C{@rel} attribute values in C{<link>} elements by C{openid:XXX}. The openid URI is also
added to the top level namespaces with the C{openid:} local name.
@param html: a DOM node for the top level html element
@param options: invocation options
@type options: L{Options<pyRdfa.options>}
@param state: top level execution state
@type state: L{State<pyRdfa.state>}
"""
from ..host import HostLanguage
if not( options.host_language in [ HostLanguage.xhtml, HostLanguage.html5, HostLanguage.xhtml5 ] ) :
return
# the head element is necessary; to be sure, the namespaces are set
# on that level only
head = None
try :
head = html.getElementsByTagName("head")[0]
except :
# no head....
return
foundOpenId = False
for link in html.getElementsByTagName("link") :
if link.hasAttribute("rel") :
rel = link.getAttribute("rel")
newProp = ""
for n in rel.strip().split() :
if n.startswith("openid.") :
newProp += " " + n.replace("openid.","openid:")
foundOpenId = True
else :
newProp += " " + n
link.setAttribute("rel",newProp.strip())
# Add the OpenId namespace if necessary
if foundOpenId and not head.hasAttribute("xmlns:openid") :
head.setAttributeNS("", "xmlns:openid", OPENID_NS)
|
bsd-3-clause
|
zh021589/robotframework-restlibrary
|
RestLibrary.py
|
2
|
7284
|
# -*- coding: UTF-8 -*-
"""
This is a HTTP/REST client library, primarily designed for use as a
`Robot Framework <http://robotframework.org/>`_ test library. It provides
keywords for calling REST-style services and inspecting the response.
Copyright (c) 2008 Niklas Lindström <lindstream@gmail.com>, all rights
reserved.
"""
__author__ = "Niklas Lindström"
__version__ = "0.1a"
import re
from urlparse import urljoin
import httplib
from warnings import warn
import hashlib
# Prefer httplib2, with fallback to (std) httplib.
try:
from httplib2 import Http
def http_request(url, method, data=None, headers=None):
h = Http()
h.follow_redirects = False
return h.request(url, method, data, headers)
except ImportError:
from urlparse import urlsplit
from httplib import HTTPConnection, HTTPSConnection
def http_request(url, method, data=None, headers=None):
scheme, netloc, path, query, fragment = urlsplit(url)
if scheme == 'https':
conn = HTTPSConnection(netloc)
else:
conn = HTTPConnection(netloc)
conn.request(method, "%s?%s" % (path, query), data, headers=headers)
response = conn.getresponse()
response.get = response.getheader # NOTE: monkey-patch
return response, response.read()
# Assertion utilities.
def expect(key, expected, value):
assert expected == value, (
"Expected %s to be %r but was %r" % (key, expected, value))
def expect_regexp(key, regexp, value):
assert re.match(regexp, value), (
"Expected %s to match regexp %r but was %r" % (key, regexp, value))
def expect_exists(what, value):
assert value, "Expected %s to be present (got %r)." % (what, value)
def expect_not_exists(what, value):
assert not value, "Expected no value for %s (got %r)." % (what, value)
# The library core.
class CoreRestClient(object):
def __init__(self):
self._status = None
self._reason = None
self._baseurl = None
self._send_headers = {}
self._reset()
def _reset(self):
self._current_url = None
self._response = None
self._content = None
def _do_request(self, method, url, data=None):
self._reset()
self._current_url = url = urljoin(self._baseurl, url)
response, content = http_request(
url, method, data, headers=self._send_headers)
self._status, self._reason = response.status, response.reason
self._response = response
self._content = content
def _get_header(self, header):
return self._response.get(header.lower())
def get_current_url(self):
return self._current_url
class CoreRestLibrary(CoreRestClient):
def base_url(self, url):
self._baseurl = url
def set_header(self, header, value):
self._send_headers[header] = value
def get(self, url):
self._do_request("GET", url)
def head(self, url):
self._do_request("HEAD", url)
def post(self, url, data):
self._do_request("POST", url, data)
def put(self, url, data):
self._do_request("PUT", url, data)
def delete(self, url):
self._do_request("DELETE", url)
def options(self, url):
self._do_request("OPTIONS", url)
def trace(self, url):
self._do_request("TRACE", url)
def patch(self, url, data):
self._do_request("PATCH", url, data)
def response_status(self, expected_status):
if not expected_status.isdigit():
status = httplib.responses[int(self._status)]
else:
status = "%d" % self._status
expect("status", expected_status, status)
def response(self, expected_response):
response = "%d %s" % (self._status, self._reason)
expect("status", expected_response, response)
def follow(self):
header = 'Location'
c_loc = self._get_header(header)
# FIXME: should only use Location (and check for 30x), right?
if not c_loc:
header = 'Content-Location'
c_loc = self._get_header(header)
expect_exists("response header '%s'" % header, c_loc)
self.get(c_loc)
def header(self, header, expected=None):
value = self._get_header(header)
expect(header, expected, value)
def body_is(self, expected):
expect("response body", expected, self._content)
def has_body(self):
expect_exists("response body", self._content)
return self._content
def no_body(self):
expect_not_exists("response body", self._content)
def body_checksum_is(self, algorithm, expected):
checksum = hashlib.new(algorithm, self._content).hexdigest()
expect("body checksum", expected, checksum)
# Final library class (with ad hoc mixin support).
class RestLibrary(CoreRestLibrary):
@classmethod
def mixin(cls, klass):
cls.__bases__ += (klass,)
def __init__(self):
self.__base_call('__init__')
def _reset(self):
self.__base_call('_reset')
def __base_call(self, name, *args, **kwargs):
for base in type(self).__bases__:
if hasattr(base, name):
getattr(base, name)(self, *args, **kwargs)
# Additional feature mixins.
class XmlMixinSupport(object):
def __init__(self):
self._namespaces = {}
def _reset(self):
self._lazy_xml = None
def xmlns(self, pfx, uri):
self._namespaces[pfx] = uri
def xpath_value(self, expr, expected):
value = self.find_xpath(expr)
expect("xpath %r" % expr, expected, value)
return value
def xpath_regexp(self, expr, regexp):
value = self.find_xpath(expr)
expect_regexp("xpath %r" % expr, regexp, value)
return value
def find_xpath_nodes(self, expr):
return self.find_xpath(expr, False)
def find_xpath(self, expr, tostr=True):
value = self._eval_xpath(expr)
expect_exists("xpath %r" % expr, value)
if tostr:
value = "".join(str(v) for v in value)
return value
def no_xpath(self, expr):
value = self._eval_xpath(expr)
expect_not_exists("xpath %r" % expr, value)
def _get_parsed_xml(self):
raise NotImplementedError("No XML parser available.")
def _eval_xpath(self, expr):
raise NotImplementedError("No XPath processor available.")
try:
from lxml import etree
class XmlMixin(XmlMixinSupport):
def _get_parsed_xml(self):
if self._lazy_xml is None:
self._lazy_xml = etree.fromstring(self._content)
return self._lazy_xml
def _eval_xpath(self, expr):
doc = self._get_parsed_xml()
return doc.xpath(expr, namespaces=self._namespaces)
RestLibrary.mixin(XmlMixin)
except ImportError:
warn("Cannot parse XML responses. Missing module: lxml")
# TODO: Try some more options (4Suite, elementtree, javax.xml).
# If none can be found, disable XML keywords by removing the following line:
RestLibrary.mixin(XmlMixinSupport)
# TODO: No features yet.
class JsonMixin(object):
def _reset(self):
self._lazy_json = None
RestLibrary.mixin(JsonMixin)
|
apache-2.0
|
apache/incubator-allura
|
ForgeSVN/forgesvn/tests/test_tasks.py
|
2
|
2803
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import shutil
import unittest
import os
import tg
import mock
from pylons import tmpl_context as c
from paste.deploy.converters import asbool
from alluratest.controller import setup_basic_test
from allura import model as M
from allura.lib import helpers as h
from allura.tasks import repo_tasks
from forgesvn.tests import with_svn
class TestRepoTasks(unittest.TestCase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
if asbool(tg.config.get('smtp.mock')):
self.smtp_mock = mock.patch('allura.lib.mail_util.smtplib.SMTP')
self.smtp_mock.start()
def tearDown(self):
if asbool(tg.config.get('smtp.mock')):
self.smtp_mock.stop()
@with_svn
def setup_with_tools(self):
h.set_context('test', 'src', neighborhood='Projects')
def test_init(self):
ns = M.Notification.query.find().count()
with mock.patch.object(c.app.repo, 'init') as f:
repo_tasks.init()
M.main_orm_session.flush()
assert f.called_with()
assert ns + 1 == M.Notification.query.find().count()
def test_clone(self):
ns = M.Notification.query.find().count()
with mock.patch.object(c.app.repo, 'init_as_clone') as f:
repo_tasks.clone('foo', 'bar', 'baz')
M.main_orm_session.flush()
f.assert_called_with('foo', 'bar', 'baz')
assert ns + 1 == M.Notification.query.find().count()
def test_refresh(self):
with mock.patch.object(c.app.repo, 'refresh') as f:
repo_tasks.refresh()
f.assert_called_with()
def test_uninstall(self):
with mock.patch.object(shutil, 'rmtree') as f:
repo_tasks.uninstall()
f.assert_called_with(
os.path.join(tg.config['scm.repos.root'], 'svn/p/test/src'),
ignore_errors=True)
|
apache-2.0
|
alimony/django
|
django/db/migrations/operations/fields.py
|
18
|
12979
|
from django.db.models.fields import NOT_PROVIDED
from django.utils.functional import cached_property
from .base import Operation
class FieldOperation(Operation):
def __init__(self, model_name, name):
self.model_name = model_name
self.name = name
@cached_property
def model_name_lower(self):
return self.model_name.lower()
@cached_property
def name_lower(self):
return self.name.lower()
def is_same_model_operation(self, operation):
return self.model_name_lower == operation.model_name_lower
def is_same_field_operation(self, operation):
return self.is_same_model_operation(operation) and self.name_lower == operation.name_lower
def references_model(self, name, app_label=None):
return name.lower() == self.model_name_lower
def references_field(self, model_name, name, app_label=None):
return self.references_model(model_name) and name.lower() == self.name_lower
def reduce(self, operation, in_between, app_label=None):
return (
super().reduce(operation, in_between, app_label=app_label) or
not operation.references_field(self.model_name, self.name, app_label)
)
class AddField(FieldOperation):
"""Add a field to a model."""
def __init__(self, model_name, name, field, preserve_default=True):
self.field = field
self.preserve_default = preserve_default
super().__init__(model_name, name)
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'name': self.name,
'field': self.field,
}
if self.preserve_default is not True:
kwargs['preserve_default'] = self.preserve_default
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
# If preserve default is off, don't use the default for future state
if not self.preserve_default:
field = self.field.clone()
field.default = NOT_PROVIDED
else:
field = self.field
state.models[app_label, self.model_name_lower].fields.append((self.name, field))
# Delay rendering of relationships if it's not a relational field
delay = not field.is_relation
state.reload_model(app_label, self.model_name_lower, delay=delay)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
field = to_model._meta.get_field(self.name)
if not self.preserve_default:
field.default = self.field.default
schema_editor.add_field(
from_model,
field,
)
if not self.preserve_default:
field.default = NOT_PROVIDED
def database_backwards(self, app_label, schema_editor, from_state, to_state):
from_model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, from_model):
schema_editor.remove_field(from_model, from_model._meta.get_field(self.name))
def describe(self):
return "Add field %s to %s" % (self.name, self.model_name)
def reduce(self, operation, in_between, app_label=None):
if isinstance(operation, FieldOperation) and self.is_same_field_operation(operation):
if isinstance(operation, AlterField):
return [
AddField(
model_name=self.model_name,
name=operation.name,
field=operation.field,
),
]
elif isinstance(operation, RemoveField):
return []
elif isinstance(operation, RenameField):
return [
AddField(
model_name=self.model_name,
name=operation.new_name,
field=self.field,
),
]
return super().reduce(operation, in_between, app_label=app_label)
class RemoveField(FieldOperation):
"""Remove a field from a model."""
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'name': self.name,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
new_fields = []
old_field = None
for name, instance in state.models[app_label, self.model_name_lower].fields:
if name != self.name:
new_fields.append((name, instance))
else:
old_field = instance
state.models[app_label, self.model_name_lower].fields = new_fields
# Delay rendering of relationships if it's not a relational field
delay = not old_field.is_relation
state.reload_model(app_label, self.model_name_lower, delay=delay)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
from_model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, from_model):
schema_editor.remove_field(from_model, from_model._meta.get_field(self.name))
def database_backwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
schema_editor.add_field(from_model, to_model._meta.get_field(self.name))
def describe(self):
return "Remove field %s from %s" % (self.name, self.model_name)
class AlterField(FieldOperation):
"""
Alter a field's database column (e.g. null, max_length) to the provided
new field.
"""
def __init__(self, model_name, name, field, preserve_default=True):
self.field = field
self.preserve_default = preserve_default
super().__init__(model_name, name)
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'name': self.name,
'field': self.field,
}
if self.preserve_default is not True:
kwargs['preserve_default'] = self.preserve_default
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
if not self.preserve_default:
field = self.field.clone()
field.default = NOT_PROVIDED
else:
field = self.field
state.models[app_label, self.model_name_lower].fields = [
(n, field if n == self.name else f)
for n, f in
state.models[app_label, self.model_name_lower].fields
]
# TODO: investigate if old relational fields must be reloaded or if it's
# sufficient if the new field is (#27737).
# Delay rendering of relationships if it's not a relational field
delay = not field.is_relation
state.reload_model(app_label, self.model_name_lower, delay=delay)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
from_field = from_model._meta.get_field(self.name)
to_field = to_model._meta.get_field(self.name)
if not self.preserve_default:
to_field.default = self.field.default
schema_editor.alter_field(from_model, from_field, to_field)
if not self.preserve_default:
to_field.default = NOT_PROVIDED
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self.database_forwards(app_label, schema_editor, from_state, to_state)
def describe(self):
return "Alter field %s on %s" % (self.name, self.model_name)
def reduce(self, operation, in_between, app_label=None):
if isinstance(operation, RemoveField) and self.is_same_field_operation(operation):
return [operation]
elif isinstance(operation, RenameField) and self.is_same_field_operation(operation):
return [
operation,
AlterField(
model_name=self.model_name,
name=operation.new_name,
field=self.field,
),
]
return super().reduce(operation, in_between, app_label=app_label)
class RenameField(FieldOperation):
"""Rename a field on the model. Might affect db_column too."""
def __init__(self, model_name, old_name, new_name):
self.old_name = old_name
self.new_name = new_name
super().__init__(model_name, old_name)
@cached_property
def old_name_lower(self):
return self.old_name.lower()
@cached_property
def new_name_lower(self):
return self.new_name.lower()
def deconstruct(self):
kwargs = {
'model_name': self.model_name,
'old_name': self.old_name,
'new_name': self.new_name,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
# Rename the field
state.models[app_label, self.model_name_lower].fields = [
(self.new_name if n == self.old_name else n, f)
for n, f in state.models[app_label, self.model_name_lower].fields
]
# Fix index/unique_together to refer to the new field
options = state.models[app_label, self.model_name_lower].options
for option in ('index_together', 'unique_together'):
if option in options:
options[option] = [
[self.new_name if n == self.old_name else n for n in together]
for together in options[option]
]
for n, f in state.models[app_label, self.model_name_lower].fields:
if n == self.new_name:
field = f
break
# Delay rendering of relationships if it's not a relational field
delay = not field.is_relation
state.reload_model(app_label, self.model_name_lower, delay=delay)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
schema_editor.alter_field(
from_model,
from_model._meta.get_field(self.old_name),
to_model._meta.get_field(self.new_name),
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.model_name)
schema_editor.alter_field(
from_model,
from_model._meta.get_field(self.new_name),
to_model._meta.get_field(self.old_name),
)
def describe(self):
return "Rename field %s on %s to %s" % (self.old_name, self.model_name, self.new_name)
def references_field(self, model_name, name, app_label=None):
return self.references_model(model_name) and (
name.lower() == self.old_name_lower or
name.lower() == self.new_name_lower
)
def reduce(self, operation, in_between, app_label=None):
if (isinstance(operation, RenameField) and
self.is_same_model_operation(operation) and
self.new_name_lower == operation.old_name_lower):
return [
RenameField(
self.model_name,
self.old_name,
operation.new_name,
),
]
# Skip `FieldOperation.reduce` as we want to run `references_field`
# against self.new_name.
return (
super(FieldOperation, self).reduce(operation, in_between, app_label=app_label) or
not operation.references_field(self.model_name, self.new_name, app_label)
)
|
bsd-3-clause
|
manics/openmicroscopy
|
components/tools/OmeroWeb/omeroweb/webclient/webclient_utils.py
|
12
|
2113
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
# Copyright (C) 2011 University of Dundee & Open Microscopy Environment.
# All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
import datetime
logger = logging.getLogger(__name__)
def getDateTime(timeString):
return datetime.datetime.strptime(timeString, "%Y-%m-%d %H:%M:%S")
def formatPercentFraction(value):
""" Formats a fraction as a percentage for display """
value = value * 100
if value < 1:
value = "%.1f" % round(value, 1)
else:
value = "%s" % int(round(value))
return value
def _formatReport(callback):
"""
Added as workaround to the changes made in #3006.
"""
rsp = callback.getResponse()
if not rsp:
return # Unfinished
import omero
if isinstance(rsp, omero.cmd.ERR):
err = rsp.parameters.get("Error", "")
warn = rsp.parameters.get("Warning", "")
logger.error('Format report: %r' % {'error': err, 'warning': warn})
return "Operation could not be completed successfully"
# Delete2Response, etc include no warnings
# Might want to take advantage of other feedback here
def _purgeCallback(request):
callbacks = request.session.get('callback').keys()
if len(callbacks) > 200:
for (cbString, count) in zip(request.session.get('callback').keys(),
range(0, len(callbacks)-200)):
del request.session['callback'][cbString]
|
gpl-2.0
|
KiChjang/servo
|
components/script/dom/bindings/codegen/parser/tests/test_method.py
|
15
|
9920
|
import WebIDL
def WebIDLTest(parser, harness):
parser.parse("""
interface TestMethods {
void basic();
static void basicStatic();
void basicWithSimpleArgs(boolean arg1, byte arg2, unsigned long arg3);
boolean basicBoolean();
static boolean basicStaticBoolean();
boolean basicBooleanWithSimpleArgs(boolean arg1, byte arg2, unsigned long arg3);
void optionalArg(optional byte? arg1, optional sequence<byte> arg2);
void variadicArg(byte?... arg1);
object getObject();
void setObject(object arg1);
void setAny(any arg1);
float doFloats(float arg1);
};
""")
results = parser.finish()
harness.ok(True, "TestMethods interface parsed without error.")
harness.check(len(results), 1, "Should be one production.")
iface = results[0]
harness.ok(isinstance(iface, WebIDL.IDLInterface),
"Should be an IDLInterface")
harness.check(iface.identifier.QName(), "::TestMethods", "Interface has the right QName")
harness.check(iface.identifier.name, "TestMethods", "Interface has the right name")
harness.check(len(iface.members), 12, "Expect 12 members")
methods = iface.members
def checkArgument(argument, QName, name, type, optional, variadic):
harness.ok(isinstance(argument, WebIDL.IDLArgument),
"Should be an IDLArgument")
harness.check(argument.identifier.QName(), QName, "Argument has the right QName")
harness.check(argument.identifier.name, name, "Argument has the right name")
harness.check(str(argument.type), type, "Argument has the right return type")
harness.check(argument.optional, optional, "Argument has the right optional value")
harness.check(argument.variadic, variadic, "Argument has the right variadic value")
def checkMethod(method, QName, name, signatures,
static=False, getter=False, setter=False,
deleter=False, legacycaller=False, stringifier=False):
harness.ok(isinstance(method, WebIDL.IDLMethod),
"Should be an IDLMethod")
harness.ok(method.isMethod(), "Method is a method")
harness.ok(not method.isAttr(), "Method is not an attr")
harness.ok(not method.isConst(), "Method is not a const")
harness.check(method.identifier.QName(), QName, "Method has the right QName")
harness.check(method.identifier.name, name, "Method has the right name")
harness.check(method.isStatic(), static, "Method has the correct static value")
harness.check(method.isGetter(), getter, "Method has the correct getter value")
harness.check(method.isSetter(), setter, "Method has the correct setter value")
harness.check(method.isDeleter(), deleter, "Method has the correct deleter value")
harness.check(method.isLegacycaller(), legacycaller, "Method has the correct legacycaller value")
harness.check(method.isStringifier(), stringifier, "Method has the correct stringifier value")
harness.check(len(method.signatures()), len(signatures), "Method has the correct number of signatures")
sigpairs = zip(method.signatures(), signatures)
for (gotSignature, expectedSignature) in sigpairs:
(gotRetType, gotArgs) = gotSignature
(expectedRetType, expectedArgs) = expectedSignature
harness.check(str(gotRetType), expectedRetType,
"Method has the expected return type.")
for i in range(0, len(gotArgs)):
(QName, name, type, optional, variadic) = expectedArgs[i]
checkArgument(gotArgs[i], QName, name, type, optional, variadic)
checkMethod(methods[0], "::TestMethods::basic", "basic", [("Void", [])])
checkMethod(methods[1], "::TestMethods::basicStatic", "basicStatic",
[("Void", [])], static=True)
checkMethod(methods[2], "::TestMethods::basicWithSimpleArgs",
"basicWithSimpleArgs",
[("Void",
[("::TestMethods::basicWithSimpleArgs::arg1", "arg1", "Boolean", False, False),
("::TestMethods::basicWithSimpleArgs::arg2", "arg2", "Byte", False, False),
("::TestMethods::basicWithSimpleArgs::arg3", "arg3", "UnsignedLong", False, False)])])
checkMethod(methods[3], "::TestMethods::basicBoolean", "basicBoolean", [("Boolean", [])])
checkMethod(methods[4], "::TestMethods::basicStaticBoolean", "basicStaticBoolean", [("Boolean", [])], static=True)
checkMethod(methods[5], "::TestMethods::basicBooleanWithSimpleArgs",
"basicBooleanWithSimpleArgs",
[("Boolean",
[("::TestMethods::basicBooleanWithSimpleArgs::arg1", "arg1", "Boolean", False, False),
("::TestMethods::basicBooleanWithSimpleArgs::arg2", "arg2", "Byte", False, False),
("::TestMethods::basicBooleanWithSimpleArgs::arg3", "arg3", "UnsignedLong", False, False)])])
checkMethod(methods[6], "::TestMethods::optionalArg",
"optionalArg",
[("Void",
[("::TestMethods::optionalArg::arg1", "arg1", "ByteOrNull", True, False),
("::TestMethods::optionalArg::arg2", "arg2", "ByteSequence", True, False)])])
checkMethod(methods[7], "::TestMethods::variadicArg",
"variadicArg",
[("Void",
[("::TestMethods::variadicArg::arg1", "arg1", "ByteOrNull", True, True)])])
checkMethod(methods[8], "::TestMethods::getObject",
"getObject", [("Object", [])])
checkMethod(methods[9], "::TestMethods::setObject",
"setObject",
[("Void",
[("::TestMethods::setObject::arg1", "arg1", "Object", False, False)])])
checkMethod(methods[10], "::TestMethods::setAny",
"setAny",
[("Void",
[("::TestMethods::setAny::arg1", "arg1", "Any", False, False)])])
checkMethod(methods[11], "::TestMethods::doFloats",
"doFloats",
[("Float",
[("::TestMethods::doFloats::arg1", "arg1", "Float", False, False)])])
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A {
void foo(optional float bar = 1);
};
""")
results = parser.finish()
except Exception as x:
threw = True
harness.ok(not threw, "Should allow integer to float type corecion")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A {
[GetterThrows] void foo();
};
""")
results = parser.finish()
except Exception as x:
threw = True
harness.ok(threw, "Should not allow [GetterThrows] on methods")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A {
[SetterThrows] void foo();
};
""")
results = parser.finish()
except Exception as x:
threw = True
harness.ok(threw, "Should not allow [SetterThrows] on methods")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A {
[Throw] void foo();
};
""")
results = parser.finish()
except Exception as x:
threw = True
harness.ok(threw, "Should spell [Throws] correctly on methods")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A {
void __noSuchMethod__();
};
""")
results = parser.finish()
except Exception as x:
threw = True
harness.ok(threw, "Should not allow __noSuchMethod__ methods")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A {
[Throws, LenientFloat]
void foo(float myFloat);
[Throws]
void foo();
};
""")
results = parser.finish()
except Exception as x:
threw = True
harness.ok(not threw, "Should allow LenientFloat to be only in a specific overload")
parser = parser.reset()
parser.parse("""
interface A {
[Throws]
void foo();
[Throws, LenientFloat]
void foo(float myFloat);
};
""")
results = parser.finish()
iface = results[0]
methods = iface.members
lenientFloat = methods[0].getExtendedAttribute("LenientFloat")
harness.ok(lenientFloat is not None, "LenientFloat in overloads must be added to the method")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A {
[Throws, LenientFloat]
void foo(float myFloat);
[Throws]
void foo(float myFloat, float yourFloat);
};
""")
results = parser.finish()
except Exception as x:
threw = True
harness.ok(threw, "Should prevent overloads from getting different restricted float behavior")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A {
[Throws]
void foo(float myFloat, float yourFloat);
[Throws, LenientFloat]
void foo(float myFloat);
};
""")
results = parser.finish()
except Exception as x:
threw = True
harness.ok(threw, "Should prevent overloads from getting different restricted float behavior (2)")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface A {
[Throws, LenientFloat]
void foo(float myFloat);
[Throws, LenientFloat]
void foo(short myShort);
};
""")
results = parser.finish()
except Exception as x:
threw = True
harness.ok(threw, "Should prevent overloads from getting redundant [LenientFloat]")
|
mpl-2.0
|
alexlib/Qt-Python-Binding-Examples
|
style_and_theme/disable_highlight.py
|
1
|
1096
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
"""
disable highlight focused widget
Tested environment:
Mac OS X 10.6.8
http://stackoverflow.com/questions/1987546/qt4-stylesheets-and-focus-rect
"""
import sys
try:
from PySide import QtCore
from PySide import QtGui
except ImportError:
from PyQt4 import QtCore
from PyQt4 import QtGui
class Demo(QtGui.QWidget):
def __init__(self):
super(Demo, self).__init__()
x, y, w, h = 500, 200, 300, 400
self.setGeometry(x, y, w, h)
# highlight
tv = QtGui.QTreeView(self)
tv.setGeometry(10, 10, 100, 100)
# disable highlight
tv2 = QtGui.QTreeView(self)
tv2.setGeometry(10, 110, 100, 100)
tv2.setFrameShape(QtGui.QFrame.NoFrame)
tv2.setFrameShadow(QtGui.QFrame.Plain)
tv2.setAttribute(QtCore.Qt.WA_MacShowFocusRect, 0)
def show_and_raise(self):
self.show()
self.raise_()
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
demo = Demo()
demo.show_and_raise()
sys.exit(app.exec_())
|
bsd-3-clause
|
unseenlaser/machinekit
|
src/machinetalk/messagebus/python-mockup/commandsubmitter.py
|
8
|
2327
|
import os, time
import zmq
from message_pb2 import Container
from types_pb2 import *
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-c", "--cmd", dest="cmduri", default="tcp://127.0.0.1:5571",
help="command URI")
parser.add_option("-r", "--response", dest="responseuri",
default="tcp://127.0.0.1:5573",
help="response URI")
parser.add_option("-n", "--name", dest="actor", default="task",
help="use this as actor name")
parser.add_option("-d", "--destination", dest="destination", default="component",
help="use this actor as command destination")
parser.add_option("-b", "--batch", dest="batch", default=1,type="int",
help="use this actor as command destination")
parser.add_option("-i", "--iterations", dest="iter", default=1,type="int",
help="to run main loop")
parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
help="print actions as they happen")
parser.add_option("-F", "--fast", action="store_true", dest="fast",
help="do not sleep after an iteration")
(options, args) = parser.parse_args()
me = options.actor
context = zmq.Context()
cmd = context.socket(zmq.XSUB)
cmd.connect(options.cmduri)
# subscribe XSUB-style by sending a message \001<topic>
cmd.send("\001%s" % (me))
response = context.socket(zmq.XSUB)
response.connect(options.responseuri)
response.send("\001%s" % (me))
i = 0
tx = Container()
rx = Container()
tx.type = MT_EMCMOT_SET_LINE
c = tx.SerializeToString()
time.sleep(1) # let subscriptions stabilize
for j in range(options.iter):
for n in range(options.batch):
msg = "cmd %d" % i
i += 1
mp = [me, options.destination,msg,c, c]
if options.verbose:
print "---%s msg %s" % (me,mp)
cmd.send_multipart(mp)
for n in range(options.batch):
msg = response.recv_multipart()
if options.verbose:
#print "---%s receive response: %s" %(me, msg)
print "---%s receive response: %s" %(me, msg)
for m in msg[3:]:
rx.ParseFromString(m)
print str(rx)
if not options.fast:
time.sleep(1)
time.sleep(1)
context.destroy(linger=0)
|
lgpl-2.1
|
tshrjn/courses
|
cs/udacity/cs101-intro-cs/code/lesson6/search_engine.py
|
4
|
2795
|
import subprocess, sys
# gets the first link on any webpage
def get_next_target(page):
copy = page
repl = ['= ', ' =', ' = ']
for i in range(0, len(repl)):
copy = copy.replace(repl[i], "=")
first_link_start = copy.find("<a")
if (first_link_start == -1):
return None
else:
first_url_start = copy.find("href=", first_link_start) + len("href=") + 1
if (copy[copy.find("href=", first_link_start) + len("href=")] == '"'):
url = copy[first_url_start:copy.find('"', first_url_start)]
else:
url = copy[first_url_start:copy.find("'", first_url_start)]
first_url_end = page.find(url) + len(url) + 1
return url, first_url_end
# prints out all of the links on a webpage
def get_all_links(page):
links = []
while (get_next_target(page) != None):
url, marker = get_next_target(page)
links.append(url)
page = page[marker:]
return links
# joins two lists, ignoring shared items
def union(a, b):
[a.append(x) for x in b if x not in a]
def get_page(url):
try:
return subprocess.check_output(['curl', '-s', url])
except subprocess.CalledProcessError as failure:
sys.exit('\n\033[91mfatal: %s\n\033[0m' % failure)
def lookup(index, keyword):
if keyword in index:
return index[keyword]
return None
def add_to_index(index,keyword,url):
if keyword in index:
index[keyword].append(url)
else:
index[keyword] = [url]
def add_page_to_index(index, url, content):
words = content.split()
for word in words:
add_to_index(index, word, url)
# crawls the web for all links reachable by a seed page
def crawl_web(seed):
to_crawl = [seed]
crawled = []
index = {}
graph = {}
while to_crawl:
page = to_crawl.pop()
if page not in crawled:
content = get_page(page)
add_page_to_index(index, page, content)
outlinks = get_all_links(content)
graph[page] = outlinks
union(to_crawl, outlinks)
crawled.append(page)
return index, graph
def compute_ranks(graph):
d = 0.8 # damping factor
num_loops = 10 # arbitrary, but decent
ranks = {}
npages = len(graph) # nodes in the graph
for page in graph:
ranks[page] = 1.0 / npages
for i in range(0, num_loops):
new_ranks = {}
for page in graph:
newrank = (1 - d) / npages
newrank += sum([d *ranks[node] / len(graph[node]) for node in graph if page in graph[node]])
new_ranks[page] = newrank
ranks = new_ranks
return ranks
index, graph = crawl_web('https://www.udacity.com/cs101x/urank/index.html')
ranks = compute_ranks(graph)
print ranks
|
mit
|
pongem/python-bot-project
|
appengine/standard/botapp/lib/django/templatetags/static.py
|
91
|
4391
|
from django import template
from django.apps import apps
from django.utils.encoding import iri_to_uri
from django.utils.six.moves.urllib.parse import urljoin
register = template.Library()
class PrefixNode(template.Node):
def __repr__(self):
return "<PrefixNode for %r>" % self.name
def __init__(self, varname=None, name=None):
if name is None:
raise template.TemplateSyntaxError(
"Prefix nodes must be given a name to return.")
self.varname = varname
self.name = name
@classmethod
def handle_token(cls, parser, token, name):
"""
Class method to parse prefix node and return a Node.
"""
# token.split_contents() isn't useful here because tags using this method don't accept variable as arguments
tokens = token.contents.split()
if len(tokens) > 1 and tokens[1] != 'as':
raise template.TemplateSyntaxError(
"First argument in '%s' must be 'as'" % tokens[0])
if len(tokens) > 1:
varname = tokens[2]
else:
varname = None
return cls(varname, name)
@classmethod
def handle_simple(cls, name):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
def render(self, context):
prefix = self.handle_simple(self.name)
if self.varname is None:
return prefix
context[self.varname] = prefix
return ''
@register.tag
def get_static_prefix(parser, token):
"""
Populates a template variable with the static prefix,
``settings.STATIC_URL``.
Usage::
{% get_static_prefix [as varname] %}
Examples::
{% get_static_prefix %}
{% get_static_prefix as static_prefix %}
"""
return PrefixNode.handle_token(parser, token, "STATIC_URL")
@register.tag
def get_media_prefix(parser, token):
"""
Populates a template variable with the media prefix,
``settings.MEDIA_URL``.
Usage::
{% get_media_prefix [as varname] %}
Examples::
{% get_media_prefix %}
{% get_media_prefix as media_prefix %}
"""
return PrefixNode.handle_token(parser, token, "MEDIA_URL")
class StaticNode(template.Node):
def __init__(self, varname=None, path=None):
if path is None:
raise template.TemplateSyntaxError(
"Static template nodes must be given a path to return.")
self.path = path
self.varname = varname
def url(self, context):
path = self.path.resolve(context)
return self.handle_simple(path)
def render(self, context):
url = self.url(context)
if self.varname is None:
return url
context[self.varname] = url
return ''
@classmethod
def handle_simple(cls, path):
if apps.is_installed('django.contrib.staticfiles'):
from django.contrib.staticfiles.storage import staticfiles_storage
return staticfiles_storage.url(path)
else:
return urljoin(PrefixNode.handle_simple("STATIC_URL"), path)
@classmethod
def handle_token(cls, parser, token):
"""
Class method to parse prefix node and return a Node.
"""
bits = token.split_contents()
if len(bits) < 2:
raise template.TemplateSyntaxError(
"'%s' takes at least one argument (path to file)" % bits[0])
path = parser.compile_filter(bits[1])
if len(bits) >= 2 and bits[-2] == 'as':
varname = bits[3]
else:
varname = None
return cls(varname, path)
@register.tag('static')
def do_static(parser, token):
"""
Joins the given path with the STATIC_URL setting.
Usage::
{% static path [as varname] %}
Examples::
{% static "myapp/css/base.css" %}
{% static variable_with_path %}
{% static "myapp/css/base.css" as admin_base_css %}
{% static variable_with_path as varname %}
"""
return StaticNode.handle_token(parser, token)
def static(path):
"""
Given a relative path to a static asset, return the absolute path to the
asset.
"""
return StaticNode.handle_simple(path)
|
apache-2.0
|
alfonsodev/ansible-modules-extras
|
monitoring/stackdriver.py
|
58
|
6848
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
module: stackdriver
short_description: Send code deploy and annotation events to stackdriver
description:
- Send code deploy and annotation events to Stackdriver
version_added: "1.6"
author: "Ben Whaley (@bwhaley)"
options:
key:
description:
- API key.
required: true
default: null
event:
description:
- The type of event to send, either annotation or deploy
choices: ['annotation', 'deploy']
required: false
default: null
revision_id:
description:
- The revision of the code that was deployed. Required for deploy events
required: false
default: null
deployed_by:
description:
- The person or robot responsible for deploying the code
required: false
default: "Ansible"
deployed_to:
description:
- "The environment code was deployed to. (ie: development, staging, production)"
required: false
default: null
repository:
description:
- The repository (or project) deployed
required: false
default: null
msg:
description:
- The contents of the annotation message, in plain text. Limited to 256 characters. Required for annotation.
required: false
default: null
annotated_by:
description:
- The person or robot who the annotation should be attributed to.
required: false
default: "Ansible"
level:
description:
- one of INFO/WARN/ERROR, defaults to INFO if not supplied. May affect display.
choices: ['INFO', 'WARN', 'ERROR']
required: false
default: 'INFO'
instance_id:
description:
- id of an EC2 instance that this event should be attached to, which will limit the contexts where this event is shown
required: false
default: null
event_epoch:
description:
- "Unix timestamp of where the event should appear in the timeline, defaults to now. Be careful with this."
required: false
default: null
'''
EXAMPLES = '''
- stackdriver: key=AAAAAA event=deploy deployed_to=production deployed_by=leeroyjenkins repository=MyWebApp revision_id=abcd123
- stackdriver: key=AAAAAA event=annotation msg="Greetings from Ansible" annotated_by=leeroyjenkins level=WARN instance_id=i-abcd1234
'''
# ===========================================
# Stackdriver module specific support methods.
#
try:
import json
except ImportError:
import simplejson as json
def send_deploy_event(module, key, revision_id, deployed_by='Ansible', deployed_to=None, repository=None):
"""Send a deploy event to Stackdriver"""
deploy_api = "https://event-gateway.stackdriver.com/v1/deployevent"
params = {}
params['revision_id'] = revision_id
params['deployed_by'] = deployed_by
if deployed_to:
params['deployed_to'] = deployed_to
if repository:
params['repository'] = repository
return do_send_request(module, deploy_api, params, key)
def send_annotation_event(module, key, msg, annotated_by='Ansible', level=None, instance_id=None, event_epoch=None):
"""Send an annotation event to Stackdriver"""
annotation_api = "https://event-gateway.stackdriver.com/v1/annotationevent"
params = {}
params['message'] = msg
if annotated_by:
params['annotated_by'] = annotated_by
if level:
params['level'] = level
if instance_id:
params['instance_id'] = instance_id
if event_epoch:
params['event_epoch'] = event_epoch
return do_send_request(module, annotation_api, params, key)
def do_send_request(module, url, params, key):
data = json.dumps(params)
headers = {
'Content-Type': 'application/json',
'x-stackdriver-apikey': key
}
response, info = fetch_url(module, url, headers=headers, data=data, method='POST')
if info['status'] != 200:
module.fail_json(msg="Unable to send msg: %s" % info['msg'])
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
key=dict(required=True),
event=dict(required=True, choices=['deploy', 'annotation']),
msg=dict(),
revision_id=dict(),
annotated_by=dict(default='Ansible'),
level=dict(default='INFO', choices=['INFO', 'WARN', 'ERROR']),
instance_id=dict(),
event_epoch=dict(),
deployed_by=dict(default='Ansible'),
deployed_to=dict(),
repository=dict(),
),
supports_check_mode=True
)
key = module.params["key"]
event = module.params["event"]
# Annotation params
msg = module.params["msg"]
annotated_by = module.params["annotated_by"]
level = module.params["level"]
instance_id = module.params["instance_id"]
event_epoch = module.params["event_epoch"]
# Deploy params
revision_id = module.params["revision_id"]
deployed_by = module.params["deployed_by"]
deployed_to = module.params["deployed_to"]
repository = module.params["repository"]
##################################################################
# deploy requires revision_id
# annotation requires msg
# We verify these manually
##################################################################
if event == 'deploy':
if not revision_id:
module.fail_json(msg="revision_id required for deploy events")
try:
send_deploy_event(module, key, revision_id, deployed_by, deployed_to, repository)
except Exception, e:
module.fail_json(msg="unable to sent deploy event: %s" % e)
if event == 'annotation':
if not msg:
module.fail_json(msg="msg required for annotation events")
try:
send_annotation_event(module, key, msg, annotated_by, level, instance_id, event_epoch)
except Exception, e:
module.fail_json(msg="unable to sent annotation event: %s" % e)
changed = True
module.exit_json(changed=changed, deployed_by=deployed_by)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
main()
|
gpl-3.0
|
Motaku/ansible
|
contrib/inventory/serf.py
|
395
|
3032
|
#!/usr/bin/env python
# (c) 2015, Marc Abramowitz <marca@surveymonkey.com>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Dynamic inventory script which lets you use nodes discovered by Serf
# (https://serfdom.io/).
#
# Requires the `serfclient` Python module from
# https://pypi.python.org/pypi/serfclient
#
# Environment variables
# ---------------------
# - `SERF_RPC_ADDR`
# - `SERF_RPC_AUTH`
#
# These variables are described at https://www.serfdom.io/docs/commands/members.html#_rpc_addr
import argparse
import collections
import os
import sys
# https://pypi.python.org/pypi/serfclient
from serfclient import SerfClient, EnvironmentConfig
try:
import json
except ImportError:
import simplejson as json
_key = 'serf'
def _serf_client():
env = EnvironmentConfig()
return SerfClient(host=env.host, port=env.port, rpc_auth=env.auth_key)
def get_serf_members_data():
return _serf_client().members().body['Members']
def get_nodes(data):
return [node['Name'] for node in data]
def get_groups(data):
groups = collections.defaultdict(list)
for node in data:
for key, value in node['Tags'].items():
groups[value].append(node['Name'])
return groups
def get_meta(data):
meta = {'hostvars': {}}
for node in data:
meta['hostvars'][node['Name']] = node['Tags']
return meta
def print_list():
data = get_serf_members_data()
nodes = get_nodes(data)
groups = get_groups(data)
meta = get_meta(data)
inventory_data = {_key: nodes, '_meta': meta}
inventory_data.update(groups)
print(json.dumps(inventory_data))
def print_host(host):
data = get_serf_members_data()
meta = get_meta(data)
print(json.dumps(meta['hostvars'][host]))
def get_args(args_list):
parser = argparse.ArgumentParser(
description='ansible inventory script reading from serf cluster')
mutex_group = parser.add_mutually_exclusive_group(required=True)
help_list = 'list all hosts from serf cluster'
mutex_group.add_argument('--list', action='store_true', help=help_list)
help_host = 'display variables for a host'
mutex_group.add_argument('--host', help=help_host)
return parser.parse_args(args_list)
def main(args_list):
args = get_args(args_list)
if args.list:
print_list()
if args.host:
print_host(args.host)
if __name__ == '__main__':
main(sys.argv[1:])
|
gpl-3.0
|
Schevo/kiwi
|
kiwi/desktopparser.py
|
3
|
5236
|
#
# Kiwi: a Framework and Enhanced Widgets for Python
#
# Copyright (C) 2006 Johan Dahlin <jdahlin@async.com.br>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
# Author(s): Johan Dahlin <jdahlin@async.com.br>
#
# Based on gkeyfile.c from glib, written by
#
# Ray Strode <rstrode@redhat.com>
# Matthias Clasen <mclasen@redhat.com>
#
from ConfigParser import ConfigParser
# Private
def _localize(option, locale):
if locale:
option = option + '[%s]' % locale
return option
def _tobool(s):
if s == 'true':
return True
return False
def _frombool(s):
if s:
return 'true'
return 'false'
class DesktopParser(ConfigParser):
"""
A DesktopParser for GNOME/KDE .desktop files.
The API is similar to GKeyFile from glib.
Example:
>>> parser = DesktopParser()
>>> parser.read('/usr/share/applications/gnome-terminal.desktop')
>>> parser.get_locale('Desktop Entry', 'Comment', 'pt')
"""
def __init__(self, defaults=None):
ConfigParser.__init__(self, defaults)
self._list_separator = ';'
# ConfigParser overrides
def optionxform(self, optionstr):
# .desktop files are case sensitive
# The default implementation makes it lowercase,
# so override to just use it as it was read
return optionstr
# Public
def set_list_separator(self, separator):
"""
Sets the character which is used to separate
values in lists. Typically ';' or ',' are used
as separators. The default list separator is ';'.
@param separator: the separator
"""
self._list_separator = separator
def set_locale(self, section, option, locale, value):
"""
Set locale.
@param section: section name
@param option: an option
@param locale: a locale
@param value: value to set
"""
self.set(section, _localize(option, locale), value)
def get_locale(self, section, option, locale):
"""
Get locale.
@param section: section name
@param option: an option
@param locale: a locale
"""
return self.get(section, _localize(option, locale))
def get_string_list(self, section, option):
"""
Get a list as string.
@param section: section name
@param option: an option
"""
return self.get(section, option).split(self._list_separator)
def set_string_list(self, section, option, values):
"""
Set a list of string values.
@param section: section name
@param option: an option
@param values: list of string values
"""
value = self._list_separator.join(values)
self.set(section, option, value)
def get_integer_list(self, section, option):
"""
Get a list of integers as string.
@param section: section name
@param option: an option
"""
return map(int, self.get_string_list(section, option))
def set_integer_list(self, section, option, values):
"""
Set a list with integer values.
@param section: section name
@param option: an option
@param values: list of integer values
"""
self.set_string_list(section, option, map(str, values))
def get_boolean_list(self, section, option):
"""
Get a boolean list.
@param section: section name
@param option: an option
"""
return map(_tobool, self.get_string_list(section, option))
def set_boolean_list(self, section, option, values):
"""
Set an list wiht boolena values.
@param section: section name
@param option: an option
@param values: list of boolean values
"""
self.set_string_list(section, option, map(_frombool, values))
def set_string_list_locale(self, section, option, locale, values):
"""
Set string list with locale values.
@param section: section name
@param option: an option
@param locale: a locale
@param values: list of string values
"""
self.set_string_list(section, _localize(option, locale), values)
def get_string_list_locale(self, section, option, locale):
"""
Get list locale as an string.
@param section: section name
@param option: an option
@param locale: a locale
"""
return self.get_string_list(section, _localize(option, locale))
|
lgpl-2.1
|
cristianquaglio/odoo
|
addons/website_mail/controllers/main.py
|
149
|
4279
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import SUPERUSER_ID
from openerp.addons.web import http
from openerp.addons.web.http import request
class WebsiteMail(http.Controller):
@http.route(['/website_mail/follow'], type='json', auth="public", website=True)
def website_message_subscribe(self, id=0, object=None, message_is_follower="on", email=False, **post):
cr, uid, context = request.cr, request.uid, request.context
partner_obj = request.registry['res.partner']
user_obj = request.registry['res.users']
_id = int(id)
_message_is_follower = message_is_follower == 'on'
_object = request.registry[object]
# search partner_id
public_id = request.website.user_id.id
if uid != public_id:
partner_ids = [user_obj.browse(cr, uid, uid, context).partner_id.id]
else:
# mail_thread method
partner_ids = _object._find_partner_from_emails(
cr, SUPERUSER_ID, _id, [email], context=context, check_followers=True)
if not partner_ids or not partner_ids[0]:
name = email.split('@')[0]
partner_ids = [partner_obj.create(cr, SUPERUSER_ID, {'name': name, 'email': email}, context=context)]
# add or remove follower
if _message_is_follower:
_object.check_access_rule(cr, uid, [_id], 'read', context)
_object.message_unsubscribe(cr, SUPERUSER_ID, [_id], partner_ids, context=context)
return False
else:
_object.check_access_rule(cr, uid, [_id], 'read', context)
# add partner to session
request.session['partner_id'] = partner_ids[0]
_object.message_subscribe(cr, SUPERUSER_ID, [_id], partner_ids, context=context)
return True
@http.route(['/website_mail/is_follower'], type='json', auth="public", website=True)
def call(self, model, id, **post):
id = int(id)
cr, uid, context = request.cr, request.uid, request.context
partner_obj = request.registry.get('res.partner')
users_obj = request.registry.get('res.users')
obj = request.registry.get(model)
partner_id = None
public_id = request.website.user_id.id
if uid != public_id:
partner_id = users_obj.browse(cr, SUPERUSER_ID, uid, context).partner_id
elif request.session.get('partner_id'):
partner_id = partner_obj.browse(cr, SUPERUSER_ID, request.session.get('partner_id'), context)
email = partner_id and partner_id.email or ""
values = {
'is_user': uid != public_id,
'email': email,
'is_follower': False,
'alias_name': False,
}
if not obj:
return values
obj_ids = obj.exists(cr, SUPERUSER_ID, [id], context=context)
if obj_ids:
if partner_id:
values['is_follower'] = len(
request.registry['mail.followers'].search(
cr, SUPERUSER_ID, [
('res_model', '=', model),
('res_id', '=', obj_ids[0]),
('partner_id', '=', partner_id.id)
], context=context)) == 1
return values
|
apache-2.0
|
chrishas35/django-travis-ci
|
django/db/models/sql/subqueries.py
|
87
|
8259
|
"""
Query subclasses which provide extra functionality beyond simple data retrieval.
"""
from django.core.exceptions import FieldError
from django.db.models.fields import DateField, FieldDoesNotExist
from django.db.models.sql.constants import *
from django.db.models.sql.datastructures import Date
from django.db.models.sql.query import Query
from django.db.models.sql.where import AND, Constraint
from django.utils.functional import Promise
from django.utils.encoding import force_unicode
__all__ = ['DeleteQuery', 'UpdateQuery', 'InsertQuery', 'DateQuery',
'AggregateQuery']
class DeleteQuery(Query):
"""
Delete queries are done through this class, since they are more constrained
than general queries.
"""
compiler = 'SQLDeleteCompiler'
def do_query(self, table, where, using):
self.tables = [table]
self.where = where
self.get_compiler(using).execute_sql(None)
def delete_batch(self, pk_list, using, field=None):
"""
Set up and execute delete queries for all the objects in pk_list.
More than one physical query may be executed if there are a
lot of values in pk_list.
"""
if not field:
field = self.model._meta.pk
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
where = self.where_class()
where.add((Constraint(None, field.column, field), 'in',
pk_list[offset:offset + GET_ITERATOR_CHUNK_SIZE]), AND)
self.do_query(self.model._meta.db_table, where, using=using)
class UpdateQuery(Query):
"""
Represents an "update" SQL query.
"""
compiler = 'SQLUpdateCompiler'
def __init__(self, *args, **kwargs):
super(UpdateQuery, self).__init__(*args, **kwargs)
self._setup_query()
def _setup_query(self):
"""
Runs on initialization and after cloning. Any attributes that would
normally be set in __init__ should go in here, instead, so that they
are also set up after a clone() call.
"""
self.values = []
self.related_ids = None
if not hasattr(self, 'related_updates'):
self.related_updates = {}
def clone(self, klass=None, **kwargs):
return super(UpdateQuery, self).clone(klass,
related_updates=self.related_updates.copy(), **kwargs)
def update_batch(self, pk_list, values, using):
pk_field = self.model._meta.pk
self.add_update_values(values)
for offset in range(0, len(pk_list), GET_ITERATOR_CHUNK_SIZE):
self.where = self.where_class()
self.where.add((Constraint(None, pk_field.column, pk_field), 'in',
pk_list[offset:offset + GET_ITERATOR_CHUNK_SIZE]),
AND)
self.get_compiler(using).execute_sql(None)
def add_update_values(self, values):
"""
Convert a dictionary of field name to value mappings into an update
query. This is the entry point for the public update() method on
querysets.
"""
values_seq = []
for name, val in values.iteritems():
field, model, direct, m2m = self.model._meta.get_field_by_name(name)
if not direct or m2m:
raise FieldError('Cannot update model field %r (only non-relations and foreign keys permitted).' % field)
if model:
self.add_related_update(model, field, val)
continue
values_seq.append((field, model, val))
return self.add_update_fields(values_seq)
def add_update_fields(self, values_seq):
"""
Turn a sequence of (field, model, value) triples into an update query.
Used by add_update_values() as well as the "fast" update path when
saving models.
"""
# Check that no Promise object passes to the query. Refs #10498.
values_seq = [(value[0], value[1], force_unicode(value[2]))
if isinstance(value[2], Promise) else value
for value in values_seq]
self.values.extend(values_seq)
def add_related_update(self, model, field, value):
"""
Adds (name, value) to an update query for an ancestor model.
Updates are coalesced so that we only run one update query per ancestor.
"""
try:
self.related_updates[model].append((field, None, value))
except KeyError:
self.related_updates[model] = [(field, None, value)]
def get_related_updates(self):
"""
Returns a list of query objects: one for each update required to an
ancestor model. Each query will have the same filtering conditions as
the current query but will only update a single table.
"""
if not self.related_updates:
return []
result = []
for model, values in self.related_updates.iteritems():
query = UpdateQuery(model)
query.values = values
if self.related_ids is not None:
query.add_filter(('pk__in', self.related_ids))
result.append(query)
return result
class InsertQuery(Query):
compiler = 'SQLInsertCompiler'
def __init__(self, *args, **kwargs):
super(InsertQuery, self).__init__(*args, **kwargs)
self.fields = []
self.objs = []
def clone(self, klass=None, **kwargs):
extras = {
'fields': self.fields[:],
'objs': self.objs[:],
'raw': self.raw,
}
extras.update(kwargs)
return super(InsertQuery, self).clone(klass, **extras)
def insert_values(self, fields, objs, raw=False):
"""
Set up the insert query from the 'insert_values' dictionary. The
dictionary gives the model field names and their target values.
If 'raw_values' is True, the values in the 'insert_values' dictionary
are inserted directly into the query, rather than passed as SQL
parameters. This provides a way to insert NULL and DEFAULT keywords
into the query, for example.
"""
self.fields = fields
# Check that no Promise object reaches the DB. Refs #10498.
for field in fields:
for obj in objs:
value = getattr(obj, field.attname)
if isinstance(value, Promise):
setattr(obj, field.attname, force_unicode(value))
self.objs = objs
self.raw = raw
class DateQuery(Query):
"""
A DateQuery is a normal query, except that it specifically selects a single
date field. This requires some special handling when converting the results
back to Python objects, so we put it in a separate class.
"""
compiler = 'SQLDateCompiler'
def add_date_select(self, field_name, lookup_type, order='ASC'):
"""
Converts the query into a date extraction query.
"""
try:
result = self.setup_joins(
field_name.split(LOOKUP_SEP),
self.get_meta(),
self.get_initial_alias(),
False
)
except FieldError:
raise FieldDoesNotExist("%s has no field named '%s'" % (
self.model._meta.object_name, field_name
))
field = result[0]
assert isinstance(field, DateField), "%r isn't a DateField." \
% field.name
alias = result[3][-1]
select = Date((alias, field.column), lookup_type)
self.select = [select]
self.select_fields = [None]
self.select_related = False # See #7097.
self.set_extra_mask([])
self.distinct = True
self.order_by = order == 'ASC' and [1] or [-1]
if field.null:
self.add_filter(("%s__isnull" % field_name, False))
class AggregateQuery(Query):
"""
An AggregateQuery takes another query as a parameter to the FROM
clause and only selects the elements in the provided list.
"""
compiler = 'SQLAggregateCompiler'
def add_subquery(self, query, using):
self.subquery, self.sub_params = query.get_compiler(using).as_sql(with_col_aliases=True)
|
bsd-3-clause
|
ragnarokkrr/rgn_vm_containers
|
rgn_k8s_kafka_python/snippets/avro_producer_plain.py
|
1
|
1038
|
from confluent_kafka import avro
from confluent_kafka.avro import AvroProducer
from lipsum import generate_words
import os
import random
SCHEMA_REGISTRY_URL = 'http://172.17.0.5:8081'
BOOTSTRAP_SERVERS = '172.17.0.4'
AVSC_DIR = os.path.dirname(os.path.realpath(__file__))
KEY_SCHEMA = avro.load(os.path.join(AVSC_DIR, 'primitive_string.avsc'))
VALUE_SCHEMA = avro.load(os.path.join(AVSC_DIR, 'basic_schema.avsc'))
TOPIC = 'avrotopic'
KEY = "mykey"
avroProducer = AvroProducer({'bootstrap.servers': BOOTSTRAP_SERVERS,
'schema.registry.url': SCHEMA_REGISTRY_URL},
default_key_schema=KEY_SCHEMA,
default_value_schema=VALUE_SCHEMA)
for i in xrange(100):
value = {"name": generate_words(count=1),
"surname": generate_words(count=2),
"number": random.randint(0, 100)}
print str(value)
avroProducer.produce(topic=TOPIC,
value=value,
key=KEY)
avroProducer.flush()
|
apache-2.0
|
abaditsegay/arangodb
|
3rdParty/V8-4.3.61/build/gyp/test/subdirectory/gyptest-SYMROOT-default.py
|
399
|
1260
|
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies building a target and a subsidiary dependent target from a
.gyp file in a subdirectory, without specifying an explicit output build
directory, and using the generated solution or project file at the top
of the tree as the entry point.
The configuration sets the Xcode SYMROOT variable and uses --depth=
to make Xcode behave like the other build tools--that is, put all
built targets in a single output build directory at the top of the tree.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src')
test.relocate('src', 'relocate/src')
# Suppress the test infrastructure's setting SYMROOT on the command line.
test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src')
test.run_built_executable('prog1',
stdout="Hello from prog1.c\n",
chdir='relocate/src')
test.run_built_executable('prog2',
stdout="Hello from prog2.c\n",
chdir='relocate/src')
test.pass_test()
|
apache-2.0
|
Changaco/oh-mainline
|
vendor/packages/celery/celery/tests/test_events/__init__.py
|
18
|
6120
|
from __future__ import absolute_import
from __future__ import with_statement
import socket
from celery import events
from celery.app import app_or_default
from celery.tests.utils import unittest
class MockProducer(object):
raise_on_publish = False
def __init__(self, *args, **kwargs):
self.sent = []
def publish(self, msg, *args, **kwargs):
if self.raise_on_publish:
raise KeyError()
self.sent.append(msg)
def close(self):
pass
def has_event(self, kind):
for event in self.sent:
if event["type"] == kind:
return event
return False
class TestEvent(unittest.TestCase):
def test_constructor(self):
event = events.Event("world war II")
self.assertEqual(event["type"], "world war II")
self.assertTrue(event["timestamp"])
class TestEventDispatcher(unittest.TestCase):
def setUp(self):
self.app = app_or_default()
def test_send(self):
producer = MockProducer()
eventer = self.app.events.Dispatcher(object(), enabled=False)
eventer.publisher = producer
eventer.enabled = True
eventer.send("World War II", ended=True)
self.assertTrue(producer.has_event("World War II"))
eventer.enabled = False
eventer.send("World War III")
self.assertFalse(producer.has_event("World War III"))
evs = ("Event 1", "Event 2", "Event 3")
eventer.enabled = True
eventer.publisher.raise_on_publish = True
eventer.buffer_while_offline = False
with self.assertRaises(KeyError):
eventer.send("Event X")
eventer.buffer_while_offline = True
for ev in evs:
eventer.send(ev)
eventer.publisher.raise_on_publish = False
eventer.flush()
for ev in evs:
self.assertTrue(producer.has_event(ev))
def test_enabled_disable(self):
connection = self.app.broker_connection()
channel = connection.channel()
try:
dispatcher = self.app.events.Dispatcher(connection,
enabled=True)
dispatcher2 = self.app.events.Dispatcher(connection,
enabled=True,
channel=channel)
self.assertTrue(dispatcher.enabled)
self.assertTrue(dispatcher.publisher.channel)
self.assertEqual(dispatcher.publisher.serializer,
self.app.conf.CELERY_EVENT_SERIALIZER)
created_channel = dispatcher.publisher.channel
dispatcher.disable()
dispatcher.disable() # Disable with no active publisher
dispatcher2.disable()
self.assertFalse(dispatcher.enabled)
self.assertIsNone(dispatcher.publisher)
self.assertTrue(created_channel.closed)
self.assertFalse(dispatcher2.channel.closed,
"does not close manually provided channel")
dispatcher.enable()
self.assertTrue(dispatcher.enabled)
self.assertTrue(dispatcher.publisher)
finally:
channel.close()
connection.close()
class TestEventReceiver(unittest.TestCase):
def setUp(self):
self.app = app_or_default()
def test_process(self):
message = {"type": "world-war"}
got_event = [False]
def my_handler(event):
got_event[0] = True
r = events.EventReceiver(object(),
handlers={"world-war": my_handler},
node_id="celery.tests",
)
r._receive(message, object())
self.assertTrue(got_event[0])
def test_catch_all_event(self):
message = {"type": "world-war"}
got_event = [False]
def my_handler(event):
got_event[0] = True
r = events.EventReceiver(object(), node_id="celery.tests")
events.EventReceiver.handlers["*"] = my_handler
try:
r._receive(message, object())
self.assertTrue(got_event[0])
finally:
events.EventReceiver.handlers = {}
def test_itercapture(self):
connection = self.app.broker_connection()
try:
r = self.app.events.Receiver(connection, node_id="celery.tests")
it = r.itercapture(timeout=0.0001, wakeup=False)
consumer = it.next()
self.assertTrue(consumer.queues)
self.assertEqual(consumer.callbacks[0], r._receive)
with self.assertRaises(socket.timeout):
it.next()
with self.assertRaises(socket.timeout):
r.capture(timeout=0.00001)
finally:
connection.close()
def test_itercapture_limit(self):
connection = self.app.broker_connection()
channel = connection.channel()
try:
events_received = [0]
def handler(event):
events_received[0] += 1
producer = self.app.events.Dispatcher(connection,
enabled=True,
channel=channel)
r = self.app.events.Receiver(connection,
handlers={"*": handler},
node_id="celery.tests")
evs = ["ev1", "ev2", "ev3", "ev4", "ev5"]
for ev in evs:
producer.send(ev)
it = r.itercapture(limit=4, wakeup=True)
it.next() # skip consumer (see itercapture)
list(it)
self.assertEqual(events_received[0], 4)
finally:
channel.close()
connection.close()
class test_misc(unittest.TestCase):
def setUp(self):
self.app = app_or_default()
def test_State(self):
state = self.app.events.State()
self.assertDictEqual(dict(state.workers), {})
|
agpl-3.0
|
joachimmetz/dfvfs
|
dfvfs/resolver_helpers/os_resolver_helper.py
|
2
|
1201
|
# -*- coding: utf-8 -*-
"""The operating system path specification resolver helper implementation."""
from dfvfs.file_io import os_file_io
from dfvfs.lib import definitions
from dfvfs.resolver_helpers import manager
from dfvfs.resolver_helpers import resolver_helper
from dfvfs.vfs import os_file_system
class OSResolverHelper(resolver_helper.ResolverHelper):
"""Operating system resolver helper."""
TYPE_INDICATOR = definitions.TYPE_INDICATOR_OS
def NewFileObject(self, resolver_context, path_spec):
"""Creates a new file input/output (IO) object.
Args:
resolver_context (Context): resolver context.
path_spec (PathSpec): a path specification.
Returns:
FileIO: file input/output (IO) object.
"""
return os_file_io.OSFile(resolver_context, path_spec)
def NewFileSystem(self, resolver_context, path_spec):
"""Creates a new file system object.
Args:
resolver_context (Context): resolver context.
path_spec (PathSpec): a path specification.
Returns:
FileSystem: file system.
"""
return os_file_system.OSFileSystem(resolver_context, path_spec)
manager.ResolverHelperManager.RegisterHelper(OSResolverHelper())
|
apache-2.0
|
Nagriar/mongo-connector
|
tests/test_gridfs_file.py
|
31
|
3939
|
# Copyright 2013-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import gridfs
sys.path[0:0] = [""]
from mongo_connector.gridfs_file import GridFSFile
from mongo_connector import errors
from tests import unittest
from tests.setup_cluster import ReplicaSet
class MockGridFSFile:
def __init__(self, doc, data):
self._id = doc['_id']
self.filename = doc['filename']
self.upload_date = doc['upload_date']
self.md5 = doc['md5']
self.data = data
self.length = len(self.data)
self.pos = 0
def get_metadata(self):
return {
'_id': self._id,
'filename': self.filename,
'upload_date': self.upload_date,
'md5': self.md5
}
def __len__(self):
return self.length
def read(self, n=-1):
if n < 0 or self.pos + n > self.length:
n = self.length - self.pos
s = self.data[self.pos:self.pos+n]
self.pos += n
return s
class TestGridFSFile(unittest.TestCase):
@classmethod
def setUpClass(cls):
# Start up a replica set and connect to it
cls.repl_set = ReplicaSet().start()
cls.main_connection = cls.repl_set.client()
@classmethod
def tearDownClass(cls):
cls.main_connection.close()
cls.repl_set.stop()
def setUp(self):
# clear existing data
self.main_connection.drop_database("test")
self.collection = self.main_connection.test.fs
self.fs = gridfs.GridFS(self.main_connection.test)
def get_file(self, doc):
return GridFSFile(self.collection, doc)
def test_insert(self):
def test_insert_file(data, filename, read_size):
# insert file
id = self.fs.put(data, filename=filename, encoding='utf8')
doc = self.collection.files.find_one(id)
f = self.get_file(doc)
# test metadata
self.assertEqual(id, f._id)
self.assertEqual(filename, f.filename)
# test data
result = []
while True:
s = f.read(read_size)
if len(s) > 0:
result.append(s.decode('utf8'))
if read_size >= 0:
self.assertLessEqual(len(s), read_size)
else:
break
result = "".join(result)
self.assertEqual(f.length, len(result))
self.assertEqual(data, result)
# test with 1-chunk files
test_insert_file("hello world", "hello.txt", -1)
test_insert_file("hello world 2", "hello.txt", 10)
test_insert_file("hello world 3", "hello.txt", 100)
# test with multiple-chunk files
size = 4 * 1024 * 1024
bigger = "".join([chr(ord('a') + (n % 26)) for n in range(size)])
test_insert_file(bigger, "bigger.txt", -1)
test_insert_file(bigger, "bigger.txt", 1024)
test_insert_file(bigger, "bigger.txt", 1024 * 1024)
def test_missing_chunk(self):
data = "test data"
id = self.fs.put(data, encoding='utf8')
doc = self.collection.files.find_one(id)
f = self.get_file(doc)
self.main_connection['test']['fs.chunks'].remove({
'files_id': id
})
self.assertRaises(errors.OperationFailed, f.read)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
leki75/ansible
|
lib/ansible/module_utils/cloudstack.py
|
30
|
23809
|
# -*- coding: utf-8 -*-
#
# (c) 2015, René Moser <mail@renemoser.net>
#
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import sys
import time
from ansible.module_utils._text import to_text
try:
from cs import CloudStack, CloudStackException, read_config
HAS_LIB_CS = True
except ImportError:
HAS_LIB_CS = False
CS_HYPERVISORS = [
'KVM', 'kvm',
'VMware', 'vmware',
'BareMetal', 'baremetal',
'XenServer', 'xenserver',
'LXC', 'lxc',
'HyperV', 'hyperv',
'UCS', 'ucs',
'OVM', 'ovm',
'Simulator', 'simulator',
]
if sys.version_info > (3,):
long = int
def cs_argument_spec():
return dict(
api_key=dict(default=os.environ.get('CLOUDSTACK_KEY')),
api_secret=dict(default=os.environ.get('CLOUDSTACK_SECRET'), no_log=True),
api_url=dict(default=os.environ.get('CLOUDSTACK_ENDPOINT')),
api_http_method=dict(choices=['get', 'post'], default=os.environ.get('CLOUDSTACK_METHOD') or 'get'),
api_timeout=dict(type='int', default=os.environ.get('CLOUDSTACK_TIMEOUT') or 10),
api_region=dict(default=os.environ.get('CLOUDSTACK_REGION') or 'cloudstack'),
)
def cs_required_together():
return [['api_key', 'api_secret']]
class AnsibleCloudStack(object):
def __init__(self, module):
if not HAS_LIB_CS:
module.fail_json(msg="python library cs required: pip install cs")
self.result = {
'changed': False,
'diff': {
'before': dict(),
'after': dict()
}
}
# Common returns, will be merged with self.returns
# search_for_key: replace_with_key
self.common_returns = {
'id': 'id',
'name': 'name',
'created': 'created',
'zonename': 'zone',
'state': 'state',
'project': 'project',
'account': 'account',
'domain': 'domain',
'displaytext': 'display_text',
'displayname': 'display_name',
'description': 'description',
}
# Init returns dict for use in subclasses
self.returns = {}
# these values will be casted to int
self.returns_to_int = {}
# these keys will be compared case sensitive in self.has_changed()
self.case_sensitive_keys = [
'id',
'displaytext',
'displayname',
'description',
]
self.module = module
self._connect()
# Helper for VPCs
self._vpc_networks_ids = None
self.domain = None
self.account = None
self.project = None
self.ip_address = None
self.network = None
self.vpc = None
self.zone = None
self.vm = None
self.vm_default_nic = None
self.os_type = None
self.hypervisor = None
self.capabilities = None
self.network_acl = None
def _connect(self):
api_region = self.module.params.get('api_region') or os.environ.get('CLOUDSTACK_REGION')
try:
config = read_config(api_region)
except KeyError:
config = {}
api_config = {
'endpoint': self.module.params.get('api_url') or config.get('endpoint'),
'key': self.module.params.get('api_key') or config.get('key'),
'secret': self.module.params.get('api_secret') or config.get('secret'),
'timeout': self.module.params.get('api_timeout') or config.get('timeout'),
'method': self.module.params.get('api_http_method') or config.get('method'),
}
self.result.update({
'api_region': api_region,
'api_url': api_config['endpoint'],
'api_key': api_config['key'],
'api_timeout': api_config['timeout'],
'api_http_method': api_config['method'],
})
if not all([api_config['endpoint'], api_config['key'], api_config['secret']]):
self.fail_json(msg="Missing api credentials: can not authenticate")
self.cs = CloudStack(**api_config)
def fail_json(self, **kwargs):
self.result.update(kwargs)
self.module.fail_json(**self.result)
def get_or_fallback(self, key=None, fallback_key=None):
value = self.module.params.get(key)
if not value:
value = self.module.params.get(fallback_key)
return value
def has_changed(self, want_dict, current_dict, only_keys=None):
result = False
for key, value in want_dict.items():
# Optionally limit by a list of keys
if only_keys and key not in only_keys:
continue
# Skip None values
if value is None:
continue
if key in current_dict:
if isinstance(value, (int, float, long, complex)):
# ensure we compare the same type
if isinstance(value, int):
current_dict[key] = int(current_dict[key])
elif isinstance(value, float):
current_dict[key] = float(current_dict[key])
elif isinstance(value, long):
current_dict[key] = long(current_dict[key])
elif isinstance(value, complex):
current_dict[key] = complex(current_dict[key])
if value != current_dict[key]:
self.result['diff']['before'][key] = current_dict[key]
self.result['diff']['after'][key] = value
result = True
else:
before_value = to_text(current_dict[key])
after_value = to_text(value)
if self.case_sensitive_keys and key in self.case_sensitive_keys:
if before_value != after_value:
self.result['diff']['before'][key] = before_value
self.result['diff']['after'][key] = after_value
result = True
# Test for diff in case insensitive way
elif before_value.lower() != after_value.lower():
self.result['diff']['before'][key] = before_value
self.result['diff']['after'][key] = after_value
result = True
else:
self.result['diff']['before'][key] = None
self.result['diff']['after'][key] = to_text(value)
result = True
return result
def _get_by_key(self, key=None, my_dict=None):
if my_dict is None:
my_dict = {}
if key:
if key in my_dict:
return my_dict[key]
self.fail_json(msg="Something went wrong: %s not found" % key)
return my_dict
def query_api(self, command, **args):
try:
res = getattr(self.cs, command)(**args)
if 'errortext' in res:
self.fail_json(msg="Failed: '%s'" % res['errortext'])
except CloudStackException as e:
self.fail_json(msg='CloudStackException: %s' % str(e))
return res
def get_network_acl(self, key=None):
if self.network_acl is None:
args = {
'name': self.module.params.get('network_acl'),
'vpcid': self.get_vpc(key='id'),
}
network_acls = self.query_api('listNetworkACLLists', **args)
if network_acls:
self.network_acl = network_acls['networkacllist'][0]
self.result['network_acl'] = self.network_acl['name']
if self.network_acl:
return self._get_by_key(key, self.network_acl)
else:
self.fail_json(msg="Network ACL %s not found" % self.module.params.get('network_acl'))
def get_vpc(self, key=None):
"""Return a VPC dictionary or the value of given key of."""
if self.vpc:
return self._get_by_key(key, self.vpc)
vpc = self.module.params.get('vpc')
if not vpc:
vpc = os.environ.get('CLOUDSTACK_VPC')
if not vpc:
return None
args = {
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'projectid': self.get_project(key='id'),
'zoneid': self.get_zone(key='id'),
}
vpcs = self.query_api('listVPCs', **args)
if not vpcs:
self.fail_json(msg="No VPCs available.")
for v in vpcs['vpc']:
if vpc in [v['name'], v['displaytext'], v['id']]:
# Fail if the identifyer matches more than one VPC
if self.vpc:
self.fail_json(msg="More than one VPC found with the provided identifyer '%s'" % vpc)
else:
self.vpc = v
self.result['vpc'] = v['name']
if self.vpc:
return self._get_by_key(key, self.vpc)
self.fail_json(msg="VPC '%s' not found" % vpc)
def is_vpc_network(self, network_id):
"""Returns True if network is in VPC."""
# This is an efficient way to query a lot of networks at a time
if self._vpc_networks_ids is None:
args = {
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'projectid': self.get_project(key='id'),
'zoneid': self.get_zone(key='id'),
}
vpcs = self.query_api('listVPCs', **args)
self._vpc_networks_ids = []
if vpcs:
for vpc in vpcs['vpc']:
for n in vpc.get('network', []):
self._vpc_networks_ids.append(n['id'])
return network_id in self._vpc_networks_ids
def get_network(self, key=None):
"""Return a network dictionary or the value of given key of."""
if self.network:
return self._get_by_key(key, self.network)
network = self.module.params.get('network')
if not network:
vpc_name = self.get_vpc(key='name')
if vpc_name:
self.fail_json(msg="Could not find network for VPC '%s' due missing argument: network" % vpc_name)
return None
args = {
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'projectid': self.get_project(key='id'),
'zoneid': self.get_zone(key='id'),
'vpcid': self.get_vpc(key='id')
}
networks = self.query_api('listNetworks', **args)
if not networks:
self.fail_json(msg="No networks available.")
for n in networks['network']:
# ignore any VPC network if vpc param is not given
if 'vpcid' in n and not self.get_vpc(key='id'):
continue
if network in [n['displaytext'], n['name'], n['id']]:
self.result['network'] = n['name']
self.network = n
return self._get_by_key(key, self.network)
self.fail_json(msg="Network '%s' not found" % network)
def get_project(self, key=None):
if self.project:
return self._get_by_key(key, self.project)
project = self.module.params.get('project')
if not project:
project = os.environ.get('CLOUDSTACK_PROJECT')
if not project:
return None
args = {
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id')
}
projects = self.query_api('listProjects', **args)
if projects:
for p in projects['project']:
if project.lower() in [p['name'].lower(), p['id']]:
self.result['project'] = p['name']
self.project = p
return self._get_by_key(key, self.project)
self.fail_json(msg="project '%s' not found" % project)
def get_ip_address(self, key=None):
if self.ip_address:
return self._get_by_key(key, self.ip_address)
ip_address = self.module.params.get('ip_address')
if not ip_address:
self.fail_json(msg="IP address param 'ip_address' is required")
args = {
'ipaddress': ip_address,
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'projectid': self.get_project(key='id'),
'vpcid': self.get_vpc(key='id'),
}
ip_addresses = self.query_api('listPublicIpAddresses', **args)
if not ip_addresses:
self.fail_json(msg="IP address '%s' not found" % args['ipaddress'])
self.ip_address = ip_addresses['publicipaddress'][0]
return self._get_by_key(key, self.ip_address)
def get_vm_guest_ip(self):
vm_guest_ip = self.module.params.get('vm_guest_ip')
default_nic = self.get_vm_default_nic()
if not vm_guest_ip:
return default_nic['ipaddress']
for secondary_ip in default_nic['secondaryip']:
if vm_guest_ip == secondary_ip['ipaddress']:
return vm_guest_ip
self.fail_json(msg="Secondary IP '%s' not assigned to VM" % vm_guest_ip)
def get_vm_default_nic(self):
if self.vm_default_nic:
return self.vm_default_nic
nics = self.query_api('listNics', virtualmachineid=self.get_vm(key='id'))
if nics:
for n in nics['nic']:
if n['isdefault']:
self.vm_default_nic = n
return self.vm_default_nic
self.fail_json(msg="No default IP address of VM '%s' found" % self.module.params.get('vm'))
def get_vm(self, key=None):
if self.vm:
return self._get_by_key(key, self.vm)
vm = self.module.params.get('vm')
if not vm:
self.fail_json(msg="Virtual machine param 'vm' is required")
args = {
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'projectid': self.get_project(key='id'),
'zoneid': self.get_zone(key='id'),
}
vms = self.query_api('listVirtualMachines', **args)
if vms:
for v in vms['virtualmachine']:
if vm.lower() in [v['name'].lower(), v['displayname'].lower(), v['id']]:
self.vm = v
return self._get_by_key(key, self.vm)
self.fail_json(msg="Virtual machine '%s' not found" % vm)
def get_zone(self, key=None):
if self.zone:
return self._get_by_key(key, self.zone)
zone = self.module.params.get('zone')
if not zone:
zone = os.environ.get('CLOUDSTACK_ZONE')
zones = self.query_api('listZones')
if not zones:
self.fail_json(msg="No zones available. Please create a zone first")
# use the first zone if no zone param given
if not zone:
self.zone = zones['zone'][0]
self.result['zone'] = self.zone['name']
return self._get_by_key(key, self.zone)
if zones:
for z in zones['zone']:
if zone.lower() in [z['name'].lower(), z['id']]:
self.result['zone'] = z['name']
self.zone = z
return self._get_by_key(key, self.zone)
self.fail_json(msg="zone '%s' not found" % zone)
def get_os_type(self, key=None):
if self.os_type:
return self._get_by_key(key, self.zone)
os_type = self.module.params.get('os_type')
if not os_type:
return None
os_types = self.query_api('listOsTypes')
if os_types:
for o in os_types['ostype']:
if os_type in [o['description'], o['id']]:
self.os_type = o
return self._get_by_key(key, self.os_type)
self.fail_json(msg="OS type '%s' not found" % os_type)
def get_hypervisor(self):
if self.hypervisor:
return self.hypervisor
hypervisor = self.module.params.get('hypervisor')
hypervisors = self.query_api('listHypervisors')
# use the first hypervisor if no hypervisor param given
if not hypervisor:
self.hypervisor = hypervisors['hypervisor'][0]['name']
return self.hypervisor
for h in hypervisors['hypervisor']:
if hypervisor.lower() == h['name'].lower():
self.hypervisor = h['name']
return self.hypervisor
self.fail_json(msg="Hypervisor '%s' not found" % hypervisor)
def get_account(self, key=None):
if self.account:
return self._get_by_key(key, self.account)
account = self.module.params.get('account')
if not account:
account = os.environ.get('CLOUDSTACK_ACCOUNT')
if not account:
return None
domain = self.module.params.get('domain')
if not domain:
self.fail_json(msg="Account must be specified with Domain")
args = {
'name': account,
'domainid': self.get_domain(key='id'),
'listall': True
}
accounts = self.query_api('listAccounts', **args)
if accounts:
self.account = accounts['account'][0]
self.result['account'] = self.account['name']
return self._get_by_key(key, self.account)
self.fail_json(msg="Account '%s' not found" % account)
def get_domain(self, key=None):
if self.domain:
return self._get_by_key(key, self.domain)
domain = self.module.params.get('domain')
if not domain:
domain = os.environ.get('CLOUDSTACK_DOMAIN')
if not domain:
return None
args = {
'listall': True,
}
domains = self.query_api('listDomains', **args)
if domains:
for d in domains['domain']:
if d['path'].lower() in [domain.lower(), "root/" + domain.lower(), "root" + domain.lower()]:
self.domain = d
self.result['domain'] = d['path']
return self._get_by_key(key, self.domain)
self.fail_json(msg="Domain '%s' not found" % domain)
def query_tags(self, resource, resource_type):
args = {
'resourceids': resource['id'],
'resourcetype': resource_type,
}
tags = self.query_api('listTags', **args)
return self.get_tags(resource=tags, key='tag')
def get_tags(self, resource=None, key='tags'):
existing_tags = []
for tag in resource.get(key) or []:
existing_tags.append({'key': tag['key'], 'value': tag['value']})
return existing_tags
def _process_tags(self, resource, resource_type, tags, operation="create"):
if tags:
self.result['changed'] = True
if not self.module.check_mode:
args = {
'resourceids': resource['id'],
'resourcetype': resource_type,
'tags': tags,
}
if operation == "create":
response = self.query_api('createTags', **args)
else:
response = self.query_api('deleteTags', **args)
self.poll_job(response)
def _tags_that_should_exist_or_be_updated(self, resource, tags):
existing_tags = self.get_tags(resource)
return [tag for tag in tags if tag not in existing_tags]
def _tags_that_should_not_exist(self, resource, tags):
existing_tags = self.get_tags(resource)
return [tag for tag in existing_tags if tag not in tags]
def ensure_tags(self, resource, resource_type=None):
if not resource_type or not resource:
self.fail_json(msg="Error: Missing resource or resource_type for tags.")
if 'tags' in resource:
tags = self.module.params.get('tags')
if tags is not None:
self._process_tags(resource, resource_type, self._tags_that_should_not_exist(resource, tags), operation="delete")
self._process_tags(resource, resource_type, self._tags_that_should_exist_or_be_updated(resource, tags))
resource['tags'] = self.query_tags(resource=resource, resource_type=resource_type)
return resource
def get_capabilities(self, key=None):
if self.capabilities:
return self._get_by_key(key, self.capabilities)
capabilities = self.query_api('listCapabilities')
self.capabilities = capabilities['capability']
return self._get_by_key(key, self.capabilities)
def poll_job(self, job=None, key=None):
if 'jobid' in job:
while True:
res = self.query_api('queryAsyncJobResult', jobid=job['jobid'])
if res['jobstatus'] != 0 and 'jobresult' in res:
if 'errortext' in res['jobresult']:
self.fail_json(msg="Failed: '%s'" % res['jobresult']['errortext'])
if key and key in res['jobresult']:
job = res['jobresult'][key]
break
time.sleep(2)
return job
def get_result(self, resource):
if resource:
returns = self.common_returns.copy()
returns.update(self.returns)
for search_key, return_key in returns.items():
if search_key in resource:
self.result[return_key] = resource[search_key]
# Bad bad API does not always return int when it should.
for search_key, return_key in self.returns_to_int.items():
if search_key in resource:
self.result[return_key] = int(resource[search_key])
if 'tags' in resource:
self.result['tags'] = resource['tags']
return self.result
|
gpl-3.0
|
duythanhphan/gumbo
|
python/gumbo/gumboc.py
|
2
|
12639
|
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""CTypes bindings for the Gumbo HTML5 parser.
This exports the raw interface of the library as a set of very thin ctypes
wrappers. It's intended to be wrapped by other libraries to provide a more
Pythonic API.
"""
__author__ = 'jdtang@google.com (Jonathan Tang)'
import contextlib
import ctypes
try:
_dll = ctypes.cdll.LoadLibrary('libgumbo.so')
except OSError:
# MacOS X
_dll = ctypes.cdll.LoadLibrary('libgumbo.dylib')
# Some aliases for common types.
_bitvector = ctypes.c_uint
_Ptr = ctypes.POINTER
class Enum(ctypes.c_uint):
class __metaclass__(type(ctypes.c_uint)):
def __new__(metaclass, name, bases, cls_dict):
cls = type(ctypes.c_uint).__new__(metaclass, name, bases, cls_dict)
if name == 'Enum':
return cls
try:
for i, value in enumerate(cls_dict['_values_']):
setattr(cls, value, cls.from_param(i))
except KeyError:
raise ValueError('No _values_ list found inside enum type.')
except TypeError:
raise ValueError('_values_ must be a list of names of enum constants.')
return cls
@classmethod
def from_param(cls, param):
if isinstance(param, Enum):
if param.__class__ != cls:
raise ValueError("Can't mix enums of different types")
return param
if param < 0 or param > len(cls._values_):
raise ValueError('%d is out of range for enum type %s; max %d.' %
(param, cls.__name__, len(cls._values_)))
return cls(param)
def __eq__(self, other):
return self.value == other.value
def __ne__(self, other):
return self.value != other.value
def __hash__(self):
return hash(self.value)
def __repr__(self):
try:
return self._values_[self.value]
except IndexError:
raise IndexError('Value %d is out of range for %r' %
(self.value, self._values_))
class StringPiece(ctypes.Structure):
_fields_ = [
('data', _Ptr(ctypes.c_char)),
('length', ctypes.c_size_t),
]
def __len__(self):
return self.length
def __str__(self):
return ctypes.string_at(self.data, self.length)
class SourcePosition(ctypes.Structure):
_fields_ = [
('line', ctypes.c_uint),
('column', ctypes.c_uint),
('offset', ctypes.c_uint)
]
SourcePosition.EMPTY = SourcePosition.in_dll(_dll, 'kGumboEmptySourcePosition')
class AttributeNamespace(Enum):
URLS = [
'http://www.w3.org/1999/xhtml',
'http://www.w3.org/1999/xlink',
'http://www.w3.org/XML/1998/namespace',
'http://www.w3.org/2000/xmlns',
]
_values_ = ['NONE', 'XLINK', 'XML', 'XMLNS']
def to_url(self):
return self.URLS[self.value]
class Attribute(ctypes.Structure):
_fields_ = [
('namespace', AttributeNamespace),
('name', ctypes.c_char_p),
('original_name', StringPiece),
('value', ctypes.c_char_p),
('original_value', StringPiece),
('name_start', SourcePosition),
('name_end', SourcePosition),
('value_start', SourcePosition),
('value_end', SourcePosition)
]
class Vector(ctypes.Structure):
_type_ = ctypes.c_void_p
_fields_ = [
('data', _Ptr(ctypes.c_void_p)),
('length', ctypes.c_uint),
('capacity', ctypes.c_uint)
]
class Iter(object):
def __init__(self, vector):
self.current = 0
self.vector = vector
def __iter__(self):
return self
def next(self):
if self.current >= self.vector.length:
raise StopIteration
obj = self.vector[self.current]
self.current += 1
return obj
def __len__(self):
return self.length
def __getitem__(self, i):
if isinstance(i, (int, long)):
if i < 0:
i += self.length
if i > self.length:
raise IndexError
array_type = _Ptr(_Ptr(self._type_))
return ctypes.cast(self.data, array_type)[i].contents
return list(self)[i]
def __iter__(self):
return Vector.Iter(self)
Vector.EMPTY = Vector.in_dll(_dll, 'kGumboEmptyVector')
class AttributeVector(Vector):
_type_ = Attribute
class NodeVector(Vector):
# _type_ assigned later, to avoid circular references with Node
pass
class QuirksMode(Enum):
_values_ = ['NO_QUIRKS', 'QUIRKS', 'LIMITED_QUIRKS']
class Document(ctypes.Structure):
_fields_ = [
('children', NodeVector),
('has_doctype', ctypes.c_bool),
('name', ctypes.c_char_p),
('public_identifier', ctypes.c_char_p),
('system_identifier', ctypes.c_char_p),
('doc_type_quirks_mode', QuirksMode),
]
def __repr__(self):
return 'Document'
class Namespace(Enum):
URLS = [
'http://www.w3.org/1999/xhtml',
'http://www.w3.org/2000/svg',
'http://www.w3.org/1998/Math/MathML',
]
_values_ = ['HTML', 'SVG', 'MATHML']
def to_url(self):
return self.URLS[self.value]
class Tag(Enum):
_values_ = [
'HTML',
'HEAD',
'TITLE',
'BASE',
'LINK',
'META',
'STYLE',
'SCRIPT',
'NOSCRIPT',
'TEMPLATE',
'BODY',
'ARTICLE',
'SECTION',
'NAV',
'ASIDE',
'H1',
'H2',
'H3',
'H4',
'H5',
'H6',
'HGROUP',
'HEADER',
'FOOTER',
'ADDRESS',
'P',
'HR',
'PRE',
'BLOCKQUOTE',
'OL',
'UL',
'LI',
'DL',
'DT',
'DD',
'FIGURE',
'FIGCAPTION',
'MAIN',
'DIV',
'A',
'EM',
'STRONG',
'SMALL',
'S',
'CITE',
'Q',
'DFN',
'ABBR',
'DATA',
'TIME',
'CODE',
'VAR',
'SAMP',
'KBD',
'SUB',
'SUP',
'I',
'B',
'U',
'MARK',
'RUBY',
'RT',
'RP',
'BDI',
'BDO',
'SPAN',
'BR',
'WBR',
'INS',
'DEL',
'IMAGE',
'IMG',
'IFRAME',
'EMBED',
'OBJECT',
'PARAM',
'VIDEO',
'AUDIO',
'SOURCE',
'TRACK',
'CANVAS',
'MAP',
'AREA',
'MATH',
'MI',
'MO',
'MN',
'MS',
'MTEXT',
'MGLYPH',
'MALIGNMARK',
'ANNOTATION_XML',
'SVG',
'FOREIGNOBJECT',
'DESC',
'TABLE',
'CAPTION',
'COLGROUP',
'COL',
'TBODY',
'THEAD',
'TFOOT',
'TR',
'TD',
'TH',
'FORM',
'FIELDSET',
'LEGEND',
'LABEL',
'INPUT',
'BUTTON',
'SELECT',
'DATALIST',
'OPTGROUP',
'OPTION',
'TEXTAREA',
'KEYGEN',
'OUTPUT',
'PROGRESS',
'METER',
'DETAILS',
'SUMMARY',
'MENU',
'MENUITEM',
'APPLET',
'ACRONYM',
'BGSOUND',
'DIR',
'FRAME',
'FRAMESET',
'NOFRAMES',
'ISINDEX',
'LISTING',
'XMP',
'NEXTID',
'NOEMBED',
'PLAINTEXT',
'RB',
'STRIKE',
'BASEFONT',
'BIG',
'BLINK',
'CENTER',
'FONT',
'MARQUEE',
'MULTICOL',
'NOBR',
'SPACER',
'TT',
'UNKNOWN',
]
class Element(ctypes.Structure):
_fields_ = [
('children', NodeVector),
('tag', Tag),
('tag_namespace', Namespace),
('original_tag', StringPiece),
('original_end_tag', StringPiece),
('start_pos', SourcePosition),
('end_pos', SourcePosition),
('attributes', AttributeVector),
]
@property
def tag_name(self):
original_tag = StringPiece.from_buffer_copy(self.original_tag)
_tag_from_original_text(ctypes.byref(original_tag))
if self.tag_namespace == Namespace.SVG:
svg_tagname = _normalize_svg_tagname(ctypes.byref(original_tag))
if svg_tagname is not None:
return str(svg_tagname)
if self.tag == Tag.UNKNOWN:
if original_tag.data is None:
return ''
return str(original_tag).lower()
return _tagname(self.tag)
def __repr__(self):
return ('<%r>\n' % self.tag +
'\n'.join(repr(child) for child in self.children) +
'</%r>' % self.tag)
class Text(ctypes.Structure):
_fields_ = [
('text', ctypes.c_char_p),
('original_text', StringPiece),
('start_pos', SourcePosition)
]
def __repr__(self):
return 'Text(%r)' % self.text
class NodeType(Enum):
_values_ = ['DOCUMENT', 'ELEMENT', 'TEXT', 'CDATA', 'COMMENT', 'WHITESPACE']
class NodeUnion(ctypes.Union):
_fields_ = [
('document', Document),
('element', Element),
('text', Text),
]
class Node(ctypes.Structure):
# _fields_ set later to avoid a circular reference
@property
def contents(self):
if self.type == NodeType.DOCUMENT:
return self.v.document
elif self.type == NodeType.ELEMENT:
return self.v.element
else:
return self.v.text
def __getattr__(self, name):
return getattr(self.contents, name)
def __setattr__(self, name, value):
return setattr(self.contents, name, value)
def __repr__(self):
return repr(self.contents)
Node._fields_ = [
('type', NodeType),
# Set the type to Node later to avoid a circular dependency.
('parent', _Ptr(Node)),
('index_within_parent', ctypes.c_size_t),
# TODO(jdtang): Make a real list of enum constants for this.
('parse_flags', _bitvector),
('v', NodeUnion)
]
NodeVector._type_ = Node
class Options(ctypes.Structure):
_fields_ = [
# TODO(jdtang): Allow the Python API to set the allocator/deallocator
# function. Right now these are treated as opaque void pointers.
('allocator', ctypes.c_void_p),
('deallocator', ctypes.c_void_p),
('tab_stop', ctypes.c_int),
('stop_on_first_error', ctypes.c_bool),
('max_utf8_decode_errors', ctypes.c_int),
# The following two options will likely be removed from the C API, and
# should be removed from the Python API when that happens too.
('verbatim_mode', ctypes.c_bool),
('preserve_entities', ctypes.c_bool),
]
class Output(ctypes.Structure):
_fields_ = [
('document', _Ptr(Node)),
('root', _Ptr(Node)),
# TODO(jdtang): Error type.
('errors', Vector),
]
@contextlib.contextmanager
def parse(text, **kwargs):
options = Options()
for field_name, _ in Options._fields_:
try:
setattr(options, field_name, kwargs[field_name])
except KeyError:
setattr(options, field_name, getattr(_DEFAULT_OPTIONS, field_name))
# We have to manually take a reference to the input text here so that it
# outlives the parse output. If we let ctypes do it automatically on function
# call, it creates a temporary buffer which is destroyed when the call
# completes, and then the original_text pointers point into invalid memory.
text_ptr = ctypes.c_char_p(text)
output = _parse_with_options(ctypes.byref(options), text_ptr, len(text))
try:
yield output
finally:
_destroy_output(ctypes.byref(options), output)
_DEFAULT_OPTIONS = Options.in_dll(_dll, 'kGumboDefaultOptions')
_parse_with_options = _dll.gumbo_parse_with_options
_parse_with_options.argtypes = [_Ptr(Options), ctypes.c_char_p, ctypes.c_size_t]
_parse_with_options.restype = _Ptr(Output)
_tag_from_original_text = _dll.gumbo_tag_from_original_text
_tag_from_original_text.argtypes = [_Ptr(StringPiece)]
_tag_from_original_text.restype = None
_normalize_svg_tagname = _dll.gumbo_normalize_svg_tagname
_normalize_svg_tagname.argtypes = [_Ptr(StringPiece)]
_normalize_svg_tagname.restype = ctypes.c_char_p
_destroy_output = _dll.gumbo_destroy_output
_destroy_output.argtypes = [_Ptr(Options), _Ptr(Output)]
_destroy_output.restype = None
_tagname = _dll.gumbo_normalized_tagname
_tagname.argtypes = [Tag]
_tagname.restype = ctypes.c_char_p
__all__ = ['StringPiece', 'SourcePosition', 'AttributeNamespace', 'Attribute',
'Vector', 'AttributeVector', 'NodeVector', 'QuirksMode', 'Document',
'Namespace', 'Tag', 'Element', 'Text', 'NodeType', 'Node',
'Options', 'Output', 'parse']
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.