code
stringlengths 1
199k
|
|---|
from __future__ import unicode_literals
from __future__ import print_function
from mock_dbus import MockDBusInterface
import unittest
import pyconnman
import mock
import dbus
class ConnTechnologyTest(unittest.TestCase):
def setUp(self):
patcher = mock.patch('dbus.Interface', MockDBusInterface)
patcher.start()
self.addCleanup(patcher.stop)
patcher = mock.patch('dbus.SystemBus')
patched_system_bus = patcher.start()
self.addCleanup(patcher.stop)
mock_system_bus = mock.MagicMock()
patched_system_bus.return_value = mock_system_bus
mock_system_bus.get_object.return_value = dbus.ObjectPath('/')
self.mock_system_bus = mock_system_bus
def test_scan(self):
manager = pyconnman.ConnManager()
technologies = manager.get_technologies()
tech = pyconnman.ConnTechnology(technologies[0][0])
print(repr(tech))
print('=========================================================')
print(tech)
print('=========================================================')
added = 'Added'
removed = 'Removed'
signal = pyconnman.ConnManager.SIGNAL_SERVICES_CHANGED
user = mock.MagicMock()
manager.add_signal_receiver(user.callback_fn, signal, self)
tech.scan()
self.mock_system_bus.add_signal_receiver.assert_called()
cb = self.mock_system_bus.add_signal_receiver.call_args_list[0][0][0]
cb(added, removed)
user.callback_fn.assert_called_with(signal, self, added, removed)
user.callback_fn.assert_called()
manager.remove_signal_receiver(signal)
self.mock_system_bus.remove_signal_receiver.assert_called()
|
import mock
from oslo_serialization import jsonutils
import webob
from jacket import context
from jacket import db
from jacket.objects import storage
from jacket.storage import test
from jacket.tests.storage.unit.api import fakes
from jacket.tests.storage.unit import utils
class VolumeUnmanageTest(test.TestCase):
"""Test cases for storage/api/contrib/volume_unmanage.py
The API extension adds an action to volumes, "os-unmanage", which will
effectively issue a delete operation on the volume, but with a flag set
that means that a different method will be invoked on the driver, so that
the volume is not actually deleted in the storage backend.
In this set of test cases, we are ensuring that the code correctly parses
the request structure and raises the correct exceptions when things are not
right, and calls down into storage.volume.api.API.delete with the correct
arguments.
"""
def setUp(self):
super(VolumeUnmanageTest, self).setUp()
self.ctxt = context.RequestContext('admin', 'fake_project', True)
api = fakes.router.APIRouter()
self.app = fakes.urlmap.URLMap()
self.app['/v2'] = api
def _get_resp(self, volume_id):
"""Helper to build an os-unmanage req for the specified volume_id."""
req = webob.Request.blank('/v2/%s/volumes/%s/action' %
(self.ctxt.project_id, volume_id))
req.method = 'POST'
req.headers['Content-Type'] = 'application/json'
req.environ['storage.context'] = self.ctxt
body = {'os-unmanage': ''}
req.body = jsonutils.dump_as_bytes(body)
res = req.get_response(self.app)
return res
@mock.patch('storage.volume.rpcapi.VolumeAPI.delete_volume')
def test_unmanage_volume_ok(self, mock_rpcapi):
"""Return success for valid and unattached volume."""
vol = utils.create_volume(self.ctxt)
res = self._get_resp(vol.id)
self.assertEqual(202, res.status_int, res)
mock_rpcapi.assert_called_once_with(self.ctxt, mock.ANY, True, False)
vol = storage.volume.Volume.get_by_id(self.ctxt, vol.id)
self.assertEqual('deleting', vol.status)
storage.volume_destroy(self.ctxt, vol.id)
def test_unmanage_volume_bad_volume_id(self):
"""Return 404 if the volume does not exist."""
res = self._get_resp('nonexistent-volume-id')
self.assertEqual(404, res.status_int, res)
def test_unmanage_volume_attached(self):
"""Return 400 if the volume exists but is attached."""
vol = utils.create_volume(self.ctxt, status='in-use',
attach_status='attached')
res = self._get_resp(vol.id)
self.assertEqual(400, res.status_int, res)
storage.volume_destroy(self.ctxt, vol.id)
def test_unmanage_volume_with_snapshots(self):
"""Return 400 if the volume exists but has snapshots."""
vol = utils.create_volume(self.ctxt)
snap = utils.create_snapshot(self.ctxt, vol.id)
res = self._get_resp(vol.id)
self.assertEqual(400, res.status_int, res)
storage.volume_destroy(self.ctxt, vol.id)
storage.snapshot_destroy(self.ctxt, snap.id)
|
"""This module create the GUI
"""
import sys
import os
import PyQt4.QtGui as QtGui
import PyQt4.QtCore as QtCore
import pyfinance.files as pyfiles
import pyfinance.pyf as pyf
class SQLiteAcctChooser(QtGui.QWidget):
"""SQLite account chooser
"""
def __init__(self, parent=None):
super(SQLiteAcctChooser,self).__init__(parent)
QtGui.QWidget.__init__(self)
self.parent = parent
label = QtGui.QLabel("SQLite Account")
self.combo = QtGui.QComboBox()
grid = QtGui.QHBoxLayout()
grid.addWidget(label)
grid.addWidget(self.combo)
self.setLayout(grid)
self.connect(self.combo,
QtCore.SIGNAL("currentIndexChanged(const QString&)"),
self.parent.load)
class SQLiteChooser(QtGui.QWidget):
"""SQLite file chooser
"""
def __init__(self, acct, parent=None):
super(SQLiteChooser,self).__init__(parent)
QtGui.QWidget.__init__(self)
self.acct = acct
label = QtGui.QLabel("SQLite DB")
self.filename = QtGui.QLineEdit()
button = QtGui.QPushButton("Choose", self)
grid = QtGui.QHBoxLayout()
grid.addWidget(label)
grid.addWidget(self.filename)
grid.addWidget(button)
self.setLayout(grid)
self.connect(button, QtCore.SIGNAL("clicked()"), self.choose)
def choose(self):
"""Choose SQLite file
"""
self.filename.setText(\
QtGui.QFileDialog.getOpenFileName(self, 'Open file',
os.getcwd()))
self.load()
def load(self):
parser = pyfiles.dataParser()
acct = parser.getData("SQLite:MoneyDroid:"+\
str(self.filename.text()))
if (isinstance(acct, pyf.accounts)):
self.acct.combo.clear()
self.acct.combo.addItem("All")
for (a,i) in acct.accounts.items():
self.acct.combo.addItem(a)
class AcctChooser(QtGui.QWidget):
"""Account file chooser
"""
def __init__(self, parent=None):
super(AcctChooser,self).__init__(parent)
QtGui.QWidget.__init__(self)
self.parent = parent
self.filename = QtGui.QLineEdit()
button = QtGui.QPushButton("Choose", self)
grid = QtGui.QHBoxLayout()
grid.addWidget(self.filename)
grid.addWidget(button)
self.setLayout(grid)
self.connect(button, QtCore.SIGNAL("clicked()"), self.choose)
def choose(self):
"""Choose account file
"""
self.filename.setText(\
QtGui.QFileDialog.getOpenFileName(self, 'Open file',
os.getcwd()))
self.parent.load()
class LeftPanel(QtGui.QWidget):
"""Left panel
"""
def __init__(self, parent=None):
"""Initialize
"""
super(LeftPanel,self).__init__(parent)
QtGui.QWidget.__init__(self)
self.sqliteacct = SQLiteAcctChooser(self)
self.sqlitechooser = SQLiteChooser(self.sqliteacct, self)
self.masterlist=QtGui.QListWidget(self)
grid = QtGui.QVBoxLayout()
grid.addWidget(self.sqlitechooser)
grid.addWidget(self.sqliteacct)
grid.addWidget(self.masterlist)
self.setLayout(grid)
def account(self):
"""Get account name
"""
sText = str(self.sqliteacct.combo.itemText(\
self.sqliteacct.combo.currentIndex()))
acctname = "SQLite:MoneyDroid:"+\
str(self.sqlitechooser.filename.text())
if (sText != "All"):
acctname += ":"+sText
return acctname
def load(self):
"""Load account
"""
parser = pyfiles.dataParser()
acct = parser.getData(self.account())
self.masterlist.clear()
for t in acct.transactions:
self.masterlist.addItem(str(t))
self.masterlist.sortItems()
class RightPanel(QtGui.QWidget):
"""Right panel
"""
def __init__(self, parent=None):
"""Initialize
"""
super(RightPanel,self).__init__(parent)
QtGui.QWidget.__init__(self)
self.combo = QtGui.QComboBox()
self.combo.addItem("---Choose Account Type---")
self.combo.addItem("QFX:CitiCard")
self.combo.addItem("QFX:Chase")
self.combo.addItem("CSV:CitiCard")
self.combo.addItem("CSV:Chase")
self.acctchooser = AcctChooser(self)
self.accountlist=QtGui.QListWidget(self)
grid = QtGui.QVBoxLayout()
grid.addWidget(self.combo)
grid.addWidget(self.acctchooser)
grid.addWidget(self.accountlist)
self.setLayout(grid)
def account(self):
"""Get account name
"""
return str(self.combo.itemText(\
self.combo.currentIndex()))+":"+\
str(self.acctchooser.filename.text())
def load(self):
"""Load account
"""
parser = pyfiles.dataParser()
acct = parser.getData(self.account())
if (acct != None):
self.accountlist.clear()
for t in acct.transactions:
self.accountlist.addItem(str(t))
self.accountlist.sortItems()
class MainPanel(QtGui.QWidget):
"""Main panel
"""
def __init__(self, parent=None):
"""Initialize
"""
super(MainPanel,self).__init__(parent)
QtGui.QWidget.__init__(self)
self.left = LeftPanel(self)
self.right = RightPanel(self)
grid = QtGui.QHBoxLayout()
grid.addWidget(self.left)
grid.addWidget(self.right)
self.setLayout(grid)
def get_left_account(self):
"""Get account on left
"""
return self.left.account()
def get_right_account(self):
"""Get account on right
"""
return self.right.account()
class BottomPanel(QtGui.QWidget):
"""Main panel
"""
def __init__(self, parent=None):
"""Initialize
"""
super(BottomPanel,self).__init__(parent)
QtGui.QWidget.__init__(self)
self.parent = parent
quitbutton = QtGui.QPushButton("Quit", self)
checkbutton = QtGui.QPushButton("Cross-check", self)
matchbutton = QtGui.QPushButton("Match and Check-off", self)
grid = QtGui.QHBoxLayout()
grid.addWidget(checkbutton)
grid.addWidget(matchbutton)
grid.addWidget(quitbutton)
self.setLayout(grid)
self.connect(quitbutton, QtCore.SIGNAL("clicked()"), QtGui.qApp,
QtCore.SLOT('quit()'))
self.connect(checkbutton, QtCore.SIGNAL("clicked()"),
self.parent.crosscheck)
self.connect(matchbutton, QtCore.SIGNAL("clicked()"),
self.parent.match)
class MainWindow(QtGui.QDialog):
"""Main window of GUI application
"""
def __init__(self, parent=None):
"""Initialize
"""
super(MainWindow,self).__init__(parent)
QtGui.QMainWindow.__init__(self)
self.resize(1000,600)
self.setWindowTitle("PyFinance")
self.mainpanel = MainPanel(self)
bottompanel = BottomPanel(self)
grid = QtGui.QVBoxLayout()
grid.addWidget(self.mainpanel)
grid.addWidget(bottompanel)
self.setLayout(grid)
def crosscheck(self):
"""Cross check accounts
"""
parser = pyfiles.dataParser()
checkagainst = parser.getData(self.mainpanel.get_left_account())
tocheck = parser.getData(self.mainpanel.get_right_account())
(ok, problems, leftover) = tocheck.crosscheck(checkagainst)
self.mainpanel.left.masterlist.clear()
for t in leftover:
self.mainpanel.left.masterlist.addItem(str(t))
self.mainpanel.left.masterlist.sortItems()
self.mainpanel.right.accountlist.clear()
for (p, pdesc) in problems.items():
self.mainpanel.right.accountlist.addItem(str(p))
self.mainpanel.right.accountlist.sortItems()
def match(self):
"""Declare entries as matched
"""
left = self.mainpanel.left.masterlist
right = self.mainpanel.right.accountlist
if ((len(left.selectedItems()) == 1) and
(len(right.selectedItems()) == 1)):
left.takeItem(left.currentRow())
right.takeItem(right.currentRow())
class GUI(QtGui.QApplication):
"""Main Qt Application
@author ykk
@date Dec 2010
"""
def __init__(self, argv):
"""Initailize
"""
QtGui.QApplication.__init__(self,argv)
def run(self):
"""Execute application
"""
main = MainWindow()
main.show()
sys.exit(self.exec_())
|
"""Provides service object for T1."""
from __future__ import absolute_import, division
from collections import Iterator
from types import GeneratorType
from .models import ACL
from .t1mappings import SINGULAR, CLASSES, CHILD_PATHS, MODEL_PATHS
from .connection import Connection
from .entity import Entity
from .errors import ClientError
from .reports import Report
from .utils import filters
from .vendor import six
def _detect_auth_method(username, password, session_id,
api_key, client_secret, token):
if api_key is None:
raise ClientError('API Key is required!')
if token is not None:
return 'oauth2'
if username is not None and password is not None:
return 'cookie'
if session_id is not None:
return 'cookie'
if client_secret is not None:
return 'oauth2'
class T1(Connection):
"""Service class for ALL other T1 entities, e.g.: t1 = T1(auth)
Accepts authentication parameters. Supports get methods to get
collections or an entity, find method to user inner-join-like queries.
"""
def __init__(self,
username=None,
password=None,
api_key=None,
client_secret=None,
auth_method=None,
session_id=None,
environment='production',
api_base=None,
json=False,
redirect_uri=None,
token=None,
token_updater=None,
**kwargs):
"""Set up session for main service object.
:param username: str T1 Username
:param password: str T1 Password
:param api_key: str API Key approved in Developer Portal
:param client_secret: str Client Secret for use with OAuth2
:param session_id: str API-provided prior session cookie.
For instance, if you have a session ID provided by browser cookie,
you can use that to authenticate a server-side connection.
:param auth_method: enum('cookie', 'oauth2') method for authentication.
:param environment: str to look up API Base to use. e.g. 'production'
for https://api.mediamath.com/api/v2.0
:param api_base: str API domain. should be the qualified domain name
without trailing slash. e.g. "api.mediamath.com".
:param json: bool use JSON header for serialization. Currently
for internal experimentation, JSON will become the default in a
future version.
:param redirect_uri: str redirect URI for OAuth2 authentication.
Must match the redirect URI set in the application settings.
:param token: dict OAuth2 token as generated by OAuth2Session.
If you have a web app, you can store the token in the browser
session, and then use that to generate a new T1 session.
See the documentation for examples.
:param token_updater: function with one argument, token, to be used to
update your token databse on automatic token refresh. If not
set, a TokenUpdated warning will be raised when a token
has been refreshed. This warning will carry the token
in its token argument.
"""
self.auth_params = {}
if auth_method is None:
auth_method = _detect_auth_method(username, password, session_id,
api_key, client_secret, token)
self.auth_params['method'] = auth_method
self.auth_params['api_key'] = api_key
if auth_method == 'oauth2':
self.auth_params.update({
'client_secret': client_secret,
'redirect_uri': redirect_uri,
'token': token,
'token_updater': token_updater,
})
else:
self.auth_params.update({
'username': username,
'password': password,
})
super(T1, self).__init__(environment, api_base=api_base,
json=json,
auth_params=self.auth_params,
_create_session=True, **kwargs)
self._authenticated = False
self._auth = (username, password, api_key, client_secret)
self.environment = environment
self.json = json
self.api_key = api_key
if auth_method != 'oauth2' and auth_method != 'delayed':
self.authenticate(auth_method, session_id=session_id)
def authenticate(self, auth_method, **kwargs):
"""Authenticate using method given."""
if auth_method == 'cookie':
session_id = kwargs.get('session_id')
if session_id is not None:
return super(T1, self)._auth_session_id(
session_id,
self.auth_params['api_key']
)
return super(T1, self)._auth_cookie(self.auth_params['username'],
self.auth_params['password'],
self.auth_params['api_key'])
elif auth_method == 'basic':
raise ClientError('basic authentication is no longer supported - use cookie or oauth')
else:
raise AttributeError('No authentication method for ' + auth_method)
def new(self, collection, report=None, properties=None, *args, **kwargs):
"""Returns a fresh class instance for a new entity.
ac = t1.new('atomic_creative') OR
ac = t1.new('atomic_creatives') OR even
ac = t1.new(terminalone.models.AtomicCreative)
"""
if type(collection) == type and issubclass(collection, Entity):
ret = collection
elif '_acl' in collection:
ret = ACL
else:
try:
ret = SINGULAR[collection]
except KeyError:
ret = CLASSES[collection]
if ret == Report:
return ret(self.session,
report=report,
environment=self.environment,
api_base=self.api_base,
**kwargs)
return ret(self.session,
environment=self.environment,
api_base=self.api_base,
properties=properties,
json=self.json,
*args, **kwargs)
def _return_class(self, ent_dict,
child=None, child_id=None, entity_id=None, collection=None):
"""Generate item for new class instantiation"""
ent_type = ent_dict.get('_type', ent_dict.get('type'))
relations = ent_dict.get('relations')
if child is not None:
# Child can be either a target dimension (with an ID) or
# a bare child, like concepts or permissions. These should not
# have an ID passed in.anyway i'm at
if child_id is not None:
ent_dict['id'] = child_id
ent_dict['parent_id'] = entity_id
ent_dict['parent'] = collection
if relations is not None:
for rel_name, data in six.iteritems(relations):
if isinstance(data, list):
ent_dict[rel_name] = []
for cls in data:
ent_dict[rel_name].append(self._return_class(cls))
else:
ent_dict[rel_name] = self._return_class(data)
ent_dict.pop('relations', None)
return self.new(ent_type, properties=ent_dict)
def _gen_classes(self, entities, child, child_id, entity_id, collection):
"""Iterate over entities, returning objects for each"""
for entity in entities:
e = self._return_class(entity, child, child_id, entity_id, collection)
yield e
@staticmethod
def _construct_params(entity, **kwargs):
"""Construct URL params"""
if entity is not None:
params = {}
else:
params = {'page_limit': kwargs.get('page_limit'),
'page_offset': kwargs.get('page_offset'),
'sort_by': kwargs.get('sort_by'),
'parent': kwargs.get('parent'),
'q': kwargs.get('query'), }
# include can be either a string (e.g. 'advertiser'),
# list of *non-traversable* relations (e.g. ['vendor', 'concept']),
# or a list of lists/strings of traversable elements, e.g.
# [['advertiser', 'agency'], 'vendor'],
# [['advertiser', 'agency'], ['vendor', 'vendor_domains']]
# If we're given a string, leave it as-is
# If we're given a list, for each element:
# -> If the item is a string, leave it as-is
# -> If the item is a list, comma-join it
# Examples from above:
# include='advertiser' -> with=advertiser
# include=['vendor', 'concept'] -> with=vendor&with=concept
# include=[['advertiser', 'agency'], 'vendor']
# -> with=advertiser,agency&with=vendor
# include=[['advertiser', 'agency'], ['vendor', 'vendor_domains']]
# -> with=advertiser,agency&with=vendor,vendor_domains
include = kwargs.get('include')
if include:
if isinstance(include, list):
for i, item in enumerate(include):
if isinstance(item, list):
include[i] = ','.join(item)
params['with'] = include
full = kwargs.get('full')
if isinstance(full, list):
params['full'] = ','.join(full)
elif full is True:
params['full'] = '*'
elif full is not None:
params['full'] = full
params.update(kwargs.get('other_params', {}))
return params
@staticmethod
def _construct_url(collection, entity, child, limit):
"""Construct URL"""
url = [collection, ]
if entity is not None:
url.append(str(entity)) # str so that we can use join
child_id = None
if child is not None:
try:
child_path = CHILD_PATHS[child.lower()]
except AttributeError:
raise ClientError("`child` must be a string of the entity to retrieve")
except KeyError:
raise ClientError("`child` must correspond to an entity in T1")
# child_path should always be a tuple of (path, id). For children
# that do not have IDs, like concepts and permissions, ID is 0
if child_path[1]:
child_id = child_path[1]
url.append(child_path[0])
# All values need to be strings for join
url.append(str(child_path[1]))
else:
url.append(child_path[0])
if isinstance(limit, dict):
if len(limit) != 1:
raise ClientError('Limit must consist of one parent collection '
'(or chained parent collection) and a single '
'value for it (e.g. {"advertiser": 1}, or '
'{"advertiser.agency": 2)')
url.extend(['limit',
'{0!s}={1:d}'.format(*next(six.iteritems(limit)))])
return '/'.join(url), child_id
def get(self,
collection,
entity=None,
child=None,
limit=None,
include=None,
full=None,
page_limit=100,
page_offset=0,
sort_by='id',
get_all=False,
parent=None,
query=None,
other_params={},
count=False,
_url=None,
_params=None):
"""Main retrieval method for T1 Entities.
:param collection: str T1 collection, e.g. "advertisers", "agencies"
:param entity: int ID of entity being retrieved from T1
:param child: str child, e.g. "dma", "acl"
:param limit: dict[str]int query for relation entity, e.g. {"advertiser": 123456}
:param include: str/list of relations to include, e.g. "advertiser",
["campaign", "advertiser"]
:param full: str/bool when retrieving multiple entities, specifies which
types to return the full record for.
e.g. "campaign", True, ["campaign", "advertiser"]
:param page_limit: int number of entities to return per query, 100 max
:param page_offset: int offset for results returned.
:param sort_by: str sort order. Default "id". e.g. "-id", "name"
:param get_all: bool whether to retrieve all results for a query or just a single page
:param parent: only return entities with this parent id
:param query: str search parameter. Invoked by `find`
:param other_params: optional dict of additional service specific params
:param count: bool return the number of entities as a second parameter
:param _url: str shortcut to bypass URL determination.
:param _params: dict query string parameters to bypass query determination
:return: If:
Collection is requested => generator over collection of entity objects
Entity ID is provided => Entity object
`count` is True => number of entities as second return val
:raise ClientError: if page_limit > 100
"""
if type(collection) == type and issubclass(collection, Entity):
collection = MODEL_PATHS[collection]
if page_limit > 100:
raise ClientError('page_limit parameter must not exceed 100')
child_id = None
if _url is None:
_url, child_id = self._construct_url(collection, entity, child, limit)
if get_all:
gen = self._get_all(collection,
entity=entity,
child=child,
include=include,
full=full,
sort_by=sort_by,
parent=parent,
query=query,
count=count,
other_params=other_params,
_params=_params,
_url=_url)
if count:
ent_count = next(gen)
return gen, ent_count
else:
return gen
if _params is None:
_params = self._construct_params(entity,
include=include,
full=full,
page_limit=page_limit,
page_offset=page_offset,
sort_by=sort_by,
parent=parent,
query=query,
other_params=other_params)
entities, ent_count = super(T1, self)._get(self._get_service_path(collection), _url, params=_params)
if not isinstance(entities, GeneratorType) and not isinstance(entities, Iterator):
return self._return_class(entities, child, child_id, entity, collection)
ent_gen = self._gen_classes(entities, child, child_id, entity, collection)
if count:
return ent_gen, ent_count
else:
return ent_gen
def get_all(self, collection, **kwargs):
"""Retrieves all entities in a collection. Has same signature as .get."""
kwargs.pop('get_all', None)
return self.get(collection, get_all=True, **kwargs)
def _get_all(self, collection, **kwargs):
"""Construct iterator to get all entities in a collection.
Pages over 100 entities.
This method should not be called directly: it's called from T1.get.
"""
params = {
'page_limit': 1,
'parent': kwargs.get('parent'),
'q': kwargs.get('query'),
}
if kwargs.get('other_params'):
params.update(kwargs.get('other_params'))
_, num_recs = super(T1, self)\
._get(self._get_service_path(collection), kwargs['_url'], params=params)
if kwargs.get('count'):
yield num_recs
for page_offset in six.moves.range(0, num_recs, 100):
# get_all=False, otherwise we could go in a loop
gen = self.get(collection,
_url=kwargs['_url'],
entity=kwargs.get('entity'),
include=kwargs.get('include'),
full=kwargs.get('full'),
page_offset=page_offset,
sort_by=kwargs.get('sort_by'),
parent=kwargs.get('parent'),
query=kwargs.get('query'),
other_params=kwargs.get('other_params'),
get_all=False)
if not isinstance(gen, GeneratorType):
gen = iter([gen])
for item in gen:
yield item
# def get_sub(self, collection, entity, sub, *args):
# pass
@staticmethod
def _parse_candidate(candidate):
"""Parse filter candidates so that you can use None, True, False."""
val = candidate
if candidate is None:
val = "null"
elif candidate is True:
val = "1"
elif candidate is False:
val = "0"
return val
def find(self, collection, variable, operator, candidates, **kwargs):
"""Find objects based on query criteria. Helper method for T1.get,
with same return values.
:param collection: str T1 collection, e.g. "advertisers", "agencies"
:param variable: str Field to query for, e.g. "name". If operator is
terminalone.filters.IN, this is ignored and None can be provided
:param operator: str Arithmetic operator, e.g. "=:". Package provides
helper object filters to help, e.g. terminalone.filters.IN or
terminalone.filters.CASE_INS_STRING
:param candidates: str/int/list values to search for. list only if
operator is IN.
:param kwargs: additional keyword args to pass on to T1.get. See that
method's signature for details.
:return: generator over collection of objects matching query
:raise TypeError: if operator is IN and candidates not provided as list
"""
if operator == filters.IN:
if not isinstance(candidates, list):
raise TypeError('`candidates` must be list of entities for `IN`')
query = '(' + ','.join(str(c) for c in candidates) + ')'
else:
query = operator.join([variable, self._parse_candidate(candidates)])
return self.get(collection, query=query, **kwargs)
T1Service = T1
|
import re
import time
import binascii
from collections import defaultdict
from . import wcwidth
from .displaying import colorme, FormattedValue, DEFAULT_VALUE_COLORS
from cql import cqltypes
unicode_controlchars_re = re.compile(r'[\x00-\x31\x7f-\xa0]')
controlchars_re = re.compile(r'[\x00-\x31\x7f-\xff]')
def _show_control_chars(match):
txt = repr(match.group(0))
if txt.startswith('u'):
txt = txt[2:-1]
else:
txt = txt[1:-1]
return txt
bits_to_turn_red_re = re.compile(r'\\([^uUx]|u[0-9a-fA-F]{4}|x[0-9a-fA-F]{2}|U[0-9a-fA-F]{8})')
def _make_turn_bits_red_f(color1, color2):
def _turn_bits_red(match):
txt = match.group(0)
if txt == '\\\\':
return '\\'
return color1 + txt + color2
return _turn_bits_red
default_null_placeholder = 'null'
default_time_format = ''
default_float_precision = 3
default_colormap = DEFAULT_VALUE_COLORS
empty_colormap = defaultdict(lambda: '')
def format_by_type(cqltype, val, encoding, colormap=None, addcolor=False,
nullval=None, time_format=None, float_precision=None):
if nullval is None:
nullval = default_null_placeholder
if val is None:
return colorme(nullval, colormap, 'error')
if addcolor is False:
colormap = empty_colormap
elif colormap is None:
colormap = default_colormap
if time_format is None:
time_format = default_time_format
if float_precision is None:
float_precision = default_float_precision
return format_value(cqltype, val, encoding=encoding, colormap=colormap,
time_format=time_format, float_precision=float_precision,
nullval=nullval)
def color_text(bval, colormap, displaywidth=None):
# note that here, we render natural backslashes as just backslashes,
# in the same color as surrounding text, when using color. When not
# using color, we need to double up the backslashes so it's not
# ambiguous. This introduces the unique difficulty of having different
# display widths for the colored and non-colored versions. To avoid
# adding the smarts to handle that in to FormattedValue, we just
# make an explicit check to see if a null colormap is being used or
# not.
if displaywidth is None:
displaywidth = len(bval)
tbr = _make_turn_bits_red_f(colormap['blob'], colormap['text'])
coloredval = colormap['text'] + bits_to_turn_red_re.sub(tbr, bval) + colormap['reset']
if colormap['text']:
displaywidth -= bval.count(r'\\')
return FormattedValue(bval, coloredval, displaywidth)
def format_value_default(val, colormap, **_):
val = str(val)
escapedval = val.replace('\\', '\\\\')
bval = controlchars_re.sub(_show_control_chars, escapedval)
return color_text(bval, colormap)
_formatters = {}
def format_value(cqltype, val, **kwargs):
if val == '' and not cqltype.empty_binary_ok:
return format_value_default(val, **kwargs)
formatter = _formatters.get(cqltype.typename, format_value_default)
return formatter(val, subtypes=cqltype.subtypes, **kwargs)
def formatter_for(typname):
def registrator(f):
_formatters[typname] = f
return f
return registrator
@formatter_for('blob')
def format_value_blob(val, colormap, **_):
bval = '0x' + ''.join('%02x' % ord(c) for c in val)
return colorme(bval, colormap, 'blob')
def format_python_formatted_type(val, colormap, color):
bval = str(val)
return colorme(bval, colormap, color)
@formatter_for('decimal')
def format_value_decimal(val, colormap, **_):
return format_python_formatted_type(val, colormap, 'decimal')
@formatter_for('uuid')
def format_value_uuid(val, colormap, **_):
return format_python_formatted_type(val, colormap, 'uuid')
formatter_for('timeuuid')(format_value_uuid)
@formatter_for('inet')
def formatter_value_inet(val, colormap, **_):
return format_python_formatted_type(val, colormap, 'inet')
@formatter_for('boolean')
def format_value_boolean(val, colormap, **_):
return format_python_formatted_type(val, colormap, 'boolean')
def format_floating_point_type(val, colormap, float_precision, **_):
bval = '%.*g' % (float_precision, val)
return colorme(bval, colormap, 'float')
formatter_for('float')(format_floating_point_type)
formatter_for('double')(format_floating_point_type)
def format_integer_type(val, colormap, **_):
# base-10 only for now; support others?
bval = str(val)
return colorme(bval, colormap, 'int')
formatter_for('bigint')(format_integer_type)
formatter_for('int')(format_integer_type)
formatter_for('varint')(format_integer_type)
formatter_for('counter')(format_integer_type)
@formatter_for('timestamp')
def format_value_timestamp(val, colormap, time_format, **_):
bval = strftime(time_format, val)
return colorme(bval, colormap, 'timestamp')
def strftime(time_format, seconds):
local = time.localtime(seconds)
formatted = time.strftime(time_format, local)
if local.tm_isdst != 0:
offset = -time.altzone
else:
offset = -time.timezone
if formatted[-4:] != '0000' or time_format[-2:] != '%z' or offset == 0:
return formatted
# deal with %z on platforms where it isn't supported. see CASSANDRA-4746.
if offset < 0:
sign = '-'
else:
sign = '+'
hours, minutes = divmod(abs(offset) / 60, 60)
return formatted[:-5] + sign + '{0:0=2}{1:0=2}'.format(hours, minutes)
@formatter_for('text')
def format_value_text(val, encoding, colormap, **_):
escapedval = val.replace(u'\\', u'\\\\')
escapedval = unicode_controlchars_re.sub(_show_control_chars, escapedval)
bval = escapedval.encode(encoding, 'backslashreplace')
displaywidth = wcwidth.wcswidth(bval.decode(encoding))
return color_text(bval, colormap, displaywidth)
formatter_for('varchar')(format_value_text)
def format_simple_collection(subtype, val, lbracket, rbracket, encoding,
colormap, time_format, float_precision, nullval):
subs = [format_value(subtype, sval, encoding=encoding, colormap=colormap,
time_format=time_format, float_precision=float_precision,
nullval=nullval)
for sval in val]
bval = lbracket + ', '.join(sval.strval for sval in subs) + rbracket
lb, sep, rb = [colormap['collection'] + s + colormap['reset']
for s in (lbracket, ', ', rbracket)]
coloredval = lb + sep.join(sval.coloredval for sval in subs) + rb
displaywidth = 2 * len(subs) + sum(sval.displaywidth for sval in subs)
return FormattedValue(bval, coloredval, displaywidth)
@formatter_for('list')
def format_value_list(val, encoding, colormap, time_format, float_precision, subtypes, nullval, **_):
return format_simple_collection(subtypes[0], val, '[', ']', encoding, colormap,
time_format, float_precision, nullval)
@formatter_for('set')
def format_value_set(val, encoding, colormap, time_format, float_precision, subtypes, nullval, **_):
return format_simple_collection(subtypes[0], sorted(val), '{', '}', encoding, colormap,
time_format, float_precision, nullval)
@formatter_for('map')
def format_value_map(val, encoding, colormap, time_format, float_precision, subtypes, nullval, **_):
def subformat(v, subtype):
return format_value(subtype, v, encoding=encoding, colormap=colormap,
time_format=time_format, float_precision=float_precision,
nullval=nullval)
subkeytype, subvaltype = subtypes
subs = [(subformat(k, subkeytype), subformat(v, subvaltype)) for (k, v) in sorted(val.items())]
bval = '{' + ', '.join(k.strval + ': ' + v.strval for (k, v) in subs) + '}'
lb, comma, colon, rb = [colormap['collection'] + s + colormap['reset']
for s in ('{', ', ', ': ', '}')]
coloredval = lb \
+ comma.join(k.coloredval + colon + v.coloredval for (k, v) in subs) \
+ rb
displaywidth = 4 * len(subs) + sum(k.displaywidth + v.displaywidth for (k, v) in subs)
return FormattedValue(bval, coloredval, displaywidth)
|
import __future__
import sys
import json
def banner():
ban = '====' * 30
print("{}\nSAMPLE INP:\n{}\n{}".format(ban,ban,open(ip, 'r').read()))
print("{}\nSAMPLE OUT:\n{}\n{}".format(ban,ban,open(op, 'r').read()))
print("{}\nSTART:\n{}".format(ban,ban))
sys.stdin = open(ip, 'r')
cnt = -1
def comp(inp,ln):
outl = output_arr[ln]
if str(inp) != outl:
raise Exception("Error input output: line {}, file: {}\ngot: {} expected: {}".format(ln,op,inp,outl))
ip = "./challenge_sample_input"
op = "./challenge_sample_output"
output_arr = map(str,open(op,'r').read().split('\n'))
banner()
def wrapper(f):
def fun(l):
r = []
for n in l:
c = n[::-1][:10][::-1]
tarr = ['+91',c[:5],c[5:10]]
r.append(" ".join(tarr))
return f(r)
return fun
@wrapper
def sort_phone(l):
print('\n'.join(sorted(l)))
if __name__ == '__main__':
l = [raw_input() for _ in range(int(input()))]
sort_phone(l)
|
from neutron_lib.plugins import constants
from neutron_lib.plugins import directory
from gbpservice.common import utils
from gbpservice.neutron.services.grouppolicy.drivers import resource_mapping
from gbpservice.neutron.services.servicechain.plugins.ncp import model
def get_gbp_plugin():
return directory.get_plugin("GROUP_POLICY")
def get_node_driver_context(sc_plugin, context, sc_instance,
current_node, original_node=None,
management_group=None, service_targets=None):
admin_context = utils.admin_context(context)
specs = sc_plugin.get_servicechain_specs(
admin_context, filters={'id': sc_instance['servicechain_specs']})
position = _calculate_node_position(specs, current_node['id'])
provider, _ = _get_ptg_or_ep(
admin_context, sc_instance['provider_ptg_id'])
consumer, is_consumer_external = _get_ptg_or_ep(
admin_context, sc_instance['consumer_ptg_id'])
management, _ = _get_ptg_or_ep(admin_context,
sc_instance['management_ptg_id'])
classifier = get_gbp_plugin().get_policy_classifier(
admin_context, sc_instance['classifier_id'])
current_profile = sc_plugin.get_service_profile(
admin_context, current_node['service_profile_id'])
original_profile = sc_plugin.get_service_profile(
admin_context,
original_node['service_profile_id']) if original_node else None
if not service_targets:
service_targets = model.get_service_targets(
admin_context.session, servicechain_instance_id=sc_instance['id'],
position=position, servicechain_node_id=current_node['id'])
return NodeDriverContext(sc_plugin=sc_plugin,
context=context,
service_chain_instance=sc_instance,
service_chain_specs=specs,
current_service_chain_node=current_node,
current_service_profile=current_profile,
provider_group=provider,
consumer_group=consumer,
management_group=management,
original_service_chain_node=original_node,
original_service_profile=original_profile,
service_targets=service_targets,
position=position,
classifier=classifier,
is_consumer_external=is_consumer_external)
def _get_ptg_or_ep(context, group_id):
if group_id == resource_mapping.SCI_CONSUMER_NOT_AVAILABLE:
return None, False
group = None
is_group_external = False
# skipping policy target group status call to avoid loop while
# getting servicechain instance status
fields = ['consumed_policy_rule_sets', 'description',
'enforce_service_chains', 'id', 'l2_policy_id', 'name',
'network_service_policy_id', 'policy_targets',
'provided_policy_rule_sets', 'proxied_group_id',
'proxy_group_id', 'proxy_type', 'service_management', 'shared',
'subnets', 'tenant_id']
if group_id:
groups = get_gbp_plugin().get_policy_target_groups(
context, filters={'id': [group_id]}, fields=fields)
if not groups:
groups = get_gbp_plugin().get_external_policies(
context, filters={'id': [group_id]})
if groups:
is_group_external = True
if groups:
group = groups[0]
return (group, is_group_external)
def _calculate_node_position(specs, node_id):
for spec in specs:
pos = 0
for node in spec['nodes']:
pos += 1
if node_id == node:
return pos
class NodeDriverContext(object):
"""Context passed down to NCP Node Drivers."""
def __init__(self, sc_plugin, context, service_chain_instance,
service_chain_specs, current_service_chain_node, position,
current_service_profile, provider_group, consumer_group=None,
management_group=None, original_service_chain_node=None,
original_service_profile=None, service_targets=None,
classifier=None, is_consumer_external=False):
self._gbp_plugin = get_gbp_plugin()
self._sc_plugin = sc_plugin
self._plugin_context = context
self._admin_context = None
self._service_chain_instance = service_chain_instance
self._current_service_chain_node = current_service_chain_node
self._current_service_profile = current_service_profile
self._original_service_chain_node = original_service_chain_node
self._original_service_profile = original_service_profile
self._service_targets = service_targets
self._service_chain_specs = service_chain_specs
self._provider_group = provider_group
self._consumer_group = consumer_group
self._management_group = management_group
self._classifier = classifier
self._is_consumer_external = is_consumer_external
self._relevant_specs = None
self._core_plugin = directory.get_plugin()
self._l3_plugin = directory.get_plugin(constants.L3)
self._position = position
@property
def gbp_plugin(self):
return self._gbp_plugin
@property
def sc_plugin(self):
return self._sc_plugin
@property
def core_plugin(self):
return self._core_plugin
@property
def l3_plugin(self):
return self._l3_plugin
@property
def plugin_context(self):
return self._plugin_context
@property
def plugin_session(self):
return self._plugin_context.session
@property
def session(self):
return self.plugin_session
@property
def admin_context(self):
if not self._admin_context:
self._admin_context = utils.admin_context(self.plugin_context)
return self._admin_context
@property
def admin_session(self):
return self.admin_context.session
@property
def instance(self):
return self._service_chain_instance
@property
def current_node(self):
return self._current_service_chain_node
@property
def current_profile(self):
return self._current_service_profile
@property
def current_position(self):
return self._position
@property
def original_node(self):
return self._original_service_chain_node
@property
def original_profile(self):
return self._original_service_profile
@property
def is_consumer_external(self):
return self._is_consumer_external
@property
def relevant_specs(self):
"""Get specs on the SCI containing this particular Node."""
if not self._relevant_specs:
self._relevant_specs = [x for x in self._service_chain_specs if
self.current_node['id'] in x['nodes']]
return self._relevant_specs
@property
def provider(self):
return self._provider_group
@property
def consumer(self):
return self._consumer_group
@property
def management(self):
return self._management_group
@property
def classifier(self):
return self._classifier
def get_service_targets(self, update=False):
""" Returns the service targets assigned for this service if any.
The result looks like the following:
{
"provider": [pt_uuids],
"consumer": [pt_uuids],
"management": [pt_uuids],
}
"""
if update:
self._service_targets = model.get_service_targets(
self.session, servicechain_instance_id=self.instance['id'],
position=self.current_position,
servicechain_node_id=self.current_node['id'])
return self._service_targets
|
import json
from http import HTTPStatus
from twisted.test.proto_helpers import MemoryReactor
import synapse.rest.admin
from synapse.rest.client import login, room
from synapse.server import HomeServer
from synapse.util import Clock
from tests import unittest
class IdentityTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets_for_client_rest_resource,
room.register_servlets,
login.register_servlets,
]
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
config = self.default_config()
config["enable_3pid_lookup"] = False
self.hs = self.setup_test_homeserver(config=config)
return self.hs
def test_3pid_lookup_disabled(self) -> None:
self.hs.config.registration.enable_3pid_lookup = False
self.register_user("kermit", "monkey")
tok = self.login("kermit", "monkey")
channel = self.make_request(b"POST", "/createRoom", b"{}", access_token=tok)
self.assertEqual(channel.code, HTTPStatus.OK, channel.result)
room_id = channel.json_body["room_id"]
params = {
"id_server": "testis",
"medium": "email",
"address": "test@example.com",
}
request_data = json.dumps(params)
request_url = ("/rooms/%s/invite" % (room_id)).encode("ascii")
channel = self.make_request(
b"POST", request_url, request_data, access_token=tok
)
self.assertEqual(channel.code, HTTPStatus.FORBIDDEN, channel.result)
|
"""
Provides functionality to interact with climate devices.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/climate/
"""
from datetime import timedelta
import logging
import functools as ft
import voluptuous as vol
from homeassistant.loader import bind_hass
from homeassistant.helpers.temperature import display_temp as show_temp
from homeassistant.util.temperature import convert as convert_temperature
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA # noqa
import homeassistant.helpers.config_validation as cv
from homeassistant.const import (
ATTR_ENTITY_ID, ATTR_TEMPERATURE, SERVICE_TURN_ON, SERVICE_TURN_OFF,
STATE_ON, STATE_OFF, STATE_UNKNOWN, TEMP_CELSIUS, PRECISION_WHOLE,
PRECISION_TENTHS, )
DEFAULT_MIN_TEMP = 7
DEFAULT_MAX_TEMP = 35
DEFAULT_MIN_HUMITIDY = 30
DEFAULT_MAX_HUMIDITY = 99
DOMAIN = 'climate'
ENTITY_ID_FORMAT = DOMAIN + '.{}'
SCAN_INTERVAL = timedelta(seconds=60)
SERVICE_SET_AWAY_MODE = 'set_away_mode'
SERVICE_SET_AUX_HEAT = 'set_aux_heat'
SERVICE_SET_TEMPERATURE = 'set_temperature'
SERVICE_SET_FAN_MODE = 'set_fan_mode'
SERVICE_SET_HOLD_MODE = 'set_hold_mode'
SERVICE_SET_OPERATION_MODE = 'set_operation_mode'
SERVICE_SET_SWING_MODE = 'set_swing_mode'
SERVICE_SET_HUMIDITY = 'set_humidity'
STATE_HEAT = 'heat'
STATE_COOL = 'cool'
STATE_IDLE = 'idle'
STATE_AUTO = 'auto'
STATE_MANUAL = 'manual'
STATE_DRY = 'dry'
STATE_FAN_ONLY = 'fan_only'
STATE_ECO = 'eco'
STATE_ELECTRIC = 'electric'
STATE_PERFORMANCE = 'performance'
STATE_HIGH_DEMAND = 'high_demand'
STATE_HEAT_PUMP = 'heat_pump'
STATE_GAS = 'gas'
SUPPORT_TARGET_TEMPERATURE = 1
SUPPORT_TARGET_TEMPERATURE_HIGH = 2
SUPPORT_TARGET_TEMPERATURE_LOW = 4
SUPPORT_TARGET_HUMIDITY = 8
SUPPORT_TARGET_HUMIDITY_HIGH = 16
SUPPORT_TARGET_HUMIDITY_LOW = 32
SUPPORT_FAN_MODE = 64
SUPPORT_OPERATION_MODE = 128
SUPPORT_HOLD_MODE = 256
SUPPORT_SWING_MODE = 512
SUPPORT_AWAY_MODE = 1024
SUPPORT_AUX_HEAT = 2048
SUPPORT_ON_OFF = 4096
ATTR_CURRENT_TEMPERATURE = 'current_temperature'
ATTR_MAX_TEMP = 'max_temp'
ATTR_MIN_TEMP = 'min_temp'
ATTR_TARGET_TEMP_HIGH = 'target_temp_high'
ATTR_TARGET_TEMP_LOW = 'target_temp_low'
ATTR_TARGET_TEMP_STEP = 'target_temp_step'
ATTR_AWAY_MODE = 'away_mode'
ATTR_AUX_HEAT = 'aux_heat'
ATTR_FAN_MODE = 'fan_mode'
ATTR_FAN_LIST = 'fan_list'
ATTR_CURRENT_HUMIDITY = 'current_humidity'
ATTR_HUMIDITY = 'humidity'
ATTR_MAX_HUMIDITY = 'max_humidity'
ATTR_MIN_HUMIDITY = 'min_humidity'
ATTR_HOLD_MODE = 'hold_mode'
ATTR_OPERATION_MODE = 'operation_mode'
ATTR_OPERATION_LIST = 'operation_list'
ATTR_SWING_MODE = 'swing_mode'
ATTR_SWING_LIST = 'swing_list'
CONVERTIBLE_ATTRIBUTE = [
ATTR_TEMPERATURE,
ATTR_TARGET_TEMP_LOW,
ATTR_TARGET_TEMP_HIGH,
]
_LOGGER = logging.getLogger(__name__)
ON_OFF_SERVICE_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
})
SET_AWAY_MODE_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_AWAY_MODE): cv.boolean,
})
SET_AUX_HEAT_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_AUX_HEAT): cv.boolean,
})
SET_TEMPERATURE_SCHEMA = vol.Schema(vol.All(
cv.has_at_least_one_key(
ATTR_TEMPERATURE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW),
{
vol.Exclusive(ATTR_TEMPERATURE, 'temperature'): vol.Coerce(float),
vol.Inclusive(ATTR_TARGET_TEMP_HIGH, 'temperature'): vol.Coerce(float),
vol.Inclusive(ATTR_TARGET_TEMP_LOW, 'temperature'): vol.Coerce(float),
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Optional(ATTR_OPERATION_MODE): cv.string,
}
))
SET_FAN_MODE_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_FAN_MODE): cv.string,
})
SET_HOLD_MODE_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_HOLD_MODE): cv.string,
})
SET_OPERATION_MODE_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_OPERATION_MODE): cv.string,
})
SET_HUMIDITY_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_HUMIDITY): vol.Coerce(float),
})
SET_SWING_MODE_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_SWING_MODE): cv.string,
})
@bind_hass
def set_away_mode(hass, away_mode, entity_id=None):
"""Turn all or specified climate devices away mode on."""
data = {
ATTR_AWAY_MODE: away_mode
}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_AWAY_MODE, data)
@bind_hass
def set_hold_mode(hass, hold_mode, entity_id=None):
"""Set new hold mode."""
data = {
ATTR_HOLD_MODE: hold_mode
}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_HOLD_MODE, data)
@bind_hass
def set_aux_heat(hass, aux_heat, entity_id=None):
"""Turn all or specified climate devices auxiliary heater on."""
data = {
ATTR_AUX_HEAT: aux_heat
}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_AUX_HEAT, data)
@bind_hass
def set_temperature(hass, temperature=None, entity_id=None,
target_temp_high=None, target_temp_low=None,
operation_mode=None):
"""Set new target temperature."""
kwargs = {
key: value for key, value in [
(ATTR_TEMPERATURE, temperature),
(ATTR_TARGET_TEMP_HIGH, target_temp_high),
(ATTR_TARGET_TEMP_LOW, target_temp_low),
(ATTR_ENTITY_ID, entity_id),
(ATTR_OPERATION_MODE, operation_mode)
] if value is not None
}
_LOGGER.debug("set_temperature start data=%s", kwargs)
hass.services.call(DOMAIN, SERVICE_SET_TEMPERATURE, kwargs)
@bind_hass
def set_humidity(hass, humidity, entity_id=None):
"""Set new target humidity."""
data = {ATTR_HUMIDITY: humidity}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_HUMIDITY, data)
@bind_hass
def set_fan_mode(hass, fan, entity_id=None):
"""Set all or specified climate devices fan mode on."""
data = {ATTR_FAN_MODE: fan}
if entity_id:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_FAN_MODE, data)
@bind_hass
def set_operation_mode(hass, operation_mode, entity_id=None):
"""Set new target operation mode."""
data = {ATTR_OPERATION_MODE: operation_mode}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_OPERATION_MODE, data)
@bind_hass
def set_swing_mode(hass, swing_mode, entity_id=None):
"""Set new target swing mode."""
data = {ATTR_SWING_MODE: swing_mode}
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
hass.services.call(DOMAIN, SERVICE_SET_SWING_MODE, data)
async def async_setup(hass, config):
"""Set up climate devices."""
component = hass.data[DOMAIN] = \
EntityComponent(_LOGGER, DOMAIN, hass, SCAN_INTERVAL)
await component.async_setup(config)
component.async_register_entity_service(
SERVICE_SET_AWAY_MODE, SET_AWAY_MODE_SCHEMA,
async_service_away_mode
)
component.async_register_entity_service(
SERVICE_SET_HOLD_MODE, SET_HOLD_MODE_SCHEMA,
'async_set_hold_mode'
)
component.async_register_entity_service(
SERVICE_SET_AUX_HEAT, SET_AUX_HEAT_SCHEMA,
async_service_aux_heat
)
component.async_register_entity_service(
SERVICE_SET_TEMPERATURE, SET_TEMPERATURE_SCHEMA,
async_service_temperature_set
)
component.async_register_entity_service(
SERVICE_SET_HUMIDITY, SET_HUMIDITY_SCHEMA,
'async_set_humidity'
)
component.async_register_entity_service(
SERVICE_SET_FAN_MODE, SET_FAN_MODE_SCHEMA,
'async_set_fan_mode'
)
component.async_register_entity_service(
SERVICE_SET_OPERATION_MODE, SET_OPERATION_MODE_SCHEMA,
'async_set_operation_mode'
)
component.async_register_entity_service(
SERVICE_SET_SWING_MODE, SET_SWING_MODE_SCHEMA,
'async_set_swing_mode'
)
component.async_register_entity_service(
SERVICE_TURN_OFF, ON_OFF_SERVICE_SCHEMA,
'async_turn_off'
)
component.async_register_entity_service(
SERVICE_TURN_ON, ON_OFF_SERVICE_SCHEMA,
'async_turn_on'
)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
class ClimateDevice(Entity):
"""Representation of a climate device."""
@property
def state(self):
"""Return the current state."""
if self.is_on is False:
return STATE_OFF
if self.current_operation:
return self.current_operation
if self.is_on:
return STATE_ON
return STATE_UNKNOWN
@property
def precision(self):
"""Return the precision of the system."""
if self.hass.config.units.temperature_unit == TEMP_CELSIUS:
return PRECISION_TENTHS
return PRECISION_WHOLE
@property
def state_attributes(self):
"""Return the optional state attributes."""
data = {
ATTR_CURRENT_TEMPERATURE: show_temp(
self.hass, self.current_temperature, self.temperature_unit,
self.precision),
ATTR_MIN_TEMP: show_temp(
self.hass, self.min_temp, self.temperature_unit,
self.precision),
ATTR_MAX_TEMP: show_temp(
self.hass, self.max_temp, self.temperature_unit,
self.precision),
ATTR_TEMPERATURE: show_temp(
self.hass, self.target_temperature, self.temperature_unit,
self.precision),
}
supported_features = self.supported_features
if self.target_temperature_step is not None:
data[ATTR_TARGET_TEMP_STEP] = self.target_temperature_step
if supported_features & SUPPORT_TARGET_TEMPERATURE_HIGH:
data[ATTR_TARGET_TEMP_HIGH] = show_temp(
self.hass, self.target_temperature_high, self.temperature_unit,
self.precision)
if supported_features & SUPPORT_TARGET_TEMPERATURE_LOW:
data[ATTR_TARGET_TEMP_LOW] = show_temp(
self.hass, self.target_temperature_low, self.temperature_unit,
self.precision)
if supported_features & SUPPORT_TARGET_HUMIDITY:
data[ATTR_HUMIDITY] = self.target_humidity
data[ATTR_CURRENT_HUMIDITY] = self.current_humidity
if supported_features & SUPPORT_TARGET_HUMIDITY_LOW:
data[ATTR_MIN_HUMIDITY] = self.min_humidity
if supported_features & SUPPORT_TARGET_HUMIDITY_HIGH:
data[ATTR_MAX_HUMIDITY] = self.max_humidity
if supported_features & SUPPORT_FAN_MODE:
data[ATTR_FAN_MODE] = self.current_fan_mode
if self.fan_list:
data[ATTR_FAN_LIST] = self.fan_list
if supported_features & SUPPORT_OPERATION_MODE:
data[ATTR_OPERATION_MODE] = self.current_operation
if self.operation_list:
data[ATTR_OPERATION_LIST] = self.operation_list
if supported_features & SUPPORT_HOLD_MODE:
data[ATTR_HOLD_MODE] = self.current_hold_mode
if supported_features & SUPPORT_SWING_MODE:
data[ATTR_SWING_MODE] = self.current_swing_mode
if self.swing_list:
data[ATTR_SWING_LIST] = self.swing_list
if supported_features & SUPPORT_AWAY_MODE:
is_away = self.is_away_mode_on
data[ATTR_AWAY_MODE] = STATE_ON if is_away else STATE_OFF
if supported_features & SUPPORT_AUX_HEAT:
is_aux_heat = self.is_aux_heat_on
data[ATTR_AUX_HEAT] = STATE_ON if is_aux_heat else STATE_OFF
return data
@property
def temperature_unit(self):
"""Return the unit of measurement used by the platform."""
raise NotImplementedError
@property
def current_humidity(self):
"""Return the current humidity."""
return None
@property
def target_humidity(self):
"""Return the humidity we try to reach."""
return None
@property
def current_operation(self):
"""Return current operation ie. heat, cool, idle."""
return None
@property
def operation_list(self):
"""Return the list of available operation modes."""
return None
@property
def current_temperature(self):
"""Return the current temperature."""
return None
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return None
@property
def target_temperature_step(self):
"""Return the supported step of target temperature."""
return None
@property
def target_temperature_high(self):
"""Return the highbound target temperature we try to reach."""
return None
@property
def target_temperature_low(self):
"""Return the lowbound target temperature we try to reach."""
return None
@property
def is_away_mode_on(self):
"""Return true if away mode is on."""
return None
@property
def current_hold_mode(self):
"""Return the current hold mode, e.g., home, away, temp."""
return None
@property
def is_on(self):
"""Return true if on."""
return None
@property
def is_aux_heat_on(self):
"""Return true if aux heater."""
return None
@property
def current_fan_mode(self):
"""Return the fan setting."""
return None
@property
def fan_list(self):
"""Return the list of available fan modes."""
return None
@property
def current_swing_mode(self):
"""Return the fan setting."""
return None
@property
def swing_list(self):
"""Return the list of available swing modes."""
return None
def set_temperature(self, **kwargs):
"""Set new target temperature."""
raise NotImplementedError()
def async_set_temperature(self, **kwargs):
"""Set new target temperature.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(
ft.partial(self.set_temperature, **kwargs))
def set_humidity(self, humidity):
"""Set new target humidity."""
raise NotImplementedError()
def async_set_humidity(self, humidity):
"""Set new target humidity.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.set_humidity, humidity)
def set_fan_mode(self, fan_mode):
"""Set new target fan mode."""
raise NotImplementedError()
def async_set_fan_mode(self, fan_mode):
"""Set new target fan mode.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.set_fan_mode, fan_mode)
def set_operation_mode(self, operation_mode):
"""Set new target operation mode."""
raise NotImplementedError()
def async_set_operation_mode(self, operation_mode):
"""Set new target operation mode.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.set_operation_mode, operation_mode)
def set_swing_mode(self, swing_mode):
"""Set new target swing operation."""
raise NotImplementedError()
def async_set_swing_mode(self, swing_mode):
"""Set new target swing operation.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.set_swing_mode, swing_mode)
def turn_away_mode_on(self):
"""Turn away mode on."""
raise NotImplementedError()
def async_turn_away_mode_on(self):
"""Turn away mode on.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.turn_away_mode_on)
def turn_away_mode_off(self):
"""Turn away mode off."""
raise NotImplementedError()
def async_turn_away_mode_off(self):
"""Turn away mode off.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.turn_away_mode_off)
def set_hold_mode(self, hold_mode):
"""Set new target hold mode."""
raise NotImplementedError()
def async_set_hold_mode(self, hold_mode):
"""Set new target hold mode.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.set_hold_mode, hold_mode)
def turn_aux_heat_on(self):
"""Turn auxiliary heater on."""
raise NotImplementedError()
def async_turn_aux_heat_on(self):
"""Turn auxiliary heater on.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.turn_aux_heat_on)
def turn_aux_heat_off(self):
"""Turn auxiliary heater off."""
raise NotImplementedError()
def async_turn_aux_heat_off(self):
"""Turn auxiliary heater off.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.turn_aux_heat_off)
def turn_on(self):
"""Turn device on."""
raise NotImplementedError()
def async_turn_on(self):
"""Turn device on.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.turn_on)
def turn_off(self):
"""Turn device off."""
raise NotImplementedError()
def async_turn_off(self):
"""Turn device off.
This method must be run in the event loop and returns a coroutine.
"""
return self.hass.async_add_job(self.turn_off)
@property
def supported_features(self):
"""Return the list of supported features."""
raise NotImplementedError()
@property
def min_temp(self):
"""Return the minimum temperature."""
return convert_temperature(DEFAULT_MIN_TEMP, TEMP_CELSIUS,
self.temperature_unit)
@property
def max_temp(self):
"""Return the maximum temperature."""
return convert_temperature(DEFAULT_MAX_TEMP, TEMP_CELSIUS,
self.temperature_unit)
@property
def min_humidity(self):
"""Return the minimum humidity."""
return DEFAULT_MIN_HUMITIDY
@property
def max_humidity(self):
"""Return the maximum humidity."""
return DEFAULT_MAX_HUMIDITY
async def async_service_away_mode(entity, service):
"""Handle away mode service."""
if service.data[ATTR_AWAY_MODE]:
await entity.async_turn_away_mode_on()
else:
await entity.async_turn_away_mode_off()
async def async_service_aux_heat(entity, service):
"""Handle aux heat service."""
if service.data[ATTR_AUX_HEAT]:
await entity.async_turn_aux_heat_on()
else:
await entity.async_turn_aux_heat_off()
async def async_service_temperature_set(entity, service):
"""Handle set temperature service."""
hass = entity.hass
kwargs = {}
for value, temp in service.data.items():
if value in CONVERTIBLE_ATTRIBUTE:
kwargs[value] = convert_temperature(
temp,
hass.config.units.temperature_unit,
entity.temperature_unit
)
else:
kwargs[value] = temp
await entity.async_set_temperature(**kwargs)
|
__author__ = 'cooper'
assert __name__ != '__main__'
import logging.config
from utils.injection import *
logging.config.fileConfig('logging.conf')
appctx = ApplicationContextBuilder([
('mysql_url', value('mysql://python_dev:vao8Je1o@localhost/stock_dev')),
('mongo_url', value('mongodb://localhost:27017/')),
('working_dir', value('/Users/cooper/tmp/work')),
('sql_engine', factory_bean('providers.database.sql_engine')),
('sql_meta_data', factory_bean('providers.database.sql_meta_data')),
('mongo_client', factory_bean('providers.database.mongo_client')),
('mongo_db', factory_bean('providers.database.mongo_db')),
('spider_container', factory_bean('providers.spider.ScrapySpider')),
('web_container', factory_bean('providers.web.TornadoWeb')),
])
inject = appctx.injector()
autowired = appctx.auto_injector()
beans = appctx.beans
appctx.configure()
|
from __future__ import print_function, unicode_literals
import argparse
import collections
import logging
import os
import sys
import glib
import gobject
from mopidy import config as config_lib, exceptions
from mopidy.audio import Audio
from mopidy.core import Core
from mopidy.utils import deps, process, versioning
logger = logging.getLogger(__name__)
_default_config = []
for base in glib.get_system_config_dirs() + (glib.get_user_config_dir(),):
_default_config.append(os.path.join(base, b'mopidy', b'mopidy.conf'))
DEFAULT_CONFIG = b':'.join(_default_config)
def config_files_type(value):
return value.split(b':')
def config_override_type(value):
try:
section, remainder = value.split(b'/', 1)
key, value = remainder.split(b'=', 1)
return (section.strip(), key.strip(), value.strip())
except ValueError:
raise argparse.ArgumentTypeError(
'%s must have the format section/key=value' % value)
class _ParserError(Exception):
pass
class _HelpError(Exception):
pass
class _ArgumentParser(argparse.ArgumentParser):
def error(self, message):
raise _ParserError(message)
class _HelpAction(argparse.Action):
def __init__(self, option_strings, dest=None, help=None):
super(_HelpAction, self).__init__(
option_strings=option_strings,
dest=dest or argparse.SUPPRESS,
default=argparse.SUPPRESS,
nargs=0,
help=help)
def __call__(self, parser, namespace, values, option_string=None):
raise _HelpError()
class Command(object):
"""Command parser and runner for building trees of commands.
This class provides a wraper around :class:`argparse.ArgumentParser`
for handling this type of command line application in a better way than
argprases own sub-parser handling.
"""
help = None
#: Help text to display in help output.
def __init__(self):
self._children = collections.OrderedDict()
self._arguments = []
self._overrides = {}
def _build(self):
actions = []
parser = _ArgumentParser(add_help=False)
parser.register('action', 'help', _HelpAction)
for args, kwargs in self._arguments:
actions.append(parser.add_argument(*args, **kwargs))
parser.add_argument('_args', nargs=argparse.REMAINDER,
help=argparse.SUPPRESS)
return parser, actions
def add_child(self, name, command):
"""Add a child parser to consider using.
:param name: name to use for the sub-command that is being added.
:type name: string
"""
self._children[name] = command
def add_argument(self, *args, **kwargs):
"""Add an argument to the parser.
This method takes all the same arguments as the
:class:`argparse.ArgumentParser` version of this method.
"""
self._arguments.append((args, kwargs))
def set(self, **kwargs):
"""Override a value in the finaly result of parsing."""
self._overrides.update(kwargs)
def exit(self, status_code=0, message=None, usage=None):
"""Optionally print a message and exit."""
print('\n\n'.join(m for m in (usage, message) if m))
sys.exit(status_code)
def format_usage(self, prog=None):
"""Format usage for current parser."""
actions = self._build()[1]
prog = prog or os.path.basename(sys.argv[0])
return self._usage(actions, prog) + '\n'
def _usage(self, actions, prog):
formatter = argparse.HelpFormatter(prog)
formatter.add_usage(None, actions, [])
return formatter.format_help().strip()
def format_help(self, prog=None):
"""Format help for current parser and children."""
actions = self._build()[1]
prog = prog or os.path.basename(sys.argv[0])
formatter = argparse.HelpFormatter(prog)
formatter.add_usage(None, actions, [])
if self.help:
formatter.add_text(self.help)
if actions:
formatter.add_text('OPTIONS:')
formatter.start_section(None)
formatter.add_arguments(actions)
formatter.end_section()
subhelp = []
for name, child in self._children.items():
child._subhelp(name, subhelp)
if subhelp:
formatter.add_text('COMMANDS:')
subhelp.insert(0, '')
return formatter.format_help() + '\n'.join(subhelp)
def _subhelp(self, name, result):
actions = self._build()[1]
if self.help or actions:
formatter = argparse.HelpFormatter(name)
formatter.add_usage(None, actions, [], '')
formatter.start_section(None)
formatter.add_text(self.help)
formatter.start_section(None)
formatter.add_arguments(actions)
formatter.end_section()
formatter.end_section()
result.append(formatter.format_help())
for childname, child in self._children.items():
child._subhelp(' '.join((name, childname)), result)
def parse(self, args, prog=None):
"""Parse command line arguments.
Will recursively parse commands until a final parser is found or an
error occurs. In the case of errors we will print a message and exit.
Otherwise, any overrides are applied and the current parser stored
in the command attribute of the return value.
:param args: list of arguments to parse
:type args: list of strings
:param prog: name to use for program
:type prog: string
:rtype: :class:`argparse.Namespace`
"""
prog = prog or os.path.basename(sys.argv[0])
try:
return self._parse(
args, argparse.Namespace(), self._overrides.copy(), prog)
except _HelpError:
self.exit(0, self.format_help(prog))
def _parse(self, args, namespace, overrides, prog):
overrides.update(self._overrides)
parser, actions = self._build()
try:
result = parser.parse_args(args, namespace)
except _ParserError as e:
self.exit(1, e.message, self._usage(actions, prog))
if not result._args:
for attr, value in overrides.items():
setattr(result, attr, value)
delattr(result, '_args')
result.command = self
return result
child = result._args.pop(0)
if child not in self._children:
usage = self._usage(actions, prog)
self.exit(1, 'unrecognized command: %s' % child, usage)
return self._children[child]._parse(
result._args, result, overrides, ' '.join([prog, child]))
def run(self, *args, **kwargs):
"""Run the command.
Must be implemented by sub-classes that are not simply an intermediate
in the command namespace.
"""
raise NotImplementedError
class RootCommand(Command):
def __init__(self):
super(RootCommand, self).__init__()
self.set(base_verbosity_level=0)
self.add_argument(
'-h', '--help',
action='help', help='Show this message and exit')
self.add_argument(
'--version', action='version',
version='Mopidy %s' % versioning.get_version())
self.add_argument(
'-q', '--quiet',
action='store_const', const=-1, dest='verbosity_level',
help='less output (warning level)')
self.add_argument(
'-v', '--verbose',
action='count', dest='verbosity_level', default=0,
help='more output (repeat up to 3 times for even more)')
self.add_argument(
'--save-debug-log',
action='store_true', dest='save_debug_log',
help='save debug log to "./mopidy.log"')
self.add_argument(
'--config',
action='store', dest='config_files', type=config_files_type,
default=DEFAULT_CONFIG, metavar='FILES',
help='config files to use, colon seperated, later files override')
self.add_argument(
'-o', '--option',
action='append', dest='config_overrides',
type=config_override_type, metavar='OPTIONS',
help='`section/key=value` values to override config options')
def run(self, args, config):
loop = gobject.MainLoop()
mixer_class = self.get_mixer_class(config, args.registry['mixer'])
backend_classes = args.registry['backend']
frontend_classes = args.registry['frontend']
exit_status_code = 0
try:
mixer = self.start_mixer(config, mixer_class)
audio = self.start_audio(config, mixer)
backends = self.start_backends(config, backend_classes, audio)
core = self.start_core(mixer, backends)
self.start_frontends(config, frontend_classes, core)
loop.run()
except (exceptions.BackendError,
exceptions.FrontendError,
exceptions.MixerError):
logger.info('Initialization error. Exiting...')
exit_status_code = 1
except KeyboardInterrupt:
logger.info('Interrupted. Exiting...')
except Exception:
logger.exception('Uncaught exception')
finally:
loop.quit()
self.stop_frontends(frontend_classes)
self.stop_core()
self.stop_backends(backend_classes)
self.stop_audio()
self.stop_mixer(mixer_class)
process.stop_remaining_actors()
return exit_status_code
def get_mixer_class(self, config, mixer_classes):
logger.debug(
'Available Mopidy mixers: %s',
', '.join(m.__name__ for m in mixer_classes) or 'none')
selected_mixers = [
m for m in mixer_classes if m.name == config['audio']['mixer']]
if len(selected_mixers) != 1:
logger.error(
'Did not find unique mixer "%s". Alternatives are: %s',
config['audio']['mixer'],
', '.join([m.name for m in mixer_classes]))
process.exit_process()
return selected_mixers[0]
def start_mixer(self, config, mixer_class):
try:
logger.info('Starting Mopidy mixer: %s', mixer_class.__name__)
mixer = mixer_class.start(config=config).proxy()
self.configure_mixer(config, mixer)
return mixer
except exceptions.MixerError as exc:
logger.error(
'Mixer (%s) initialization error: %s',
mixer_class.__name__, exc.message)
raise
def configure_mixer(self, config, mixer):
volume = config['audio']['mixer_volume']
if volume is not None:
mixer.set_volume(volume)
logger.info('Mixer volume set to %d', volume)
else:
logger.debug('Mixer volume left unchanged')
def start_audio(self, config, mixer):
logger.info('Starting Mopidy audio')
return Audio.start(config=config, mixer=mixer).proxy()
def start_backends(self, config, backend_classes, audio):
logger.info(
'Starting Mopidy backends: %s',
', '.join(b.__name__ for b in backend_classes) or 'none')
backends = []
for backend_class in backend_classes:
try:
backend = backend_class.start(
config=config, audio=audio).proxy()
backends.append(backend)
except exceptions.BackendError as exc:
logger.error(
'Backend (%s) initialization error: %s',
backend_class.__name__, exc.message)
raise
return backends
def start_core(self, mixer, backends):
logger.info('Starting Mopidy core')
return Core.start(mixer=mixer, backends=backends).proxy()
def start_frontends(self, config, frontend_classes, core):
logger.info(
'Starting Mopidy frontends: %s',
', '.join(f.__name__ for f in frontend_classes) or 'none')
for frontend_class in frontend_classes:
try:
frontend_class.start(config=config, core=core)
except exceptions.FrontendError as exc:
logger.error(
'Frontend (%s) initialization error: %s',
frontend_class.__name__, exc.message)
raise
def stop_frontends(self, frontend_classes):
logger.info('Stopping Mopidy frontends')
for frontend_class in frontend_classes:
process.stop_actors_by_class(frontend_class)
def stop_core(self):
logger.info('Stopping Mopidy core')
process.stop_actors_by_class(Core)
def stop_backends(self, backend_classes):
logger.info('Stopping Mopidy backends')
for backend_class in backend_classes:
process.stop_actors_by_class(backend_class)
def stop_audio(self):
logger.info('Stopping Mopidy audio')
process.stop_actors_by_class(Audio)
def stop_mixer(self, mixer_class):
logger.info('Stopping Mopidy mixer')
process.stop_actors_by_class(mixer_class)
class ConfigCommand(Command):
help = 'Show currently active configuration.'
def __init__(self):
super(ConfigCommand, self).__init__()
self.set(base_verbosity_level=-1)
def run(self, config, errors, extensions):
print(config_lib.format(config, extensions, errors))
return 0
class DepsCommand(Command):
help = 'Show dependencies and debug information.'
def __init__(self):
super(DepsCommand, self).__init__()
self.set(base_verbosity_level=-1)
def run(self):
print(deps.format_dependency_list())
return 0
|
from maintenance.views import RestartFramework, validate_data_files_view
from publications.views import EditPublication
__author__ = 'Ahmed G. Ali'
from django.conf.urls import patterns, url
from accounts import views as accounts_views
from publications import views as publications_views
urlpatterns = patterns(
'',
url(r'^me/?$', accounts_views.MeView.as_view()),
url(r'^users/?$', accounts_views.UserListView.as_view()),
url(r'^users/(?P<pk>\d+)/?$',
accounts_views.UserDetailView.as_view()),
# url(r'^publications/?$', publications_views.PublicationList.as_view()),
# url(r'^publications/(?P<pk>\d+)/$',
# publications_views.PublicationDetail.as_view()),
# url(r'^publications/(?P<pk>[0-9]+)/$', publications_views.PublicationDetail.as_view(), name='publication-detail')
url(r'^publications/?$', publications_views.AssociationList.as_view()),
url(r'^publications/(?P<pk>[0-9]+)/$', publications_views.AssociationDetail.as_view(), name='association-detail'),
url(r'^publications/edit$', EditPublication.as_view()),
url(r'^maintenance/framework/restart$', RestartFramework.as_view()),
url(r'^validate/(?P<job_id>[\w\-]+)/$', validate_data_files_view),
url(r'^validate/$', validate_data_files_view)
# url(r'^validate/?$', validate_data_files_view)
)
|
'''
Main modules for summarizer package.
Copyright, 2015.
Authors:
Luis Perez (luis.perez.live@gmail.com)
Kevin Eskici (keskici@college.harvard.edu)
'''
import os
import traceback
import nltk
import argparse
import sys
from . import grasshopper
from . import baselines
from . import textrank
tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
argsToAlgo = {
'baseline': baselines.baseline,
'geomprior': baselines.geomPriorBaseline,
'firstgeomprior': baselines.modifiedGeomPriorBaseline,
'multiplegeomprior': baselines.multipleGeomPrior,
'frequency': baselines.wordFreqBaseline,
'textrank': textrank.textRank,
'modifiedtextrank': textrank.modifiedTextRank,
'grasshopper': grasshopper.run_grassHopper,
'modifiedgrasshopper': grasshopper.run_modified_grasshopper
}
def parseArgs(parser):
parser.add_argument("-d", "--data_dir", default=None,
help="Base Directory containing summarization documents" +
" and model summaries. Summarization documents should be" +
" contained in the docs/ subdirectory. See README for" +
" details. If no directory is provided, input is streamed" +
" from STDIN (or provided text file) and results are " +
"output to STDOUT. ROUGE analysis is not performed.")
parser.add_argument("-a", "--algorithm", default="frequency",
help="Algorithm to use for summarization. Output" +
" summaries are saved under the DATA_DIR/ALGORITHM/" +
" directory if a data_dir parameter is provided." +
"Current options are {}".format(argsToAlgo.keys()))
parser.add_argument("-s", "--rouge_score", default="False",
help="The parameter is ignored in the case where DATA_DIR " +
"is not set. Otherwise, if ROUGE_SCORE, then the model " +
"and system summaries are scored using the ROUGE metrics " +
"and results are printed to STDOUT.")
parser.add_argument("--debug", default="False",
help="Prints helpful debugging information.")
parser.add_argument("--summarize", default="True",
help="If true, performs the summarization using the " +
"specified ALGORITHM. Otherwise, does not summarize.")
parser.add_argument("-k", "--summary_length", default=5,
help="Sentence length of output summary. Note that a summary" +
" might be truncated to be shorter than this length.")
parser.add_argument("-b", "--bytes", default=-1,
help="Byte length of output summary. All output summaries" +
" will be truncated at this length if written out to a file." +
"A value of -1 will avoid almost all truncation (except " +
"the last character). If you're setting this, you likely also " +
"want to set SUMMARY_LENGTH to some large value.")
parser.add_argument("--rouge_folder", default="cs182_data/programs/RELEASE-1.5.5",
help="Folder Containing the ROUGE Perl Executables. " +
"It must be provided if ROUGE is to be used.")
parser.add_argument("--sort_sents", default="True",
help="Boolean parameter specifying whether sentences " +
"should be sorted or not. When using ROUGE, we recommend " +
"that this parameter be set to False in order to allow " +
"for correct determination of the ROUGE score.")
def processSummary(sort_sents, sentences, D, mapping):
if sort_sents:
sentences = sorted(sentences)
return [D[mapping[i]] for i in sentences]
def createSummaries(sum_algo, abs_path, out_path, sort_sents, k=5, bytes=665, multiDocument=False):
# Extracted from the input folder name
setID = abs_path.split('/')[-1]
# Stores DocumentIDs
docIDs = []
# Create document set that we wish to evaluate
D = []
for filename in os.listdir(abs_path):
# Only take files that we've parsed before!
tmp = filename.split('.')
if tmp[0] == 'Parsed':
docIDs.append(tmp[1])
filepath = os.path.join(abs_path, filename)
with open(filepath) as inputDoc:
text = inputDoc.read().strip()
sentences = tokenizer.tokenize(text)
D.append([s.split(" ") for s in sentences])
# Pass this to the algorithm which should return the summary as
# a list of sentences.
if multiDocument:
summary = processSummary(sort_sents, *sum_algo(D, k))
# Write out the summary
filepath = os.path.join(out_path, "SetSummary.{}.txt".format(setID))
with open(filepath, 'w+') as out:
res = "\n".join([" ".join(s).strip() for s in summary])
out.write(res[:bytes])
else:
for i in range(len(D)):
summary = processSummary(sort_sents, sum_algo([D[i]], k))
filepath = os.path.join(
out_path, "Summary.{}.txt".format(docIDs[i]))
with open(filepath, 'w+') as out:
res = "\n".join([" ".join(s).strip() for s in summary])
out.write(res[:bytes])
def run(opts):
'''
Runs our summarization software based on user options.
'''
base = None if opts.data_dir is None else os.path.abspath(opts.data_dir)
debug = opts.debug.lower() == 'true'
bytes = int(opts.bytes)
sort_sents = opts.sort_sents.lower() == 'true'
k = int(opts.summary_length)
if opts.summarize.lower() == 'true':
try:
algorithm = argsToAlgo[opts.algorithm.lower()]
except KeyError:
raise Exception(
"{} is not an available algorithm!".format(opts.algorithm))
else:
algorithm = opts.algorithm
inputParams = "_sorted={}_k={}_bytes={}".format(sort_sents, k, bytes)
outpath = None if base is None else os.path.join(
base, opts.algorithm + inputParams)
if opts.summarize.lower() == 'true':
if base is None:
summary = processSummary(
False, *algorithm([sys.stdin.readlines()], k))
print "\n".join([s.strip() for s in summary])
return
# Create directory if it does not exist
if not os.path.exists(outpath):
os.makedirs(outpath)
inbase = os.path.join(base, 'docs')
folders = dirs = [d for d in os.listdir(
inbase) if os.path.isdir(os.path.join(inbase, d))]
for folder in folders:
inpath = os.path.join(inbase, folder)
try:
createSummaries(algorithm, inpath, outpath, sort_sents, bytes=bytes,
k=k, multiDocument=True)
except Exception as e:
print "Failed with {}".format(inpath)
if debug:
print traceback.print_exc()
# If rouge score is input, attempt to score the results with pyrouge
# Currently only handles multiple documents!
if base is not None and opts.rouge_score == 'True':
import pyrouge
# NOTE THAT WE MUST CONSTRUCT THE ARGUMENTS TO ROUGE OURSELF
rouge_dir = os.path.abspath(opts.rouge_folder)
options = [
'-e', os.path.join(rouge_dir, 'data'),
'-b', bytes,
'-c', 95,
'-n', 4,
'-w', 1.2,
'-a',
'-f', 'A',
'-p', 0.5,
'-t', 0
]
args = " ".join(map(str, options))
r = pyrouge.Rouge155(rouge_dir=rouge_dir, rouge_args=args)
r.system_dir = outpath
if debug:
print "System Directory: {}.".format(r.system_dir)
r.model_dir = os.path.join(base, 'model_multi')
if debug:
print "Model Directory: {}.".format(r.model_dir)
r.system_filename_pattern = 'SetSummary.(\d+).txt'
r.model_filename_pattern = 'SetSummary.#ID#.[A-Z].txt'
output = r.convert_and_evaluate()
print output
|
import pretend
from pyramid.httpexceptions import HTTPMovedPermanently
from warehouse.legacy.api import simple
from ....common.db.accounts import UserFactory
from ....common.db.packaging import (
ProjectFactory, ReleaseFactory, FileFactory, JournalEntryFactory,
)
class TestSimpleIndex:
def test_no_results_no_serial(self, db_request):
assert simple.simple_index(db_request) == {"projects": []}
assert db_request.response.headers["X-PyPI-Last-Serial"] == "0"
def test_no_results_with_serial(self, db_request):
user = UserFactory.create()
je = JournalEntryFactory.create(submitted_by=user)
assert simple.simple_index(db_request) == {"projects": []}
assert db_request.response.headers["X-PyPI-Last-Serial"] == str(je.id)
def test_with_results_no_serial(self, db_request):
projects = [
(x.name, x.normalized_name)
for x in [ProjectFactory.create() for _ in range(3)]
]
assert simple.simple_index(db_request) == {
"projects": sorted(projects, key=lambda x: x[1]),
}
assert db_request.response.headers["X-PyPI-Last-Serial"] == "0"
def test_with_results_with_serial(self, db_request):
projects = [
(x.name, x.normalized_name)
for x in [ProjectFactory.create() for _ in range(3)]
]
user = UserFactory.create()
je = JournalEntryFactory.create(submitted_by=user)
assert simple.simple_index(db_request) == {
"projects": sorted(projects, key=lambda x: x[1]),
}
assert db_request.response.headers["X-PyPI-Last-Serial"] == str(je.id)
class TestSimpleDetail:
def test_redirects(self, pyramid_request):
project = pretend.stub(normalized_name="foo")
pyramid_request.matchdict["name"] = "Foo"
pyramid_request.current_route_path = pretend.call_recorder(
lambda name: "/foobar/"
)
resp = simple.simple_detail(project, pyramid_request)
assert isinstance(resp, HTTPMovedPermanently)
assert resp.headers["Location"] == "/foobar/"
assert pyramid_request.current_route_path.calls == [
pretend.call(name="foo"),
]
def test_no_files_no_serial(self, db_request):
project = ProjectFactory.create()
db_request.matchdict["name"] = project.normalized_name
user = UserFactory.create()
JournalEntryFactory.create(submitted_by=user)
assert simple.simple_detail(project, db_request) == {
"project": project,
"files": [],
}
assert db_request.response.headers["X-PyPI-Last-Serial"] == "0"
def test_no_files_with_serial(self, db_request):
project = ProjectFactory.create()
db_request.matchdict["name"] = project.normalized_name
user = UserFactory.create()
je = JournalEntryFactory.create(name=project.name, submitted_by=user)
# Make sure that we get any changes made since the JournalEntry was
# saved.
db_request.db.refresh(project)
assert simple.simple_detail(project, db_request) == {
"project": project,
"files": [],
}
assert db_request.response.headers["X-PyPI-Last-Serial"] == str(je.id)
def test_with_files_no_serial(self, db_request):
project = ProjectFactory.create()
releases = [
ReleaseFactory.create(project=project)
for _ in range(3)
]
files = [
FileFactory.create(
release=r,
filename="{}-{}.tar.gz".format(project.name, r.version),
)
for r in releases
]
# let's assert the result is ordered by string comparison of filename
files = sorted(files, key=lambda key: key.filename)
db_request.matchdict["name"] = project.normalized_name
user = UserFactory.create()
JournalEntryFactory.create(submitted_by=user)
# Make sure that we get any changes made since the JournalEntry was
# saved.
db_request.db.refresh(project)
assert simple.simple_detail(project, db_request) == {
"project": project,
"files": files,
}
assert db_request.response.headers["X-PyPI-Last-Serial"] == "0"
def test_with_files_with_serial(self, db_request):
project = ProjectFactory.create()
releases = [
ReleaseFactory.create(project=project)
for _ in range(3)
]
files = [
FileFactory.create(
release=r,
filename="{}-{}.tar.gz".format(project.name, r.version),
)
for r in releases
]
# let's assert the result is ordered by string comparison of filename
files = sorted(files, key=lambda key: key.filename)
db_request.matchdict["name"] = project.normalized_name
user = UserFactory.create()
je = JournalEntryFactory.create(name=project.name, submitted_by=user)
# Make sure that we get any changes made since the JournalEntry was
# saved.
db_request.db.refresh(project)
assert simple.simple_detail(project, db_request) == {
"project": project,
"files": files,
}
assert db_request.response.headers["X-PyPI-Last-Serial"] == str(je.id)
|
from hashlib import md5
import os
import urllib
import txweb2.dav.test.util
from txweb2 import responsecode
from txweb2.test.test_server import SimpleRequest
from txweb2.dav.test.util import dircmp, serialize
from txweb2.dav.fileop import rmdir
class COPY(txweb2.dav.test.util.TestCase):
"""
COPY request
"""
# FIXME:
# Check that properties are being copied
def test_COPY_create(self):
"""
COPY to new resource.
"""
def test(response, path, isfile, sum, uri, depth, dst_path):
if response.code != responsecode.CREATED:
self.fail("Incorrect response code for COPY %s (depth=%r): %s != %s"
% (uri, depth, response.code, responsecode.CREATED))
if response.headers.getHeader("location") is None:
self.fail("Reponse to COPY %s (depth=%r) with CREATE status is missing location: header."
% (uri, depth))
if os.path.isfile(path):
if not os.path.isfile(dst_path):
self.fail("COPY %s (depth=%r) produced no output file" % (uri, depth))
if not cmp(path, dst_path):
self.fail("COPY %s (depth=%r) produced different file" % (uri, depth))
os.remove(dst_path)
elif os.path.isdir(path):
if not os.path.isdir(dst_path):
self.fail("COPY %s (depth=%r) produced no output directory" % (uri, depth))
if depth in ("infinity", None):
if dircmp(path, dst_path):
self.fail("COPY %s (depth=%r) produced different directory" % (uri, depth))
elif depth == "0":
for filename in os.listdir(dst_path):
self.fail("COPY %s (depth=%r) shouldn't copy directory contents (eg. %s)" % (uri, depth, filename))
else:
raise AssertionError("Unknown depth: %r" % (depth,))
rmdir(dst_path)
else:
self.fail("Source %s is neither a file nor a directory"
% (path,))
return serialize(self.send, work(self, test))
def test_COPY_exists(self):
"""
COPY to existing resource.
"""
def test(response, path, isfile, sum, uri, depth, dst_path):
if response.code != responsecode.PRECONDITION_FAILED:
self.fail("Incorrect response code for COPY without overwrite %s: %s != %s"
% (uri, response.code, responsecode.PRECONDITION_FAILED))
else:
# FIXME: Check XML error code (2518bis)
pass
return serialize(self.send, work(self, test, overwrite=False))
def test_COPY_overwrite(self):
"""
COPY to existing resource with overwrite header.
"""
def test(response, path, isfile, sum, uri, depth, dst_path):
if response.code != responsecode.NO_CONTENT:
self.fail("Incorrect response code for COPY with overwrite %s: %s != %s"
% (uri, response.code, responsecode.NO_CONTENT))
else:
# FIXME: Check XML error code (2518bis)
pass
self.failUnless(os.path.exists(dst_path), "COPY didn't produce file: %s" % (dst_path,))
return serialize(self.send, work(self, test, overwrite=True))
def test_COPY_no_parent(self):
"""
COPY to resource with no parent.
"""
def test(response, path, isfile, sum, uri, depth, dst_path):
if response.code != responsecode.CONFLICT:
self.fail("Incorrect response code for COPY with no parent %s: %s != %s"
% (uri, response.code, responsecode.CONFLICT))
else:
# FIXME: Check XML error code (2518bis)
pass
return serialize(self.send, work(self, test, dst=os.path.join(self.docroot, "elvislives!")))
def work(self, test, overwrite=None, dst=None, depths=("0", "infinity", None)):
if dst is None:
dst = os.path.join(self.docroot, "dst")
os.mkdir(dst)
for basename in os.listdir(self.docroot):
if basename == "dst":
continue
uri = urllib.quote("/" + basename)
path = os.path.join(self.docroot, basename)
isfile = os.path.isfile(path)
sum = sumFile(path)
dst_path = os.path.join(dst, basename)
dst_uri = urllib.quote("/dst/" + basename)
if not isfile:
uri += "/"
dst_uri += "/"
if overwrite is not None:
# Create a file at dst_path to create a conflict
file(dst_path, "w").close()
for depth in depths:
def do_test(response, path=path, isfile=isfile, sum=sum, uri=uri, depth=depth, dst_path=dst_path):
test(response, path, isfile, sum, uri, depth, dst_path)
request = SimpleRequest(self.site, self.__class__.__name__, uri)
request.headers.setHeader("destination", dst_uri)
if depth is not None:
request.headers.setHeader("depth", depth)
if overwrite is not None:
request.headers.setHeader("overwrite", overwrite)
yield (request, do_test)
def sumFile(path):
m = md5()
if os.path.isfile(path):
f = file(path)
try:
m.update(f.read())
finally:
f.close()
elif os.path.isdir(path):
for dir, subdirs, files in os.walk(path):
for filename in files:
m.update(filename)
f = file(os.path.join(dir, filename))
try:
m.update(f.read())
finally:
f.close()
for dirname in subdirs:
m.update(dirname + "/")
else:
raise AssertionError()
return m.digest()
|
import sys, os, time, paramiko
from vnc_api import vnc_api
from novaclient.v2 import client
def wait():
for i in range(21):
sys.stdout.write('\r')
sys.stdout.write("[%-20s] %d%%" % ('='*i, 5*i))
sys.stdout.flush()
time.sleep(1)
def test_Connectivity(host1, host2, username, password):
""" FUNCTION TO TEST CONNECTIVITY """
print "Checking connectivity\n"
wait()
print "\n"
try:
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(host1, 22, username, password)
cmd = 'ping -c 3 ' + host2 + '; echo $?'
stdin, stdout, stderr = client.exec_command(cmd)
out = stdout.readlines()[-1].strip()
if str(out) == "0":
print "Ping successful\n"
else:
print "Ping unsuccessful\n"
except:
pass
def delete_Env(nova, vm_name_1, vm_name_2, policy_name, left_network_name, right_network_name, vnc, domain, project_name):
""" FUNCTION TO DELETE THE ENVIRONMENT """
project = vnc.project_read(fq_name = [domain, project_name])
try:
fip_list = open("fip_uuid.txt").read().strip().split("\n")
for fip in fip_list:
time.sleep(2)
nova.floating_ips.delete(fip)
print 'Floating IP {} deleted successfully\n'.format(fip)
os.remove("fip_uuid.txt")
except:
pass
vm_delete_list = [vm_name_1, vm_name_2]
for vm in vm_delete_list:
try:
nova.servers.find(name=vm).delete()
print 'Server "{}" deleted successfully\n'.format(vm)
except:
print 'Server "{}" does not exist\n'.format(vm)
try:
fip_list = open("fip_uuid.txt").read().strip().split("\n")
for fip in fip_list:
nova.floating_ips.delete(fip)
print 'Floating IP {} successfully deleted\n'.format(fip_uuid)
os.remove("fip_uuid.txt")
except:
pass
time.sleep(5)
vn_delete_list = [left_network_name, right_network_name]
for vn in vn_delete_list:
try:
vnc.virtual_network_delete(fq_name= [domain, project_name, vn])
print 'Virtual Network "{}" deleted successfully\n'.format(vn)
except:
print 'Virtual Network "{}" does not exist\n'.format(vn)
try:
vnc.network_policy_delete(fq_name= [domain, project_name, policy_name])
print 'Network Policy "{}" deleted successfully\n'.format(policy_name)
except:
print 'Network Policy "{}" does not exist\n'.format(policy_name)
def launch_VM(nova, project_name, vm_name, image_name, flavor_type, network):
""" FUNCTION TO LAUNCH A VIRTUAL MACHINE """
image = nova.images.find(name = image_name)
flavor = nova.flavors.find(name = flavor_type)
network = nova.networks.find(label = network)
nova.floating_ip_pools.list()
floating_ip = nova.floating_ips.create(nova.floating_ip_pools.list()[0].name)
nova.servers.create(name=vm_name, image=image.id, flavor=flavor.id, nics=[{'net-id': network.id}])
os.system('echo ' + floating_ip.id + ' >> fip_uuid.txt')
print 'Server "{}" created successfully\n'.format(vm_name)
time.sleep(2)
nova.servers.find(name=vm_name).add_floating_ip(floating_ip)
print 'Floating IP "{}" attached to "{}"\n'.format(floating_ip.ip, vm_name)
return floating_ip.ip
def create_NetworkPolicy(policy_name, left_network_name, right_network_name, vnc, domain, project_name):
""" FUNCTION TO CREATE NETWORK POLICY """
project = vnc.project_read(fq_name = [domain, project_name])
rule = vnc_api.PolicyRuleType(direction = '<>', protocol = 'any',
action_list = vnc_api.ActionListType(simple_action = 'pass'),
src_addresses = [vnc_api.AddressType(virtual_network = left_network_name)],
src_ports = [vnc_api.PortType(start_port = -1, end_port = -1)],
dst_addresses = [vnc_api.AddressType(virtual_network = right_network_name)],
dst_ports = [vnc_api.PortType(start_port = -1, end_port = -1)])
policy = vnc_api.NetworkPolicy(name = policy_name, parent_obj = project, network_policy_entries = vnc_api.PolicyEntriesType([rule]))
vnc.network_policy_create(policy)
print 'Policy "{}" created between "{}" & "{}"\n'.format(policy_name, left_network_name, right_network_name)
def add_NetworkPolicy(policy_name, network, vnc, domain, project_name):
""" FUNCTION TO ATTACH NETWORK POLICY TO VIRTUAL_NETWORKS """
policy = vnc.network_policy_read(fq_name = [domain, project_name, policy_name])
policy_type = vnc_api.VirtualNetworkPolicyType(sequence = vnc_api.SequenceType(major = 0, minor = 0))
vn = vnc.virtual_network_read(fq_name = [domain, project_name, network])
vn.add_network_policy(ref_obj = policy, ref_data = policy_type)
vnc.virtual_network_update(vn)
print 'Policy "{}" attached to "{}"\n'.format(policy_name, network)
def create_VirtualNetwork(network_name, network_subnet, network_mask, network_gateway, vnc, domain, project_name):
""" FUNCTION TO CREATE VIRTUAL-NETWORK """
project = vnc.project_read(fq_name = [domain, project_name])
vn_obj = vnc_api.VirtualNetwork(name=network_name, parent_obj=project)
vn_obj.add_network_ipam(vnc_api.NetworkIpam(),
vnc_api.VnSubnetsType([vnc_api.IpamSubnetType(subnet = vnc_api.SubnetType(network_subnet,network_mask), default_gateway = network_gateway)]))
vnc.virtual_network_create(vn_obj)
print 'Network "{}" created successfully\n'.format(network_name)
def main():
""" MAIN/AUTHENTICATE """
project_name = 'admin'
domain = 'default-domain'
username = 'admin'
password = 'xyz123'
api_server = '10.84.18.1'
auth_url = "http://10.84.18.1:5000/v2.0/"
left_network_name = 'left_VN'
left_network_subnet = '1.1.1.0'
left_network_mask = 24
left_network_gateway = '1.1.1.1'
right_network_name = 'right_VN'
right_network_subnet = '2.2.2.0'
right_network_mask = 24
right_network_gateway = '2.2.2.1'
vm_name_1 = "vm_1"
vm_name_2 = "vm_2"
image = "ubuntu"
flavor = "m1.tiny"
vm_username = "ubuntu"
vm_password = "ubuntu"
policy_name = 'red-to-blue'
vnc = vnc_api.VncApi(username=username, password=password, api_server_host = api_server, tenant_name=project_name)
nova = client.Client(username, password, project_name, auth_url, service_type="compute")
if len(sys.argv) == 2 and (sys.argv[1]) == "-d":
delete_Env(nova, vm_name_1, vm_name_2, policy_name, left_network_name, right_network_name, vnc, domain, project_name)
elif len(sys.argv) == 2 and (sys.argv[1]) == "-c":
create_VirtualNetwork(left_network_name, left_network_subnet, left_network_mask, left_network_gateway, vnc, domain, project_name)
create_VirtualNetwork(right_network_name, right_network_subnet, right_network_mask, right_network_gateway, vnc, domain, project_name)
create_NetworkPolicy(policy_name, left_network_name, right_network_name, vnc, domain, project_name)
add_NetworkPolicy(policy_name, left_network_name, vnc, domain, project_name)
add_NetworkPolicy(policy_name, right_network_name, vnc, domain, project_name)
host1 = launch_VM(nova, project_name, vm_name=vm_name_1, image_name=image, flavor_type=flavor, network=left_network_name)
host2 = launch_VM(nova, project_name, vm_name=vm_name_2, image_name=image, flavor_type=flavor, network=right_network_name)
time.sleep(30)
test_Connectivity(host1, host2, vm_username, vm_password)
else:
print "Invalid argument\n\nUSAGE:\n\nTo create environment: python <file.py> -c\nTo delete environment: python <file.py> -d\n"
if __name__=="__main__":
main()
|
"""Forwarding utils for backwards compatibility."""
from tf_agents.specs import bandit_spec_utils as _utils
GLOBAL_FEATURE_KEY = _utils.GLOBAL_FEATURE_KEY
PER_ARM_FEATURE_KEY = _utils.PER_ARM_FEATURE_KEY
NUM_ACTIONS_FEATURE_KEY = _utils.NUM_ACTIONS_FEATURE_KEY
REWARD_SPEC_KEY = _utils.REWARD_SPEC_KEY
CONSTRAINTS_SPEC_KEY = _utils.CONSTRAINTS_SPEC_KEY
create_per_arm_observation_spec = _utils.create_per_arm_observation_spec
get_context_dims_from_spec = _utils.get_context_dims_from_spec
drop_arm_observation = _utils.drop_arm_observation
|
"""Utility functions supporting FAUCET/Gauge config parsing."""
import hashlib
import logging
import os
import yaml
from yaml.constructor import ConstructorError
try:
from yaml import CLoader as Loader # type: ignore
except ImportError:
from yaml import Loader
CONFIG_HASH_FUNC = 'sha256'
class UniqueKeyLoader(Loader): # pylint: disable=too-many-ancestors
"""YAML loader that will reject duplicate/overwriting keys."""
def construct_mapping(self, node, deep=False):
"""Check for duplicate YAML keys."""
try:
key_value_pairs = [
(self.construct_object(key_node, deep=deep),
self.construct_object(value_node, deep=deep))
for key_node, value_node in node.value]
except TypeError as err:
raise ConstructorError('invalid key type: %s' % err)
mapping = {}
for key, value in key_value_pairs:
try:
if key in mapping:
raise ConstructorError('duplicate key: %s' % key)
except TypeError:
raise ConstructorError('unhashable key: %s' % key)
mapping[key] = value
return mapping
yaml.SafeLoader.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
UniqueKeyLoader.construct_mapping)
def get_logger(logname):
"""Return logger instance for config parsing."""
return logging.getLogger(logname + '.config')
def read_config(config_file, logname):
"""Return a parsed YAML config file or None."""
logger = get_logger(logname)
conf_txt = None
conf = None
try:
with open(config_file, 'r') as stream:
conf_txt = stream.read()
conf = yaml.safe_load(conf_txt)
except (yaml.YAMLError, UnicodeDecodeError,
PermissionError, ValueError) as err: # pytype: disable=name-error
logger.error('Error in file %s (%s)', config_file, str(err))
except FileNotFoundError as err: # pytype: disable=name-error
logger.error('Could not find requested file: %s', config_file)
return conf, conf_txt
def config_file_hash(config_file_name):
"""Return hash of YAML config file contents."""
config_hash = getattr(hashlib, CONFIG_HASH_FUNC)
with open(config_file_name) as config_file:
config_file = config_file.read()
return config_hash(config_file.encode('utf-8')).hexdigest()
def dp_config_path(config_file, parent_file=None):
"""Return full path to config file."""
if parent_file and not os.path.isabs(config_file):
return os.path.realpath(os.path.join(os.path.dirname(parent_file), config_file))
return os.path.realpath(config_file)
def dp_include(config_hashes, config_contents, config_file, logname, # pylint: disable=too-many-locals
top_confs):
"""Handles including additional config files"""
logger = get_logger(logname)
if not os.path.isfile(config_file):
logger.warning('not a regular file or does not exist: %s', config_file)
return False
conf, config_content = read_config(config_file, logname)
if not conf:
logger.warning('error loading config from file: %s', config_file)
return False
valid_conf_keys = set(top_confs.keys()).union({'include', 'include-optional', 'version'})
unknown_top_confs = set(conf.keys()) - valid_conf_keys
if unknown_top_confs:
logger.error('unknown top level config items: %s', unknown_top_confs)
return False
# Add the SHA256 hash for this configuration file, so FAUCET can determine
# whether or not this configuration file should be reloaded upon receiving
# a HUP signal.
new_config_hashes = config_hashes.copy()
new_config_hashes[config_file] = config_file_hash(config_file)
new_config_contents = config_contents.copy()
new_config_contents[config_file] = config_content
# Save the updated configuration state in separate dicts,
# so if an error is found, the changes can simply be thrown away.
new_top_confs = {}
for conf_name, curr_conf in top_confs.items():
new_top_confs[conf_name] = curr_conf.copy()
try:
new_top_confs[conf_name].update(conf.pop(conf_name, {}))
except (TypeError, ValueError):
logger.error('Invalid config for "%s"', conf_name)
return False
for include_directive, file_required in (
('include', True),
('include-optional', False)):
include_values = conf.pop(include_directive, [])
if not isinstance(include_values, list):
logger.error('Include directive is not in a valid format')
return False
for include_file in include_values:
if not isinstance(include_file, str):
include_file = str(include_file)
include_path = dp_config_path(include_file, parent_file=config_file)
logger.info('including file: %s', include_path)
if include_path in config_hashes:
logger.error(
'include file %s already loaded, include loop found in file: %s',
include_path, config_file,)
return False
if not dp_include(
new_config_hashes, config_contents, include_path, logname, new_top_confs):
if file_required:
logger.error('unable to load required include file: %s', include_path)
return False
new_config_hashes[include_path] = None
logger.warning('skipping optional include file: %s', include_path)
# Actually update the configuration data structures,
# now that this file has been successfully loaded.
config_hashes.update(new_config_hashes)
config_contents.update(new_config_contents)
for conf_name, new_conf in new_top_confs.items():
top_confs[conf_name].update(new_conf)
return True
def config_changed(top_config_file, new_top_config_file, config_hashes):
"""Return True if configuration has changed.
Args:
top_config_file (str): name of FAUCET config file
new_top_config_file (str): name, possibly new, of FAUCET config file.
config_hashes (dict): map of config file/includes and hashes of contents.
Returns:
bool: True if the file, or any file it includes, has changed.
"""
if new_top_config_file != top_config_file:
return True
if config_hashes is None or new_top_config_file is None:
return False
for config_file, config_hash in config_hashes.items():
config_file_exists = os.path.isfile(config_file)
# Config file not loaded but exists = reload.
if config_hash is None and config_file_exists:
return True
# Config file loaded but no longer exists = reload.
if config_hash and not config_file_exists:
return True
# Config file hash has changed = reload.
if config_file_exists:
new_config_hash = config_file_hash(config_file)
if new_config_hash != config_hash:
return True
return False
|
'''
The caller module is used as a front-end to manage direct calls to the salt
minion modules.
'''
from __future__ import absolute_import, print_function
import os
import sys
import time
import logging
import traceback
import multiprocessing
import salt
import salt.loader
import salt.minion
import salt.output
import salt.payload
import salt.transport
import salt.utils.args
import salt.utils.jid
import salt.defaults.exitcodes
from salt.log import LOG_LEVELS
from salt.utils import is_windows
from salt.utils import print_cli
from salt.utils import kinds
from salt.utils import activate_profile
from salt.utils import output_profile
from salt.cli import daemons
try:
from raet import raeting, nacling
from raet.lane.stacking import LaneStack
from raet.lane.yarding import RemoteYard, Yard
if is_windows():
import win32file
except ImportError:
# Don't die on missing transport libs since only one transport is required
pass
import salt.ext.six as six
from salt.exceptions import (
SaltClientError,
CommandNotFoundError,
CommandExecutionError,
SaltInvocationError,
)
log = logging.getLogger(__name__)
class Caller(object):
'''
Factory class to create salt-call callers for different transport
'''
@staticmethod
def factory(opts, **kwargs):
# Default to ZeroMQ for now
ttype = 'zeromq'
# determine the ttype
if 'transport' in opts:
ttype = opts['transport']
elif 'transport' in opts.get('pillar', {}).get('master', {}):
ttype = opts['pillar']['master']['transport']
# switch on available ttypes
if ttype in ('zeromq', 'tcp'):
return ZeroMQCaller(opts, **kwargs)
elif ttype == 'raet':
return RAETCaller(opts, **kwargs)
else:
raise Exception('Callers are only defined for ZeroMQ and raet')
# return NewKindOfCaller(opts, **kwargs)
class BaseCaller(object):
'''
Base class for caller transports
'''
def __init__(self, opts):
'''
Pass in command line opts
'''
self.opts = opts
self.opts['caller'] = True
self.serial = salt.payload.Serial(self.opts)
# Handle this here so other deeper code which might
# be imported as part of the salt api doesn't do a
# nasty sys.exit() and tick off our developer users
try:
self.minion = salt.minion.SMinion(opts)
except SaltClientError as exc:
raise SystemExit(str(exc))
def print_docs(self):
'''
Pick up the documentation for all of the modules and print it out.
'''
docs = {}
for name, func in six.iteritems(self.minion.functions):
if name not in docs:
if func.__doc__:
docs[name] = func.__doc__
for name in sorted(docs):
if name.startswith(self.opts.get('fun', '')):
print_cli('{0}:\n{1}\n'.format(name, docs[name]))
def print_grains(self):
'''
Print out the grains
'''
grains = salt.loader.grains(self.opts)
salt.output.display_output({'local': grains}, 'grains', self.opts)
def run(self):
'''
Execute the salt call logic
'''
profiling_enabled = self.opts.get('profiling_enabled', False)
try:
pr = activate_profile(profiling_enabled)
try:
ret = self.call()
finally:
output_profile(pr,
stats_path=self.opts.get('profiling_path',
'/tmp/stats'),
stop=True)
out = ret.get('out', 'nested')
if self.opts['metadata']:
print_ret = ret
out = 'nested'
else:
print_ret = ret.get('return', {})
salt.output.display_output(
{'local': print_ret},
out,
self.opts)
if self.opts.get('retcode_passthrough', False):
sys.exit(ret['retcode'])
except SaltInvocationError as err:
raise SystemExit(err)
def call(self):
'''
Call the module
'''
ret = {}
fun = self.opts['fun']
ret['jid'] = salt.utils.jid.gen_jid()
proc_fn = os.path.join(
salt.minion.get_proc_dir(self.opts['cachedir']),
ret['jid']
)
if fun not in self.minion.functions:
sys.stderr.write(self.minion.functions.missing_fun_string(fun))
mod_name = fun.split('.')[0]
if mod_name in self.minion.function_errors:
sys.stderr.write(' Possible reasons: {0}\n'.format(self.minion.function_errors[mod_name]))
else:
sys.stderr.write('\n')
sys.exit(-1)
try:
sdata = {
'fun': fun,
'pid': os.getpid(),
'jid': ret['jid'],
'tgt': 'salt-call'}
args, kwargs = salt.minion.load_args_and_kwargs(
self.minion.functions[fun],
salt.utils.args.parse_input(self.opts['arg']),
data=sdata)
try:
with salt.utils.fopen(proc_fn, 'w+b') as fp_:
fp_.write(self.serial.dumps(sdata))
except NameError:
# Don't require msgpack with local
pass
except IOError:
sys.stderr.write(
'Cannot write to process directory. '
'Do you have permissions to '
'write to {0} ?\n'.format(proc_fn))
func = self.minion.functions[fun]
try:
ret['return'] = func(*args, **kwargs)
except TypeError as exc:
sys.stderr.write('\nPassed invalid arguments: {0}.\n\nUsage:\n'.format(exc))
print_cli(func.__doc__)
active_level = LOG_LEVELS.get(
self.opts['log_level'].lower(), logging.ERROR)
if active_level <= logging.DEBUG:
trace = traceback.format_exc()
sys.stderr.write(trace)
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
try:
ret['retcode'] = sys.modules[
func.__module__].__context__.get('retcode', 0)
except AttributeError:
ret['retcode'] = 1
except (CommandExecutionError) as exc:
msg = 'Error running \'{0}\': {1}\n'
active_level = LOG_LEVELS.get(
self.opts['log_level'].lower(), logging.ERROR)
if active_level <= logging.DEBUG:
sys.stderr.write(traceback.format_exc())
sys.stderr.write(msg.format(fun, str(exc)))
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
except CommandNotFoundError as exc:
msg = 'Command required for \'{0}\' not found: {1}\n'
sys.stderr.write(msg.format(fun, str(exc)))
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
try:
os.remove(proc_fn)
except (IOError, OSError):
pass
if hasattr(self.minion.functions[fun], '__outputter__'):
oput = self.minion.functions[fun].__outputter__
if isinstance(oput, six.string_types):
ret['out'] = oput
is_local = self.opts['local'] or self.opts.get(
'file_client', False) == 'local'
returners = self.opts.get('return', '').split(',')
if (not is_local) or returners:
ret['id'] = self.opts['id']
ret['fun'] = fun
ret['fun_args'] = self.opts['arg']
for returner in returners:
if not returner: # if we got an empty returner somehow, skip
continue
try:
ret['success'] = True
self.minion.returners['{0}.returner'.format(returner)](ret)
except Exception:
pass
# return the job infos back up to the respective minion's master
if not is_local:
try:
mret = ret.copy()
mret['jid'] = 'req'
self.return_pub(mret)
except Exception:
pass
# close raet channel here
return ret
class ZeroMQCaller(BaseCaller):
'''
Object to wrap the calling of local salt modules for the salt-call command
'''
def __init__(self, opts):
'''
Pass in the command line options
'''
super(ZeroMQCaller, self).__init__(opts)
def return_pub(self, ret):
'''
Return the data up to the master
'''
channel = salt.transport.Channel.factory(self.opts, usage='salt_call')
load = {'cmd': '_return', 'id': self.opts['id']}
for key, value in six.iteritems(ret):
load[key] = value
channel.send(load)
def raet_minion_run(cleanup_protecteds):
'''
Set up the minion caller. Should be run in its own process.
This function is intentionally left out of RAETCaller. This will avoid
needing to pickle the RAETCaller object on Windows.
'''
minion = daemons.Minion() # daemonizes here
minion.call(cleanup_protecteds=cleanup_protecteds) # caller minion.call_in uses caller.flo
class RAETCaller(BaseCaller):
'''
Object to wrap the calling of local salt modules for the salt-call command
when transport is raet
There are two operation modes.
1) Use a preexisting minion
2) Set up a special caller minion if no preexisting minion
The special caller minion is a subset whose only function is to perform
Salt-calls with raet as the transport
The essentials:
A RoadStack whose local estate name is of the form "role_kind" where:
role is the minion id opts['id']
kind is opts['__role'] which should be 'caller' APPL_KIND_NAMES
The RoadStack if for communication to/from a master
A LaneStack with manor yard so that RaetChannels created by the func Jobbers
can communicate through this manor yard then through the
RoadStack to/from a master
A Router to route between the stacks (Road and Lane)
These are all managed via a FloScript named caller.flo
'''
def __init__(self, opts):
'''
Pass in the command line options
'''
self.process = None
if not opts['local']:
self.stack = self._setup_caller_stack(opts)
salt.transport.jobber_stack = self.stack
if (opts.get('__role') ==
kinds.APPL_KIND_NAMES[kinds.applKinds.caller]):
# spin up and fork minion here
self.process = multiprocessing.Process(target=raet_minion_run,
kwargs={'cleanup_protecteds': [self.stack.ha], })
self.process.start()
# wait here until '/var/run/salt/minion/alpha_caller.manor.uxd' exists
self._wait_caller(opts)
super(RAETCaller, self).__init__(opts)
def run(self):
'''
Execute the salt call logic
'''
try:
ret = self.call()
if not self.opts['local']:
self.stack.server.close()
salt.transport.jobber_stack = None
if self.opts['metadata']:
print_ret = ret
else:
print_ret = ret.get('return', {})
if self.process:
self.process.terminate()
salt.output.display_output(
{'local': print_ret},
ret.get('out', 'nested'),
self.opts)
if self.opts.get('retcode_passthrough', False):
sys.exit(ret['retcode'])
except SaltInvocationError as err:
raise SystemExit(err)
def _setup_caller_stack(self, opts):
'''
Setup and return the LaneStack and Yard used by by channel when global
not already setup such as in salt-call to communicate to-from the minion
'''
role = opts.get('id')
if not role:
emsg = ("Missing role required to setup RAETChannel.")
log.error(emsg + "\n")
raise ValueError(emsg)
kind = opts.get('__role') # application kind 'master', 'minion', etc
if kind not in kinds.APPL_KINDS:
emsg = ("Invalid application kind = '{0}' for RAETChannel.".format(kind))
log.error(emsg + "\n")
raise ValueError(emsg)
if kind in [kinds.APPL_KIND_NAMES[kinds.applKinds.minion],
kinds.APPL_KIND_NAMES[kinds.applKinds.caller], ]:
lanename = "{0}_{1}".format(role, kind)
else:
emsg = ("Unsupported application kind '{0}' for RAETChannel.".format(kind))
log.error(emsg + '\n')
raise ValueError(emsg)
sockdirpath = opts['sock_dir']
stackname = 'caller' + nacling.uuid(size=18)
stack = LaneStack(name=stackname,
lanename=lanename,
sockdirpath=sockdirpath)
stack.Pk = raeting.PackKind.pack.value
stack.addRemote(RemoteYard(stack=stack,
name='manor',
lanename=lanename,
dirpath=sockdirpath))
log.debug("Created Caller Jobber Stack {0}\n".format(stack.name))
return stack
def _wait_caller(self, opts):
'''
Returns when RAET Minion Yard is available
'''
yardname = 'manor'
dirpath = opts['sock_dir']
role = opts.get('id')
if not role:
emsg = ("Missing role required to setup RAET SaltCaller.")
log.error(emsg + "\n")
raise ValueError(emsg)
kind = opts.get('__role') # application kind 'master', 'minion', etc
if kind not in kinds.APPL_KINDS:
emsg = ("Invalid application kind = '{0}' for RAET SaltCaller.".format(kind))
log.error(emsg + "\n")
raise ValueError(emsg)
if kind in [kinds.APPL_KIND_NAMES[kinds.applKinds.minion],
kinds.APPL_KIND_NAMES[kinds.applKinds.caller], ]:
lanename = "{0}_{1}".format(role, kind)
else:
emsg = ("Unsupported application kind '{0}' for RAET SaltCaller.".format(kind))
log.error(emsg + '\n')
raise ValueError(emsg)
ha, dirpath = Yard.computeHa(dirpath, lanename, yardname)
if is_windows():
# RAET lanes do not use files on Windows. Need to use win32file
# API to check for existence.
exists = False
while not exists:
try:
f = win32file.CreateFile(
ha,
win32file.GENERIC_WRITE | win32file.GENERIC_READ,
win32file.FILE_SHARE_READ,
None,
win32file.OPEN_EXISTING,
0,
None)
win32file.CloseHandle(f)
exists = True
except win32file.error:
time.sleep(0.1)
else:
while not ((os.path.exists(ha) and
not os.path.isfile(ha) and
not os.path.isdir(ha))):
time.sleep(0.1)
time.sleep(0.5)
|
from uploader.settings.test import *
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
'TEST_NAME': ':memory:',
},
}
|
"""
Cloudbreak API
Cloudbreak is a powerful left surf that breaks over a coral reef, a mile off southwest the island of Tavarua, Fiji. Cloudbreak is a cloud agnostic Hadoop as a Service API. Abstracts the provisioning and ease management and monitoring of on-demand clusters. SequenceIQ's Cloudbreak is a RESTful application development platform with the goal of helping developers to build solutions for deploying Hadoop YARN clusters in different environments. Once it is deployed in your favourite servlet container it exposes a REST API allowing to span up Hadoop clusters of arbitary sizes and cloud providers. Provisioning Hadoop has never been easier. Cloudbreak is built on the foundation of cloud providers API (Amazon AWS, Microsoft Azure, Google Cloud Platform, Openstack), Apache Ambari, Docker lightweight containers, Swarm and Consul. For further product documentation follow the link: <a href=\"http://hortonworks.com/apache/cloudbreak/\">http://hortonworks.com/apache/cloudbreak/</a>
OpenAPI spec version: 2.9.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class ImageResponse(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'_date': 'str',
'description': 'str',
'os': 'str',
'os_type': 'str',
'uuid': 'str',
'version': 'str',
'repo': 'dict(str, str)',
'images': 'dict(str, dict(str, str))',
'stack_details': 'StackDetailsJson',
'default_image': 'bool',
'package_versions': 'dict(str, str)'
}
attribute_map = {
'_date': 'date',
'description': 'description',
'os': 'os',
'os_type': 'osType',
'uuid': 'uuid',
'version': 'version',
'repo': 'repo',
'images': 'images',
'stack_details': 'stackDetails',
'default_image': 'defaultImage',
'package_versions': 'packageVersions'
}
def __init__(self, _date=None, description=None, os=None, os_type=None, uuid=None, version=None, repo=None, images=None, stack_details=None, default_image=False, package_versions=None):
"""
ImageResponse - a model defined in Swagger
"""
self.__date = None
self._description = None
self._os = None
self._os_type = None
self._uuid = None
self._version = None
self._repo = None
self._images = None
self._stack_details = None
self._default_image = None
self._package_versions = None
if _date is not None:
self._date = _date
if description is not None:
self.description = description
if os is not None:
self.os = os
if os_type is not None:
self.os_type = os_type
if uuid is not None:
self.uuid = uuid
if version is not None:
self.version = version
if repo is not None:
self.repo = repo
if images is not None:
self.images = images
if stack_details is not None:
self.stack_details = stack_details
if default_image is not None:
self.default_image = default_image
if package_versions is not None:
self.package_versions = package_versions
@property
def _date(self):
"""
Gets the _date of this ImageResponse.
:return: The _date of this ImageResponse.
:rtype: str
"""
return self.__date
@_date.setter
def _date(self, _date):
"""
Sets the _date of this ImageResponse.
:param _date: The _date of this ImageResponse.
:type: str
"""
self.__date = _date
@property
def description(self):
"""
Gets the description of this ImageResponse.
:return: The description of this ImageResponse.
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""
Sets the description of this ImageResponse.
:param description: The description of this ImageResponse.
:type: str
"""
self._description = description
@property
def os(self):
"""
Gets the os of this ImageResponse.
:return: The os of this ImageResponse.
:rtype: str
"""
return self._os
@os.setter
def os(self, os):
"""
Sets the os of this ImageResponse.
:param os: The os of this ImageResponse.
:type: str
"""
self._os = os
@property
def os_type(self):
"""
Gets the os_type of this ImageResponse.
:return: The os_type of this ImageResponse.
:rtype: str
"""
return self._os_type
@os_type.setter
def os_type(self, os_type):
"""
Sets the os_type of this ImageResponse.
:param os_type: The os_type of this ImageResponse.
:type: str
"""
self._os_type = os_type
@property
def uuid(self):
"""
Gets the uuid of this ImageResponse.
:return: The uuid of this ImageResponse.
:rtype: str
"""
return self._uuid
@uuid.setter
def uuid(self, uuid):
"""
Sets the uuid of this ImageResponse.
:param uuid: The uuid of this ImageResponse.
:type: str
"""
self._uuid = uuid
@property
def version(self):
"""
Gets the version of this ImageResponse.
:return: The version of this ImageResponse.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""
Sets the version of this ImageResponse.
:param version: The version of this ImageResponse.
:type: str
"""
self._version = version
@property
def repo(self):
"""
Gets the repo of this ImageResponse.
:return: The repo of this ImageResponse.
:rtype: dict(str, str)
"""
return self._repo
@repo.setter
def repo(self, repo):
"""
Sets the repo of this ImageResponse.
:param repo: The repo of this ImageResponse.
:type: dict(str, str)
"""
self._repo = repo
@property
def images(self):
"""
Gets the images of this ImageResponse.
:return: The images of this ImageResponse.
:rtype: dict(str, dict(str, str))
"""
return self._images
@images.setter
def images(self, images):
"""
Sets the images of this ImageResponse.
:param images: The images of this ImageResponse.
:type: dict(str, dict(str, str))
"""
self._images = images
@property
def stack_details(self):
"""
Gets the stack_details of this ImageResponse.
:return: The stack_details of this ImageResponse.
:rtype: StackDetailsJson
"""
return self._stack_details
@stack_details.setter
def stack_details(self, stack_details):
"""
Sets the stack_details of this ImageResponse.
:param stack_details: The stack_details of this ImageResponse.
:type: StackDetailsJson
"""
self._stack_details = stack_details
@property
def default_image(self):
"""
Gets the default_image of this ImageResponse.
:return: The default_image of this ImageResponse.
:rtype: bool
"""
return self._default_image
@default_image.setter
def default_image(self, default_image):
"""
Sets the default_image of this ImageResponse.
:param default_image: The default_image of this ImageResponse.
:type: bool
"""
self._default_image = default_image
@property
def package_versions(self):
"""
Gets the package_versions of this ImageResponse.
:return: The package_versions of this ImageResponse.
:rtype: dict(str, str)
"""
return self._package_versions
@package_versions.setter
def package_versions(self, package_versions):
"""
Sets the package_versions of this ImageResponse.
:param package_versions: The package_versions of this ImageResponse.
:type: dict(str, str)
"""
self._package_versions = package_versions
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, ImageResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
class Solution:
def multiply(self, num1, num2):
"""
:type num1: str
:type num2: str
:rtype: str
"""
# return str(int(num1) * int(num2))
if '0' in (num1, num2):
return '0'
m, n = len(num1), len(num2)
res = [0] * (m + n)
for i in range(m):
for j in range(n):
d1 = ord(num1[i]) - ord('0')
d2 = ord(num2[j]) - ord('0')
res[i+j+1] += (d1*d2)%10
res[i+j] += (d1*d2)//10
for i in range(m + n - 1, 0, -1):
res[i - 1] += res[i] // 10
res[i] = res[i] % 10
return ''.join(str(x) for x in res) if res[0] else ''.join(str(x) for x in res[1:])
|
import wx
from cairis.core.armid import *
from datetime import datetime
from RevisionEntryDialog import RevisionEntryDialog
__author__ = 'Shamal Faily'
class RevisionListCtrl(wx.ListCtrl):
def __init__(self,parent):
wx.ListCtrl.__init__(self,parent,PROJECTSETTINGS_LISTREVISIONS_ID,size=wx.DefaultSize,style=wx.LC_REPORT | wx.LC_SORT_ASCENDING)
self.InsertColumn(0,'No')
self.SetColumnWidth(0,100)
self.InsertColumn(1,'Date')
self.SetColumnWidth(1,100)
self.InsertColumn(2,'Remarks')
self.SetColumnWidth(2,100)
self.theSelectedIdx = -1
self.theLastRevision = 0
self.theMenu = wx.Menu()
self.theMenu.Append(CONTRIBUTORLISTCTRL_MENUADD_ID,'Add')
self.Bind(wx.EVT_LIST_ITEM_RIGHT_CLICK,self.OnRightDown)
self.Bind(wx.EVT_LIST_ITEM_SELECTED,self.OnItemSelected)
self.Bind(wx.EVT_LIST_ITEM_DESELECTED,self.OnItemDeselected)
wx.EVT_MENU(self.theMenu,CONTRIBUTORLISTCTRL_MENUADD_ID,self.onAddEntry)
def OnItemSelected(self,evt):
self.theSelectedIdx = evt.GetIndex()
def OnItemDeselected(self,evt):
self.theSelectedIdx = -1
def OnRightDown(self,evt):
self.PopupMenu(self.theMenu)
def onAddEntry(self,evt):
dlg = RevisionEntryDialog(self)
if (dlg.ShowModal() == REVISIONENTRY_BUTTONCOMMIT_ID):
revRemarks = dlg.remarks()
self.theLastRevision += 1
revNo = self.theLastRevision
revDate = datetime.now().strftime("%y-%m-%d %H:%M:%S")
idx = self.GetItemCount()
self.InsertStringItem(idx,str(revNo))
self.SetStringItem(idx,1,revDate)
self.SetStringItem(idx,2,revRemarks)
def load(self,entries):
for revNo,revDate,revRemarks in entries:
idx = self.GetItemCount()
self.InsertStringItem(idx,str(revNo))
self.theLastRevision = revNo
self.SetStringItem(idx,1,revDate)
self.SetStringItem(idx,2,revRemarks)
def dimensions(self):
entries = []
for x in range(self.GetItemCount()):
revNo = self.GetItemText(x)
revDate = self.GetItem(x,1)
revRemarks = self.GetItem(x,2)
entries.append((int(revNo),revDate.GetText(),revRemarks.GetText()))
return entries
|
import tensorflow as tf
from preprocessing import crop
MEAN = tf.constant([123.68, 116.78, 103.94], dtype=tf.float32) # IMAGENET
def preprocess_image(image, output_height, output_width, is_training=False):
# Crop
img_crop = crop.preprocess_image(image, output_height, output_width, is_training)
# Subtract the imagenet mean (mean over all imagenet images)
imgnet_mean = tf.reshape(MEAN, [1, 1, 3])
img_cast = tf.cast(img_crop, dtype=tf.float32)
img_standardized = tf.subtract(img_cast, imgnet_mean)
return img_standardized
|
from typing import Optional, Tuple, Union
from . import array_types
from iree.compiler import (
ir,
passmanager,
)
from iree.compiler.transforms import (
ireec,)
import jax.core
import jax.interpreters.mlir
import jax.numpy as jnp
from jax._src.lib.mlir import ir as jax_ir
from jax.interpreters.xla import abstractify as jax_abstractify
_JAX_CONTEXT = jax_ir.Context()
_JAX_LOC = jax_ir.Location.unknown(context=_JAX_CONTEXT)
def aval_to_ir_types(context: ir.Context,
aval: jax.core.AbstractValue) -> Tuple[ir.Type]:
# We use a Jax internal to do this, since it has the best knowledge.
# However, this is very likely crossing a context/ABI boundary, so be
# mindful and trip through text.
# TODO: We could detect if these are actually the same instance and
# elide this.
with _JAX_LOC:
jax_types = jax.interpreters.mlir.aval_to_ir_types(aval)
def convert(jax_type: jax_ir.Type) -> ir.Type:
return ir.Type.parse(str(jax_type), context=context)
return tuple(convert(t) for t in jax_types)
def cleanup_mhlo_module(module: ir.Module):
with module.context:
pm = passmanager.PassManager()
ireec.build_xla_cleanup_pass_pipeline(pm)
# TODO: Don't lower it all the way here - but need to land bug fixes
# first.
#driver.build_mhlo_import_pass_pipeline(pm)
pm.run(module)
def abstractify(x) -> jax.core.AbstractValue:
# TODO: Ugh.
if isinstance(x, jax.core.ConcreteArray):
x = x.val
if isinstance(x, array_types.TracedArrayBase):
return x.aval
# Note that a ConcreteArray is an AbstractValue so we handle that above.
if isinstance(x, jax.core.AbstractValue):
return x
return jax_abstractify(x)
def unwrap_global_array(x) -> Optional[array_types.ExportedGlobalArray]:
# TODO: Ugh. Ugh.
if isinstance(x, jax.core.ConcreteArray):
x = x.val
if not isinstance(x, array_types.ExportedGlobalArray):
return None
return x
def import_module(context: ir.Context, module: Union[str, ir.Module]):
if isinstance(module, ir.Module):
if module.context is context:
return module
# TODO: Fix upstream so that parse can accept bytes and then enable
# binary=True.
module = module.operation.get_asm(enable_debug_info=True)
if not isinstance(module, str):
raise ValueError(
f"Attempted to import a non-module (did you enable MLIR in JAX?). "
f"Got {module}")
new_module = ir.Module.parse(module, context=context)
return new_module
def import_main_function(*,
target_module: ir.Module,
target_symbol_table: ir.SymbolTable,
source_module: Union[str, ir.Module],
main_symbol: str = "main",
visibility: str = "private") -> str:
"""Imports a named function from another module into this one.
Returns (imported symbol name, operation) of the found function (if
present).
TODO: This is horrible. Burn it.
"""
context = target_module.context
source_module = import_module(context, source_module)
cleanup_mhlo_module(source_module)
with context:
target_body = target_module.body
main_symbol_attr = ir.StringAttr.get(main_symbol)
found_function = None
found_name = None
for source_operation in source_module.body.operations:
source_operation = source_operation.detach_from_parent()
target_body.append(source_operation)
# TODO: Really should be checking for the Symbol trait.
# TODO: The builtin.func overrides provide a 'name' attribute which
# shadows the operation name.
found_it = False
if "sym_name" in source_operation.attributes:
if source_operation.attributes["sym_name"] == main_symbol_attr:
found_it = True
target_symbol_table.insert(source_operation)
if found_it:
found_name = ir.StringAttr(
source_operation.attributes["sym_name"]).value
found_function = source_operation
found_function.attributes["sym_visibility"] = ir.StringAttr.get(
visibility)
assert found_name, f"Imported function {main_symbol} not found"
return found_name
|
import logging
LOG_LEVEL = logging.DEBUG
LOG_DIR = ''
ENVIROMENT_PATH = ''
LOG_FORMAT = "%(asctime)s - %(levelname)s - %(message)s"
DEV_CLOUD_IP_ADDRESS = ''
DEV_CLOUD_DATA = {
'site_domain': DEV_CLOUD_IP_ADDRESS, # Web interface address for activation link
'site_name': 'Dev Cloud' # System name in emails
}
AWS_ACCESS_KEY_ID = ''
AWS_SECRET_ACCESS_KEY = ''
CLM_LOGIN = ''
CLM_PASSWORD = ''
CLM_ADDRESS = 'http://www.cloud.ifj.edu.pl:8000/'
REST_API_ADDRESS = ''
CELERY_IP_ADDRESS = ''
VM_IMAGE_NAME = 'Dev Cloud - JuJu environment'
VM_IMAGE_ROOT_PASSWORD = ''
SSH_KEY_PATH = ''
EMAIL = 'devcloudplatform@gmail.com'
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_HOST_USER = 'devcloudplatform@gmail.com'
EMAIL_HOST_PASSWORD = ''
FROM_EMAIL = 'devcloudplatform@gmail.com'
SECRET_KEY = ''
|
import sys
sys.path.insert(0, 'lib')
import endpoints
import os
from google.appengine.ext import ndb
from google.appengine.ext.ndb import msgprop
from protorpc import messages
from protorpc import remote
from endpoints_proto_datastore.ndb import EndpointsModel
from endpoints_proto_datastore.ndb import EndpointsAliasProperty
import httplib2
from apiclient.discovery import build
from oauth2client.client import AccessTokenCredentials
_CLIENT_IDs = [
endpoints.API_EXPLORER_CLIENT_ID,
'YOUR_CLIENT_ID'
]
class Line(messages.Enum):
AKSUMITE = 1
CAHOKIAN = 2
DONGHU = 3
HARAPPAN = 4
KOORI = 5
LATENE = 6
MINOAN = 7
MU = 8
NABATEAN = 9
OLMEC = 10
SHANG = 11
SUMERIAN = 12
class Student(EndpointsModel):
_message_fields_schema = ('id', 'name', 'link', 'image', 'line')
name = ndb.StringProperty(required=True)
link = ndb.StringProperty(required=True)
image = ndb.StringProperty(required=True)
line = msgprop.EnumProperty(Line, required=True)
def IdSet(self, value):
if not isinstance(value, basestring):
raise TypeError('ID must be a string.')
self.UpdateFromKey(ndb.Key(Student, value))
@EndpointsAliasProperty(setter=IdSet, required=True)
def id(self):
if self.key is not None:
return self.key.string_id()
def check_auth(id):
user = endpoints.get_current_user()
if user is None:
return False
# We want to check if the submitted user id is the same the user authenticated with
# Since endpoints.get_current_user() only includes the email but not the ID
# we have to use a workaround for this, until this issue is fixed:
# https://code.google.com/p/googleappengine/issues/detail?id=8848
# We could use the email address as ID instead, but for privacy reason
# I didn't want to store it and wanted to use the public G+ ID instead
if "HTTP_AUTHORIZATION" in os.environ:
(tokentype, token) = os.environ["HTTP_AUTHORIZATION"].split(" ")
else:
return False
credentials = AccessTokenCredentials(token, 'my-user-agent/1.0')
http = httplib2.Http()
http = credentials.authorize(http)
service = build('plus', 'v1', http=http)
try:
profile = service.people().get(userId='me', fields='id').execute()
except:
return False
if id != profile['id']:
return False
return True
api_root = endpoints.api(name='ancientsocieties', version='v1', allowed_client_ids=_CLIENT_IDs)
@api_root.api_class(resource_name='student', path='students')
class StudentsService(remote.Service):
@Student.method(request_fields=('id',), path='/students/{id}',
http_method='GET', name='get')
def get(self, student):
if not student.from_datastore:
raise endpoints.NotFoundException('Student not found.')
return student
@Student.method(path='/students/{id}', http_method='POST',
name='insert')
def insert(self, student):
if not check_auth(student.id):
raise endpoints.UnauthorizedException('You may only enter or change your own data.')
student.put()
return student
@Student.method(request_fields=('id',), response_fields=('id',),
path='/students/{id}',
http_method='DELETE', name='delete')
def delete(self, student):
if not student.from_datastore:
raise endpoints.NotFoundException('Student not found.')
if not check_auth(student.id):
raise endpoints.UnauthorizedException('You may only enter or change your own data.')
student.key.delete()
return student
@Student.query_method(query_fields=('limit', 'pageToken'),
path='/students', name='list')
def list(self, query):
return query
server = endpoints.api_server([StudentsService], restricted=False)
|
from __future__ import unicode_literals
import datetime
from six.moves.urllib.request import urlopen
from six.moves.urllib.error import HTTPError
from functools import wraps
from gzip import GzipFile
from io import BytesIO
import zlib
import json
import boto
import boto3
from botocore.client import ClientError
import botocore.exceptions
from boto.exception import S3CreateError, S3ResponseError
from botocore.handlers import disable_signing
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from freezegun import freeze_time
import six
import requests
import tests.backport_assert_raises # noqa
from nose.tools import assert_raises
import sure # noqa
from moto import settings, mock_s3, mock_s3_deprecated
import moto.s3.models as s3model
if settings.TEST_SERVER_MODE:
REDUCED_PART_SIZE = s3model.UPLOAD_PART_MIN_SIZE
EXPECTED_ETAG = '"140f92a6df9f9e415f74a1463bcee9bb-2"'
else:
REDUCED_PART_SIZE = 256
EXPECTED_ETAG = '"66d1a1a2ed08fd05c137f316af4ff255-2"'
def reduced_min_part_size(f):
""" speed up tests by temporarily making the multipart minimum part size
small
"""
orig_size = s3model.UPLOAD_PART_MIN_SIZE
@wraps(f)
def wrapped(*args, **kwargs):
try:
s3model.UPLOAD_PART_MIN_SIZE = REDUCED_PART_SIZE
return f(*args, **kwargs)
finally:
s3model.UPLOAD_PART_MIN_SIZE = orig_size
return wrapped
class MyModel(object):
def __init__(self, name, value):
self.name = name
self.value = value
def save(self):
s3 = boto3.client('s3', region_name='us-east-1')
s3.put_object(Bucket='mybucket', Key=self.name, Body=self.value)
@mock_s3
def test_my_model_save():
# Create Bucket so that test can run
conn = boto3.resource('s3', region_name='us-east-1')
conn.create_bucket(Bucket='mybucket')
####################################
model_instance = MyModel('steve', 'is awesome')
model_instance.save()
body = conn.Object('mybucket', 'steve').get()['Body'].read().decode()
assert body == 'is awesome'
@mock_s3
def test_key_etag():
conn = boto3.resource('s3', region_name='us-east-1')
conn.create_bucket(Bucket='mybucket')
model_instance = MyModel('steve', 'is awesome')
model_instance.save()
conn.Bucket('mybucket').Object('steve').e_tag.should.equal(
'"d32bda93738f7e03adb22e66c90fbc04"')
@mock_s3_deprecated
def test_multipart_upload_too_small():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
multipart = bucket.initiate_multipart_upload("the-key")
multipart.upload_part_from_file(BytesIO(b'hello'), 1)
multipart.upload_part_from_file(BytesIO(b'world'), 2)
# Multipart with total size under 5MB is refused
multipart.complete_upload.should.throw(S3ResponseError)
@mock_s3_deprecated
@reduced_min_part_size
def test_multipart_upload():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
multipart = bucket.initiate_multipart_upload("the-key")
part1 = b'0' * REDUCED_PART_SIZE
multipart.upload_part_from_file(BytesIO(part1), 1)
# last part, can be less than 5 MB
part2 = b'1'
multipart.upload_part_from_file(BytesIO(part2), 2)
multipart.complete_upload()
# we should get both parts as the key contents
bucket.get_key(
"the-key").get_contents_as_string().should.equal(part1 + part2)
@mock_s3_deprecated
@reduced_min_part_size
def test_multipart_upload_out_of_order():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
multipart = bucket.initiate_multipart_upload("the-key")
# last part, can be less than 5 MB
part2 = b'1'
multipart.upload_part_from_file(BytesIO(part2), 4)
part1 = b'0' * REDUCED_PART_SIZE
multipart.upload_part_from_file(BytesIO(part1), 2)
multipart.complete_upload()
# we should get both parts as the key contents
bucket.get_key(
"the-key").get_contents_as_string().should.equal(part1 + part2)
@mock_s3_deprecated
@reduced_min_part_size
def test_multipart_upload_with_headers():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
multipart = bucket.initiate_multipart_upload(
"the-key", metadata={"foo": "bar"})
part1 = b'0' * 10
multipart.upload_part_from_file(BytesIO(part1), 1)
multipart.complete_upload()
key = bucket.get_key("the-key")
key.metadata.should.equal({"foo": "bar"})
@mock_s3_deprecated
@reduced_min_part_size
def test_multipart_upload_with_copy_key():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "original-key"
key.set_contents_from_string("key_value")
multipart = bucket.initiate_multipart_upload("the-key")
part1 = b'0' * REDUCED_PART_SIZE
multipart.upload_part_from_file(BytesIO(part1), 1)
multipart.copy_part_from_key("foobar", "original-key", 2, 0, 3)
multipart.complete_upload()
bucket.get_key(
"the-key").get_contents_as_string().should.equal(part1 + b"key_")
@mock_s3_deprecated
@reduced_min_part_size
def test_multipart_upload_cancel():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
multipart = bucket.initiate_multipart_upload("the-key")
part1 = b'0' * REDUCED_PART_SIZE
multipart.upload_part_from_file(BytesIO(part1), 1)
multipart.cancel_upload()
# TODO we really need some sort of assertion here, but we don't currently
# have the ability to list mulipart uploads for a bucket.
@mock_s3_deprecated
@reduced_min_part_size
def test_multipart_etag():
# Create Bucket so that test can run
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('mybucket')
multipart = bucket.initiate_multipart_upload("the-key")
part1 = b'0' * REDUCED_PART_SIZE
multipart.upload_part_from_file(BytesIO(part1), 1)
# last part, can be less than 5 MB
part2 = b'1'
multipart.upload_part_from_file(BytesIO(part2), 2)
multipart.complete_upload()
# we should get both parts as the key contents
bucket.get_key("the-key").etag.should.equal(EXPECTED_ETAG)
@mock_s3_deprecated
@reduced_min_part_size
def test_multipart_invalid_order():
# Create Bucket so that test can run
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('mybucket')
multipart = bucket.initiate_multipart_upload("the-key")
part1 = b'0' * 5242880
etag1 = multipart.upload_part_from_file(BytesIO(part1), 1).etag
# last part, can be less than 5 MB
part2 = b'1'
etag2 = multipart.upload_part_from_file(BytesIO(part2), 2).etag
xml = "<Part><PartNumber>{0}</PartNumber><ETag>{1}</ETag></Part>"
xml = xml.format(2, etag2) + xml.format(1, etag1)
xml = "<CompleteMultipartUpload>{0}</CompleteMultipartUpload>".format(xml)
bucket.complete_multipart_upload.when.called_with(
multipart.key_name, multipart.id, xml).should.throw(S3ResponseError)
@mock_s3_deprecated
@reduced_min_part_size
def test_multipart_etag_quotes_stripped():
# Create Bucket so that test can run
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('mybucket')
multipart = bucket.initiate_multipart_upload("the-key")
part1 = b'0' * REDUCED_PART_SIZE
etag1 = multipart.upload_part_from_file(BytesIO(part1), 1).etag
# last part, can be less than 5 MB
part2 = b'1'
etag2 = multipart.upload_part_from_file(BytesIO(part2), 2).etag
# Strip quotes from etags
etag1 = etag1.replace('"','')
etag2 = etag2.replace('"','')
xml = "<Part><PartNumber>{0}</PartNumber><ETag>{1}</ETag></Part>"
xml = xml.format(1, etag1) + xml.format(2, etag2)
xml = "<CompleteMultipartUpload>{0}</CompleteMultipartUpload>".format(xml)
bucket.complete_multipart_upload.when.called_with(
multipart.key_name, multipart.id, xml).should_not.throw(S3ResponseError)
# we should get both parts as the key contents
bucket.get_key("the-key").etag.should.equal(EXPECTED_ETAG)
@mock_s3_deprecated
@reduced_min_part_size
def test_multipart_duplicate_upload():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
multipart = bucket.initiate_multipart_upload("the-key")
part1 = b'0' * REDUCED_PART_SIZE
multipart.upload_part_from_file(BytesIO(part1), 1)
# same part again
multipart.upload_part_from_file(BytesIO(part1), 1)
part2 = b'1' * 1024
multipart.upload_part_from_file(BytesIO(part2), 2)
multipart.complete_upload()
# We should get only one copy of part 1.
bucket.get_key(
"the-key").get_contents_as_string().should.equal(part1 + part2)
@mock_s3_deprecated
def test_list_multiparts():
# Create Bucket so that test can run
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('mybucket')
multipart1 = bucket.initiate_multipart_upload("one-key")
multipart2 = bucket.initiate_multipart_upload("two-key")
uploads = bucket.get_all_multipart_uploads()
uploads.should.have.length_of(2)
dict([(u.key_name, u.id) for u in uploads]).should.equal(
{'one-key': multipart1.id, 'two-key': multipart2.id})
multipart2.cancel_upload()
uploads = bucket.get_all_multipart_uploads()
uploads.should.have.length_of(1)
uploads[0].key_name.should.equal("one-key")
multipart1.cancel_upload()
uploads = bucket.get_all_multipart_uploads()
uploads.should.be.empty
@mock_s3_deprecated
def test_key_save_to_missing_bucket():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.get_bucket('mybucket', validate=False)
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string.when.called_with(
"foobar").should.throw(S3ResponseError)
@mock_s3_deprecated
def test_missing_key():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
bucket.get_key("the-key").should.equal(None)
@mock_s3_deprecated
def test_missing_key_urllib2():
conn = boto.connect_s3('the_key', 'the_secret')
conn.create_bucket("foobar")
urlopen.when.called_with(
"http://foobar.s3.amazonaws.com/the-key").should.throw(HTTPError)
@mock_s3_deprecated
def test_empty_key():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("")
key = bucket.get_key("the-key")
key.size.should.equal(0)
key.get_contents_as_string().should.equal(b'')
@mock_s3_deprecated
def test_empty_key_set_on_existing_key():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("foobar")
key = bucket.get_key("the-key")
key.size.should.equal(6)
key.get_contents_as_string().should.equal(b'foobar')
key.set_contents_from_string("")
bucket.get_key("the-key").get_contents_as_string().should.equal(b'')
@mock_s3_deprecated
def test_large_key_save():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("foobar" * 100000)
bucket.get_key(
"the-key").get_contents_as_string().should.equal(b'foobar' * 100000)
@mock_s3_deprecated
def test_copy_key():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("some value")
bucket.copy_key('new-key', 'foobar', 'the-key')
bucket.get_key(
"the-key").get_contents_as_string().should.equal(b"some value")
bucket.get_key(
"new-key").get_contents_as_string().should.equal(b"some value")
@mock_s3_deprecated
def test_copy_key_with_version():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
bucket.configure_versioning(versioning=True)
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("some value")
key.set_contents_from_string("another value")
bucket.copy_key('new-key', 'foobar', 'the-key', src_version_id='0')
bucket.get_key(
"the-key").get_contents_as_string().should.equal(b"another value")
bucket.get_key(
"new-key").get_contents_as_string().should.equal(b"some value")
@mock_s3_deprecated
def test_set_metadata():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = 'the-key'
key.set_metadata('md', 'Metadatastring')
key.set_contents_from_string("Testval")
bucket.get_key('the-key').get_metadata('md').should.equal('Metadatastring')
@mock_s3_deprecated
def test_copy_key_replace_metadata():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_metadata('md', 'Metadatastring')
key.set_contents_from_string("some value")
bucket.copy_key('new-key', 'foobar', 'the-key',
metadata={'momd': 'Mometadatastring'})
bucket.get_key("new-key").get_metadata('md').should.be.none
bucket.get_key(
"new-key").get_metadata('momd').should.equal('Mometadatastring')
@freeze_time("2012-01-01 12:00:00")
@mock_s3_deprecated
def test_last_modified():
# See https://github.com/boto/boto/issues/466
conn = boto.connect_s3()
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("some value")
rs = bucket.get_all_keys()
rs[0].last_modified.should.equal('2012-01-01T12:00:00.000Z')
bucket.get_key(
"the-key").last_modified.should.equal('Sun, 01 Jan 2012 12:00:00 GMT')
@mock_s3_deprecated
def test_missing_bucket():
conn = boto.connect_s3('the_key', 'the_secret')
conn.get_bucket.when.called_with('mybucket').should.throw(S3ResponseError)
@mock_s3_deprecated
def test_bucket_with_dash():
conn = boto.connect_s3('the_key', 'the_secret')
conn.get_bucket.when.called_with(
'mybucket-test').should.throw(S3ResponseError)
@mock_s3_deprecated
def test_create_existing_bucket():
"Trying to create a bucket that already exists should raise an Error"
conn = boto.s3.connect_to_region("us-west-2")
conn.create_bucket("foobar")
with assert_raises(S3CreateError):
conn.create_bucket('foobar')
@mock_s3_deprecated
def test_create_existing_bucket_in_us_east_1():
"Trying to create a bucket that already exists in us-east-1 returns the bucket"
""""
http://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html
Your previous request to create the named bucket succeeded and you already
own it. You get this error in all AWS regions except US Standard,
us-east-1. In us-east-1 region, you will get 200 OK, but it is no-op (if
bucket exists it Amazon S3 will not do anything).
"""
conn = boto.s3.connect_to_region("us-east-1")
conn.create_bucket("foobar")
bucket = conn.create_bucket("foobar")
bucket.name.should.equal("foobar")
@mock_s3_deprecated
def test_other_region():
conn = S3Connection(
'key', 'secret', host='s3-website-ap-southeast-2.amazonaws.com')
conn.create_bucket("foobar")
list(conn.get_bucket("foobar").get_all_keys()).should.equal([])
@mock_s3_deprecated
def test_bucket_deletion():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("some value")
# Try to delete a bucket that still has keys
conn.delete_bucket.when.called_with("foobar").should.throw(S3ResponseError)
bucket.delete_key("the-key")
conn.delete_bucket("foobar")
# Get non-existing bucket
conn.get_bucket.when.called_with("foobar").should.throw(S3ResponseError)
# Delete non-existant bucket
conn.delete_bucket.when.called_with("foobar").should.throw(S3ResponseError)
@mock_s3_deprecated
def test_get_all_buckets():
conn = boto.connect_s3('the_key', 'the_secret')
conn.create_bucket("foobar")
conn.create_bucket("foobar2")
buckets = conn.get_all_buckets()
buckets.should.have.length_of(2)
@mock_s3
@mock_s3_deprecated
def test_post_to_bucket():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
requests.post("https://foobar.s3.amazonaws.com/", {
'key': 'the-key',
'file': 'nothing'
})
bucket.get_key('the-key').get_contents_as_string().should.equal(b'nothing')
@mock_s3
@mock_s3_deprecated
def test_post_with_metadata_to_bucket():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
requests.post("https://foobar.s3.amazonaws.com/", {
'key': 'the-key',
'file': 'nothing',
'x-amz-meta-test': 'metadata'
})
bucket.get_key('the-key').get_metadata('test').should.equal('metadata')
@mock_s3_deprecated
def test_delete_missing_key():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('foobar')
deleted_key = bucket.delete_key("foobar")
deleted_key.key.should.equal("foobar")
@mock_s3_deprecated
def test_delete_keys():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('foobar')
Key(bucket=bucket, name='file1').set_contents_from_string('abc')
Key(bucket=bucket, name='file2').set_contents_from_string('abc')
Key(bucket=bucket, name='file3').set_contents_from_string('abc')
Key(bucket=bucket, name='file4').set_contents_from_string('abc')
result = bucket.delete_keys(['file2', 'file3'])
result.deleted.should.have.length_of(2)
result.errors.should.have.length_of(0)
keys = bucket.get_all_keys()
keys.should.have.length_of(2)
keys[0].name.should.equal('file1')
@mock_s3_deprecated
def test_delete_keys_with_invalid():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('foobar')
Key(bucket=bucket, name='file1').set_contents_from_string('abc')
Key(bucket=bucket, name='file2').set_contents_from_string('abc')
Key(bucket=bucket, name='file3').set_contents_from_string('abc')
Key(bucket=bucket, name='file4').set_contents_from_string('abc')
result = bucket.delete_keys(['abc', 'file3'])
result.deleted.should.have.length_of(1)
result.errors.should.have.length_of(1)
keys = bucket.get_all_keys()
keys.should.have.length_of(3)
keys[0].name.should.equal('file1')
@mock_s3_deprecated
def test_bucket_name_with_dot():
conn = boto.connect_s3()
bucket = conn.create_bucket('firstname.lastname')
k = Key(bucket, 'somekey')
k.set_contents_from_string('somedata')
@mock_s3_deprecated
def test_key_with_special_characters():
conn = boto.connect_s3()
bucket = conn.create_bucket('test_bucket_name')
key = Key(bucket, 'test_list_keys_2/x?y')
key.set_contents_from_string('value1')
key_list = bucket.list('test_list_keys_2/', '/')
keys = [x for x in key_list]
keys[0].name.should.equal("test_list_keys_2/x?y")
@mock_s3_deprecated
def test_unicode_key_with_slash():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "/the-key-unîcode/test"
key.set_contents_from_string("value")
key = bucket.get_key("/the-key-unîcode/test")
key.get_contents_as_string().should.equal(b'value')
@mock_s3_deprecated
def test_bucket_key_listing_order():
conn = boto.connect_s3()
bucket = conn.create_bucket('test_bucket')
prefix = 'toplevel/'
def store(name):
k = Key(bucket, prefix + name)
k.set_contents_from_string('somedata')
names = ['x/key', 'y.key1', 'y.key2', 'y.key3', 'x/y/key', 'x/y/z/key']
for name in names:
store(name)
delimiter = None
keys = [x.name for x in bucket.list(prefix, delimiter)]
keys.should.equal([
'toplevel/x/key', 'toplevel/x/y/key', 'toplevel/x/y/z/key',
'toplevel/y.key1', 'toplevel/y.key2', 'toplevel/y.key3'
])
delimiter = '/'
keys = [x.name for x in bucket.list(prefix, delimiter)]
keys.should.equal([
'toplevel/y.key1', 'toplevel/y.key2', 'toplevel/y.key3', 'toplevel/x/'
])
# Test delimiter with no prefix
delimiter = '/'
keys = [x.name for x in bucket.list(prefix=None, delimiter=delimiter)]
keys.should.equal(['toplevel/'])
delimiter = None
keys = [x.name for x in bucket.list(prefix + 'x', delimiter)]
keys.should.equal(
[u'toplevel/x/key', u'toplevel/x/y/key', u'toplevel/x/y/z/key'])
delimiter = '/'
keys = [x.name for x in bucket.list(prefix + 'x', delimiter)]
keys.should.equal([u'toplevel/x/'])
@mock_s3_deprecated
def test_key_with_reduced_redundancy():
conn = boto.connect_s3()
bucket = conn.create_bucket('test_bucket_name')
key = Key(bucket, 'test_rr_key')
key.set_contents_from_string('value1', reduced_redundancy=True)
# we use the bucket iterator because of:
# https:/github.com/boto/boto/issues/1173
list(bucket)[0].storage_class.should.equal('REDUCED_REDUNDANCY')
@mock_s3_deprecated
def test_copy_key_reduced_redundancy():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("some value")
bucket.copy_key('new-key', 'foobar', 'the-key',
storage_class='REDUCED_REDUNDANCY')
# we use the bucket iterator because of:
# https:/github.com/boto/boto/issues/1173
keys = dict([(k.name, k) for k in bucket])
keys['new-key'].storage_class.should.equal("REDUCED_REDUNDANCY")
keys['the-key'].storage_class.should.equal("STANDARD")
@freeze_time("2012-01-01 12:00:00")
@mock_s3_deprecated
def test_restore_key():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("some value")
list(bucket)[0].ongoing_restore.should.be.none
key.restore(1)
key = bucket.get_key('the-key')
key.ongoing_restore.should_not.be.none
key.ongoing_restore.should.be.false
key.expiry_date.should.equal("Mon, 02 Jan 2012 12:00:00 GMT")
key.restore(2)
key = bucket.get_key('the-key')
key.ongoing_restore.should_not.be.none
key.ongoing_restore.should.be.false
key.expiry_date.should.equal("Tue, 03 Jan 2012 12:00:00 GMT")
@freeze_time("2012-01-01 12:00:00")
@mock_s3_deprecated
def test_restore_key_headers():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket("foobar")
key = Key(bucket)
key.key = "the-key"
key.set_contents_from_string("some value")
key.restore(1, headers={'foo': 'bar'})
key = bucket.get_key('the-key')
key.ongoing_restore.should_not.be.none
key.ongoing_restore.should.be.false
key.expiry_date.should.equal("Mon, 02 Jan 2012 12:00:00 GMT")
@mock_s3_deprecated
def test_get_versioning_status():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('foobar')
d = bucket.get_versioning_status()
d.should.be.empty
bucket.configure_versioning(versioning=True)
d = bucket.get_versioning_status()
d.shouldnt.be.empty
d.should.have.key('Versioning').being.equal('Enabled')
bucket.configure_versioning(versioning=False)
d = bucket.get_versioning_status()
d.should.have.key('Versioning').being.equal('Suspended')
@mock_s3_deprecated
def test_key_version():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('foobar')
bucket.configure_versioning(versioning=True)
key = Key(bucket)
key.key = 'the-key'
key.version_id.should.be.none
key.set_contents_from_string('some string')
key.version_id.should.equal('0')
key.set_contents_from_string('some string')
key.version_id.should.equal('1')
key = bucket.get_key('the-key')
key.version_id.should.equal('1')
@mock_s3_deprecated
def test_list_versions():
conn = boto.connect_s3('the_key', 'the_secret')
bucket = conn.create_bucket('foobar')
bucket.configure_versioning(versioning=True)
key = Key(bucket, 'the-key')
key.version_id.should.be.none
key.set_contents_from_string("Version 1")
key.version_id.should.equal('0')
key.set_contents_from_string("Version 2")
key.version_id.should.equal('1')
versions = list(bucket.list_versions())
versions.should.have.length_of(2)
versions[0].name.should.equal('the-key')
versions[0].version_id.should.equal('0')
versions[0].get_contents_as_string().should.equal(b"Version 1")
versions[1].name.should.equal('the-key')
versions[1].version_id.should.equal('1')
versions[1].get_contents_as_string().should.equal(b"Version 2")
key = Key(bucket, 'the2-key')
key.set_contents_from_string("Version 1")
keys = list(bucket.list())
keys.should.have.length_of(2)
versions = list(bucket.list_versions(prefix='the2-'))
versions.should.have.length_of(1)
@mock_s3_deprecated
def test_acl_setting():
conn = boto.connect_s3()
bucket = conn.create_bucket('foobar')
content = b'imafile'
keyname = 'test.txt'
key = Key(bucket, name=keyname)
key.content_type = 'text/plain'
key.set_contents_from_string(content)
key.make_public()
key = bucket.get_key(keyname)
assert key.get_contents_as_string() == content
grants = key.get_acl().acl.grants
assert any(g.uri == 'http://acs.amazonaws.com/groups/global/AllUsers' and
g.permission == 'READ' for g in grants), grants
@mock_s3_deprecated
def test_acl_setting_via_headers():
conn = boto.connect_s3()
bucket = conn.create_bucket('foobar')
content = b'imafile'
keyname = 'test.txt'
key = Key(bucket, name=keyname)
key.content_type = 'text/plain'
key.set_contents_from_string(content, headers={
'x-amz-grant-full-control': 'uri="http://acs.amazonaws.com/groups/global/AllUsers"'
})
key = bucket.get_key(keyname)
assert key.get_contents_as_string() == content
grants = key.get_acl().acl.grants
assert any(g.uri == 'http://acs.amazonaws.com/groups/global/AllUsers' and
g.permission == 'FULL_CONTROL' for g in grants), grants
@mock_s3_deprecated
def test_acl_switching():
conn = boto.connect_s3()
bucket = conn.create_bucket('foobar')
content = b'imafile'
keyname = 'test.txt'
key = Key(bucket, name=keyname)
key.content_type = 'text/plain'
key.set_contents_from_string(content, policy='public-read')
key.set_acl('private')
grants = key.get_acl().acl.grants
assert not any(g.uri == 'http://acs.amazonaws.com/groups/global/AllUsers' and
g.permission == 'READ' for g in grants), grants
@mock_s3_deprecated
def test_bucket_acl_setting():
conn = boto.connect_s3()
bucket = conn.create_bucket('foobar')
bucket.make_public()
grants = bucket.get_acl().acl.grants
assert any(g.uri == 'http://acs.amazonaws.com/groups/global/AllUsers' and
g.permission == 'READ' for g in grants), grants
@mock_s3_deprecated
def test_bucket_acl_switching():
conn = boto.connect_s3()
bucket = conn.create_bucket('foobar')
bucket.make_public()
bucket.set_acl('private')
grants = bucket.get_acl().acl.grants
assert not any(g.uri == 'http://acs.amazonaws.com/groups/global/AllUsers' and
g.permission == 'READ' for g in grants), grants
@mock_s3
def test_s3_object_in_public_bucket():
s3 = boto3.resource('s3')
bucket = s3.Bucket('test-bucket')
bucket.create(ACL='public-read')
bucket.put_object(Body=b'ABCD', Key='file.txt')
s3_anonymous = boto3.resource('s3')
s3_anonymous.meta.client.meta.events.register('choose-signer.s3.*', disable_signing)
contents = s3_anonymous.Object(key='file.txt', bucket_name='test-bucket').get()['Body'].read()
contents.should.equal(b'ABCD')
bucket.put_object(ACL='private', Body=b'ABCD', Key='file.txt')
with assert_raises(ClientError) as exc:
s3_anonymous.Object(key='file.txt', bucket_name='test-bucket').get()
exc.exception.response['Error']['Code'].should.equal('403')
params = {'Bucket': 'test-bucket', 'Key': 'file.txt'}
presigned_url = boto3.client('s3').generate_presigned_url('get_object', params, ExpiresIn=900)
response = requests.get(presigned_url)
assert response.status_code == 200
@mock_s3
def test_s3_object_in_private_bucket():
s3 = boto3.resource('s3')
bucket = s3.Bucket('test-bucket')
bucket.create(ACL='private')
bucket.put_object(ACL='private', Body=b'ABCD', Key='file.txt')
s3_anonymous = boto3.resource('s3')
s3_anonymous.meta.client.meta.events.register('choose-signer.s3.*', disable_signing)
with assert_raises(ClientError) as exc:
s3_anonymous.Object(key='file.txt', bucket_name='test-bucket').get()
exc.exception.response['Error']['Code'].should.equal('403')
bucket.put_object(ACL='public-read', Body=b'ABCD', Key='file.txt')
contents = s3_anonymous.Object(key='file.txt', bucket_name='test-bucket').get()['Body'].read()
contents.should.equal(b'ABCD')
@mock_s3_deprecated
def test_unicode_key():
conn = boto.connect_s3()
bucket = conn.create_bucket('mybucket')
key = Key(bucket)
key.key = u'こんにちは.jpg'
key.set_contents_from_string('Hello world!')
assert [listed_key.key for listed_key in bucket.list()] == [key.key]
fetched_key = bucket.get_key(key.key)
assert fetched_key.key == key.key
assert fetched_key.get_contents_as_string().decode("utf-8") == 'Hello world!'
@mock_s3_deprecated
def test_unicode_value():
conn = boto.connect_s3()
bucket = conn.create_bucket('mybucket')
key = Key(bucket)
key.key = 'some_key'
key.set_contents_from_string(u'こんにちは.jpg')
list(bucket.list())
key = bucket.get_key(key.key)
assert key.get_contents_as_string().decode("utf-8") == u'こんにちは.jpg'
@mock_s3_deprecated
def test_setting_content_encoding():
conn = boto.connect_s3()
bucket = conn.create_bucket('mybucket')
key = bucket.new_key("keyname")
key.set_metadata("Content-Encoding", "gzip")
compressed_data = "abcdef"
key.set_contents_from_string(compressed_data)
key = bucket.get_key("keyname")
key.content_encoding.should.equal("gzip")
@mock_s3_deprecated
def test_bucket_location():
conn = boto.s3.connect_to_region("us-west-2")
bucket = conn.create_bucket('mybucket')
bucket.get_location().should.equal("us-west-2")
@mock_s3_deprecated
def test_ranged_get():
conn = boto.connect_s3()
bucket = conn.create_bucket('mybucket')
key = Key(bucket)
key.key = 'bigkey'
rep = b"0123456789"
key.set_contents_from_string(rep * 10)
# Implicitly bounded range requests.
key.get_contents_as_string(
headers={'Range': 'bytes=0-'}).should.equal(rep * 10)
key.get_contents_as_string(
headers={'Range': 'bytes=50-'}).should.equal(rep * 5)
key.get_contents_as_string(
headers={'Range': 'bytes=99-'}).should.equal(b'9')
# Explicitly bounded range requests starting from the first byte.
key.get_contents_as_string(
headers={'Range': 'bytes=0-0'}).should.equal(b'0')
key.get_contents_as_string(
headers={'Range': 'bytes=0-49'}).should.equal(rep * 5)
key.get_contents_as_string(
headers={'Range': 'bytes=0-99'}).should.equal(rep * 10)
key.get_contents_as_string(
headers={'Range': 'bytes=0-100'}).should.equal(rep * 10)
key.get_contents_as_string(
headers={'Range': 'bytes=0-700'}).should.equal(rep * 10)
# Explicitly bounded range requests starting from the / a middle byte.
key.get_contents_as_string(
headers={'Range': 'bytes=50-54'}).should.equal(rep[:5])
key.get_contents_as_string(
headers={'Range': 'bytes=50-99'}).should.equal(rep * 5)
key.get_contents_as_string(
headers={'Range': 'bytes=50-100'}).should.equal(rep * 5)
key.get_contents_as_string(
headers={'Range': 'bytes=50-700'}).should.equal(rep * 5)
# Explicitly bounded range requests starting from the last byte.
key.get_contents_as_string(
headers={'Range': 'bytes=99-99'}).should.equal(b'9')
key.get_contents_as_string(
headers={'Range': 'bytes=99-100'}).should.equal(b'9')
key.get_contents_as_string(
headers={'Range': 'bytes=99-700'}).should.equal(b'9')
# Suffix range requests.
key.get_contents_as_string(
headers={'Range': 'bytes=-1'}).should.equal(b'9')
key.get_contents_as_string(
headers={'Range': 'bytes=-60'}).should.equal(rep * 6)
key.get_contents_as_string(
headers={'Range': 'bytes=-100'}).should.equal(rep * 10)
key.get_contents_as_string(
headers={'Range': 'bytes=-101'}).should.equal(rep * 10)
key.get_contents_as_string(
headers={'Range': 'bytes=-700'}).should.equal(rep * 10)
key.size.should.equal(100)
@mock_s3_deprecated
def test_policy():
conn = boto.connect_s3()
bucket_name = 'mybucket'
bucket = conn.create_bucket(bucket_name)
policy = json.dumps({
"Version": "2012-10-17",
"Id": "PutObjPolicy",
"Statement": [
{
"Sid": "DenyUnEncryptedObjectUploads",
"Effect": "Deny",
"Principal": "*",
"Action": "s3:PutObject",
"Resource": "arn:aws:s3:::{bucket_name}/*".format(bucket_name=bucket_name),
"Condition": {
"StringNotEquals": {
"s3:x-amz-server-side-encryption": "aws:kms"
}
}
}
]
})
with assert_raises(S3ResponseError) as err:
bucket.get_policy()
ex = err.exception
ex.box_usage.should.be.none
ex.error_code.should.equal('NoSuchBucketPolicy')
ex.message.should.equal('The bucket policy does not exist')
ex.reason.should.equal('Not Found')
ex.resource.should.be.none
ex.status.should.equal(404)
ex.body.should.contain(bucket_name)
ex.request_id.should_not.be.none
bucket.set_policy(policy).should.be.true
bucket = conn.get_bucket(bucket_name)
bucket.get_policy().decode('utf-8').should.equal(policy)
bucket.delete_policy()
with assert_raises(S3ResponseError) as err:
bucket.get_policy()
@mock_s3_deprecated
def test_website_configuration_xml():
conn = boto.connect_s3()
bucket = conn.create_bucket('test-bucket')
bucket.set_website_configuration_xml(TEST_XML)
bucket.get_website_configuration_xml().should.equal(TEST_XML)
@mock_s3_deprecated
def test_key_with_trailing_slash_in_ordinary_calling_format():
conn = boto.connect_s3(
'access_key',
'secret_key',
calling_format=boto.s3.connection.OrdinaryCallingFormat()
)
bucket = conn.create_bucket('test_bucket_name')
key_name = 'key_with_slash/'
key = Key(bucket, key_name)
key.set_contents_from_string('some value')
[k.name for k in bucket.get_all_keys()].should.contain(key_name)
"""
boto3
"""
@mock_s3
def test_boto3_key_etag():
s3 = boto3.client('s3', region_name='us-east-1')
s3.create_bucket(Bucket='mybucket')
s3.put_object(Bucket='mybucket', Key='steve', Body=b'is awesome')
resp = s3.get_object(Bucket='mybucket', Key='steve')
resp['ETag'].should.equal('"d32bda93738f7e03adb22e66c90fbc04"')
@mock_s3
def test_website_redirect_location():
s3 = boto3.client('s3', region_name='us-east-1')
s3.create_bucket(Bucket='mybucket')
s3.put_object(Bucket='mybucket', Key='steve', Body=b'is awesome')
resp = s3.get_object(Bucket='mybucket', Key='steve')
resp.get('WebsiteRedirectLocation').should.be.none
url = 'https://github.com/spulec/moto'
s3.put_object(Bucket='mybucket', Key='steve', Body=b'is awesome', WebsiteRedirectLocation=url)
resp = s3.get_object(Bucket='mybucket', Key='steve')
resp['WebsiteRedirectLocation'].should.equal(url)
@mock_s3
def test_boto3_list_keys_xml_escaped():
s3 = boto3.client('s3', region_name='us-east-1')
s3.create_bucket(Bucket='mybucket')
key_name = 'Q&A.txt'
s3.put_object(Bucket='mybucket', Key=key_name, Body=b'is awesome')
resp = s3.list_objects_v2(Bucket='mybucket', Prefix=key_name)
assert resp['Contents'][0]['Key'] == key_name
assert resp['KeyCount'] == 1
assert resp['MaxKeys'] == 1000
assert resp['Prefix'] == key_name
assert resp['IsTruncated'] == False
assert 'Delimiter' not in resp
assert 'StartAfter' not in resp
assert 'NextContinuationToken' not in resp
assert 'Owner' not in resp['Contents'][0]
@mock_s3
def test_boto3_list_objects_v2_truncated_response():
s3 = boto3.client('s3', region_name='us-east-1')
s3.create_bucket(Bucket='mybucket')
s3.put_object(Bucket='mybucket', Key='one', Body=b'1')
s3.put_object(Bucket='mybucket', Key='two', Body=b'22')
s3.put_object(Bucket='mybucket', Key='three', Body=b'333')
# First list
resp = s3.list_objects_v2(Bucket='mybucket', MaxKeys=1)
listed_object = resp['Contents'][0]
assert listed_object['Key'] == 'one'
assert resp['MaxKeys'] == 1
assert resp['Prefix'] == ''
assert resp['KeyCount'] == 1
assert resp['IsTruncated'] == True
assert 'Delimiter' not in resp
assert 'StartAfter' not in resp
assert 'Owner' not in listed_object # owner info was not requested
next_token = resp['NextContinuationToken']
# Second list
resp = s3.list_objects_v2(
Bucket='mybucket', MaxKeys=1, ContinuationToken=next_token)
listed_object = resp['Contents'][0]
assert listed_object['Key'] == 'three'
assert resp['MaxKeys'] == 1
assert resp['Prefix'] == ''
assert resp['KeyCount'] == 1
assert resp['IsTruncated'] == True
assert 'Delimiter' not in resp
assert 'StartAfter' not in resp
assert 'Owner' not in listed_object
next_token = resp['NextContinuationToken']
# Third list
resp = s3.list_objects_v2(
Bucket='mybucket', MaxKeys=1, ContinuationToken=next_token)
listed_object = resp['Contents'][0]
assert listed_object['Key'] == 'two'
assert resp['MaxKeys'] == 1
assert resp['Prefix'] == ''
assert resp['KeyCount'] == 1
assert resp['IsTruncated'] == False
assert 'Delimiter' not in resp
assert 'Owner' not in listed_object
assert 'StartAfter' not in resp
assert 'NextContinuationToken' not in resp
@mock_s3
def test_boto3_list_objects_v2_truncated_response_start_after():
s3 = boto3.client('s3', region_name='us-east-1')
s3.create_bucket(Bucket='mybucket')
s3.put_object(Bucket='mybucket', Key='one', Body=b'1')
s3.put_object(Bucket='mybucket', Key='two', Body=b'22')
s3.put_object(Bucket='mybucket', Key='three', Body=b'333')
# First list
resp = s3.list_objects_v2(Bucket='mybucket', MaxKeys=1, StartAfter='one')
listed_object = resp['Contents'][0]
assert listed_object['Key'] == 'three'
assert resp['MaxKeys'] == 1
assert resp['Prefix'] == ''
assert resp['KeyCount'] == 1
assert resp['IsTruncated'] == True
assert resp['StartAfter'] == 'one'
assert 'Delimiter' not in resp
assert 'Owner' not in listed_object
next_token = resp['NextContinuationToken']
# Second list
# The ContinuationToken must take precedence over StartAfter.
resp = s3.list_objects_v2(Bucket='mybucket', MaxKeys=1, StartAfter='one',
ContinuationToken=next_token)
listed_object = resp['Contents'][0]
assert listed_object['Key'] == 'two'
assert resp['MaxKeys'] == 1
assert resp['Prefix'] == ''
assert resp['KeyCount'] == 1
assert resp['IsTruncated'] == False
# When ContinuationToken is given, StartAfter is ignored. This also means
# AWS does not return it in the response.
assert 'StartAfter' not in resp
assert 'Delimiter' not in resp
assert 'Owner' not in listed_object
@mock_s3
def test_boto3_list_objects_v2_fetch_owner():
s3 = boto3.client('s3', region_name='us-east-1')
s3.create_bucket(Bucket='mybucket')
s3.put_object(Bucket='mybucket', Key='one', Body=b'11')
resp = s3.list_objects_v2(Bucket='mybucket', FetchOwner=True)
owner = resp['Contents'][0]['Owner']
assert 'ID' in owner
assert 'DisplayName' in owner
assert len(owner.keys()) == 2
@mock_s3
def test_boto3_bucket_create():
s3 = boto3.resource('s3', region_name='us-east-1')
s3.create_bucket(Bucket="blah")
s3.Object('blah', 'hello.txt').put(Body="some text")
s3.Object('blah', 'hello.txt').get()['Body'].read().decode(
"utf-8").should.equal("some text")
@mock_s3
def test_bucket_create_duplicate():
s3 = boto3.resource('s3', region_name='us-west-2')
s3.create_bucket(Bucket="blah", CreateBucketConfiguration={
'LocationConstraint': 'us-west-2',
})
with assert_raises(ClientError) as exc:
s3.create_bucket(
Bucket="blah",
CreateBucketConfiguration={
'LocationConstraint': 'us-west-2',
}
)
exc.exception.response['Error']['Code'].should.equal('BucketAlreadyExists')
@mock_s3
def test_boto3_bucket_create_eu_central():
s3 = boto3.resource('s3', region_name='eu-central-1')
s3.create_bucket(Bucket="blah")
s3.Object('blah', 'hello.txt').put(Body="some text")
s3.Object('blah', 'hello.txt').get()['Body'].read().decode(
"utf-8").should.equal("some text")
@mock_s3
def test_boto3_head_object():
s3 = boto3.resource('s3', region_name='us-east-1')
s3.create_bucket(Bucket="blah")
s3.Object('blah', 'hello.txt').put(Body="some text")
s3.Object('blah', 'hello.txt').meta.client.head_object(
Bucket='blah', Key='hello.txt')
with assert_raises(ClientError) as e:
s3.Object('blah', 'hello2.txt').meta.client.head_object(
Bucket='blah', Key='hello_bad.txt')
e.exception.response['Error']['Code'].should.equal('404')
@mock_s3
def test_boto3_bucket_deletion():
cli = boto3.client('s3', region_name='us-east-1')
cli.create_bucket(Bucket="foobar")
cli.put_object(Bucket="foobar", Key="the-key", Body="some value")
# Try to delete a bucket that still has keys
cli.delete_bucket.when.called_with(Bucket="foobar").should.throw(
cli.exceptions.ClientError,
('An error occurred (BucketNotEmpty) when calling the DeleteBucket operation: '
'The bucket you tried to delete is not empty'))
cli.delete_object(Bucket="foobar", Key="the-key")
cli.delete_bucket(Bucket="foobar")
# Get non-existing bucket
cli.head_bucket.when.called_with(Bucket="foobar").should.throw(
cli.exceptions.ClientError,
"An error occurred (404) when calling the HeadBucket operation: Not Found")
# Delete non-existing bucket
cli.delete_bucket.when.called_with(Bucket="foobar").should.throw(cli.exceptions.NoSuchBucket)
@mock_s3
def test_boto3_get_object():
s3 = boto3.resource('s3', region_name='us-east-1')
s3.create_bucket(Bucket="blah")
s3.Object('blah', 'hello.txt').put(Body="some text")
s3.Object('blah', 'hello.txt').meta.client.head_object(
Bucket='blah', Key='hello.txt')
with assert_raises(ClientError) as e:
s3.Object('blah', 'hello2.txt').get()
e.exception.response['Error']['Code'].should.equal('NoSuchKey')
@mock_s3
def test_boto3_head_object_with_versioning():
s3 = boto3.resource('s3', region_name='us-east-1')
bucket = s3.create_bucket(Bucket='blah')
bucket.Versioning().enable()
old_content = 'some text'
new_content = 'some new text'
s3.Object('blah', 'hello.txt').put(Body=old_content)
s3.Object('blah', 'hello.txt').put(Body=new_content)
head_object = s3.Object('blah', 'hello.txt').meta.client.head_object(
Bucket='blah', Key='hello.txt')
head_object['VersionId'].should.equal('1')
head_object['ContentLength'].should.equal(len(new_content))
old_head_object = s3.Object('blah', 'hello.txt').meta.client.head_object(
Bucket='blah', Key='hello.txt', VersionId='0')
old_head_object['VersionId'].should.equal('0')
old_head_object['ContentLength'].should.equal(len(old_content))
@mock_s3
def test_boto3_copy_object_with_versioning():
client = boto3.client('s3', region_name='us-east-1')
client.create_bucket(Bucket='blah', CreateBucketConfiguration={'LocationConstraint': 'eu-west-1'})
client.put_bucket_versioning(Bucket='blah', VersioningConfiguration={'Status': 'Enabled'})
client.put_object(Bucket='blah', Key='test1', Body=b'test1')
client.put_object(Bucket='blah', Key='test2', Body=b'test2')
obj1_version = client.get_object(Bucket='blah', Key='test1')['VersionId']
obj2_version = client.get_object(Bucket='blah', Key='test2')['VersionId']
# Versions should be the same
obj1_version.should.equal(obj2_version)
client.copy_object(CopySource={'Bucket': 'blah', 'Key': 'test1'}, Bucket='blah', Key='test2')
obj2_version_new = client.get_object(Bucket='blah', Key='test2')['VersionId']
# Version should be different to previous version
obj2_version_new.should_not.equal(obj2_version)
@mock_s3
def test_boto3_deleted_versionings_list():
client = boto3.client('s3', region_name='us-east-1')
client.create_bucket(Bucket='blah')
client.put_bucket_versioning(Bucket='blah', VersioningConfiguration={'Status': 'Enabled'})
client.put_object(Bucket='blah', Key='test1', Body=b'test1')
client.put_object(Bucket='blah', Key='test2', Body=b'test2')
client.delete_objects(Bucket='blah', Delete={'Objects': [{'Key': 'test1'}]})
listed = client.list_objects_v2(Bucket='blah')
assert len(listed['Contents']) == 1
@mock_s3
def test_boto3_delete_versioned_bucket():
client = boto3.client('s3', region_name='us-east-1')
client.create_bucket(Bucket='blah')
client.put_bucket_versioning(Bucket='blah', VersioningConfiguration={'Status': 'Enabled'})
resp = client.put_object(Bucket='blah', Key='test1', Body=b'test1')
client.delete_object(Bucket='blah', Key='test1', VersionId=resp["VersionId"])
client.delete_bucket(Bucket='blah')
@mock_s3
def test_boto3_head_object_if_modified_since():
s3 = boto3.client('s3', region_name='us-east-1')
bucket_name = "blah"
s3.create_bucket(Bucket=bucket_name)
key = 'hello.txt'
s3.put_object(
Bucket=bucket_name,
Key=key,
Body='test'
)
with assert_raises(botocore.exceptions.ClientError) as err:
s3.head_object(
Bucket=bucket_name,
Key=key,
IfModifiedSince=datetime.datetime.utcnow() + datetime.timedelta(hours=1)
)
e = err.exception
e.response['Error'].should.equal({'Code': '304', 'Message': 'Not Modified'})
@mock_s3
@reduced_min_part_size
def test_boto3_multipart_etag():
# Create Bucket so that test can run
s3 = boto3.client('s3', region_name='us-east-1')
s3.create_bucket(Bucket='mybucket')
upload_id = s3.create_multipart_upload(
Bucket='mybucket', Key='the-key')['UploadId']
part1 = b'0' * REDUCED_PART_SIZE
etags = []
etags.append(
s3.upload_part(Bucket='mybucket', Key='the-key', PartNumber=1,
UploadId=upload_id, Body=part1)['ETag'])
# last part, can be less than 5 MB
part2 = b'1'
etags.append(
s3.upload_part(Bucket='mybucket', Key='the-key', PartNumber=2,
UploadId=upload_id, Body=part2)['ETag'])
s3.complete_multipart_upload(
Bucket='mybucket', Key='the-key', UploadId=upload_id,
MultipartUpload={'Parts': [{'ETag': etag, 'PartNumber': i}
for i, etag in enumerate(etags, 1)]})
# we should get both parts as the key contents
resp = s3.get_object(Bucket='mybucket', Key='the-key')
resp['ETag'].should.equal(EXPECTED_ETAG)
@mock_s3
def test_boto3_put_object_with_tagging():
s3 = boto3.client('s3', region_name='us-east-1')
bucket_name = 'mybucket'
key = 'key-with-tags'
s3.create_bucket(Bucket=bucket_name)
s3.put_object(
Bucket=bucket_name,
Key=key,
Body='test',
Tagging='foo=bar',
)
resp = s3.get_object_tagging(Bucket=bucket_name, Key=key)
resp['TagSet'].should.contain({'Key': 'foo', 'Value': 'bar'})
@mock_s3
def test_boto3_put_bucket_tagging():
s3 = boto3.client("s3", region_name="us-east-1")
bucket_name = "mybucket"
s3.create_bucket(Bucket=bucket_name)
# With 1 tag:
resp = s3.put_bucket_tagging(Bucket=bucket_name,
Tagging={
"TagSet": [
{
"Key": "TagOne",
"Value": "ValueOne"
}
]
})
resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
# With multiple tags:
resp = s3.put_bucket_tagging(Bucket=bucket_name,
Tagging={
"TagSet": [
{
"Key": "TagOne",
"Value": "ValueOne"
},
{
"Key": "TagTwo",
"Value": "ValueTwo"
}
]
})
resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
# No tags is also OK:
resp = s3.put_bucket_tagging(Bucket=bucket_name, Tagging={
"TagSet": []
})
resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
@mock_s3
def test_boto3_get_bucket_tagging():
s3 = boto3.client("s3", region_name="us-east-1")
bucket_name = "mybucket"
s3.create_bucket(Bucket=bucket_name)
s3.put_bucket_tagging(Bucket=bucket_name,
Tagging={
"TagSet": [
{
"Key": "TagOne",
"Value": "ValueOne"
},
{
"Key": "TagTwo",
"Value": "ValueTwo"
}
]
})
# Get the tags for the bucket:
resp = s3.get_bucket_tagging(Bucket=bucket_name)
resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
len(resp["TagSet"]).should.equal(2)
# With no tags:
s3.put_bucket_tagging(Bucket=bucket_name, Tagging={
"TagSet": []
})
with assert_raises(ClientError) as err:
s3.get_bucket_tagging(Bucket=bucket_name)
e = err.exception
e.response["Error"]["Code"].should.equal("NoSuchTagSet")
e.response["Error"]["Message"].should.equal("The TagSet does not exist")
@mock_s3
def test_boto3_delete_bucket_tagging():
s3 = boto3.client("s3", region_name="us-east-1")
bucket_name = "mybucket"
s3.create_bucket(Bucket=bucket_name)
s3.put_bucket_tagging(Bucket=bucket_name,
Tagging={
"TagSet": [
{
"Key": "TagOne",
"Value": "ValueOne"
},
{
"Key": "TagTwo",
"Value": "ValueTwo"
}
]
})
resp = s3.delete_bucket_tagging(Bucket=bucket_name)
resp['ResponseMetadata']['HTTPStatusCode'].should.equal(204)
with assert_raises(ClientError) as err:
s3.get_bucket_tagging(Bucket=bucket_name)
e = err.exception
e.response["Error"]["Code"].should.equal("NoSuchTagSet")
e.response["Error"]["Message"].should.equal("The TagSet does not exist")
@mock_s3
def test_boto3_put_bucket_cors():
s3 = boto3.client("s3", region_name="us-east-1")
bucket_name = "mybucket"
s3.create_bucket(Bucket=bucket_name)
resp = s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={
"CORSRules": [
{
"AllowedOrigins": [
"*"
],
"AllowedMethods": [
"GET",
"POST"
],
"AllowedHeaders": [
"Authorization"
],
"ExposeHeaders": [
"x-amz-request-id"
],
"MaxAgeSeconds": 123
},
{
"AllowedOrigins": [
"*"
],
"AllowedMethods": [
"PUT"
],
"AllowedHeaders": [
"Authorization"
],
"ExposeHeaders": [
"x-amz-request-id"
],
"MaxAgeSeconds": 123
}
]
})
resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
with assert_raises(ClientError) as err:
s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={
"CORSRules": [
{
"AllowedOrigins": [
"*"
],
"AllowedMethods": [
"NOTREAL",
"POST"
]
}
]
})
e = err.exception
e.response["Error"]["Code"].should.equal("InvalidRequest")
e.response["Error"]["Message"].should.equal("Found unsupported HTTP method in CORS config. "
"Unsupported method is NOTREAL")
with assert_raises(ClientError) as err:
s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={
"CORSRules": []
})
e = err.exception
e.response["Error"]["Code"].should.equal("MalformedXML")
# And 101:
many_rules = [{"AllowedOrigins": ["*"], "AllowedMethods": ["GET"]}] * 101
with assert_raises(ClientError) as err:
s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={
"CORSRules": many_rules
})
e = err.exception
e.response["Error"]["Code"].should.equal("MalformedXML")
@mock_s3
def test_boto3_get_bucket_cors():
s3 = boto3.client("s3", region_name="us-east-1")
bucket_name = "mybucket"
s3.create_bucket(Bucket=bucket_name)
# Without CORS:
with assert_raises(ClientError) as err:
s3.get_bucket_cors(Bucket=bucket_name)
e = err.exception
e.response["Error"]["Code"].should.equal("NoSuchCORSConfiguration")
e.response["Error"]["Message"].should.equal("The CORS configuration does not exist")
s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={
"CORSRules": [
{
"AllowedOrigins": [
"*"
],
"AllowedMethods": [
"GET",
"POST"
],
"AllowedHeaders": [
"Authorization"
],
"ExposeHeaders": [
"x-amz-request-id"
],
"MaxAgeSeconds": 123
},
{
"AllowedOrigins": [
"*"
],
"AllowedMethods": [
"PUT"
],
"AllowedHeaders": [
"Authorization"
],
"ExposeHeaders": [
"x-amz-request-id"
],
"MaxAgeSeconds": 123
}
]
})
resp = s3.get_bucket_cors(Bucket=bucket_name)
resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
len(resp["CORSRules"]).should.equal(2)
@mock_s3
def test_boto3_delete_bucket_cors():
s3 = boto3.client("s3", region_name="us-east-1")
bucket_name = "mybucket"
s3.create_bucket(Bucket=bucket_name)
s3.put_bucket_cors(Bucket=bucket_name, CORSConfiguration={
"CORSRules": [
{
"AllowedOrigins": [
"*"
],
"AllowedMethods": [
"GET"
]
}
]
})
resp = s3.delete_bucket_cors(Bucket=bucket_name)
resp['ResponseMetadata']['HTTPStatusCode'].should.equal(204)
# Verify deletion:
with assert_raises(ClientError) as err:
s3.get_bucket_cors(Bucket=bucket_name)
e = err.exception
e.response["Error"]["Code"].should.equal("NoSuchCORSConfiguration")
e.response["Error"]["Message"].should.equal("The CORS configuration does not exist")
@mock_s3
def test_put_bucket_acl_body():
s3 = boto3.client("s3", region_name="us-east-1")
s3.create_bucket(Bucket="bucket")
bucket_owner = s3.get_bucket_acl(Bucket="bucket")["Owner"]
s3.put_bucket_acl(Bucket="bucket", AccessControlPolicy={
"Grants": [
{
"Grantee": {
"URI": "http://acs.amazonaws.com/groups/s3/LogDelivery",
"Type": "Group"
},
"Permission": "WRITE"
},
{
"Grantee": {
"URI": "http://acs.amazonaws.com/groups/s3/LogDelivery",
"Type": "Group"
},
"Permission": "READ_ACP"
}
],
"Owner": bucket_owner
})
result = s3.get_bucket_acl(Bucket="bucket")
assert len(result["Grants"]) == 2
for g in result["Grants"]:
assert g["Grantee"]["URI"] == "http://acs.amazonaws.com/groups/s3/LogDelivery"
assert g["Grantee"]["Type"] == "Group"
assert g["Permission"] in ["WRITE", "READ_ACP"]
# With one:
s3.put_bucket_acl(Bucket="bucket", AccessControlPolicy={
"Grants": [
{
"Grantee": {
"URI": "http://acs.amazonaws.com/groups/s3/LogDelivery",
"Type": "Group"
},
"Permission": "WRITE"
}
],
"Owner": bucket_owner
})
result = s3.get_bucket_acl(Bucket="bucket")
assert len(result["Grants"]) == 1
# With no owner:
with assert_raises(ClientError) as err:
s3.put_bucket_acl(Bucket="bucket", AccessControlPolicy={
"Grants": [
{
"Grantee": {
"URI": "http://acs.amazonaws.com/groups/s3/LogDelivery",
"Type": "Group"
},
"Permission": "WRITE"
}
]
})
assert err.exception.response["Error"]["Code"] == "MalformedACLError"
# With incorrect permission:
with assert_raises(ClientError) as err:
s3.put_bucket_acl(Bucket="bucket", AccessControlPolicy={
"Grants": [
{
"Grantee": {
"URI": "http://acs.amazonaws.com/groups/s3/LogDelivery",
"Type": "Group"
},
"Permission": "lskjflkasdjflkdsjfalisdjflkdsjf"
}
],
"Owner": bucket_owner
})
assert err.exception.response["Error"]["Code"] == "MalformedACLError"
# Clear the ACLs:
result = s3.put_bucket_acl(Bucket="bucket", AccessControlPolicy={"Grants": [], "Owner": bucket_owner})
assert not result.get("Grants")
@mock_s3
def test_put_bucket_notification():
s3 = boto3.client("s3", region_name="us-east-1")
s3.create_bucket(Bucket="bucket")
# With no configuration:
result = s3.get_bucket_notification(Bucket="bucket")
assert not result.get("TopicConfigurations")
assert not result.get("QueueConfigurations")
assert not result.get("LambdaFunctionConfigurations")
# Place proper topic configuration:
s3.put_bucket_notification_configuration(Bucket="bucket",
NotificationConfiguration={
"TopicConfigurations": [
{
"TopicArn": "arn:aws:sns:us-east-1:012345678910:mytopic",
"Events": [
"s3:ObjectCreated:*",
"s3:ObjectRemoved:*"
]
},
{
"TopicArn": "arn:aws:sns:us-east-1:012345678910:myothertopic",
"Events": [
"s3:ObjectCreated:*"
],
"Filter": {
"Key": {
"FilterRules": [
{
"Name": "prefix",
"Value": "images/"
},
{
"Name": "suffix",
"Value": "png"
}
]
}
}
}
]
})
# Verify to completion:
result = s3.get_bucket_notification_configuration(Bucket="bucket")
assert len(result["TopicConfigurations"]) == 2
assert not result.get("QueueConfigurations")
assert not result.get("LambdaFunctionConfigurations")
assert result["TopicConfigurations"][0]["TopicArn"] == "arn:aws:sns:us-east-1:012345678910:mytopic"
assert result["TopicConfigurations"][1]["TopicArn"] == "arn:aws:sns:us-east-1:012345678910:myothertopic"
assert len(result["TopicConfigurations"][0]["Events"]) == 2
assert len(result["TopicConfigurations"][1]["Events"]) == 1
assert result["TopicConfigurations"][0]["Events"][0] == "s3:ObjectCreated:*"
assert result["TopicConfigurations"][0]["Events"][1] == "s3:ObjectRemoved:*"
assert result["TopicConfigurations"][1]["Events"][0] == "s3:ObjectCreated:*"
assert result["TopicConfigurations"][0]["Id"]
assert result["TopicConfigurations"][1]["Id"]
assert not result["TopicConfigurations"][0].get("Filter")
assert len(result["TopicConfigurations"][1]["Filter"]["Key"]["FilterRules"]) == 2
assert result["TopicConfigurations"][1]["Filter"]["Key"]["FilterRules"][0]["Name"] == "prefix"
assert result["TopicConfigurations"][1]["Filter"]["Key"]["FilterRules"][0]["Value"] == "images/"
assert result["TopicConfigurations"][1]["Filter"]["Key"]["FilterRules"][1]["Name"] == "suffix"
assert result["TopicConfigurations"][1]["Filter"]["Key"]["FilterRules"][1]["Value"] == "png"
# Place proper queue configuration:
s3.put_bucket_notification_configuration(Bucket="bucket",
NotificationConfiguration={
"QueueConfigurations": [
{
"Id": "SomeID",
"QueueArn": "arn:aws:sqs:us-east-1:012345678910:myQueue",
"Events": ["s3:ObjectCreated:*"],
"Filter": {
"Key": {
"FilterRules": [
{
"Name": "prefix",
"Value": "images/"
}
]
}
}
}
]
})
result = s3.get_bucket_notification_configuration(Bucket="bucket")
assert len(result["QueueConfigurations"]) == 1
assert not result.get("TopicConfigurations")
assert not result.get("LambdaFunctionConfigurations")
assert result["QueueConfigurations"][0]["Id"] == "SomeID"
assert result["QueueConfigurations"][0]["QueueArn"] == "arn:aws:sqs:us-east-1:012345678910:myQueue"
assert result["QueueConfigurations"][0]["Events"][0] == "s3:ObjectCreated:*"
assert len(result["QueueConfigurations"][0]["Events"]) == 1
assert len(result["QueueConfigurations"][0]["Filter"]["Key"]["FilterRules"]) == 1
assert result["QueueConfigurations"][0]["Filter"]["Key"]["FilterRules"][0]["Name"] == "prefix"
assert result["QueueConfigurations"][0]["Filter"]["Key"]["FilterRules"][0]["Value"] == "images/"
# Place proper Lambda configuration:
s3.put_bucket_notification_configuration(Bucket="bucket",
NotificationConfiguration={
"LambdaFunctionConfigurations": [
{
"LambdaFunctionArn":
"arn:aws:lambda:us-east-1:012345678910:function:lambda",
"Events": ["s3:ObjectCreated:*"],
"Filter": {
"Key": {
"FilterRules": [
{
"Name": "prefix",
"Value": "images/"
}
]
}
}
}
]
})
result = s3.get_bucket_notification_configuration(Bucket="bucket")
assert len(result["LambdaFunctionConfigurations"]) == 1
assert not result.get("TopicConfigurations")
assert not result.get("QueueConfigurations")
assert result["LambdaFunctionConfigurations"][0]["Id"]
assert result["LambdaFunctionConfigurations"][0]["LambdaFunctionArn"] == \
"arn:aws:lambda:us-east-1:012345678910:function:lambda"
assert result["LambdaFunctionConfigurations"][0]["Events"][0] == "s3:ObjectCreated:*"
assert len(result["LambdaFunctionConfigurations"][0]["Events"]) == 1
assert len(result["LambdaFunctionConfigurations"][0]["Filter"]["Key"]["FilterRules"]) == 1
assert result["LambdaFunctionConfigurations"][0]["Filter"]["Key"]["FilterRules"][0]["Name"] == "prefix"
assert result["LambdaFunctionConfigurations"][0]["Filter"]["Key"]["FilterRules"][0]["Value"] == "images/"
# And with all 3 set:
s3.put_bucket_notification_configuration(Bucket="bucket",
NotificationConfiguration={
"TopicConfigurations": [
{
"TopicArn": "arn:aws:sns:us-east-1:012345678910:mytopic",
"Events": [
"s3:ObjectCreated:*",
"s3:ObjectRemoved:*"
]
}
],
"LambdaFunctionConfigurations": [
{
"LambdaFunctionArn":
"arn:aws:lambda:us-east-1:012345678910:function:lambda",
"Events": ["s3:ObjectCreated:*"]
}
],
"QueueConfigurations": [
{
"QueueArn": "arn:aws:sqs:us-east-1:012345678910:myQueue",
"Events": ["s3:ObjectCreated:*"]
}
]
})
result = s3.get_bucket_notification_configuration(Bucket="bucket")
assert len(result["LambdaFunctionConfigurations"]) == 1
assert len(result["TopicConfigurations"]) == 1
assert len(result["QueueConfigurations"]) == 1
# And clear it out:
s3.put_bucket_notification_configuration(Bucket="bucket", NotificationConfiguration={})
result = s3.get_bucket_notification_configuration(Bucket="bucket")
assert not result.get("TopicConfigurations")
assert not result.get("QueueConfigurations")
assert not result.get("LambdaFunctionConfigurations")
@mock_s3
def test_put_bucket_notification_errors():
s3 = boto3.client("s3", region_name="us-east-1")
s3.create_bucket(Bucket="bucket")
# With incorrect ARNs:
for tech, arn in [("Queue", "sqs"), ("Topic", "sns"), ("LambdaFunction", "lambda")]:
with assert_raises(ClientError) as err:
s3.put_bucket_notification_configuration(Bucket="bucket",
NotificationConfiguration={
"{}Configurations".format(tech): [
{
"{}Arn".format(tech):
"arn:aws:{}:us-east-1:012345678910:lksajdfkldskfj",
"Events": ["s3:ObjectCreated:*"]
}
]
})
assert err.exception.response["Error"]["Code"] == "InvalidArgument"
assert err.exception.response["Error"]["Message"] == "The ARN is not well formed"
# Region not the same as the bucket:
with assert_raises(ClientError) as err:
s3.put_bucket_notification_configuration(Bucket="bucket",
NotificationConfiguration={
"QueueConfigurations": [
{
"QueueArn":
"arn:aws:sqs:us-west-2:012345678910:lksajdfkldskfj",
"Events": ["s3:ObjectCreated:*"]
}
]
})
assert err.exception.response["Error"]["Code"] == "InvalidArgument"
assert err.exception.response["Error"]["Message"] == \
"The notification destination service region is not valid for the bucket location constraint"
# Invalid event name:
with assert_raises(ClientError) as err:
s3.put_bucket_notification_configuration(Bucket="bucket",
NotificationConfiguration={
"QueueConfigurations": [
{
"QueueArn":
"arn:aws:sqs:us-east-1:012345678910:lksajdfkldskfj",
"Events": ["notarealeventname"]
}
]
})
assert err.exception.response["Error"]["Code"] == "InvalidArgument"
assert err.exception.response["Error"]["Message"] == "The event is not supported for notifications"
@mock_s3
def test_boto3_put_bucket_logging():
s3 = boto3.client("s3", region_name="us-east-1")
bucket_name = "mybucket"
log_bucket = "logbucket"
wrong_region_bucket = "wrongregionlogbucket"
s3.create_bucket(Bucket=bucket_name)
s3.create_bucket(Bucket=log_bucket) # Adding the ACL for log-delivery later...
s3.create_bucket(Bucket=wrong_region_bucket, CreateBucketConfiguration={"LocationConstraint": "us-west-2"})
# No logging config:
result = s3.get_bucket_logging(Bucket=bucket_name)
assert not result.get("LoggingEnabled")
# A log-bucket that doesn't exist:
with assert_raises(ClientError) as err:
s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={
"LoggingEnabled": {
"TargetBucket": "IAMNOTREAL",
"TargetPrefix": ""
}
})
assert err.exception.response["Error"]["Code"] == "InvalidTargetBucketForLogging"
# A log-bucket that's missing the proper ACLs for LogDelivery:
with assert_raises(ClientError) as err:
s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={
"LoggingEnabled": {
"TargetBucket": log_bucket,
"TargetPrefix": ""
}
})
assert err.exception.response["Error"]["Code"] == "InvalidTargetBucketForLogging"
assert "log-delivery" in err.exception.response["Error"]["Message"]
# Add the proper "log-delivery" ACL to the log buckets:
bucket_owner = s3.get_bucket_acl(Bucket=log_bucket)["Owner"]
for bucket in [log_bucket, wrong_region_bucket]:
s3.put_bucket_acl(Bucket=bucket, AccessControlPolicy={
"Grants": [
{
"Grantee": {
"URI": "http://acs.amazonaws.com/groups/s3/LogDelivery",
"Type": "Group"
},
"Permission": "WRITE"
},
{
"Grantee": {
"URI": "http://acs.amazonaws.com/groups/s3/LogDelivery",
"Type": "Group"
},
"Permission": "READ_ACP"
},
{
"Grantee": {
"Type": "CanonicalUser",
"ID": bucket_owner["ID"]
},
"Permission": "FULL_CONTROL"
}
],
"Owner": bucket_owner
})
# A log-bucket that's in the wrong region:
with assert_raises(ClientError) as err:
s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={
"LoggingEnabled": {
"TargetBucket": wrong_region_bucket,
"TargetPrefix": ""
}
})
assert err.exception.response["Error"]["Code"] == "CrossLocationLoggingProhibitted"
# Correct logging:
s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={
"LoggingEnabled": {
"TargetBucket": log_bucket,
"TargetPrefix": "{}/".format(bucket_name)
}
})
result = s3.get_bucket_logging(Bucket=bucket_name)
assert result["LoggingEnabled"]["TargetBucket"] == log_bucket
assert result["LoggingEnabled"]["TargetPrefix"] == "{}/".format(bucket_name)
assert not result["LoggingEnabled"].get("TargetGrants")
# And disabling:
s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={})
assert not s3.get_bucket_logging(Bucket=bucket_name).get("LoggingEnabled")
# And enabling with multiple target grants:
s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={
"LoggingEnabled": {
"TargetBucket": log_bucket,
"TargetPrefix": "{}/".format(bucket_name),
"TargetGrants": [
{
"Grantee": {
"ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274",
"Type": "CanonicalUser"
},
"Permission": "READ"
},
{
"Grantee": {
"ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274",
"Type": "CanonicalUser"
},
"Permission": "WRITE"
}
]
}
})
result = s3.get_bucket_logging(Bucket=bucket_name)
assert len(result["LoggingEnabled"]["TargetGrants"]) == 2
assert result["LoggingEnabled"]["TargetGrants"][0]["Grantee"]["ID"] == \
"SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274"
# Test with just 1 grant:
s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={
"LoggingEnabled": {
"TargetBucket": log_bucket,
"TargetPrefix": "{}/".format(bucket_name),
"TargetGrants": [
{
"Grantee": {
"ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274",
"Type": "CanonicalUser"
},
"Permission": "READ"
}
]
}
})
result = s3.get_bucket_logging(Bucket=bucket_name)
assert len(result["LoggingEnabled"]["TargetGrants"]) == 1
# With an invalid grant:
with assert_raises(ClientError) as err:
s3.put_bucket_logging(Bucket=bucket_name, BucketLoggingStatus={
"LoggingEnabled": {
"TargetBucket": log_bucket,
"TargetPrefix": "{}/".format(bucket_name),
"TargetGrants": [
{
"Grantee": {
"ID": "SOMEIDSTRINGHERE9238748923734823917498237489237409123840983274",
"Type": "CanonicalUser"
},
"Permission": "NOTAREALPERM"
}
]
}
})
assert err.exception.response["Error"]["Code"] == "MalformedXML"
@mock_s3
def test_boto3_put_object_tagging():
s3 = boto3.client('s3', region_name='us-east-1')
bucket_name = 'mybucket'
key = 'key-with-tags'
s3.create_bucket(Bucket=bucket_name)
with assert_raises(ClientError) as err:
s3.put_object_tagging(
Bucket=bucket_name,
Key=key,
Tagging={'TagSet': [
{'Key': 'item1', 'Value': 'foo'},
{'Key': 'item2', 'Value': 'bar'},
]}
)
e = err.exception
e.response['Error'].should.equal({
'Code': 'NoSuchKey',
'Message': 'The specified key does not exist.',
'RequestID': '7a62c49f-347e-4fc4-9331-6e8eEXAMPLE',
})
s3.put_object(
Bucket=bucket_name,
Key=key,
Body='test'
)
resp = s3.put_object_tagging(
Bucket=bucket_name,
Key=key,
Tagging={'TagSet': [
{'Key': 'item1', 'Value': 'foo'},
{'Key': 'item2', 'Value': 'bar'},
]}
)
resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
@mock_s3
def test_boto3_put_object_tagging_with_single_tag():
s3 = boto3.client('s3', region_name='us-east-1')
bucket_name = 'mybucket'
key = 'key-with-tags'
s3.create_bucket(Bucket=bucket_name)
s3.put_object(
Bucket=bucket_name,
Key=key,
Body='test'
)
resp = s3.put_object_tagging(
Bucket=bucket_name,
Key=key,
Tagging={'TagSet': [
{'Key': 'item1', 'Value': 'foo'}
]}
)
resp['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
@mock_s3
def test_boto3_get_object_tagging():
s3 = boto3.client('s3', region_name='us-east-1')
bucket_name = 'mybucket'
key = 'key-with-tags'
s3.create_bucket(Bucket=bucket_name)
s3.put_object(
Bucket=bucket_name,
Key=key,
Body='test'
)
resp = s3.get_object_tagging(Bucket=bucket_name, Key=key)
resp['TagSet'].should.have.length_of(0)
resp = s3.put_object_tagging(
Bucket=bucket_name,
Key=key,
Tagging={'TagSet': [
{'Key': 'item1', 'Value': 'foo'},
{'Key': 'item2', 'Value': 'bar'},
]}
)
resp = s3.get_object_tagging(Bucket=bucket_name, Key=key)
resp['TagSet'].should.have.length_of(2)
resp['TagSet'].should.contain({'Key': 'item1', 'Value': 'foo'})
resp['TagSet'].should.contain({'Key': 'item2', 'Value': 'bar'})
@mock_s3
def test_boto3_list_object_versions():
s3 = boto3.client('s3', region_name='us-east-1')
bucket_name = 'mybucket'
key = 'key-with-versions'
s3.create_bucket(Bucket=bucket_name)
s3.put_bucket_versioning(
Bucket=bucket_name,
VersioningConfiguration={
'Status': 'Enabled'
}
)
items = (six.b('v1'), six.b('v2'))
for body in items:
s3.put_object(
Bucket=bucket_name,
Key=key,
Body=body
)
response = s3.list_object_versions(
Bucket=bucket_name
)
# Two object versions should be returned
len(response['Versions']).should.equal(2)
keys = set([item['Key'] for item in response['Versions']])
keys.should.equal({key})
# Test latest object version is returned
response = s3.get_object(Bucket=bucket_name, Key=key)
response['Body'].read().should.equal(items[-1])
@mock_s3
def test_boto3_bad_prefix_list_object_versions():
s3 = boto3.client('s3', region_name='us-east-1')
bucket_name = 'mybucket'
key = 'key-with-versions'
bad_prefix = 'key-that-does-not-exist'
s3.create_bucket(Bucket=bucket_name)
s3.put_bucket_versioning(
Bucket=bucket_name,
VersioningConfiguration={
'Status': 'Enabled'
}
)
items = (six.b('v1'), six.b('v2'))
for body in items:
s3.put_object(
Bucket=bucket_name,
Key=key,
Body=body
)
response = s3.list_object_versions(
Bucket=bucket_name,
Prefix=bad_prefix,
)
response['ResponseMetadata']['HTTPStatusCode'].should.equal(200)
response.should_not.contain('Versions')
response.should_not.contain('DeleteMarkers')
@mock_s3
def test_boto3_delete_markers():
s3 = boto3.client('s3', region_name='us-east-1')
bucket_name = 'mybucket'
key = u'key-with-versions-and-unicode-ó'
s3.create_bucket(Bucket=bucket_name)
s3.put_bucket_versioning(
Bucket=bucket_name,
VersioningConfiguration={
'Status': 'Enabled'
}
)
items = (six.b('v1'), six.b('v2'))
for body in items:
s3.put_object(
Bucket=bucket_name,
Key=key,
Body=body
)
s3.delete_objects(Bucket=bucket_name, Delete={'Objects': [{'Key': key}]})
with assert_raises(ClientError) as e:
s3.get_object(
Bucket=bucket_name,
Key=key
)
e.response['Error']['Code'].should.equal('404')
s3.delete_object(
Bucket=bucket_name,
Key=key,
VersionId='2'
)
response = s3.get_object(
Bucket=bucket_name,
Key=key
)
response['Body'].read().should.equal(items[-1])
response = s3.list_object_versions(
Bucket=bucket_name
)
response['Versions'].should.have.length_of(2)
# We've asserted there is only 2 records so one is newest, one is oldest
latest = list(filter(lambda item: item['IsLatest'], response['Versions']))[0]
oldest = list(filter(lambda item: not item['IsLatest'], response['Versions']))[0]
# Double check ordering of version ID's
latest['VersionId'].should.equal('1')
oldest['VersionId'].should.equal('0')
# Double check the name is still unicode
latest['Key'].should.equal('key-with-versions-and-unicode-ó')
oldest['Key'].should.equal('key-with-versions-and-unicode-ó')
@mock_s3
def test_get_stream_gzipped():
payload = b"this is some stuff here"
s3_client = boto3.client("s3", region_name='us-east-1')
s3_client.create_bucket(Bucket='moto-tests')
buffer_ = BytesIO()
with GzipFile(fileobj=buffer_, mode='w') as f:
f.write(payload)
payload_gz = buffer_.getvalue()
s3_client.put_object(
Bucket='moto-tests',
Key='keyname',
Body=payload_gz,
ContentEncoding='gzip',
)
obj = s3_client.get_object(
Bucket='moto-tests',
Key='keyname',
)
res = zlib.decompress(obj['Body'].read(), 16 + zlib.MAX_WBITS)
assert res == payload
TEST_XML = """\
<?xml version="1.0" encoding="UTF-8"?>
<ns0:WebsiteConfiguration xmlns:ns0="http://s3.amazonaws.com/doc/2006-03-01/">
<ns0:IndexDocument>
<ns0:Suffix>index.html</ns0:Suffix>
</ns0:IndexDocument>
<ns0:RoutingRules>
<ns0:RoutingRule>
<ns0:Condition>
<ns0:KeyPrefixEquals>test/testing</ns0:KeyPrefixEquals>
</ns0:Condition>
<ns0:Redirect>
<ns0:ReplaceKeyWith>test.txt</ns0:ReplaceKeyWith>
</ns0:Redirect>
</ns0:RoutingRule>
</ns0:RoutingRules>
</ns0:WebsiteConfiguration>
"""
|
class Solution(object):
def sortedArrayToBST(self, nums):
"""
:type nums: List[int]
:rtype: TreeNode
"""
if (not nums):
return None
n = len(nums)
mid = int(math.floor(n/2))
root = TreeNode(nums[mid])
root.left = self.sortedArrayToBST(nums[:mid])
root.right = self.sortedArrayToBST(nums[mid+1:])
return root
|
"""
blast2gff.py [options] <blast file>
"""
import sys
from optparse import OptionParser
from blast import BlastFile
import gff
usage = "%prog [options] <blast file>"
parser = OptionParser(usage=usage)
parser.add_option(
"-s", "--source",
dest="source",
help="GFF source (Default: match)",
default='match')
parser.add_option(
"-n", "--note",
dest="note",
help="Note",
default=None)
options, args = parser.parse_args()
output = []
extrema = []
scores = []
for hsp in BlastFile(args[0]):
tokens = hsp.queryId.split('|')
if len(tokens)>4:
name = tokens[3]
else:
name = hsp.queryId
g = gff.Feature(
reference=hsp.subjectId.split(':')[0],
source=options.source,
type='HSP',
start=hsp.sStart,
end=hsp.sEnd,
score=hsp.bitScore,
strand=hsp.strand(),
group='Match %s ; Evalue %g' % (name, hsp.eValue)
)
if options.note:
g.group += ' ; Note "%s"' % options.note
output.append(g)
extrema.append(g.start)
extrema.append(g.end)
scores.append(hsp.bitScore)
output.sort(key=lambda x: x.start)
if output:
g = output[0]
match = gff.Feature(
reference=g.reference,
source=options.source,
type='match',
start=min(extrema),
end=max(extrema),
score=sum(scores),
strand=g.strand,
group='Match %s' % name
)
if options.note:
match.group += ' ; Note "%s"' % options.note
output.insert(0,match)
for g in output:
print g
|
from setuptools import setup, find_packages
requirements = [
"sphinx",
"requests",
]
classifiers = [
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Topic :: Software Development :: Documentation",
"Topic :: Documentation"
]
setup(
name='sphinxcontrib-menesbuilder',
version="0.3.1",
description='menesbuilder is a Sphinx extension for menes PDF generates web application',
long_description=open("README.rst").read(),
classifiers=classifiers,
keywords=['sphinx', 'pdf'],
author='WAKAYAMA shirou',
author_email='shirou.faw at gmail.com',
zip_safe=False,
url='http://menes-pdf.info',
download_url='http://pypi.python.org/pypi/sphinxcontrib-menesbuilder',
license='BSD License',
packages=find_packages(),
install_requires=requirements,
namespace_packages=['sphinxcontrib'],
include_package_data=True
)
|
def id_generator():
id_ = 0
while True:
yield id_
id_ += 1
class PersistentIDManager:
def __init__(self):
self._id_generator = id_generator()
self._persistent_to_temporary = {}
self._temporary_to_persistent = {}
def create_persistent_id(self, temporary_id):
persistent_id = next(self._id_generator)
self._persistent_to_temporary[persistent_id] = temporary_id
self._temporary_to_persistent[temporary_id] = persistent_id
return persistent_id
def change_temporary_with_persistent_id(self, persistent_id, temporary_id):
old_temporary = self._persistent_to_temporary[persistent_id]
self._persistent_to_temporary[persistent_id] = temporary_id
self._temporary_to_persistent[temporary_id] = persistent_id
self._temporary_to_persistent.pop(old_temporary)
def change_temporary_with_temporary_id(self, old_temporary, temporary_id):
persistent_id = self._temporary_to_persistent.pop(old_temporary)
self._persistent_to_temporary[persistent_id] = temporary_id
self._temporary_to_persistent[temporary_id] = persistent_id
def get_temporary_id(self, persistent_id):
return self._persistent_to_temporary[persistent_id]
def get_persistent_id(self, temporary_id):
return self._temporary_to_persistent[temporary_id]
def remove_with_persistent_id(self, persistent_id):
temporary = self._persistent_to_temporary.pop(persistent_id)
self._temporary_to_persistent.pop(temporary)
def remove_with_temporary_id(self, temporary_id):
persistent = self._temporary_to_persistent.pop(temporary_id)
self._persistent_to_temporary.pop(persistent)
|
from jira.client import GreenHopper
options = {
'server': 'https://jira.atlassian.com'}
gh = GreenHopper(options)
boards = gh.boards()
board_id = 441
print("GreenHopper board: %s (%s)" % (boards[0].name, board_id))
sprints = gh.sprints(board_id)
for sprint in sprints:
sprint_id = sprint.id
print("Sprint: %s" % sprint.name)
incompleted_issues = gh.incompleted_issues(board_id, sprint_id)
print("Incomplete issues: %s" %
', '.join(issue.key for issue in incompleted_issues))
|
__all__ = [
'ExecutionError',
'NoDispatch',
'InvalidLibraryDefinition',
'CustomSyntaxError'
]
class ExecutionError(Exception):
"""
Raised when we are unable to execute a certain lazy or immediate
expression.
"""
class NoDispatch(Exception):
def __init__(self, aterm):
self.aterm = aterm
def __str__(self):
return "No implementation for '%r'" % self.aterm
class InvalidLibraryDefinition(Exception):
pass
syntax_error = """
File {filename}, line {lineno}
{line}
{pointer}
{error}: {msg}
"""
class CustomSyntaxError(Exception):
"""
Makes datashape parse errors look like Python SyntaxError.
"""
def __init__(self, lexpos, filename, text, msg=None):
self.lexpos = lexpos
self.filename = filename
self.text = text
self.msg = msg or 'invalid syntax'
self.lineno = text.count('\n', 0, lexpos) + 1
# Get the extent of the line with the error
linestart = text.rfind('\n', 0, lexpos)
if linestart < 0:
linestart = 0
else:
linestart = linestart + 1
lineend = text.find('\n', lexpos)
if lineend < 0:
lineend = len(text)
self.line = text[linestart:lineend]
self.col_offset = lexpos - linestart
def __str__(self):
pointer = ' '*self.col_offset + '^'
return syntax_error.format(
filename = self.filename,
lineno = self.lineno,
line = self.line,
pointer = ' '*self.col_offset + '^',
msg = self.msg,
error = self.__class__.__name__,
)
def __repr__(self):
return str(self)
|
import os
import sys
from barf.barf import BARF
if __name__ == "__main__":
#
# Open file
#
try:
filename = os.path.abspath("../../bin/x86/branch4")
barf = BARF(filename)
except Exception as err:
print err
print "[-] Error opening file : %s" % filename
sys.exit(1)
#
# Translate to REIL
#
print("[+] Translating x86 to REIL...")
for addr, asm_instr, reil_instrs in barf.translate():
print("0x{0:08x} : {1}".format(addr, asm_instr))
for reil_instr in reil_instrs:
print("{0:14}{1}".format("", reil_instr))
|
import pytest
from fabric.api import run
from fabtools.utils import run_as_root
pytestmark = pytest.mark.network
@pytest.fixture(scope='module', autouse=True)
def check_for_debian_family():
from fabtools.system import distrib_family
if distrib_family() != 'debian':
pytest.skip("Skipping apt-key test on non-Debian distrib")
def test_add_apt_key_with_key_id_from_url():
from fabtools.deb import add_apt_key
try:
add_apt_key(keyid='A750EDCD', url='https://packagecloud.io/varnishcache/varnish60lts/gpgkey')
run_as_root('apt-key finger | grep -q -E "A750 ?EDCD"')
finally:
run_as_root('apt-key del A750EDCD', quiet=True)
def test_add_apt_key_with_key_id_from_specific_key_server():
from fabtools.deb import add_apt_key
try:
add_apt_key(keyid='7BD9BF62', keyserver='keyserver.ubuntu.com')
run_as_root('apt-key finger | grep -q -E "7BD9 ?BF62"')
finally:
run_as_root('apt-key del 7BD9BF62', quiet=True)
def test_add_apt_key_with_key_id_from_file():
from fabtools.deb import add_apt_key
try:
run('wget https://packagecloud.io/varnishcache/varnish60lts/gpgkey -O /tmp/tmp.fabtools.test.key')
add_apt_key(keyid='A750EDCD', filename='/tmp/tmp.fabtools.test.key')
run_as_root('apt-key finger | grep -q -E "A750 ?EDCD"')
finally:
run_as_root('apt-key del A750EDCD', quiet=True)
def test_add_apt_key_without_key_id_from_url():
from fabtools.deb import add_apt_key
try:
add_apt_key(url='https://packagecloud.io/varnishcache/varnish60lts/gpgkey')
run_as_root('apt-key finger | grep -q -E "A750 ?EDCD"')
finally:
run_as_root('apt-key del A750EDCD', quiet=True)
def test_add_apt_key_without_key_id_from_file():
from fabtools.deb import add_apt_key
try:
run('wget https://packagecloud.io/varnishcache/varnish60lts/gpgkey -O /tmp/tmp.fabtools.test.key')
add_apt_key(filename='/tmp/tmp.fabtools.test.key')
run_as_root('apt-key finger | grep -q -E "A750 ?EDCD"')
finally:
run_as_root('apt-key del A750EDCD', quiet=True)
def test_require_deb_key_from_url():
from fabtools.require.deb import key as require_key
try:
require_key(keyid='A750EDCD', url='https://packagecloud.io/varnishcache/varnish60lts/gpgkey')
run_as_root('apt-key finger | grep -q -E "A750 ?EDCD"')
finally:
run_as_root('apt-key del A750EDCD', quiet=True)
def test_require_deb_key_from_specific_keyserver():
from fabtools.require.deb import key as require_key
try:
require_key(keyid='7BD9BF62', keyserver='keyserver.ubuntu.com')
run_as_root('apt-key finger | grep -q -E "7BD9 ?BF62"')
finally:
run_as_root('apt-key del 7BD9BF62', quiet=True)
def test_require_deb_key_from_file():
from fabtools.require.deb import key as require_key
try:
run('wget https://packagecloud.io/varnishcache/varnish60lts/gpgkey -O /tmp/tmp.fabtools.test.key')
require_key(keyid='A750EDCD', filename='/tmp/tmp.fabtools.test.key')
run_as_root('apt-key finger | grep -q -E "A750 ?EDCD"')
finally:
run_as_root('apt-key del A750EDCD', quiet=True)
|
from JumpScale import j
parentclass=j.core.osis.getOsisImplementationParentClass("_arakoonmodelobjects") #is the name of the namespace
class mainclass(parentclass):
"""
"""
def getObject(self,ddict={}):
obj=j.core.grid.zobjects.getModelObject(ddict=ddict)
return obj
|
"""Utility for converting a bunch of #defined (integer!) constants
into a Python dictionary
To use this, you'll want to create a new header file that just
contains lines such as::
#define FOO 0
Every line that doesn't start with #define will be ignored.
"""
from __future__ import print_function
def convert(defines):
"""Convert the string defines into a dict."""
constants = {}
for line in defines:
line = line.split()
try:
if line[0] != '#define':
continue
constants[line[1]] = int(line[2], base=0)
except IndexError:
pass
return constants
if __name__ == "__main__":
import sys, json
try:
fname = sys.argv[1]
with open(fname, 'r') as f:
constants = convert(f.readlines())
except IndexError:
print("Usage: python pdef2dict.py <filename>")
sys.exit(1)
with open('outfile.py', 'w') as outfile:
out = json.dumps(constants, indent=4, sort_keys=True)
outfile.write('constants = ' + out)
|
"""
Maintain a ``global'' dict stack
def doit():
Context.curr()['a'] = 10
...
ctx = Context()
with ctx:
doit()
ctx => { 'a' : 10 }
"""
import threading
import types
class Context(dict):
__thstat__ = threading.local()
__thstat__.curr = None
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.__prev = None
@classmethod
def curr(cls):
"""
Get current context in this thread of execution
"""
return cls.__thstat__.curr
@classmethod
def switch(cls, ctx):
"""
Switch context in this thread of execution, return the old current context
"""
ret = cls.__thstat__.curr
cls.__thstat__.curr = ctx
return ret
def __enter__(self):
"""
In this thread of excution, use this context as current context
"""
if self.__prev is not None:
raise ValueError, 'Already in stack'
self.__prev = Context.switch(self)
def __exit__(self, exc_type, exc_value, traceback):
"""
Exit the scope of context, restore prev context
"""
if self.__class__.curr() != self:
raise ValueError, 'Invalid thread of execution'
self.__class__.switch(self.__prev)
self.__prev = None
curr = Context.curr
|
import os
from uliweb import manage
from uliweb.orm import *
from uliweb.manage import make_simple_application
os.chdir('test_multidb')
manage.call('uliweb syncdb -v')
manage.call('uliweb syncdb -v --engine=b')
manage.call('uliweb syncdb -v --engine=c')
def test_1():
"""
>>> app = make_simple_application(project_dir='.')
>>> import uliweb.orm as orm
>>> print '__models__', orm.__models__
__models__ {'blog': {'config': {}, 'model_path': 'blog.models.Blog', 'engines': ['default', 'b'], 'appname': 'blog'}, 'category': {'config': {}, 'model_path': 'blog.models.Category', 'engines': ['b'], 'appname': 'blog'}}
>>> print engine_manager['default'].models
{'blog': {'model': <class 'blog.models.Blog'>, 'created': None, 'model_path': 'blog.models.Blog', 'appname': 'blog'}}
>>> print engine_manager['b'].models
{'blog': {'model': <class 'uliweb.orm.Blog'>, 'created': None, 'model_path': 'blog.models.Blog', 'appname': 'blog'}, 'category': {'model': <class 'blog.models.Category'>, 'created': None, 'model_path': 'blog.models.Category', 'appname': 'blog'}}
>>> Blog1 = get_model('blog')
>>> Blog2 = get_model('blog', 'b')
>>> print 'blog2', Blog2, Blog2.table, Blog2.tablename, Blog2.get_engine_name(), Blog2.get_session()
blog2 <class 'uliweb.orm.Blog'> blog blog b <Session engine_name:b, auto_transaction=None, auto_close=True>
>>> print 'blog1', Blog1, Blog1.table, Blog1.tablename, Blog1.get_engine_name(), Blog1.get_session()
blog1 <class 'blog.models.Blog'> blog blog default <Session engine_name:default, auto_transaction=None, auto_close=True>
>>> r = Blog2.all().remove()
>>> r = Blog1.all().remove()
>>> b2 = Blog2(title='1', content='1')
>>> b2.save()
True
>>> b1 = Blog1(title='2', content='2')
>>> b1.save()
True
>>> print 'blog2 all', list(Blog2.all())
blog2 all [<Blog {'title':u'1','content':u'1','id':1}>]
>>> print 'blog1 all', list(Blog1.all())
blog1 all [<Blog {'title':u'2','content':u'2','id':1}>]
>>> b3 = Blog2(title='3', content='3')
>>> b3.save()
True
>>> print 'blog2 all', list(Blog2.all())
blog2 all [<Blog {'title':u'1','content':u'1','id':1}>, <Blog {'title':u'3','content':u'3','id':2}>]
"""
def test_2():
"""
>>> app = make_simple_application(project_dir='.')
>>> import uliweb.orm as orm
>>> C = get_model('category')
>>> r = C.all().remove()
>>> a = C(name='python')
>>> a.save()
True
>>> C.get(1)
<Category {'name':u'python','id':1}>
"""
def test_3():
"""
>>> app = make_simple_application(project_dir='.')
>>> import uliweb.orm as orm
>>> B = get_model('blog')
>>> r = B.all().remove()
>>> a = B(title='1', content='1')
>>> a.save()
True
>>> B1 = get_model('blog', 'b')
>>> B2 = B.use('b')
>>> id(B1) == id(B2)
True
>>> b = B.use('b')(title='2', content='2')
>>> b.save()
True
>>> B.get(1)
<Blog {'title':u'1','content':u'1','id':1}>
"""
def test_4():
"""
>>> app = make_simple_application(project_dir='.')
>>> import uliweb.orm as orm
>>> B = get_model('blog')
>>> r = B.all().remove()
>>> a = B(title='1', content='1')
>>> a.save()
True
>>> r = B.use('b').remove()
>>> b = B.use('b')(title='2', content='2')
>>> b.save()
True
>>> print list(B.all().use('b'))
[<Blog {'title':u'2','content':u'2','id':1}>]
>>> print list(B.use('b').all())
[<Blog {'title':u'2','content':u'2','id':1}>]
"""
def test_5():
"""
>>> app = make_simple_application(project_dir='.')
>>> import uliweb.orm as orm
>>> session = Session()
>>> B = get_model('blog')
>>> r = B.all().remove()
>>> B1 = B.use(session)
>>> print session.in_transaction()
False
>>> r = B1.all().remove()
>>> trans = session.begin()
>>> print session.in_transaction()
True
>>> a = B1(title='1', content='1')
>>> a.save()
True
>>> session.commit()
>>> B.get(1)
<Blog {'title':u'1','content':u'1','id':1}>
>>> from sqlalchemy.sql import select
>>> print list(session.do_(select([B.table])))
[(u'1', u'1', 1)]
>>> r = B.get(1)
"""
def test_local_cache():
"""
>>> app = make_simple_application(project_dir='.')
>>> import uliweb.orm as orm
>>> session = Session()
>>> session.get_local_cache('111')
>>> session.get_local_cache('111', '222')
'222'
>>> B = get_model('blog')
>>> r = B.all().remove()
>>> a = B(title='1', content='1')
>>> a.save()
True
>>> set_echo(True)
>>> get_cached_object('blog', 1) # doctest:+ELLIPSIS, +NORMALIZE_WHITESPACE
<BLANKLINE>
===>>>>> [default] (...)
SELECT blog.title, blog.content, blog.id FROM blog WHERE blog.id = 1 LIMIT 1 OFFSET 0;
===<<<<< time used ...s
<BLANKLINE>
<Blog {'title':u'1','content':u'1','id':1}>
>>> s = get_session()
>>> s.local_cache
{'OC:default:blog:1': <Blog {'title':u'1','content':u'1','id':1}>}
>>> get_cached_object('blog', 1)
<Blog {'title':u'1','content':u'1','id':1}>
>>> s.close()
>>> s.local_cache
{}
>>> set_echo(False)
"""
def test_rollback():
"""
>>> app = make_simple_application(project_dir='.')
>>> import uliweb.orm as orm
>>> session = Session()
>>> B = get_model('blog')
>>> r = B.all().remove()
>>> B1 = B.use(session)
>>> print session.in_transaction()
False
>>> r = B1.all().remove()
>>> trans = session.begin()
>>> print session.in_transaction()
True
>>> a = B1(title='1', content='1')
>>> a.save()
True
>>> session.rollback()
>>> B.count()
0
"""
def test_blog_add():
"""
>>> app = make_simple_application(project_dir='.')
>>> from uliweb import is_in_web
>>> from uliweb.utils.test import client
>>> Blog = get_model('blog')
>>> r = Blog.all().remove()
>>> c = client('.')
>>> r = c.get('/test_add')
>>> print r.data
ok
>>> print list(Blog.all())
[<Blog {'title':u'test_add','content':u'test_add','id':1}>]
>>> b = Blog.get(Blog.c.title=='test_add')
>>> b
<Blog {'title':u'test_add','content':u'test_add','id':1}>
>>> b.delete()
>>> Blog.count()
0
"""
def test_blog_rollback():
"""
>>> app = make_simple_application(project_dir='.')
>>> from uliweb import is_in_web
>>> from uliweb.utils.test import client
>>> Blog = get_model('blog')
>>> r = Blog.all().remove()
>>> c = client('.')
>>> try:
... r = c.get('/test_rollback')
... except Exception:
... print 'fail'
fail
>>> print list(Blog.all())
[]
"""
def test_blog_manual_commit():
"""
>>> app = make_simple_application(project_dir='.')
>>> from uliweb import is_in_web
>>> from uliweb.utils.test import client
>>> Blog = get_model('blog')
>>> r = Blog.all().remove()
>>> c = client('.')
>>> r = c.get('/test_manual_commit')
>>> print list(Blog.all())
[<Blog {'title':u'test_add','content':u'test_add','id':1}>]
>>> r = Blog.all().remove()
>>> r = c.get('/test_manual_rollback')
>>> print list(Blog.all())
[]
"""
def test_patch_none():
"""
>>> app = make_simple_application(project_dir='.')
>>> Blog = get_model('blog')
>>> cond = None
>>> print (Blog.c.id == 1) & cond
blog.id = :id_1
"""
def test_connection_duplication():
"""
>>> app = make_simple_application(project_dir='.')
>>> import uliweb.orm as orm
>>> print engine_manager['c'].models
{'blog': {'model': <class 'blog.models.Blog'>, 'created': None, 'model_path': 'blog.models.Blog', 'appname': 'blog'}}
>>> Blog1 = get_model('blog', 'c')
>>> print 'blog1', Blog1, Blog1.table, Blog1.tablename, Blog1.get_engine_name(), Blog1.get_session()
blog1 <class 'uliweb.orm.Blog'> blog blog c <Session engine_name:c, auto_transaction=None, auto_close=True>
"""
def test_set_session():
"""
>>> app = make_simple_application(project_dir='.')
>>> import uliweb.orm as orm
>>> s = Session('c')
>>> set_session(s, 'default') # doctest: +ELLIPSIS
<uliweb.orm.Session object at ...>
>>> Blog1 = get_model('blog')
>>> print 'blog1', Blog1, Blog1.table, Blog1.tablename, Blog1.get_engine_name(), Blog1.get_session()
blog1 <class 'blog.models.Blog'> blog blog default <Session engine_name:c, auto_transaction=None, auto_close=True>
>>> s = get_session('default')
>>> print s
<Session engine_name:c, auto_transaction=None, auto_close=True>
>>> b1 = Blog1(title='2', content='2')
>>> b1.save()
True
"""
|
import sys
import uvint
import io
from pathlib import Path
def main():
path = Path(sys.argv[1])
out = Path(sys.argv[2])
with open(out, 'wb') as f:
decompress(f, path.read_bytes())
def decompress(file, buf):
input = io.BytesIO(buf)
def read_uvint():
return uvint.decode_from_file(input)
def read_string(length):
s = input.read(length)
#assert len(s) == length
return s
# read symbol table
symbols = []
while True:
length = read_uvint()
if length == 0:
break
symbols.append(read_string(length))
# decode message
n = len(buf)
while input.tell() < n:
proc = 100 * (n - len(buf)) / n
cmd = read_uvint()
if cmd < 128:
# raw string of length 0..127
file.write(read_string(cmd))
else:
# symbol
symbol = cmd - 128
file.write(symbols[symbol])
if __name__ == '__main__':
main()
|
try:
import bluetooth
BT_address = '00:12:02:09:05:16'
BT_port = 1
BT_socket = bluetooth.BluetoothSocket(bluetooth.RFCOMM)
BT_socket.connect((BT_address, BT_port))
BT_available = True
except:
BT_available = False
print 'No bluetooth device is available'
class RoMIE:
def _wait_ack(self):
if not BT_available: return
received = ''
while 'ACK' not in received and 'NAK' not in received:
received += BT_socket.recv(1024)
if 'NAK' in received:
# TODO finish experiment
pass
return received
def forward(self):
if not BT_available: return 'forward'
BT_socket.send('F')
response = self._wait_ack()
return response
def turn_left(self):
if not BT_available: return 'left'
BT_socket.send('L')
response = self._wait_ack()
return response
def turn_right(self):
if not BT_available: return 'right'
BT_socket.send('R')
response = self._wait_ack()
return response
def check_wall(self):
if not BT_available: return 'wall'
BT_socket.send('S')
response = self._wait_ack()
return response
import BaseHTTPServer
import urlparse
class RoMIEHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def __init__(self,req,client_addr,server):
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self,req,client_addr,server)
def do_GET(self):
parsedParams = urlparse.urlparse(self.path)
queryParsed = urlparse.parse_qs(parsedParams.query)
global romie;
response = '';
if parsedParams.path == "/l":
response = romie.turn_left()
elif parsedParams.path == "/r":
response = romie.turn_right()
elif parsedParams.path == "/f":
response = romie.forward()
elif parsedParams.path == "/s":
response = romie.check_wall()
if response is not '' or True:
self.send_response(200)
self.send_header('Content-Type', 'text/html')
self.end_headers()
self.wfile.write(response)
romie = RoMIE()
httpd = BaseHTTPServer.HTTPServer(('', 8000), RoMIEHandler)
print "RoMIE server listening"
httpd.serve_forever()
|
"""
Multiobjective Optimization with Femag
"""
import sys
import json
import femagtools.opt
import logging
import glob
import pathlib
import os
from femagtools.multiproc import Engine
opt = {
"objective_vars": [
{"desc": "Torque / Nm", "name": "dqPar.torque[-1]", "sign": -1},
{"desc": "Torque Ripple / Nm", "name": "torque[-1].ripple"},
{"desc": "Iron Loss / W", "name": "machine.plfe[-1]"}
],
"population_size": 16,
"decision_vars": [
{"desc": "Magn width", "bounds": [0.75, 0.85],
"name": "magnet.magnetSector.magn_width_pct"},
{"desc": "Magn height", "bounds": [3e-3, 5e-3],
"name": "magnet.magnetSector.magn_height"}
]
}
operatingConditions = {
"angl_i_up": -10.0,
"calculationMode": "pm_sym_fast",
"wind_temp": 60.0,
"magn_temp": 60.0,
"current": 10.0,
"eval_force": 0,
"speed": 50.0,
"period_frac": 6,
"optim_i_up": 0
}
magnetMat = [{
"name": "M395",
"remanenc": 1.17,
"temcoefbr": -0.001,
"spmaweight": 7.5,
"magntemp": 20.0,
"temcoefhc": -0.001,
"hcb": 810000.4,
"relperm": 1.05}
]
magnetizingCurve = "../magnetcurves"
machine = dict(
name="PM 130 L4",
lfe=0.1,
poles=4,
outer_diam=0.13,
bore_diam=0.07,
inner_diam=0.015,
airgap=0.001,
stator=dict(
num_slots=12,
mcvkey_yoke="M270-35A",
num_slots_gen=3,
nodedist=1.5,
rlength=1.0,
statorRotor3=dict(
slot_h1=0.002,
slot_h2=0.004,
middle_line=0,
tooth_width=0.009,
wedge_width2=0.0,
wedge_width1=0.0,
slot_top_sh=0,
slot_r2=0.002,
slot_height=0.02,
slot_r1=0.003,
slot_width=0.003)
),
magnet=dict(
nodedist=1.0,
mcvkey_shaft="M270-35A",
mcvkey_yoke="M270-35A",
material="M395",
magnetSector=dict(
magn_num=1,
magn_width_pct=0.8,
magn_height=0.004,
magn_shape=0.0,
bridge_height=0.0,
magn_type=1,
condshaft_r=0.02,
magn_ori=2,
magn_rfe=0.0,
bridge_width=0.0,
magn_len=1.0)
),
windings=dict(
num_phases=3,
num_wires=100,
coil_span=3.0,
slot_indul=1,
cufilfact=0.45,
culength=1.4,
num_layers=1)
)
def get_losses(task):
"""adds extra results for optimization:
total losses and efficiency"""
bch = femagtools.bch.read(
glob.glob(os.path.join(task.directory, '*.B*CH'))[-1])
pltot = sum((bch.losses[-1]['winding'],
bch.losses[-1]['staza'], bch.losses[-1]['stajo'],
bch.losses[-1]['rotfe'],
bch.losses[-1]['magnetJ']))
eff = bch.machine['p2']/(bch.machine['p2'] + pltot)
bch.machine['pltotal'] = [pltot]
bch.machine['eff'] = eff
return bch
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(message)s')
logger = logging.getLogger('pmopt')
engine = Engine()
workdir = pathlib.Path.home() / 'opti'
workdir.mkdir(parents=True, exist_ok=True)
o = femagtools.opt.Optimizer(workdir,
magnetizingCurve, magnetMat,
result_func=get_losses)
num_generations = 3
results = o.optimize(num_generations,
opt, machine, operatingConditions, engine)
json.dump(results, sys.stdout)
|
"""Command line tool for advanced search on Python Package Index."""
__author__ = 'Radosław Ganczarek'
__email__ = 'radoslaw@ganczarek.in'
__version__ = '0.1.0'
|
from django.db import models
from . import settings
class PageElement(models.Model):
"""
Elements of an editable HTML page.
"""
slug = models.CharField(max_length=50)
text = models.TextField(blank=True)
account = models.ForeignKey(
settings.ACCOUNT_MODEL, related_name='account_page_element', null=True)
def __unicode__(self):
return unicode(self.slug)
class MediaTag(models.Model):
location = models.CharField(max_length=250)
tag = models.CharField(max_length=50)
def __unicode__(self):
return unicode(self.tag)
class UploadedTemplate(models.Model):
"""
This model allow to record uploaded template.
"""
account = models.ForeignKey(
settings.ACCOUNT_MODEL,
related_name='account_template', null=True, blank=True)
name = models.CharField(max_length=150)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
is_active = models.BooleanField(default=False)
def __unicode__(self):
if self.account:
return '%s-%s' % (self.account, self.name)
else:
return self.name
|
"""
ExternalInstance class module
"""
from pyxform.survey_element import SurveyElement
class ExternalInstance(SurveyElement):
def xml_control(self):
"""
No-op since there is no associated form control to place under <body/>.
Exists here because there's a soft abstractmethod in SurveyElement.
"""
pass
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import re
from indra.statements import *
from indra.assemblers.html.assembler import HtmlAssembler, template_path, \
tag_text
def make_stmt():
src = Agent('SRC', db_refs = {'HGNC': '11283'})
ras = Agent('RAS', db_refs = {'FPLX': 'RAS'})
ev = Evidence(text="We noticed that the Src kinase was able to "
"phosphorylate Ras proteins.",
source_api='test', pmid='1234567',
annotations={'agents': {'raw_text': ['Src kinase',
'Ras proteins']}})
st = Phosphorylation(src, ras, 'tyrosine', '32', evidence=[ev])
return st
def test_format_evidence_text():
stmt = make_stmt()
ev_list = HtmlAssembler._format_evidence_text(stmt)
assert len(ev_list) == 1
ev = ev_list[0]
assert isinstance(ev, dict)
assert set(ev.keys()) == {'source_api', 'text_refs', 'text', 'source_hash',
'pmid'}
assert ev['source_api'] == 'test'
assert ev['text_refs']['PMID'] == '1234567'
assert ev['text'] == ('We noticed that the '
'<span class="badge badge-subject">Src kinase</span> '
'was able to phosphorylate '
'<span class="badge badge-object">'
'Ras proteins</span>.'), ev['text']
def test_assembler():
stmt = make_stmt()
ha = HtmlAssembler([stmt])
result = ha.make_model()
assert isinstance(result, str)
# Read from the template file and make sure the beginning and end of the
# content matches
with open(template_path, 'rt') as f:
template = f.read().strip()
assert result.startswith(template[0:100])
assert result.strip().endswith(template[-10:])
def test_tag_text():
"""If there are overlapping or nested matches, show only one."""
text = 'FooBarBaz binds Foo.'
indices = []
for span in ('FooBarBaz', 'Foo'):
tag_start = "<%s>" % span
tag_close = "</%s>" % span
indices += [(m.start(), m.start() + len(span), span,
tag_start, tag_close)
for m in re.finditer(re.escape(span), text)]
tagged_text = tag_text(text, indices)
print(tagged_text)
assert tagged_text == '<FooBarBaz>FooBarBaz</FooBarBaz> binds ' \
'<Foo>Foo</Foo>.'
|
from nephoria.baseops.botobaseops import BotoBaseOps
import boto
from boto.ec2.regioninfo import RegionInfo
from boto.sts import STSConnection
class STSops(BotoBaseOps):
SERVICE_PREFIX = 'ec2'
EUCARC_URL_NAME = 'sts_url'
CONNECTION_CLASS = STSConnection
def get_session_token( self, duration=None ):
"""
Get a possibly cached session token, if getting a new token request the given duration
Options:
duration - The desired duration for the token in seconds (if issued, None for default duration)
"""
return self.connection.get_session_token( duration )
def issue_session_token( self, duration=None ):
"""
Get a newly issued session token with the given (or default) duration
Options:
duration - The desired duration for the token in seconds (None for default duration)
"""
return self.connection.get_session_token( duration, force_new=True )
|
import os
from mi.logging import config
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.driver.cg_cpm_eng.cpm.cg_cpm_eng_cpm_common_driver import CgCpmEngCpmDriver
from mi.core.versioning import version
@version("15.7.0")
def parse(basePythonCodePath, sourceFilePath, particleDataHdlrObj):
config.add_configuration(os.path.join(basePythonCodePath, 'res', 'config', 'mi-logging.yml'))
parser_config = {
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.cg_cpm_eng_cpm',
DataSetDriverConfigKeys.PARTICLE_CLASS: 'CgCpmEngCpmRecoveredDataParticle'
}
driver = CgCpmEngCpmDriver(sourceFilePath, particleDataHdlrObj, parser_config)
return driver.process()
|
import warnings
from django.template import Library
from django.template import defaulttags
from django.utils.deprecation import RemovedInDjango19Warning
register = Library()
@register.tag
def ssi(parser, token):
warnings.warn(
"Loading the `ssi` tag from the `future` library is deprecated and "
"will be removed in Django 1.9. Use the default `ssi` tag instead.",
RemovedInDjango19Warning)
return defaulttags.ssi(parser, token)
@register.tag
def url(parser, token):
warnings.warn(
"Loading the `url` tag from the `future` library is deprecated and "
"will be removed in Django 1.9. Use the default `url` tag instead.",
RemovedInDjango19Warning)
return defaulttags.url(parser, token)
@register.tag
def cycle(parser, token):
"""
This is the future version of `cycle` with auto-escaping.
The deprecation is now complete and this version is no different
from the non-future version so this can be deprecated (#22306)
By default all strings are escaped.
If you want to disable auto-escaping of variables you can use::
{% autoescape off %}
{% cycle var1 var2 var3 as somecycle %}
{% autoescape %}
Or if only some variables should be escaped, you can use::
{% cycle var1 var2|safe var3|safe as somecycle %}
"""
return defaulttags.cycle(parser, token)
@register.tag
def firstof(parser, token):
"""
This is the future version of `firstof` with auto-escaping.
The deprecation is now complete and this version is no different
from the non-future version so this can be deprecated (#22306)
This is equivalent to::
{% if var1 %}
{{ var1 }}
{% elif var2 %}
{{ var2 }}
{% elif var3 %}
{{ var3 }}
{% endif %}
If you want to disable auto-escaping of variables you can use::
{% autoescape off %}
{% firstof var1 var2 var3 "<strong>fallback value</strong>" %}
{% autoescape %}
Or if only some variables should be escaped, you can use::
{% firstof var1 var2|safe var3 "<strong>fallback value</strong>"|safe %}
"""
return defaulttags.firstof(parser, token)
|
from datetime import timedelta
import operator
from typing import Any, Callable, List, Optional, Sequence, Type, Union
import numpy as np
from pandas._libs.tslibs import (
NaT,
NaTType,
Timedelta,
delta_to_nanoseconds,
frequencies as libfrequencies,
iNaT,
period as libperiod,
to_offset,
)
from pandas._libs.tslibs.fields import isleapyear_arr
from pandas._libs.tslibs.offsets import Tick, delta_to_tick
from pandas._libs.tslibs.period import (
DIFFERENT_FREQ,
IncompatibleFrequency,
Period,
get_period_field_arr,
period_asfreq_arr,
)
from pandas._typing import AnyArrayLike
from pandas.util._decorators import cache_readonly
from pandas.core.dtypes.common import (
TD64NS_DTYPE,
ensure_object,
is_datetime64_dtype,
is_float_dtype,
is_period_dtype,
pandas_dtype,
)
from pandas.core.dtypes.dtypes import PeriodDtype
from pandas.core.dtypes.generic import (
ABCIndexClass,
ABCPeriodIndex,
ABCSeries,
ABCTimedeltaArray,
)
from pandas.core.dtypes.missing import isna, notna
import pandas.core.algorithms as algos
from pandas.core.arrays import datetimelike as dtl
import pandas.core.common as com
from pandas.tseries.offsets import DateOffset
def _field_accessor(name: str, docstring=None):
def f(self):
base, _ = libfrequencies.get_freq_code(self.freq)
result = get_period_field_arr(name, self.asi8, base)
return result
f.__name__ = name
f.__doc__ = docstring
return property(f)
class PeriodArray(dtl.DatetimeLikeArrayMixin, dtl.DatelikeOps):
"""
Pandas ExtensionArray for storing Period data.
Users should use :func:`period_array` to create new instances.
Parameters
----------
values : Union[PeriodArray, Series[period], ndarray[int], PeriodIndex]
The data to store. These should be arrays that can be directly
converted to ordinals without inference or copy (PeriodArray,
ndarray[int64]), or a box around such an array (Series[period],
PeriodIndex).
freq : str or DateOffset
The `freq` to use for the array. Mostly applicable when `values`
is an ndarray of integers, when `freq` is required. When `values`
is a PeriodArray (or box around), it's checked that ``values.freq``
matches `freq`.
dtype : PeriodDtype, optional
A PeriodDtype instance from which to extract a `freq`. If both
`freq` and `dtype` are specified, then the frequencies must match.
copy : bool, default False
Whether to copy the ordinals before storing.
Attributes
----------
None
Methods
-------
None
See Also
--------
period_array : Create a new PeriodArray.
PeriodIndex : Immutable Index for period data.
Notes
-----
There are two components to a PeriodArray
- ordinals : integer ndarray
- freq : pd.tseries.offsets.Offset
The values are physically stored as a 1-D ndarray of integers. These are
called "ordinals" and represent some kind of offset from a base.
The `freq` indicates the span covered by each element of the array.
All elements in the PeriodArray have the same `freq`.
"""
# array priority higher than numpy scalars
__array_priority__ = 1000
_typ = "periodarray" # ABCPeriodArray
_scalar_type = Period
_recognized_scalars = (Period,)
_is_recognized_dtype = is_period_dtype
# Names others delegate to us
_other_ops: List[str] = []
_bool_ops = ["is_leap_year"]
_object_ops = ["start_time", "end_time", "freq"]
_field_ops = [
"year",
"month",
"day",
"hour",
"minute",
"second",
"weekofyear",
"weekday",
"week",
"dayofweek",
"dayofyear",
"quarter",
"qyear",
"days_in_month",
"daysinmonth",
]
_datetimelike_ops = _field_ops + _object_ops + _bool_ops
_datetimelike_methods = ["strftime", "to_timestamp", "asfreq"]
# --------------------------------------------------------------------
# Constructors
def __init__(self, values, freq=None, dtype=None, copy=False):
freq = validate_dtype_freq(dtype, freq)
if freq is not None:
freq = Period._maybe_convert_freq(freq)
if isinstance(values, ABCSeries):
values = values._values
if not isinstance(values, type(self)):
raise TypeError("Incorrect dtype")
elif isinstance(values, ABCPeriodIndex):
values = values._values
if isinstance(values, type(self)):
if freq is not None and freq != values.freq:
raise raise_on_incompatible(values, freq)
values, freq = values._data, values.freq
values = np.array(values, dtype="int64", copy=copy)
self._data = values
if freq is None:
raise ValueError("freq is not specified and cannot be inferred")
self._dtype = PeriodDtype(freq)
@classmethod
def _simple_new(cls, values: np.ndarray, freq=None, **kwargs) -> "PeriodArray":
# alias for PeriodArray.__init__
assertion_msg = "Should be numpy array of type i8"
assert isinstance(values, np.ndarray) and values.dtype == "i8", assertion_msg
return cls(values, freq=freq, **kwargs)
@classmethod
def _from_sequence(
cls: Type["PeriodArray"],
scalars: Union[Sequence[Optional[Period]], AnyArrayLike],
dtype: Optional[PeriodDtype] = None,
copy: bool = False,
) -> "PeriodArray":
if dtype:
freq = dtype.freq
else:
freq = None
if isinstance(scalars, cls):
validate_dtype_freq(scalars.dtype, freq)
if copy:
scalars = scalars.copy()
return scalars
periods = np.asarray(scalars, dtype=object)
if copy:
periods = periods.copy()
freq = freq or libperiod.extract_freq(periods)
ordinals = libperiod.extract_ordinals(periods, freq)
return cls(ordinals, freq=freq)
@classmethod
def _from_sequence_of_strings(
cls, strings, dtype=None, copy=False
) -> "PeriodArray":
return cls._from_sequence(strings, dtype, copy)
@classmethod
def _from_datetime64(cls, data, freq, tz=None) -> "PeriodArray":
"""
Construct a PeriodArray from a datetime64 array
Parameters
----------
data : ndarray[datetime64[ns], datetime64[ns, tz]]
freq : str or Tick
tz : tzinfo, optional
Returns
-------
PeriodArray[freq]
"""
data, freq = dt64arr_to_periodarr(data, freq, tz)
return cls(data, freq=freq)
@classmethod
def _generate_range(cls, start, end, periods, freq, fields):
periods = dtl.validate_periods(periods)
if freq is not None:
freq = Period._maybe_convert_freq(freq)
field_count = len(fields)
if start is not None or end is not None:
if field_count > 0:
raise ValueError(
"Can either instantiate from fields or endpoints, but not both"
)
subarr, freq = _get_ordinal_range(start, end, periods, freq)
elif field_count > 0:
subarr, freq = _range_from_fields(freq=freq, **fields)
else:
raise ValueError("Not enough parameters to construct Period range")
return subarr, freq
# -----------------------------------------------------------------
# DatetimeLike Interface
def _unbox_scalar(self, value: Union[Period, NaTType]) -> int:
if value is NaT:
return value.value
elif isinstance(value, self._scalar_type):
self._check_compatible_with(value)
return value.ordinal
else:
raise ValueError(f"'value' should be a Period. Got '{value}' instead.")
def _scalar_from_string(self, value: str) -> Period:
return Period(value, freq=self.freq)
def _check_compatible_with(self, other, setitem: bool = False):
if other is NaT:
return
if self.freqstr != other.freqstr:
raise raise_on_incompatible(self, other)
# --------------------------------------------------------------------
# Data / Attributes
@cache_readonly
def dtype(self) -> PeriodDtype:
return self._dtype
# error: Read-only property cannot override read-write property [misc]
@property # type: ignore
def freq(self) -> DateOffset:
"""
Return the frequency object for this PeriodArray.
"""
return self.dtype.freq
def __array__(self, dtype=None) -> np.ndarray:
if dtype == "i8":
return self.asi8
elif dtype == bool:
return ~self._isnan
# This will raise TypeError for non-object dtypes
return np.array(list(self), dtype=object)
def __arrow_array__(self, type=None):
"""
Convert myself into a pyarrow Array.
"""
import pyarrow
from pandas.core.arrays._arrow_utils import ArrowPeriodType
if type is not None:
if pyarrow.types.is_integer(type):
return pyarrow.array(self._data, mask=self.isna(), type=type)
elif isinstance(type, ArrowPeriodType):
# ensure we have the same freq
if self.freqstr != type.freq:
raise TypeError(
"Not supported to convert PeriodArray to array with different "
f"'freq' ({self.freqstr} vs {type.freq})"
)
else:
raise TypeError(
f"Not supported to convert PeriodArray to '{type}' type"
)
period_type = ArrowPeriodType(self.freqstr)
storage_array = pyarrow.array(self._data, mask=self.isna(), type="int64")
return pyarrow.ExtensionArray.from_storage(period_type, storage_array)
# --------------------------------------------------------------------
# Vectorized analogues of Period properties
year = _field_accessor(
"year",
"""
The year of the period.
""",
)
month = _field_accessor(
"month",
"""
The month as January=1, December=12.
""",
)
day = _field_accessor(
"day",
"""
The days of the period.
""",
)
hour = _field_accessor(
"hour",
"""
The hour of the period.
""",
)
minute = _field_accessor(
"minute",
"""
The minute of the period.
""",
)
second = _field_accessor(
"second",
"""
The second of the period.
""",
)
weekofyear = _field_accessor(
"week",
"""
The week ordinal of the year.
""",
)
week = weekofyear
dayofweek = _field_accessor(
"weekday",
"""
The day of the week with Monday=0, Sunday=6.
""",
)
weekday = dayofweek
dayofyear = day_of_year = _field_accessor(
"day_of_year",
"""
The ordinal day of the year.
""",
)
quarter = _field_accessor(
"quarter",
"""
The quarter of the date.
""",
)
qyear = _field_accessor("qyear")
days_in_month = _field_accessor(
"days_in_month",
"""
The number of days in the month.
""",
)
daysinmonth = days_in_month
@property
def is_leap_year(self) -> np.ndarray:
"""
Logical indicating if the date belongs to a leap year.
"""
return isleapyear_arr(np.asarray(self.year))
@property
def start_time(self):
return self.to_timestamp(how="start")
@property
def end_time(self):
return self.to_timestamp(how="end")
def to_timestamp(self, freq=None, how="start"):
"""
Cast to DatetimeArray/Index.
Parameters
----------
freq : str or DateOffset, optional
Target frequency. The default is 'D' for week or longer,
'S' otherwise.
how : {'s', 'e', 'start', 'end'}
Whether to use the start or end of the time period being converted.
Returns
-------
DatetimeArray/Index
"""
from pandas.core.arrays import DatetimeArray
how = libperiod.validate_end_alias(how)
end = how == "E"
if end:
if freq == "B" or self.freq == "B":
# roll forward to ensure we land on B date
adjust = Timedelta(1, "D") - Timedelta(1, "ns")
return self.to_timestamp(how="start") + adjust
else:
adjust = Timedelta(1, "ns")
return (self + self.freq).to_timestamp(how="start") - adjust
if freq is None:
base, mult = libfrequencies.get_freq_code(self.freq)
freq = libfrequencies.get_to_timestamp_base(base)
else:
freq = Period._maybe_convert_freq(freq)
base, mult = libfrequencies.get_freq_code(freq)
new_data = self.asfreq(freq, how=how)
new_data = libperiod.periodarr_to_dt64arr(new_data.asi8, base)
return DatetimeArray(new_data)._with_freq("infer")
# --------------------------------------------------------------------
def _time_shift(self, periods, freq=None):
"""
Shift each value by `periods`.
Note this is different from ExtensionArray.shift, which
shifts the *position* of each element, padding the end with
missing values.
Parameters
----------
periods : int
Number of periods to shift by.
freq : pandas.DateOffset, pandas.Timedelta, or str
Frequency increment to shift by.
"""
if freq is not None:
raise TypeError(
"`freq` argument is not supported for "
f"{type(self).__name__}._time_shift"
)
values = self.asi8 + periods * self.freq.n
if self._hasnans:
values[self._isnan] = iNaT
return type(self)(values, freq=self.freq)
@property
def _box_func(self):
return lambda x: Period._from_ordinal(ordinal=x, freq=self.freq)
def asfreq(self, freq=None, how: str = "E") -> "PeriodArray":
"""
Convert the Period Array/Index to the specified frequency `freq`.
Parameters
----------
freq : str
A frequency.
how : str {'E', 'S'}
Whether the elements should be aligned to the end
or start within pa period.
* 'E', 'END', or 'FINISH' for end,
* 'S', 'START', or 'BEGIN' for start.
January 31st ('END') vs. January 1st ('START') for example.
Returns
-------
Period Array/Index
Constructed with the new frequency.
Examples
--------
>>> pidx = pd.period_range('2010-01-01', '2015-01-01', freq='A')
>>> pidx
PeriodIndex(['2010', '2011', '2012', '2013', '2014', '2015'],
dtype='period[A-DEC]', freq='A-DEC')
>>> pidx.asfreq('M')
PeriodIndex(['2010-12', '2011-12', '2012-12', '2013-12', '2014-12',
'2015-12'], dtype='period[M]', freq='M')
>>> pidx.asfreq('M', how='S')
PeriodIndex(['2010-01', '2011-01', '2012-01', '2013-01', '2014-01',
'2015-01'], dtype='period[M]', freq='M')
"""
how = libperiod.validate_end_alias(how)
freq = Period._maybe_convert_freq(freq)
base1, mult1 = libfrequencies.get_freq_code(self.freq)
base2, mult2 = libfrequencies.get_freq_code(freq)
asi8 = self.asi8
# mult1 can't be negative or 0
end = how == "E"
if end:
ordinal = asi8 + mult1 - 1
else:
ordinal = asi8
new_data = period_asfreq_arr(ordinal, base1, base2, end)
if self._hasnans:
new_data[self._isnan] = iNaT
return type(self)(new_data, freq=freq)
# ------------------------------------------------------------------
# Rendering Methods
def _formatter(self, boxed: bool = False):
if boxed:
return str
return "'{}'".format
def _format_native_types(self, na_rep="NaT", date_format=None, **kwargs):
"""
actually format my specific types
"""
values = self.astype(object)
if date_format:
formatter = lambda dt: dt.strftime(date_format)
else:
formatter = lambda dt: str(dt)
if self._hasnans:
mask = self._isnan
values[mask] = na_rep
imask = ~mask
values[imask] = np.array([formatter(dt) for dt in values[imask]])
else:
values = np.array([formatter(dt) for dt in values])
return values
# ------------------------------------------------------------------
def astype(self, dtype, copy: bool = True):
# We handle Period[T] -> Period[U]
# Our parent handles everything else.
dtype = pandas_dtype(dtype)
if is_period_dtype(dtype):
return self.asfreq(dtype.freq)
return super().astype(dtype, copy=copy)
# ------------------------------------------------------------------
# Arithmetic Methods
def _sub_datelike(self, other):
assert other is not NaT
return NotImplemented
def _sub_period(self, other):
# If the operation is well-defined, we return an object-Index
# of DateOffsets. Null entries are filled with pd.NaT
self._check_compatible_with(other)
asi8 = self.asi8
new_data = asi8 - other.ordinal
new_data = np.array([self.freq * x for x in new_data])
if self._hasnans:
new_data[self._isnan] = NaT
return new_data
def _sub_period_array(self, other):
"""
Subtract a Period Array/Index from self. This is only valid if self
is itself a Period Array/Index, raises otherwise. Both objects must
have the same frequency.
Parameters
----------
other : PeriodIndex or PeriodArray
Returns
-------
result : np.ndarray[object]
Array of DateOffset objects; nulls represented by NaT.
"""
if self.freq != other.freq:
msg = DIFFERENT_FREQ.format(
cls=type(self).__name__, own_freq=self.freqstr, other_freq=other.freqstr
)
raise IncompatibleFrequency(msg)
new_values = algos.checked_add_with_arr(
self.asi8, -other.asi8, arr_mask=self._isnan, b_mask=other._isnan
)
new_values = np.array([self.freq.base * x for x in new_values])
if self._hasnans or other._hasnans:
mask = (self._isnan) | (other._isnan)
new_values[mask] = NaT
return new_values
def _addsub_int_array(
self, other: np.ndarray, op: Callable[[Any, Any], Any],
) -> "PeriodArray":
"""
Add or subtract array of integers; equivalent to applying
`_time_shift` pointwise.
Parameters
----------
other : np.ndarray[integer-dtype]
op : {operator.add, operator.sub}
Returns
-------
result : PeriodArray
"""
assert op in [operator.add, operator.sub]
if op is operator.sub:
other = -other
res_values = algos.checked_add_with_arr(self.asi8, other, arr_mask=self._isnan)
res_values = res_values.view("i8")
res_values[self._isnan] = iNaT
return type(self)(res_values, freq=self.freq)
def _add_offset(self, other: DateOffset):
assert not isinstance(other, Tick)
if other.base != self.freq.base:
raise raise_on_incompatible(self, other)
# Note: when calling parent class's _add_timedeltalike_scalar,
# it will call delta_to_nanoseconds(delta). Because delta here
# is an integer, delta_to_nanoseconds will return it unchanged.
result = super()._add_timedeltalike_scalar(other.n)
return type(self)(result, freq=self.freq)
def _add_timedeltalike_scalar(self, other):
"""
Parameters
----------
other : timedelta, Tick, np.timedelta64
Returns
-------
PeriodArray
"""
if not isinstance(self.freq, Tick):
# We cannot add timedelta-like to non-tick PeriodArray
raise raise_on_incompatible(self, other)
if notna(other):
# special handling for np.timedelta64("NaT"), avoid calling
# _check_timedeltalike_freq_compat as that would raise TypeError
other = self._check_timedeltalike_freq_compat(other)
# Note: when calling parent class's _add_timedeltalike_scalar,
# it will call delta_to_nanoseconds(delta). Because delta here
# is an integer, delta_to_nanoseconds will return it unchanged.
return super()._add_timedeltalike_scalar(other)
def _add_timedelta_arraylike(self, other):
"""
Parameters
----------
other : TimedeltaArray or ndarray[timedelta64]
Returns
-------
result : ndarray[int64]
"""
if not isinstance(self.freq, Tick):
# We cannot add timedelta-like to non-tick PeriodArray
raise TypeError(
f"Cannot add or subtract timedelta64[ns] dtype from {self.dtype}"
)
if not np.all(isna(other)):
delta = self._check_timedeltalike_freq_compat(other)
else:
# all-NaT TimedeltaIndex is equivalent to a single scalar td64 NaT
return self + np.timedelta64("NaT")
ordinals = self._addsub_int_array(delta, operator.add).asi8
return type(self)(ordinals, dtype=self.dtype)
def _check_timedeltalike_freq_compat(self, other):
"""
Arithmetic operations with timedelta-like scalars or array `other`
are only valid if `other` is an integer multiple of `self.freq`.
If the operation is valid, find that integer multiple. Otherwise,
raise because the operation is invalid.
Parameters
----------
other : timedelta, np.timedelta64, Tick,
ndarray[timedelta64], TimedeltaArray, TimedeltaIndex
Returns
-------
multiple : int or ndarray[int64]
Raises
------
IncompatibleFrequency
"""
assert isinstance(self.freq, Tick) # checked by calling function
base_nanos = self.freq.base.nanos
if isinstance(other, (timedelta, np.timedelta64, Tick)):
nanos = delta_to_nanoseconds(other)
elif isinstance(other, np.ndarray):
# numpy timedelta64 array; all entries must be compatible
assert other.dtype.kind == "m"
if other.dtype != TD64NS_DTYPE:
# i.e. non-nano unit
# TODO: disallow unit-less timedelta64
other = other.astype(TD64NS_DTYPE)
nanos = other.view("i8")
else:
# TimedeltaArray/Index
nanos = other.asi8
if np.all(nanos % base_nanos == 0):
# nanos being added is an integer multiple of the
# base-frequency to self.freq
delta = nanos // base_nanos
# delta is the integer (or integer-array) number of periods
# by which will be added to self.
return delta
raise raise_on_incompatible(self, other)
def raise_on_incompatible(left, right):
"""
Helper function to render a consistent error message when raising
IncompatibleFrequency.
Parameters
----------
left : PeriodArray
right : None, DateOffset, Period, ndarray, or timedelta-like
Returns
-------
IncompatibleFrequency
Exception to be raised by the caller.
"""
# GH#24283 error message format depends on whether right is scalar
if isinstance(right, (np.ndarray, ABCTimedeltaArray)) or right is None:
other_freq = None
elif isinstance(right, (ABCPeriodIndex, PeriodArray, Period, DateOffset)):
other_freq = right.freqstr
else:
other_freq = delta_to_tick(Timedelta(right)).freqstr
msg = DIFFERENT_FREQ.format(
cls=type(left).__name__, own_freq=left.freqstr, other_freq=other_freq
)
return IncompatibleFrequency(msg)
def period_array(
data: Union[Sequence[Optional[Period]], AnyArrayLike],
freq: Optional[Union[str, Tick]] = None,
copy: bool = False,
) -> PeriodArray:
"""
Construct a new PeriodArray from a sequence of Period scalars.
Parameters
----------
data : Sequence of Period objects
A sequence of Period objects. These are required to all have
the same ``freq.`` Missing values can be indicated by ``None``
or ``pandas.NaT``.
freq : str, Tick, or Offset
The frequency of every element of the array. This can be specified
to avoid inferring the `freq` from `data`.
copy : bool, default False
Whether to ensure a copy of the data is made.
Returns
-------
PeriodArray
See Also
--------
PeriodArray
pandas.PeriodIndex
Examples
--------
>>> period_array([pd.Period('2017', freq='A'),
... pd.Period('2018', freq='A')])
<PeriodArray>
['2017', '2018']
Length: 2, dtype: period[A-DEC]
>>> period_array([pd.Period('2017', freq='A'),
... pd.Period('2018', freq='A'),
... pd.NaT])
<PeriodArray>
['2017', '2018', 'NaT']
Length: 3, dtype: period[A-DEC]
Integers that look like years are handled
>>> period_array([2000, 2001, 2002], freq='D')
<PeriodArray>
['2000-01-01', '2001-01-01', '2002-01-01']
Length: 3, dtype: period[D]
Datetime-like strings may also be passed
>>> period_array(['2000-Q1', '2000-Q2', '2000-Q3', '2000-Q4'], freq='Q')
<PeriodArray>
['2000Q1', '2000Q2', '2000Q3', '2000Q4']
Length: 4, dtype: period[Q-DEC]
"""
data_dtype = getattr(data, "dtype", None)
if is_datetime64_dtype(data_dtype):
return PeriodArray._from_datetime64(data, freq)
if is_period_dtype(data_dtype):
return PeriodArray(data, freq)
# other iterable of some kind
if not isinstance(data, (np.ndarray, list, tuple, ABCSeries)):
data = list(data)
data = np.asarray(data)
dtype: Optional[PeriodDtype]
if freq:
dtype = PeriodDtype(freq)
else:
dtype = None
if is_float_dtype(data) and len(data) > 0:
raise TypeError("PeriodIndex does not allow floating point in construction")
data = ensure_object(data)
return PeriodArray._from_sequence(data, dtype=dtype)
def validate_dtype_freq(dtype, freq):
"""
If both a dtype and a freq are available, ensure they match. If only
dtype is available, extract the implied freq.
Parameters
----------
dtype : dtype
freq : DateOffset or None
Returns
-------
freq : DateOffset
Raises
------
ValueError : non-period dtype
IncompatibleFrequency : mismatch between dtype and freq
"""
if freq is not None:
freq = to_offset(freq)
if dtype is not None:
dtype = pandas_dtype(dtype)
if not is_period_dtype(dtype):
raise ValueError("dtype must be PeriodDtype")
if freq is None:
freq = dtype.freq
elif freq != dtype.freq:
raise IncompatibleFrequency("specified freq and dtype are different")
return freq
def dt64arr_to_periodarr(data, freq, tz=None):
"""
Convert an datetime-like array to values Period ordinals.
Parameters
----------
data : Union[Series[datetime64[ns]], DatetimeIndex, ndarray[datetime64ns]]
freq : Optional[Union[str, Tick]]
Must match the `freq` on the `data` if `data` is a DatetimeIndex
or Series.
tz : Optional[tzinfo]
Returns
-------
ordinals : ndarray[int]
freq : Tick
The frequency extracted from the Series or DatetimeIndex if that's
used.
"""
if data.dtype != np.dtype("M8[ns]"):
raise ValueError(f"Wrong dtype: {data.dtype}")
if freq is None:
if isinstance(data, ABCIndexClass):
data, freq = data._values, data.freq
elif isinstance(data, ABCSeries):
data, freq = data._values, data.dt.freq
freq = Period._maybe_convert_freq(freq)
if isinstance(data, (ABCIndexClass, ABCSeries)):
data = data._values
base, mult = libfrequencies.get_freq_code(freq)
return libperiod.dt64arr_to_periodarr(data.view("i8"), base, tz), freq
def _get_ordinal_range(start, end, periods, freq, mult=1):
if com.count_not_none(start, end, periods) != 2:
raise ValueError(
"Of the three parameters: start, end, and periods, "
"exactly two must be specified"
)
if freq is not None:
_, mult = libfrequencies.get_freq_code(freq)
if start is not None:
start = Period(start, freq)
if end is not None:
end = Period(end, freq)
is_start_per = isinstance(start, Period)
is_end_per = isinstance(end, Period)
if is_start_per and is_end_per and start.freq != end.freq:
raise ValueError("start and end must have same freq")
if start is NaT or end is NaT:
raise ValueError("start and end must not be NaT")
if freq is None:
if is_start_per:
freq = start.freq
elif is_end_per:
freq = end.freq
else: # pragma: no cover
raise ValueError("Could not infer freq from start/end")
if periods is not None:
periods = periods * mult
if start is None:
data = np.arange(
end.ordinal - periods + mult, end.ordinal + 1, mult, dtype=np.int64
)
else:
data = np.arange(
start.ordinal, start.ordinal + periods, mult, dtype=np.int64
)
else:
data = np.arange(start.ordinal, end.ordinal + 1, mult, dtype=np.int64)
return data, freq
def _range_from_fields(
year=None,
month=None,
quarter=None,
day=None,
hour=None,
minute=None,
second=None,
freq=None,
):
if hour is None:
hour = 0
if minute is None:
minute = 0
if second is None:
second = 0
if day is None:
day = 1
ordinals = []
if quarter is not None:
if freq is None:
freq = "Q"
base = libfrequencies.FreqGroup.FR_QTR
else:
base, mult = libfrequencies.get_freq_code(freq)
if base != libfrequencies.FreqGroup.FR_QTR:
raise AssertionError("base must equal FR_QTR")
year, quarter = _make_field_arrays(year, quarter)
for y, q in zip(year, quarter):
y, m = libperiod.quarter_to_myear(y, q, freq)
val = libperiod.period_ordinal(y, m, 1, 1, 1, 1, 0, 0, base)
ordinals.append(val)
else:
base, mult = libfrequencies.get_freq_code(freq)
arrays = _make_field_arrays(year, month, day, hour, minute, second)
for y, mth, d, h, mn, s in zip(*arrays):
ordinals.append(libperiod.period_ordinal(y, mth, d, h, mn, s, 0, 0, base))
return np.array(ordinals, dtype=np.int64), freq
def _make_field_arrays(*fields):
length = None
for x in fields:
if isinstance(x, (list, np.ndarray, ABCSeries)):
if length is not None and len(x) != length:
raise ValueError("Mismatched Period array lengths")
elif length is None:
length = len(x)
arrays = [
np.asarray(x)
if isinstance(x, (np.ndarray, list, ABCSeries))
else np.repeat(x, length)
for x in fields
]
return arrays
|
import os
print '/'.join(os.path.abspath(__file__).split('/')[:-2])
|
from django.conf import settings
from django.contrib.auth import login as auth_login, REDIRECT_FIELD_NAME
from django.contrib.auth.views import redirect_to_login
from django.core.urlresolvers import resolve, Resolver404, reverse
from django.http import HttpResponseRedirect, HttpResponse
from django.utils.cache import patch_cache_control
from django.utils.http import is_safe_url, urlquote
from django.utils.timezone import now
from django.utils.translation import get_language
from django.views.decorators.http import require_POST
from cms.apphook_pool import apphook_pool
from cms.appresolver import get_app_urls
from cms.cache.page import get_page_cache
from cms.forms.login import CMSToolbarLoginForm
from cms.page_rendering import _handle_no_page, render_page, render_object_structure, _render_welcome_page
from cms.utils import get_language_code, get_language_from_request, get_cms_setting
from cms.utils.i18n import (get_fallback_languages, force_language, get_public_languages,
get_redirect_on_fallback, get_language_list,
is_language_prefix_patterns_used)
from cms.utils.page import get_pages_queryset
from cms.utils.page_permissions import user_can_change_page
from cms.utils.page_resolver import get_page_from_request
def details(request, slug):
"""
The main view of the Django-CMS! Takes a request and a slug, renders the
page.
"""
response_timestamp = now()
if get_cms_setting("PAGE_CACHE") and (
not hasattr(request, 'toolbar') or (
not request.toolbar.edit_mode_active and
not request.toolbar.show_toolbar and
not request.user.is_authenticated()
)
):
cache_content = get_page_cache(request)
if cache_content is not None:
content, headers, expires_datetime = cache_content
response = HttpResponse(content)
response._headers = headers
# Recalculate the max-age header for this cached response
max_age = int(
(expires_datetime - response_timestamp).total_seconds() + 0.5)
patch_cache_control(response, max_age=max_age)
return response
# Get a Page model object from the request
page = get_page_from_request(request, use_path=slug)
draft_pages = get_pages_queryset(draft=True)
if not page and not slug and not draft_pages.exists():
# render the welcome page if the requested path is root "/"
# and there's no pages
return _render_welcome_page(request)
if not page:
# raise 404
_handle_no_page(request)
request.current_page = page
current_language = request.GET.get('language', None)
if not current_language:
current_language = request.POST.get('language', None)
if current_language:
current_language = get_language_code(current_language)
if current_language not in get_language_list(page.site_id):
current_language = None
if current_language is None:
current_language = get_language_code(getattr(request, 'LANGUAGE_CODE', None))
if current_language:
current_language = get_language_code(current_language)
if current_language not in get_language_list(page.site_id):
current_language = None
if current_language is None:
current_language = get_language_code(get_language())
# Check that the current page is available in the desired (current) language
available_languages = []
# this will return all languages in draft mode, and published only in live mode
page_languages = list(page.get_published_languages())
if hasattr(request, 'user') and request.user.is_staff:
user_languages = get_language_list()
else:
user_languages = get_public_languages()
for frontend_lang in user_languages:
if frontend_lang in page_languages:
available_languages.append(frontend_lang)
# Check that the language is in FRONTEND_LANGUAGES:
own_urls = [
'http%s://%s%s' % ('s' if request.is_secure() else '', request.get_host(), request.path),
'/%s' % request.path,
request.path,
]
if current_language not in user_languages:
#are we on root?
if not slug:
#redirect to supported language
languages = []
for language in available_languages:
languages.append((language, language))
if languages:
# get supported language
new_language = get_language_from_request(request)
if new_language in get_public_languages():
with force_language(new_language):
pages_root = reverse('pages-root')
if (hasattr(request, 'toolbar') and request.user.is_staff and request.toolbar.edit_mode_active):
request.toolbar.redirect_url = pages_root
elif pages_root not in own_urls:
return HttpResponseRedirect(pages_root)
elif not hasattr(request, 'toolbar') or not request.toolbar.redirect_url:
# raise 404
_handle_no_page(request)
else:
# raise 404
_handle_no_page(request)
if current_language not in available_languages:
# If we didn't find the required page in the requested (current)
# language, let's try to find a fallback
found = False
for alt_lang in get_fallback_languages(current_language):
if alt_lang in available_languages:
if get_redirect_on_fallback(current_language) or slug == "":
with force_language(alt_lang):
path = page.get_absolute_url(language=alt_lang, fallback=True)
# In the case where the page is not available in the
# preferred language, *redirect* to the fallback page. This
# is a design decision (instead of rendering in place)).
if (hasattr(request, 'toolbar') and request.user.is_staff
and request.toolbar.edit_mode_active):
request.toolbar.redirect_url = path
elif path not in own_urls:
return HttpResponseRedirect(path)
else:
found = True
if not found and (not hasattr(request, 'toolbar') or not request.toolbar.redirect_url):
# There is a page object we can't find a proper language to render it
_handle_no_page(request)
else:
page_path = page.get_absolute_url(language=current_language)
page_slug = page.get_path(language=current_language) or page.get_slug(language=current_language)
if slug and slug != page_slug and request.path[:len(page_path)] != page_path:
# The current language does not match it's slug.
# Redirect to the current language.
if hasattr(request, 'toolbar') and request.user.is_staff and request.toolbar.edit_mode_active:
request.toolbar.redirect_url = page_path
else:
return HttpResponseRedirect(page_path)
if apphook_pool.get_apphooks():
# There are apphooks in the pool. Let's see if there is one for the
# current page
# since we always have a page at this point, applications_page_check is
# pointless
# page = applications_page_check(request, page, slug)
# Check for apphooks! This time for real!
app_urls = page.get_application_urls(current_language, False)
skip_app = False
if (not page.is_published(current_language) and hasattr(request, 'toolbar')
and request.toolbar.edit_mode_active):
skip_app = True
if app_urls and not skip_app:
app = apphook_pool.get_apphook(app_urls)
pattern_list = []
if app:
for urlpatterns in get_app_urls(app.get_urls(page, current_language)):
pattern_list += urlpatterns
try:
view, args, kwargs = resolve('/', tuple(pattern_list))
return view(request, *args, **kwargs)
except Resolver404:
pass
# Check if the page has a redirect url defined for this language.
redirect_url = page.get_redirect(language=current_language)
if redirect_url:
if (is_language_prefix_patterns_used() and redirect_url[0] == "/"
and not redirect_url.startswith('/%s/' % current_language)):
# add language prefix to url
redirect_url = "/%s/%s" % (current_language, redirect_url.lstrip("/"))
# prevent redirect to self
if hasattr(request, 'toolbar') and request.user.is_staff and request.toolbar.edit_mode_active:
request.toolbar.redirect_url = redirect_url
elif redirect_url not in own_urls:
return HttpResponseRedirect(redirect_url)
# permission checks
if page.login_required and not request.user.is_authenticated():
return redirect_to_login(urlquote(request.get_full_path()), settings.LOGIN_URL)
if hasattr(request, 'toolbar'):
request.toolbar.set_object(page)
structure_requested = get_cms_setting('CMS_TOOLBAR_URL__BUILD') in request.GET
if user_can_change_page(request.user, page) and structure_requested:
return render_object_structure(request, page)
response = render_page(request, page, current_language=current_language, slug=slug)
return response
@require_POST
def login(request):
redirect_to = request.GET.get(REDIRECT_FIELD_NAME)
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = reverse("pages-root")
if request.user.is_authenticated():
return HttpResponseRedirect(redirect_to)
form = CMSToolbarLoginForm(request=request, data=request.POST)
if form.is_valid():
auth_login(request, form.user_cache)
else:
redirect_to += u'?cms_toolbar_login_error=1'
return HttpResponseRedirect(redirect_to)
|
from . import BOTH
from time import time
def time_no_fracs(target, offset=0, when=BOTH):
def func(dizzy_iterator):
dizzy_iterator[target] = int(time() + offset)
return (func, when)
def time(target, offset=0, when=BOTH):
def func(dizzy_iterator):
now = time.time() + offset
secs = int(now)
fracs = int((now - secs) * 65536)
# secs is packed in 48 bits(2 ** 48 - 1 = 281474976710655) and fracs packed in 18 bits(2 ** 18 - 1 = 262143)
dizzy_iterator[target] = ((secs & 281474976710655) << 18) | (fracs & 262143)
return (func, when)
|
"""Clean up all docker caches."""
import subprocess
print(subprocess.check_output(['docker', 'system', 'prune', '-fa']))
|
import datetime
import os
from collections import defaultdict
from django import forms
from django.conf import settings
import basket
import happyforms
import waffle
from tower import ugettext as _, ugettext_lazy as _lazy
import amo
from amo.utils import slug_validator
from mkt.comm.utils import create_comm_note
from mkt.constants import APP_FEATURES, comm, FREE_PLATFORMS, PAID_PLATFORMS
from mkt.developers.forms import verify_app_domain
from mkt.files.models import FileUpload
from mkt.files.utils import parse_addon
from mkt.reviewers.models import RereviewQueue
from mkt.translations.fields import TransField
from mkt.translations.forms import TranslationFormMixin
from mkt.translations.widgets import TransInput, TransTextarea
from mkt.users.models import UserNotification
from mkt.users.notifications import app_surveys
from mkt.webapps.models import Addon, AppFeatures, BlacklistedSlug, Webapp
def mark_for_rereview(addon, added_devices, removed_devices):
msg = _(u'Device(s) changed: {0}').format(', '.join(
[_(u'Added {0}').format(unicode(amo.DEVICE_TYPES[d].name))
for d in added_devices] +
[_(u'Removed {0}').format(unicode(amo.DEVICE_TYPES[d].name))
for d in removed_devices]))
RereviewQueue.flag(addon, amo.LOG.REREVIEW_DEVICES_ADDED, msg)
def mark_for_rereview_features_change(addon, added_features, removed_features):
# L10n: {0} is the list of requirements changes.
msg = _(u'Requirements changed: {0}').format(', '.join(
[_(u'Added {0}').format(f) for f in added_features] +
[_(u'Removed {0}').format(f) for f in removed_features]))
RereviewQueue.flag(addon, amo.LOG.REREVIEW_FEATURES_CHANGED, msg)
class DeviceTypeForm(happyforms.Form):
ERRORS = {
'both': _lazy(u'Cannot be free and paid.'),
'none': _lazy(u'Please select a device.'),
'packaged': _lazy(u'Packaged apps are not yet supported for those '
u'platforms.'),
}
free_platforms = forms.MultipleChoiceField(
choices=FREE_PLATFORMS(), required=False)
paid_platforms = forms.MultipleChoiceField(
choices=PAID_PLATFORMS(), required=False)
def save(self, addon, is_paid):
data = self.cleaned_data[
'paid_platforms' if is_paid else 'free_platforms']
submitted_data = self.get_devices(t.split('-', 1)[1] for t in data)
new_types = set(dev.id for dev in submitted_data)
old_types = set(amo.DEVICE_TYPES[x.id].id for x in addon.device_types)
added_devices = new_types - old_types
removed_devices = old_types - new_types
for d in added_devices:
addon.addondevicetype_set.create(device_type=d)
for d in removed_devices:
addon.addondevicetype_set.filter(device_type=d).delete()
# Send app to re-review queue if public and new devices are added.
if added_devices and addon.status in amo.WEBAPPS_APPROVED_STATUSES:
mark_for_rereview(addon, added_devices, removed_devices)
def _add_error(self, msg):
self._errors['free_platforms'] = self._errors['paid_platforms'] = (
self.ERRORS[msg])
def _get_combined(self):
devices = (self.cleaned_data.get('free_platforms', []) +
self.cleaned_data.get('paid_platforms', []))
return set(d.split('-', 1)[1] for d in devices)
def _set_packaged_errors(self):
"""Add packaged-app submission errors for incompatible platforms."""
devices = self._get_combined()
bad_android = (
not waffle.flag_is_active(self.request, 'android-packaged') and
('android-mobile' in devices or 'android-tablet' in devices)
)
bad_desktop = (
not waffle.flag_is_active(self.request, 'desktop-packaged') and
'desktop' in devices
)
if bad_android or bad_desktop:
self._errors['free_platforms'] = self._errors['paid_platforms'] = (
self.ERRORS['packaged'])
def clean(self):
data = self.cleaned_data
paid = data.get('paid_platforms', [])
free = data.get('free_platforms', [])
# Check that they didn't select both.
if free and paid:
self._add_error('both')
return data
# Check that they selected one.
if not free and not paid:
self._add_error('none')
return data
return super(DeviceTypeForm, self).clean()
def get_devices(self, source=None):
"""Returns a device based on the requested free or paid."""
if source is None:
source = self._get_combined()
platforms = {'firefoxos': amo.DEVICE_GAIA,
'desktop': amo.DEVICE_DESKTOP,
'android-mobile': amo.DEVICE_MOBILE,
'android-tablet': amo.DEVICE_TABLET}
return map(platforms.get, source)
def is_paid(self):
return bool(self.cleaned_data.get('paid_platforms', False))
def get_paid(self):
"""Returns the premium type. Should not be used if the form is used to
modify an existing app.
"""
return amo.ADDON_PREMIUM if self.is_paid() else amo.ADDON_FREE
class DevAgreementForm(happyforms.Form):
read_dev_agreement = forms.BooleanField(label=_lazy(u'Agree and Continue'),
widget=forms.HiddenInput)
newsletter = forms.BooleanField(required=False, label=app_surveys.label,
widget=forms.CheckboxInput)
def __init__(self, *args, **kw):
self.instance = kw.pop('instance')
self.request = kw.pop('request')
super(DevAgreementForm, self).__init__(*args, **kw)
def save(self):
self.instance.read_dev_agreement = datetime.datetime.now()
self.instance.save()
if self.cleaned_data.get('newsletter'):
UserNotification.update_or_create(user=self.instance,
notification_id=app_surveys.id, update={'enabled': True})
basket.subscribe(self.instance.email,
'app-dev',
format='H',
country=self.request.REGION.slug,
lang=self.request.LANG,
source_url=os.path.join(settings.SITE_URL,
'developers/submit'))
class NewWebappVersionForm(happyforms.Form):
upload_error = _lazy(u'There was an error with your upload. '
u'Please try again.')
upload = forms.ModelChoiceField(widget=forms.HiddenInput,
queryset=FileUpload.objects.filter(valid=True),
error_messages={'invalid_choice': upload_error})
def __init__(self, *args, **kw):
request = kw.pop('request', None)
self.addon = kw.pop('addon', None)
self._is_packaged = kw.pop('is_packaged', False)
super(NewWebappVersionForm, self).__init__(*args, **kw)
if (not waffle.flag_is_active(request, 'allow-b2g-paid-submission')
and 'paid_platforms' in self.fields):
del self.fields['paid_platforms']
def clean(self):
data = self.cleaned_data
if 'upload' not in self.cleaned_data:
self._errors['upload'] = self.upload_error
return
if self.is_packaged():
# Now run the packaged app check, done in clean, because
# clean_packaged needs to be processed first.
try:
pkg = parse_addon(data['upload'], self.addon)
except forms.ValidationError, e:
self._errors['upload'] = self.error_class(e.messages)
return
# Collect validation errors so we can display them at once.
errors = []
ver = pkg.get('version')
if (ver and self.addon and
self.addon.versions.filter(version=ver).exists()):
errors.append(_(u'Version %s already exists.') % ver)
origin = pkg.get('origin')
if origin:
try:
verify_app_domain(origin, packaged=True,
exclude=self.addon)
except forms.ValidationError, e:
errors.append(e.message)
if self.addon and origin != self.addon.app_domain:
errors.append(_('Changes to "origin" are not allowed.'))
if errors:
self._errors['upload'] = self.error_class(errors)
return
else:
# Throw an error if this is a dupe.
# (JS sets manifest as `upload.name`.)
try:
verify_app_domain(data['upload'].name)
except forms.ValidationError, e:
self._errors['upload'] = self.error_class(e.messages)
return
return data
def is_packaged(self):
return self._is_packaged
class NewWebappForm(DeviceTypeForm, NewWebappVersionForm):
upload = forms.ModelChoiceField(widget=forms.HiddenInput,
queryset=FileUpload.objects.filter(valid=True),
error_messages={'invalid_choice': _lazy(
u'There was an error with your upload. Please try again.')})
packaged = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
super(NewWebappForm, self).__init__(*args, **kwargs)
if 'paid_platforms' in self.fields:
self.fields['paid_platforms'].choices = PAID_PLATFORMS(
self.request)
def _add_error(self, msg):
self._errors['free_platforms'] = self._errors['paid_platforms'] = (
self.ERRORS[msg])
def clean(self):
data = super(NewWebappForm, self).clean()
if not data:
return
if self.is_packaged():
self._set_packaged_errors()
if self._errors.get('free_platforms'):
return
return data
def is_packaged(self):
return self._is_packaged or self.cleaned_data.get('packaged', False)
class AppDetailsBasicForm(TranslationFormMixin, happyforms.ModelForm):
"""Form for "Details" submission step."""
PUBLISH_CHOICES = (
(amo.PUBLISH_IMMEDIATE,
_lazy(u'Publish my app and make it visible to everyone in the '
u'Marketplace and include it in search results.')),
(amo.PUBLISH_PRIVATE,
_lazy(u'Do not publish my app. Notify me and I will adjust app '
u'visibility after it is approved.')),
)
app_slug = forms.CharField(max_length=30,
widget=forms.TextInput(attrs={'class': 'm'}))
description = TransField(
label=_lazy(u'Description:'),
help_text=_lazy(u'This description will appear on the details page.'),
widget=TransTextarea(attrs={'rows': 4}))
privacy_policy = TransField(
label=_lazy(u'Privacy Policy:'),
widget=TransTextarea(attrs={'rows': 6}),
help_text=_lazy(
u"A privacy policy that explains what data is transmitted from a "
u"user's computer and how it is used is required."))
homepage = TransField.adapt(forms.URLField)(
label=_lazy(u'Homepage:'), required=False,
widget=TransInput(attrs={'class': 'full'}),
help_text=_lazy(
u'If your app has another homepage, enter its address here.'))
support_url = TransField.adapt(forms.URLField)(
label=_lazy(u'Support Website:'), required=False,
widget=TransInput(attrs={'class': 'full'}),
help_text=_lazy(
u'If your app has a support website or forum, enter its address '
u'here.'))
support_email = TransField.adapt(forms.EmailField)(
label=_lazy(u'Support Email:'),
widget=TransInput(attrs={'class': 'full'}),
help_text=_lazy(
u'This email address will be listed publicly on the Marketplace '
u'and used by end users to contact you with support issues. This '
u'email address will be listed publicly on your app details page.'))
flash = forms.TypedChoiceField(
label=_lazy(u'Does your app require Flash support?'),
required=False, coerce=lambda x: bool(int(x)),
initial=0, widget=forms.RadioSelect,
choices=((1, _lazy(u'Yes')),
(0, _lazy(u'No'))))
notes = forms.CharField(
label=_lazy(u'Your comments for reviewers'), required=False,
widget=forms.Textarea(attrs={'rows': 2}),
help_text=_lazy(
u'Your app will be reviewed by Mozilla before it becomes publicly '
u'listed on the Marketplace. Enter any special instructions for '
u'the app reviewers here.'))
publish_type = forms.TypedChoiceField(
label=_lazy(u'Once your app is approved, choose a publishing option:'),
choices=PUBLISH_CHOICES, initial=amo.PUBLISH_IMMEDIATE,
widget=forms.RadioSelect())
class Meta:
model = Addon
fields = ('app_slug', 'description', 'privacy_policy', 'homepage',
'support_url', 'support_email', 'publish_type')
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(AppDetailsBasicForm, self).__init__(*args, **kwargs)
def clean_app_slug(self):
slug = self.cleaned_data['app_slug']
slug_validator(slug, lower=False)
if slug != self.instance.app_slug:
if Webapp.objects.filter(app_slug=slug).exists():
raise forms.ValidationError(
_('This slug is already in use. Please choose another.'))
if BlacklistedSlug.blocked(slug):
raise forms.ValidationError(
_('The slug cannot be "%s". Please choose another.'
% slug))
return slug.lower()
def save(self, *args, **kw):
if self.data['notes']:
create_comm_note(self.instance, self.instance.versions.latest(),
self.request.user, self.data['notes'],
note_type=comm.SUBMISSION)
self.instance = super(AppDetailsBasicForm, self).save(commit=True)
uses_flash = self.cleaned_data.get('flash')
af = self.instance.get_latest_file()
if af is not None:
af.update(uses_flash=bool(uses_flash))
return self.instance
class AppFeaturesForm(happyforms.ModelForm):
class Meta:
exclude = ['version']
model = AppFeatures
def __init__(self, *args, **kwargs):
super(AppFeaturesForm, self).__init__(*args, **kwargs)
if self.instance:
self.initial_features = sorted(self.instance.to_keys())
else:
self.initial_features = None
def all_fields(self):
"""
Degeneratorizes self.__iter__(), the list of fields on the form. This
allows further manipulation of fields: to display a subset of fields or
order them in a specific way.
"""
return [f for f in self.__iter__()]
def required_api_fields(self):
"""
All fields on the form, alphabetically sorted by help text.
"""
return sorted(self.all_fields(), key=lambda x: x.help_text)
def get_tooltip(self, field):
field_id = field.name.split('_', 1)[1].upper()
return (unicode(APP_FEATURES[field_id].get('description') or '') if
field_id in APP_FEATURES else None)
def _changed_features(self):
old_features = defaultdict.fromkeys(self.initial_features, True)
old_features = set(unicode(f) for f
in AppFeatures(**old_features).to_list())
new_features = set(unicode(f) for f in self.instance.to_list())
added_features = new_features - old_features
removed_features = old_features - new_features
return added_features, removed_features
def save(self, *args, **kwargs):
mark_for_rereview = kwargs.pop('mark_for_rereview', True)
addon = self.instance.version.addon
rval = super(AppFeaturesForm, self).save(*args, **kwargs)
if (self.instance and mark_for_rereview and
addon.status in amo.WEBAPPS_APPROVED_STATUSES and
sorted(self.instance.to_keys()) != self.initial_features):
added_features, removed_features = self._changed_features()
mark_for_rereview_features_change(addon,
added_features,
removed_features)
return rval
|
"""
File for specifying the filters handled by mailhandler.
"""
import helpers as h
import sys
sys.path.append('/path/to/django/project')
from django.core.management import setup_environ
from django_project import settings
setup_environ(settings)
from django_app import models
def run_filter(instance):
"""
run_filter function should be retained and all your filters should go under this function.
"""
# Sample filters
# We filter mail based on the sender and 'failure' text in the body
# grep for lines matchine certain keywords , split the line using ':' delimiter
# added it to a list and update a mysql database using the Django libraries
t = []
if (h.contains(instance.sender, 'alerter@xyz.net') and
h.contains(instance.body, 'failure')):
for each_line in h.line_match(instance.body,
['URL', 'Project'):
t.append(each_line.split(': ')[1])
print "Alert for %(url)s belonging to %(project)s\n" % \
{"url":t[0], "project":t[1]}
models.monitors(url=t[0], project=t[1]).save()
# Real example - We filter mails coming from Keynote, the site monitoring service.
# We find the project, select the status for the project based on the subject and then print the same
if (h.contains(instance.sender, 'alert@keynote.com')):
project = instance.body[instance.body.find('EST')+4:].split('\n')[0]
if h.contains(instance.subject, 'Now OK'):
status = "Now OK"
elif h.contains(instance.subject, 'Crit'):
status = "Critical"
else:
status = "Unknown"
keynote_url = h.line_match(instance.body, 'my.keynote.com')
performance = float(h.line_match(instance.body,
'secs').split(' ')[0])
date = instance.date_d
print "%(status)s for %(project)s : %(performance)f secs\n" % \
{"status":status, "project":project, "performance":performance}
# We print the entire body of the message sometimes based on the sender and subject
if (h.contains(instance.sender, 'servicedesk@xyz.com') and
h.contains(instance.subject, 'Notification')):
print instance.body
|
import os, sys
import shutil
from .utils import get_environment_label
NPATH = os.path.expanduser("~/Library/Services/")
PATH = "%s/bin"%sys.exec_prefix
CONDA_ENV_LABEL = get_environment_label()
def add_jupyter_here():
if not os.path.exists(NPATH):
print("Nothing done. User Library is unavailable, are you sure this is OSX?")
return
workflow_path = os.path.expandvars(os.path.join(
os.path.dirname(__file__), 'prototype.workflow/Contents'))
docu_prototype = os.path.join(workflow_path,'document.wflow')
info_prototype = os.path.join(workflow_path,'Info.plist')
with open(docu_prototype, "r") as f:
document = f.read()
with open(info_prototype, "r") as f:
info = f.read()
for terminal in ["qtconsole", "notebook", "lab"]:
script_dir = os.path.join(NPATH, "Jupyter %s here%s.workflow/Contents" % (
terminal, CONDA_ENV_LABEL))
if (not os.path.exists(script_dir) and
shutil.which("jupyter-%s" % terminal)):
os.makedirs(script_dir)
document_path = os.path.join(script_dir, "document.wflow")
info_path = os.path.join(script_dir, "Info.plist")
if not os.path.exists(document_path):
with open(document_path, "w") as f:
f.write(document % (PATH, terminal, PATH, terminal))
if not os.path.exists(info_path):
with open(info_path, "w") as f:
f.write(info % (terminal, CONDA_ENV_LABEL))
print('Jupyter %s here%s created.' % (terminal, CONDA_ENV_LABEL))
def remove_jupyter_here():
for terminal in ["qtconsole", "notebook", "lab"]:
script_path = os.path.join(NPATH, "Jupyter %s here%s.workflow" %(
terminal, CONDA_ENV_LABEL))
if os.path.exists(script_path):
shutil.rmtree(script_path)
print("Jupyter %s here%s removed." % (terminal, CONDA_ENV_LABEL))
'''
Guide for building this script using Automator.
This guide was originally based on this article: https://davidwalsh.name/mac-context-menu
1) Start Automator (either through the Applications folder, or spotlight search)
2) Open the "File" dropdown menu, and select "New"
3) Choose the "Quick Action" document type (this is represented by the cog icon,
and used to be called "Service")
4) In the "Library" menu on the left chose "Utilities", and then double-click on
"Run Shell Script" in the menu 2nd on the left. This will open a "Run Shell Script"
panel in the right pane.
5) Using the dropdown menus in the uppermost panel in the right pane set the workflow to
receive current "files or folders" in "finder".
6) In top-right corner of the "Run Shell Script" panel, set Pass Input "as arguments".
7) Write Bash script, using this template (substitute for the anaconda install path, and
swap out jupyter-notebook for your python application of choice):
if [[ -d $1 ]] ; then
cd "$1"
current_file="None"
else
current_path=$(dirname "$1")
current_file=$(basename "$1")
cd "$current_path"
fi
if [[ $current_file == *.ipynb ]] ; then
[path to anaconda install]/bin/jupyter-notebook "$current_file"
else
[path to anaconda install]/bin/jupyter-notebook
fi
8) Save the script (using a descriptive name), and test it as described in the README file.
'''
|
import qpy
import qpy_test
print(qpy.tr("hello"))
|
from django.db import models
class Topic(models.Model):
name = models.CharField(editable=True, null=False, max_length=255)
# order = models.IntegerField(null=False,
# default=0,
# editable=True,
# help_text='Position of this entry: 1-n')
def __str__(self):
return "{}".format(self.name)
@classmethod
def add(cls, new_topic):
"""
Add a new topic
:param new_topic: string topic to be added
:return:
"""
topic = cls()
topic.name = new_topic
# topics_list = Topic.objects.all.values_list("order", flat=True)
# topic.order = max(topics_list if topics_list else [0]) + 1
topic.save()
|
import uuid
from copy import deepcopy
from django.test import SimpleTestCase, TestCase
from pillowtop.checkpoints.manager import PillowCheckpoint
from pillowtop.feed.interface import ChangeMeta
from pillowtop.pillow.interface import ConstructedPillow
from pillowtop.processors.sample import CountingProcessor
from corehq.apps.change_feed import topics
from corehq.apps.change_feed.consumer.feed import (
KafkaChangeFeed,
KafkaCheckpointEventHandler,
)
from corehq.apps.change_feed.exceptions import UnavailableKafkaOffset
from corehq.apps.change_feed.producer import producer
from corehq.apps.change_feed.topics import (
get_multi_topic_first_available_offsets,
)
class KafkaChangeFeedTest(SimpleTestCase):
def test_multiple_topics(self):
feed = KafkaChangeFeed(topics=[topics.FORM_SQL, topics.CASE_SQL], client_id='test-kafka-feed')
self.assertEqual(0, len(list(feed.iter_changes(since=None, forever=False))))
offsets = feed.get_latest_offsets()
expected_metas = [publish_stub_change(topics.FORM_SQL), publish_stub_change(topics.CASE_SQL)]
unexpected_metas = [publish_stub_change(topics.COMMCARE_USER), publish_stub_change(topics.WEB_USER)]
changes = list(feed.iter_changes(since=offsets, forever=False))
self.assertEqual(2, len(changes))
found_change_ids = set([change.id for change in changes])
self.assertEqual(set([meta.document_id for meta in expected_metas]), found_change_ids)
for unexpected in unexpected_metas:
self.assertTrue(unexpected.document_id not in found_change_ids)
def test_expired_checkpoint_iteration_strict(self):
feed = KafkaChangeFeed(topics=[topics.FORM_SQL, topics.CASE_SQL], client_id='test-kafka-feed', strict=True)
first_available_offsets = get_multi_topic_first_available_offsets([topics.FORM_SQL, topics.CASE_SQL])
since = {
topic_partition: offset - 1
for topic_partition, offset in first_available_offsets.items()
}
with self.assertRaises(UnavailableKafkaOffset):
next(feed.iter_changes(since=since, forever=False))
def test_non_expired_checkpoint_iteration_strict(self):
feed = KafkaChangeFeed(topics=[topics.FORM_SQL, topics.CASE_SQL], client_id='test-kafka-feed', strict=True)
first_available_offsets = get_multi_topic_first_available_offsets([topics.FORM_SQL, topics.CASE_SQL])
next(feed.iter_changes(since=first_available_offsets, forever=False))
class KafkaCheckpointTest(TestCase):
def test_checkpoint_with_multiple_topics(self):
feed = KafkaChangeFeed(topics=[topics.FORM_SQL, topics.CASE_SQL], client_id='test-kafka-feed')
pillow_name = 'test-multi-topic-checkpoints'
checkpoint = PillowCheckpoint(pillow_name, feed.sequence_format)
processor = CountingProcessor()
pillow = ConstructedPillow(
name=pillow_name,
checkpoint=checkpoint,
change_feed=feed,
processor=processor,
change_processed_event_handler=KafkaCheckpointEventHandler(
checkpoint=checkpoint, checkpoint_frequency=1, change_feed=feed
)
)
offsets = feed.get_latest_offsets()
self.assertEqual(set([(topics.FORM_SQL, 0), (topics.CASE_SQL, 0)]), set(offsets.keys()))
# send a few changes to kafka so they should be picked up by the pillow
publish_stub_change(topics.FORM_SQL)
publish_stub_change(topics.FORM_SQL)
publish_stub_change(topics.CASE_SQL)
publish_stub_change(topics.CASE_SQL)
publish_stub_change(topics.COMMCARE_USER)
pillow.process_changes(since=offsets, forever=False)
self.assertEqual(4, processor.count)
self.assertEqual(feed.get_current_checkpoint_offsets(), pillow.get_last_checkpoint_sequence())
publish_stub_change(topics.FORM_SQL)
publish_stub_change(topics.FORM_SQL)
publish_stub_change(topics.CASE_SQL)
publish_stub_change(topics.CASE_SQL)
publish_stub_change(topics.COMMCARE_USER)
pillow.process_changes(pillow.get_last_checkpoint_sequence(), forever=False)
self.assertEqual(8, processor.count)
self.assertEqual(feed.get_current_checkpoint_offsets(), pillow.get_last_checkpoint_sequence())
def test_dont_create_checkpoint_past_current(self):
pillow_name = 'test-checkpoint-reset'
# initialize change feed and pillow
feed = KafkaChangeFeed(topics=topics.USER_TOPICS, client_id='test-kafka-feed')
checkpoint = PillowCheckpoint(pillow_name, feed.sequence_format)
processor = CountingProcessor()
pillow = ConstructedPillow(
name=pillow_name,
checkpoint=checkpoint,
change_feed=feed,
processor=processor,
change_processed_event_handler=KafkaCheckpointEventHandler(
checkpoint=checkpoint, checkpoint_frequency=1, change_feed=feed
)
)
original_kafka_offsets = feed.get_latest_offsets()
current_kafka_offsets = deepcopy(original_kafka_offsets)
self.assertEqual(feed.get_current_checkpoint_offsets(), {})
self.assertEqual(pillow.get_last_checkpoint_sequence(), {})
publish_stub_change(topics.COMMCARE_USER)
# the following line causes tests to fail if you have multiple partitions
current_kafka_offsets[(topics.COMMCARE_USER, 0)] += 1
pillow.process_changes(since=original_kafka_offsets, forever=False)
self.assertEqual(1, processor.count)
self.assertEqual(feed.get_current_checkpoint_offsets(), current_kafka_offsets)
def publish_stub_change(topic):
meta = ChangeMeta(document_id=uuid.uuid4().hex, data_source_type='dummy-type', data_source_name='dummy-name')
producer.send_change(topic, meta)
return meta
|
import suffix_array
def crush(script):
escape_chars = [unichr(i).encode('utf-8') for i in range(1024)]
escape_chars[:] = (
escape_chars[32:128] +
escape_chars[0:32] +
escape_chars[128:1024])
used_chars = set()
used_chars.add('"')
for s in script:
used_chars.add(s)
for c in used_chars:
escape_chars.remove(c.decode('iso 8859-1').encode('utf-8'))
#
#
#
matches = _find_matches(script)
used_escapes = []
i = 0
while True:
if not escape_chars:
break
escape = escape_chars.pop(0)
escape_len = len(escape)
max_weight = 0
for needle in list(matches):
num_matches = script.count(needle)
length = len(needle)
weight = (
(length * num_matches) -
(length + (num_matches + 2) * escape_len)
)
if weight >= 1:
if weight > max_weight:
max_weight = weight
max_needle = needle
max_reps = num_matches
else:
matches.remove(needle)
if not matches:
break
script = escape.join(script.split(max_needle) + [max_needle])
matches = [
escape.join(needle.split(max_needle))
for needle in matches
if needle != max_needle
]
used_escapes.append(escape)
print '%2d: "%s" (%d)' % (i, max_needle, max_reps)
i += 1
quote = '"'
decoder = ['_=','%s',';for(Y in $=','%s',')with(_.split($[Y]))_=join(pop());eval(_)']
decoder = quote.join(decoder)
script = decoder % (script, ''.join(used_escapes)[::-1])
return script
def _find_matches(script):
suffix = suffix_array.SuffixArray(script)
dupes = suffix.find_all_duplicates()
matches = []
for min_length, max_length, indices in dupes:
start_pos = indices[0]
for l in xrange(min_length, max_length+1):
needle = script[start_pos:start_pos+l]
matches.append(needle)
return matches
|
"""
sentry.utils.javascript
~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from collections import defaultdict
from datetime import timedelta
from django.core.urlresolvers import reverse
from django.utils import timezone
from django.utils.html import escape
from sentry.app import env, tsdb
from sentry.constants import TAG_LABELS
from sentry.models import (
Group, GroupBookmark, GroupMeta, GroupTagKey, GroupSeen, GroupStatus,
ProjectOption
)
from sentry.templatetags.sentry_plugins import get_legacy_annotations
from sentry.utils import json
from sentry.utils.db import attach_foreignkey
from sentry.utils.http import absolute_uri
transformers = {}
def has_sourcemap(event):
if event.platform != 'javascript':
return False
data = event.data
if 'sentry.interfaces.Exception' not in data:
return False
exception = data['sentry.interfaces.Exception']
for value in exception['values']:
stacktrace = value.get('stacktrace', {})
for frame in stacktrace.get('frames', []):
if 'sourcemap' in frame.get('data', {}):
return True
return False
def transform(objects, request=None):
if request is None:
request = getattr(env, 'request', None)
if not objects:
return objects
elif not isinstance(objects, (list, tuple)):
return transform([objects], request=request)[0]
# elif isinstance(obj, dict):
# return dict((k, transform(v, request=request)) for k, v in obj.iteritems())
t = transformers.get(type(objects[0]))
if t:
t.attach_metadata(objects, request=request)
return [t(o, request=request) for o in objects]
return objects
def to_json(obj, request=None):
result = transform(obj, request=request)
return json.dumps_htmlsafe(result)
def register(type):
def wrapped(cls):
transformers[type] = cls()
return cls
return wrapped
class Transformer(object):
def __call__(self, obj, request=None):
return self.transform(obj, request)
def attach_metadata(self, objects, request=None):
pass
def transform(self, obj, request=None):
return {}
@register(Group)
class GroupTransformer(Transformer):
def attach_metadata(self, objects, request=None):
from sentry.templatetags.sentry_plugins import handle_before_events
attach_foreignkey(objects, Group.project, ['team'])
GroupMeta.objects.populate_cache(objects)
if request and objects:
handle_before_events(request, objects)
if request and request.user.is_authenticated() and objects:
bookmarks = set(GroupBookmark.objects.filter(
user=request.user,
group__in=objects,
).values_list('group_id', flat=True))
seen_groups = dict(GroupSeen.objects.filter(
user=request.user,
group__in=objects,
).values_list('group_id', 'last_seen'))
else:
bookmarks = set()
seen_groups = {}
if objects:
end = timezone.now()
start = end - timedelta(days=1)
historical_data = tsdb.get_range(
model=tsdb.models.group,
keys=[g.id for g in objects],
start=start,
end=end,
)
else:
historical_data = {}
project_list = set(o.project for o in objects)
tag_keys = set(['sentry:user'])
project_annotations = {}
for project in project_list:
enabled_annotations = ProjectOption.objects.get_value(
project, 'annotations', ['sentry:user'])
project_annotations[project] = enabled_annotations
tag_keys.update(enabled_annotations)
annotation_counts = defaultdict(dict)
annotation_results = GroupTagKey.objects.filter(
group__in=objects,
key__in=tag_keys,
).values_list('key', 'group', 'values_seen')
for key, group_id, values_seen in annotation_results:
annotation_counts[key][group_id] = values_seen
for g in objects:
g.is_bookmarked = g.pk in bookmarks
g.historical_data = [x[1] for x in historical_data.get(g.id, [])]
active_date = g.active_at or g.last_seen
g.has_seen = seen_groups.get(g.id, active_date) > active_date
g.annotations = []
for key in sorted(tag_keys):
if key in project_annotations[project]:
label = TAG_LABELS.get(key, key.replace('_', ' ')).lower() + 's'
try:
value = annotation_counts[key].get(g.id, 0)
except KeyError:
value = 0
g.annotations.append({
'label': label,
'count': value,
})
def localize_datetime(self, dt, request=None):
if not request:
return dt.isoformat()
elif getattr(request, 'timezone', None):
return dt.astimezone(request.timezone).isoformat()
return dt.isoformat()
def transform(self, obj, request=None):
status = obj.get_status()
if status == GroupStatus.RESOLVED:
status_label = 'resolved'
elif status == GroupStatus.MUTED:
status_label = 'muted'
else:
status_label = 'unresolved'
version = obj.last_seen
if obj.resolved_at:
version = max(obj.resolved_at, obj.last_seen)
version = int(version.strftime('%s'))
d = {
'id': str(obj.id),
'count': str(obj.times_seen),
'title': escape(obj.title),
'message': escape(obj.message_short),
'level': obj.level,
'levelName': escape(obj.get_level_display()),
'logger': escape(obj.logger),
'permalink': absolute_uri(reverse('sentry-group', args=[obj.organization.slug, obj.project.slug, obj.id])),
'firstSeen': self.localize_datetime(obj.first_seen, request=request),
'lastSeen': self.localize_datetime(obj.last_seen, request=request),
'canResolve': request and request.user.is_authenticated(),
'status': status_label,
'isResolved': obj.get_status() == GroupStatus.RESOLVED,
'isPublic': obj.is_public,
'score': getattr(obj, 'sort_value', 0),
'project': {
'name': escape(obj.project.name),
'slug': obj.project.slug,
},
'version': version,
}
if hasattr(obj, 'is_bookmarked'):
d['isBookmarked'] = obj.is_bookmarked
if hasattr(obj, 'has_seen'):
d['hasSeen'] = obj.has_seen
if hasattr(obj, 'historical_data'):
d['historicalData'] = obj.historical_data
if hasattr(obj, 'annotations'):
d['annotations'] = obj.annotations
# TODO(dcramer): these aren't tags, and annotations aren't annotations
if request:
d['tags'] = get_legacy_annotations(obj, request)
return d
|
from setuptools import setup, find_packages
import os
version = __import__('cms_themes').__version__
install_requires = [
'setuptools',
'django',
'django-cms',
]
setup(
name = "django-cms-themes",
version = version,
url = 'http://github.com/megamark16/django-cms-themes',
license = 'BSD',
platforms=['OS Independent'],
description = "Load prepackaged themes (templates and accompanying media) into Django CMS projects through the admin",
author = "Mark Ransom",
author_email = 'megamark16@gmail.com',
packages=find_packages(),
install_requires = install_requires,
include_package_data=True,
zip_safe=False,
classifiers = [
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
package_dir={
'cms_themes': 'cms_themes',
},
)
|
import requests
import pprint
import jsonschema
import json
import logging
import time
import urllib.request
import urllib.parse
import urllib.error
from requests.exceptions import ReadTimeout, ConnectTimeout
from pymacaron_core.exceptions import PyMacaronCoreException, ValidationError
from pymacaron_core.utils import get_function
from bravado_core.response import unmarshal_response
log = logging.getLogger(__name__)
try:
from flask import _app_ctx_stack as stack
except ImportError:
from flask import _request_ctx_stack as stack
def generate_client_callers(spec, timeout, error_callback, local, app):
"""Return a dict mapping method names to anonymous functions that
will call the server's endpoint of the corresponding name as
described in the api defined by the swagger dict and bravado spec"""
callers_dict = {}
def mycallback(endpoint):
if not endpoint.handler_client:
return
callers_dict[endpoint.handler_client] = _generate_client_caller(spec, endpoint, timeout, error_callback, local, app)
spec.call_on_each_endpoint(mycallback)
return callers_dict
def _generate_request_arguments(url, spec, endpoint, headers, args, kwargs):
# Prepare (g)requests arguments
data = None
params = None
custom_url = url
if hasattr(stack.top, 'call_id'):
headers['PymCallID'] = stack.top.call_id
if hasattr(stack.top, 'call_path'):
headers['PymCallPath'] = stack.top.call_path
if endpoint.param_in_path:
# Fill url with values from kwargs, and remove those params from kwargs
custom_url = _format_flask_url(url, kwargs)
if endpoint.param_in_query:
# The query parameters are contained in **kwargs
params = kwargs
# TODO: validate params? or let the server do that...
elif endpoint.param_in_body:
# The body parameter is the first elem in *args
if len(args) != 1:
raise ValidationError("%s expects exactly 1 parameter" % endpoint.handler_client)
data = json.dumps(spec.model_to_json(args[0]))
# Prune undefined parameters that would otherwise be turned into '=None'
# query params
if params:
for k in list(params.keys()):
if params[k] is None:
del params[k]
return custom_url, params, data, headers
def _generate_client_caller(spec, endpoint, timeout, error_callback, local, app):
if local:
assert app
# Is the endpoint available locally?
if local:
url = endpoint.path.lstrip('/')
else:
url = "%s://%s:%s/%s" % (
spec.protocol,
spec.host,
spec.port,
endpoint.path.lstrip('/')
)
# Get eventual decorator and http method
decorator = None
if endpoint.decorate_request:
decorator = get_function(endpoint.decorate_request)
method = endpoint.method.lower()
if method not in ('get', 'post', 'patch', 'put', 'delete'):
raise PyMacaronCoreException("BUG: method %s for %s is not supported. Only get and post are." %
(endpoint.method, endpoint.path))
# Are we doing a local call?
if local:
def local_client(*args, **kwargs):
"""Just call the local method"""
log.info("Calling %s locally via flask test_client" % (endpoint.path))
headers = {'Content-Type': 'application/json'}
headers.update(kwargs.get('request_headers', {}))
# Remove magic client parameters before passing on
for k in ('max_attempts', 'read_timeout', 'connect_timeout', 'request_headers'):
if k in kwargs:
del kwargs[k]
custom_url, params, data, headers = _generate_request_arguments(url, spec, endpoint, headers, args, kwargs)
if '<' in custom_url:
# Some arguments were missing
return error_callback(ValidationError("Missing some arguments to format url: %s" % custom_url))
if params:
for k, v in params.items():
if isinstance(v, str):
params[k] = v.encode('utf-8')
custom_url = custom_url + '?' + urllib.parse.urlencode(params)
log.info("Calling with params [%s]" % params)
with app.test_client() as c:
requests_method = getattr(c, method)
if decorator:
requests_method = decorator(requests_method)
response = requests_method(
custom_url,
data=data,
headers=headers
)
return response_to_result(response, method, custom_url, endpoint.operation, error_callback)
return local_client
# Else call over HTTP/HTTPS
requests_method = getattr(requests, method)
if decorator:
requests_method = decorator(requests_method)
def client(*args, **kwargs):
"""Call the server endpoint and handle marshaling/unmarshaling of parameters/result.
client takes either a dict of query parameters, or an object representing the unique
body parameter.
"""
# Extract custom parameters from **kwargs
headers = {'Content-Type': 'application/json'}
max_attempts = 3
read_timeout = timeout
connect_timeout = timeout
if 'max_attempts' in kwargs:
max_attempts = kwargs['max_attempts']
del kwargs['max_attempts']
if 'read_timeout' in kwargs:
read_timeout = kwargs['read_timeout']
del kwargs['read_timeout']
if 'connect_timeout' in kwargs:
connect_timeout = kwargs['connect_timeout']
del kwargs['connect_timeout']
if 'request_headers' in kwargs:
headers.update(kwargs['request_headers'])
del kwargs['request_headers']
custom_url, params, data, headers = _generate_request_arguments(url, spec, endpoint, headers, args, kwargs)
if '<' in custom_url:
# Some arguments were missing
return error_callback(ValidationError("Missing some arguments to format url: %s" % custom_url))
# TODO: refactor this left-over from the time of async/grequests support and simplify!
return ClientCaller(requests_method, custom_url, data, params, headers, read_timeout, connect_timeout, endpoint.operation, endpoint.method, error_callback, max_attempts, spec.verify_ssl).call()
return client
def _format_flask_url(url, params):
# TODO: make this code more robust: error if some params are left unmatched
# or if url still contains placeholders after replacing
remove = []
for name, value in params.items():
if "<%s>" % name in url:
url = url.replace("<%s>" % name, str(value))
remove.append(name)
for name in remove:
del params[name]
return url
def response_to_result(response, method, url, operation, error_callback):
# Monkey patching flask test_client response if necessary
if not hasattr(response, 'text'):
data = response.data.decode("utf-8")
setattr(response, 'text', data)
j = json.loads(data)
def get_json():
return j
setattr(response, 'json', get_json)
# If the remote-server returned an error, raise it as a local PyMacaronCoreException
if str(response.status_code) != '200':
log.warn("Call to %s %s returns error: %s" % (method, url, response.text))
if 'error_description' in response.text:
# We got a PyMacaronCoreException: unmarshal it and return as valid
# return value UGLY FRAGILE CODE. To be replaced by proper
# exception scheme
pass
else:
# Unknown exception...
log.info("Unknown exception: " + response.text)
k = PyMacaronCoreException("Call to %s %s returned unknown exception: %s" % (method, url, response.text))
k.status_code = response.status_code
c = error_callback
if hasattr(c, '__func__'):
c = c.__func__
return c(k)
# Now transform the request's Response object into an instance of a
# swagger model
try:
result = unmarshal_response(response, operation)
except jsonschema.exceptions.ValidationError as e:
log.warn("Failed to unmarshal response: %s" % e)
k = ValidationError("Failed to unmarshal response because: %s" % str(e))
c = error_callback
if hasattr(c, '__func__'):
c = c.__func__
return c(k)
log.info("Call to %s %s returned an instance of %s" % (method, url, type(result)))
return result
class ClientCaller():
def __init__(self, requests_method, url, data, params, headers, read_timeout, connect_timeout, operation, method, error_callback, max_attempts, verify_ssl):
assert max_attempts >= 1
self.requests_method = requests_method
self.url = url
self.operation = operation
self.data = data
self.params = params
self.headers = headers
self.read_timeout = read_timeout
self.connect_timeout = connect_timeout
self.method = method.upper()
self.error_callback = error_callback
self.max_attempts = max_attempts
self.verify_ssl = verify_ssl
def _method_is_safe_to_retry(self):
return self.method in ('GET', 'PATCH')
def _call_retry(self, force_retry):
"""Call request and retry up to max_attempts times (or none if self.max_attempts=1)"""
last_exception = None
for i in range(self.max_attempts):
try:
log.info("Calling %s %s" % (self.method, self.url))
response = self.requests_method(
self.url,
data=self.data,
params=self.params,
headers=self.headers,
timeout=(self.connect_timeout, self.read_timeout),
verify=self.verify_ssl,
)
if response is None:
log.warn("Got response None")
if self._method_is_safe_to_retry():
delay = 0.5 + i * 0.5
log.info("Waiting %s sec and Retrying since call is a %s" % (delay, self.method))
time.sleep(delay)
continue
else:
raise PyMacaronCoreException("Call %s %s returned empty response" % (self.method, self.url))
return response
except Exception as e:
last_exception = e
retry = force_retry
if isinstance(e, ReadTimeout):
# Log enough to help debugging...
log.warn("Got a ReadTimeout calling %s %s" % (self.method, self.url))
log.warn("Exception was: %s" % str(e))
resp = e.response
if not resp:
log.info("Requests error has no response.")
# TODO: retry=True? Is it really safe?
else:
b = resp.content
log.info("Requests has a response with content: " + pprint.pformat(b))
if self._method_is_safe_to_retry():
# It is safe to retry
log.info("Retrying since call is a %s" % self.method)
retry = True
elif isinstance(e, ConnectTimeout):
log.warn("Got a ConnectTimeout calling %s %s" % (self.method, self.url))
log.warn("Exception was: %s" % str(e))
# ConnectTimeouts are safe to retry whatever the call...
retry = True
if retry:
continue
else:
raise e
# max_attempts has been reached: propagate the last received Exception
if not last_exception:
last_exception = Exception("Reached max-attempts (%s). Giving up calling %s %s" % (self.max_attempts, self.method, self.url))
raise last_exception
def call(self, force_retry=False):
response = self._call_retry(force_retry)
return response_to_result(response, self.method, self.url, self.operation, self.error_callback)
|
"""
A class that represents a unit symbol.
"""
import copy
import itertools
import math
import numpy as np
from functools import lru_cache
from numbers import Number as numeric_type
from sympy import (
Expr,
Mul,
Add,
Number,
Pow,
Symbol,
Float,
Basic,
Rational,
Mod,
floor,
)
from sympy.core.numbers import One
from sympy import sympify, latex
from unyt.dimensions import (
angle,
base_dimensions,
dimensionless,
temperature,
current_mks,
logarithmic,
)
import unyt.dimensions as dims
from unyt.equivalencies import equivalence_registry
from unyt.exceptions import (
InvalidUnitOperation,
MissingMKSCurrent,
MKSCGSConversionError,
UnitConversionError,
UnitsNotReducible,
UnitParseError,
)
from unyt._parsing import parse_unyt_expr
from unyt._physical_ratios import speed_of_light_cm_per_s
from unyt.unit_registry import default_unit_registry, _lookup_unit_symbol, UnitRegistry
from unyt.unit_systems import _split_prefix
sympy_one = sympify(1)
def _get_latex_representation(expr, registry):
symbol_table = {}
for ex in expr.free_symbols:
try:
symbol_table[ex] = registry.lut[str(ex)][3]
except KeyError:
symbol_table[ex] = r"\rm{" + str(ex).replace("_", r"\ ") + "}"
# invert the symbol table dict to look for keys with identical values
invert_symbols = {}
for key, value in symbol_table.items():
if value not in invert_symbols:
invert_symbols[value] = [key]
else:
invert_symbols[value].append(key)
# if there are any units with identical latex representations, substitute
# units to avoid uncanceled terms in the final latex expression.
for val in invert_symbols:
symbols = invert_symbols[val]
for i in range(1, len(symbols)):
expr = expr.subs(symbols[i], symbols[0])
prefix = None
l_expr = expr
if isinstance(expr, Mul):
coeffs = expr.as_coeff_Mul()
if coeffs[0] == 1 or not isinstance(coeffs[0], Number):
l_expr = coeffs[1]
else:
l_expr = coeffs[1]
prefix = Float(coeffs[0], 2)
latex_repr = latex(
l_expr,
symbol_names=symbol_table,
mul_symbol="dot",
fold_frac_powers=True,
fold_short_frac=True,
)
if prefix is not None:
latex_repr = latex(prefix, mul_symbol="times") + "\\ " + latex_repr
if latex_repr == "1":
return ""
else:
return latex_repr
class _ImportCache(object):
__slots__ = ["_ua", "_uq"]
def __init__(self):
self._ua = None
self._uq = None
@property
def ua(self):
if self._ua is None:
from unyt.array import unyt_array
self._ua = unyt_array
return self._ua
@property
def uq(self):
if self._uq is None:
from unyt.array import unyt_quantity
self._uq = unyt_quantity
return self._uq
_import_cache_singleton = _ImportCache()
class Unit(object):
"""
A symbolic unit, using sympy functionality. We only add "dimensions" so
that sympy understands relations between different units.
"""
__slots__ = [
"expr",
"is_atomic",
"base_value",
"base_offset",
"dimensions",
"_latex_repr",
"registry",
"is_Unit",
]
# Set some assumptions for sympy.
is_positive = True # make sqrt(m**2) --> m
is_commutative = True
is_number = False
__array_priority__ = 3.0
def __new__(
cls,
unit_expr=sympy_one,
base_value=None,
base_offset=0.0,
dimensions=None,
registry=None,
latex_repr=None,
):
"""
Create a new unit. May be an atomic unit (like a gram) or combinations
of atomic units (like g / cm**3).
Parameters
----------
unit_expr : Unit object, sympy.core.expr.Expr object, or str
The symbolic unit expression.
base_value : float
The unit's value in yt's base units.
base_offset : float
The offset necessary to normalize temperature units to a common
zero point.
dimensions : sympy.core.expr.Expr
A sympy expression representing the dimensionality of this unit.
It must contain only mass, length, time, temperature and angle
symbols.
registry : UnitRegistry object
The unit registry we use to interpret unit symbols.
latex_repr : string
A string to render the unit as LaTeX
"""
unit_cache_key = None
# Simplest case. If user passes a Unit object, just use the expr.
if hasattr(unit_expr, "is_Unit"):
# grab the unit object's sympy expression.
unit_expr = unit_expr.expr
elif hasattr(unit_expr, "units") and hasattr(unit_expr, "value"):
# something that looks like a unyt_array, grab the unit and value
if unit_expr.shape != ():
raise UnitParseError(
"Cannot create a unit from a non-scalar unyt_array, "
"received: %s" % (unit_expr,)
)
value = unit_expr.value
if value == 1:
unit_expr = unit_expr.units.expr
else:
unit_expr = unit_expr.value * unit_expr.units.expr
# Parse a text unit representation using sympy's parser
elif isinstance(unit_expr, (str, bytes)):
if isinstance(unit_expr, bytes):
unit_expr = unit_expr.decode("utf-8")
# this cache substantially speeds up unit conversions
if registry and unit_expr in registry._unit_object_cache:
return registry._unit_object_cache[unit_expr]
unit_cache_key = unit_expr
unit_expr = parse_unyt_expr(unit_expr)
# Make sure we have an Expr at this point.
if not isinstance(unit_expr, Expr):
raise UnitParseError(
"Unit representation must be a string or "
"sympy Expr. '%s' has type '%s'." % (unit_expr, type(unit_expr))
)
if dimensions is None and unit_expr is sympy_one:
dimensions = dimensionless
if registry is None:
# Caller did not set the registry, so use the default.
registry = default_unit_registry
# done with argument checking...
# see if the unit is atomic.
is_atomic = False
if isinstance(unit_expr, Symbol):
is_atomic = True
#
# check base_value and dimensions
#
if base_value is not None:
# check that base_value is a float or can be converted to one
try:
base_value = float(base_value)
except ValueError:
raise UnitParseError(
"Could not use base_value as a float. "
"base_value is '%s' (type '%s')." % (base_value, type(base_value))
)
# check that dimensions is valid
if dimensions is not None:
_validate_dimensions(dimensions)
else:
# lookup the unit symbols
unit_data = _get_unit_data_from_expr(unit_expr, registry.lut)
base_value = unit_data[0]
dimensions = unit_data[1]
if len(unit_data) > 2:
base_offset = unit_data[2]
latex_repr = unit_data[3]
else:
base_offset = 0.0
# Create obj with superclass construct.
obj = super(Unit, cls).__new__(cls)
# Attach attributes to obj.
obj.expr = unit_expr
obj.is_atomic = is_atomic
obj.base_value = base_value
obj.base_offset = base_offset
obj.dimensions = dimensions
obj._latex_repr = latex_repr
obj.registry = registry
# lets us avoid isinstance calls
obj.is_Unit = True
# if we parsed a string unit expression, cache the result
# for faster lookup later
if unit_cache_key is not None:
registry._unit_object_cache[unit_cache_key] = obj
# Return `obj` so __init__ can handle it.
return obj
@property
def latex_repr(self):
"""A LaTeX representation for the unit
Examples
--------
>>> from unyt import g, cm
>>> (g/cm**3).units.latex_repr
'\\\\frac{\\\\rm{g}}{\\\\rm{cm}^{3}}'
"""
if self._latex_repr is not None:
return self._latex_repr
if self.expr.is_Atom:
expr = self.expr
else:
expr = self.expr.copy()
self._latex_repr = _get_latex_representation(expr, self.registry)
return self._latex_repr
@property
def units(self):
return self
def __hash__(self):
return int(self.registry.unit_system_id, 16) ^ hash(self.expr)
# end sympy conventions
def __repr__(self):
if self.expr == sympy_one:
return "(dimensionless)"
# @todo: don't use dunder method?
return self.expr.__repr__()
def __str__(self):
if self.expr == sympy_one:
return "dimensionless"
unit_str = self.expr.__str__()
if unit_str == "degC":
return "°C"
if unit_str == "delta_degC":
return "Δ°C"
if unit_str == "degF":
return "°F"
if unit_str == "delta_degF":
return "Δ°F"
# @todo: don't use dunder method?
return unit_str
#
# Start unit operations
#
def __add__(self, u):
raise InvalidUnitOperation("addition with unit objects is not allowed")
def __radd__(self, u):
raise InvalidUnitOperation("addition with unit objects is not allowed")
def __sub__(self, u):
raise InvalidUnitOperation("subtraction with unit objects is not allowed")
def __rsub__(self, u):
raise InvalidUnitOperation("subtraction with unit objects is not allowed")
def __iadd__(self, u):
raise InvalidUnitOperation(
"in-place operations with unit objects are not allowed"
)
def __isub__(self, u):
raise InvalidUnitOperation(
"in-place operations with unit objects are not allowed"
)
def __imul__(self, u):
raise InvalidUnitOperation(
"in-place operations with unit objects are not allowed"
)
def __itruediv__(self, u):
raise InvalidUnitOperation(
"in-place operations with unit objects are not allowed"
)
def __rmul__(self, u):
return self.__mul__(u)
def __mul__(self, u):
"""Multiply Unit with u (Unit object)."""
if not getattr(u, "is_Unit", False):
data = np.array(u, subok=True)
unit = getattr(u, "units", None)
if unit is not None:
if self.dimensions is logarithmic:
raise InvalidUnitOperation(
"Tried to multiply '%s' and '%s'." % (self, unit)
)
units = unit * self
else:
units = self
if data.dtype.kind not in ("f", "u", "i", "c"):
raise InvalidUnitOperation(
"Tried to multiply a Unit object with '%s' (type %s). "
"This behavior is undefined." % (u, type(u))
)
if data.shape == ():
return _import_cache_singleton.uq(data, units, bypass_validation=True)
return _import_cache_singleton.ua(data, units, bypass_validation=True)
elif self.dimensions is logarithmic and not u.is_dimensionless:
raise InvalidUnitOperation("Tried to multiply '%s' and '%s'." % (self, u))
elif u.dimensions is logarithmic and not self.is_dimensionless:
raise InvalidUnitOperation("Tried to multiply '%s' and '%s'." % (self, u))
base_offset = 0.0
if self.base_offset or u.base_offset:
if u.dimensions in (temperature, angle) and self.is_dimensionless:
base_offset = u.base_offset
elif self.dimensions in (temperature, angle) and u.is_dimensionless:
base_offset = self.base_offset
else:
raise InvalidUnitOperation(
"Quantities with dimensions of angle or units of "
"Fahrenheit or Celsius cannot be multiplied."
)
return Unit(
self.expr * u.expr,
base_value=(self.base_value * u.base_value),
base_offset=base_offset,
dimensions=(self.dimensions * u.dimensions),
registry=self.registry,
)
def __truediv__(self, u):
"""Divide Unit by u (Unit object)."""
if not isinstance(u, Unit):
if isinstance(u, (numeric_type, list, tuple, np.ndarray)):
from unyt.array import unyt_quantity
return unyt_quantity(1.0, self) / u
else:
raise InvalidUnitOperation(
"Tried to divide a Unit object by '%s' (type %s). This "
"behavior is undefined." % (u, type(u))
)
elif self.dimensions is logarithmic and not u.is_dimensionless:
raise InvalidUnitOperation("Tried to divide '%s' and '%s'." % (self, u))
elif u.dimensions is logarithmic and not self.is_dimensionless:
raise InvalidUnitOperation("Tried to divide '%s' and '%s'." % (self, u))
base_offset = 0.0
if self.base_offset or u.base_offset:
if self.dimensions in (temperature, angle) and u.is_dimensionless:
base_offset = self.base_offset
else:
raise InvalidUnitOperation(
"Quantities with units of Farhenheit "
"and Celsius cannot be divided."
)
return Unit(
self.expr / u.expr,
base_value=(self.base_value / u.base_value),
base_offset=base_offset,
dimensions=(self.dimensions / u.dimensions),
registry=self.registry,
)
def __rtruediv__(self, u):
return u * self ** -1
def __pow__(self, p):
"""Take Unit to power p (float)."""
try:
p = Rational(str(p)).limit_denominator()
except (ValueError, TypeError):
raise InvalidUnitOperation(
"Tried to take a Unit object to the "
"power '%s' (type %s). Failed to cast "
"it to a float." % (p, type(p))
)
if self.dimensions is logarithmic and p != 1.0:
raise InvalidUnitOperation("Tried to raise '%s' to power '%s'" % (self, p))
return Unit(
self.expr ** p,
base_value=(self.base_value ** p),
dimensions=(self.dimensions ** p),
registry=self.registry,
)
def __eq__(self, u):
"""Test unit equality."""
if not isinstance(u, Unit):
return False
return (
math.isclose(self.base_value, u.base_value)
and self.dimensions == u.dimensions
)
def __ne__(self, u):
"""Test unit inequality."""
if not isinstance(u, Unit):
return True
if not math.isclose(self.base_value, u.base_value):
return True
# use 'is' comparison dimensions to avoid expensive sympy operation
if self.dimensions is u.dimensions:
return False
# fall back to expensive sympy comparison
return self.dimensions != u.dimensions
def copy(self):
return copy.deepcopy(self)
def __deepcopy__(self, memodict=None):
expr = str(self.expr)
base_value = copy.deepcopy(self.base_value)
base_offset = copy.deepcopy(self.base_offset)
dimensions = copy.deepcopy(self.dimensions)
lut = copy.deepcopy(self.registry.lut)
registry = UnitRegistry(lut=lut)
return Unit(expr, base_value, base_offset, dimensions, registry)
#
# End unit operations
#
def same_dimensions_as(self, other_unit):
"""Test if the dimensions of *other_unit* are the same as this unit
Examples
--------
>>> from unyt import Msun, kg, mile
>>> Msun.units.same_dimensions_as(kg.units)
True
>>> Msun.units.same_dimensions_as(mile.units)
False
"""
# test first for 'is' equality to avoid expensive sympy operation
if self.dimensions is other_unit.dimensions:
return True
return (self.dimensions / other_unit.dimensions) == sympy_one
@property
def is_dimensionless(self):
"""Is this a dimensionless unit?
Returns
-------
True for a dimensionless unit, False otherwise
Examples
--------
>>> from unyt import count, kg
>>> count.units.is_dimensionless
True
>>> kg.units.is_dimensionless
False
"""
return self.dimensions is sympy_one
@property
def is_code_unit(self):
"""Is this a "code" unit?
Returns
-------
True if the unit consists of atom units that being with "code".
False otherwise
"""
for atom in self.expr.atoms():
if not (str(atom).startswith("code") or atom.is_Number):
return False
return True
def list_equivalencies(self):
"""Lists the possible equivalencies associated with this unit object
Examples
--------
>>> from unyt import km
>>> km.units.list_equivalencies()
spectral: length <-> spatial_frequency <-> frequency <-> energy
schwarzschild: mass <-> length
compton: mass <-> length
"""
from unyt.equivalencies import equivalence_registry
for k, v in equivalence_registry.items():
if self.has_equivalent(k):
print(v())
def has_equivalent(self, equiv):
"""
Check to see if this unit object as an equivalent unit in *equiv*.
Example
-------
>>> from unyt import km
>>> km.has_equivalent('spectral')
True
>>> km.has_equivalent('mass_energy')
False
"""
try:
this_equiv = equivalence_registry[equiv]()
except KeyError:
raise KeyError('No such equivalence "%s".' % equiv)
old_dims = self.dimensions
return old_dims in this_equiv._dims
def get_base_equivalent(self, unit_system=None):
"""Create and return dimensionally-equivalent units in a specified base.
>>> from unyt import g, cm
>>> (g/cm**3).get_base_equivalent('mks')
kg/m**3
>>> (g/cm**3).get_base_equivalent('solar')
Mearth/AU**3
"""
from unyt.unit_registry import _sanitize_unit_system
unit_system = _sanitize_unit_system(unit_system, self)
try:
conv_data = _check_em_conversion(
self.units, registry=self.registry, unit_system=unit_system
)
um = unit_system.units_map
if self.dimensions in um and self.expr == um[self.dimensions]:
return self.copy()
except MKSCGSConversionError:
raise UnitsNotReducible(self.units, unit_system)
if any(conv_data):
new_units, _ = _em_conversion(self, conv_data, unit_system=unit_system)
else:
try:
new_units = unit_system[self.dimensions]
except MissingMKSCurrent:
raise UnitsNotReducible(self.units, unit_system)
return Unit(new_units, registry=self.registry)
def get_cgs_equivalent(self):
"""Create and return dimensionally-equivalent cgs units.
Example
-------
>>> from unyt import kg, m
>>> (kg/m**3).get_cgs_equivalent()
g/cm**3
"""
return self.get_base_equivalent(unit_system="cgs")
def get_mks_equivalent(self):
"""Create and return dimensionally-equivalent mks units.
Example
-------
>>> from unyt import g, cm
>>> (g/cm**3).get_mks_equivalent()
kg/m**3
"""
return self.get_base_equivalent(unit_system="mks")
def get_conversion_factor(self, other_units, dtype=None):
"""Get the conversion factor and offset (if any) from one unit
to another
Parameters
----------
other_units: unit object
The units we want the conversion factor for
dtype: numpy dtype
The dtype to return the conversion factor as
Returns
-------
conversion_factor : float
old_units / new_units
offset : float or None
Offset between this unit and the other unit. None if there is
no offset.
Examples
--------
>>> from unyt import km, cm, degree_fahrenheit, degree_celsius
>>> km.get_conversion_factor(cm)
(100000.0, None)
>>> degree_celsius.get_conversion_factor(degree_fahrenheit)
(1.7999999999999998, -31.999999999999886)
"""
return _get_conversion_factor(self, other_units, dtype)
def latex_representation(self):
"""A LaTeX representation for the unit
Examples
--------
>>> from unyt import g, cm
>>> (g/cm**3).latex_representation()
'\\\\frac{\\\\rm{g}}{\\\\rm{cm}^{3}}'
"""
return self.latex_repr
def as_coeff_unit(self):
"""Factor the coefficient multiplying a unit
For units that are multiplied by a constant dimensionless
coefficient, returns a tuple containing the coefficient and
a new unit object for the unmultiplied unit.
Example
-------
>>> import unyt as u
>>> unit = (u.m**2/u.cm).simplify()
>>> unit
100*m
>>> unit.as_coeff_unit()
(100.0, m)
"""
coeff, mul = self.expr.as_coeff_Mul()
coeff = float(coeff)
ret = Unit(
mul,
self.base_value / coeff,
self.base_offset,
self.dimensions,
self.registry,
)
return coeff, ret
def simplify(self):
"""Return a new equivalent unit object with a simplified unit expression
>>> import unyt as u
>>> unit = (u.m**2/u.cm).simplify()
>>> unit
100*m
"""
expr = self.expr
self.expr = _cancel_mul(expr, self.registry)
return self
def _factor_pairs(expr):
factors = expr.as_ordered_factors()
expanded_factors = []
for f in factors:
if f.is_Number:
continue
base, exp = f.as_base_exp()
if exp.q != 1:
expanded_factors.append(base ** Mod(exp, 1))
exp = floor(exp)
if exp >= 0:
f = (base,) * exp
else:
f = (1 / base,) * abs(exp)
expanded_factors.extend(f)
return list(itertools.combinations(expanded_factors, 2))
def _create_unit_from_factor(factor, registry):
base, exp = factor.as_base_exp()
f = registry[str(base)]
return Unit(base, f[0], f[2], f[1], registry, f[3]) ** exp
def _cancel_mul(expr, registry):
pairs_to_consider = _factor_pairs(expr)
uncancelable_pairs = set()
while len(pairs_to_consider):
pair = pairs_to_consider.pop()
if pair in uncancelable_pairs:
continue
u1 = _create_unit_from_factor(pair[0], registry)
u2 = _create_unit_from_factor(pair[1], registry)
prod = u1 * u2
if prod.dimensions == 1:
expr = expr / pair[0]
expr = expr / pair[1]
value = prod.base_value
if value != 1:
if value.is_integer():
value = int(value)
expr *= value
else:
uncancelable_pairs.add(pair)
pairs_to_consider = _factor_pairs(expr)
return expr
em_conversions = {
("C", dims.charge_mks): (dims.charge_cgs, "statC", 0.1 * speed_of_light_cm_per_s),
("statC", dims.charge_cgs): (dims.charge_mks, "C", 10.0 / speed_of_light_cm_per_s),
("T", dims.magnetic_field_mks): (dims.magnetic_field_cgs, "G", 1.0e4),
("G", dims.magnetic_field_cgs): (dims.magnetic_field_mks, "T", 1.0e-4),
("A", dims.current_mks): (dims.current_cgs, "statA", 0.1 * speed_of_light_cm_per_s),
("statA", dims.current_cgs): (
dims.current_mks,
"A",
10.0 / speed_of_light_cm_per_s,
),
("V", dims.electric_potential_mks): (
dims.electric_potential_cgs,
"statV",
1.0e-8 * speed_of_light_cm_per_s,
),
("statV", dims.electric_potential_cgs): (
dims.electric_potential_mks,
"V",
1.0e8 / speed_of_light_cm_per_s,
),
("Ω", dims.resistance_mks): (
dims.resistance_cgs,
"statohm",
1.0e9 / (speed_of_light_cm_per_s ** 2),
),
("statohm", dims.resistance_cgs): (
dims.resistance_mks,
"Ω",
1.0e-9 * speed_of_light_cm_per_s ** 2,
),
}
em_conversion_dims = [k[1] for k in em_conversions.keys()]
def _em_conversion(orig_units, conv_data, to_units=None, unit_system=None):
"""Convert between E&M & MKS base units.
If orig_units is a CGS (or MKS) E&M unit, conv_data contains the
corresponding MKS (or CGS) unit and scale factor converting between them.
This must be done by replacing the expression of the original unit
with the new one in the unit expression and multiplying by the scale
factor.
"""
conv_unit, canonical_unit, scale = conv_data
if conv_unit is None:
conv_unit = canonical_unit
new_expr = scale * canonical_unit.expr
if unit_system is not None:
# we don't know the to_units, so we get it directly from the
# conv_data
to_units = Unit(conv_unit.expr, registry=orig_units.registry)
new_units = Unit(new_expr, registry=orig_units.registry)
conv = new_units.get_conversion_factor(to_units)
return to_units, conv
@lru_cache(maxsize=128, typed=False)
def _check_em_conversion(unit, to_unit=None, unit_system=None, registry=None):
"""Check to see if the units contain E&M units
This function supports unyt's ability to convert data to and from E&M
electromagnetic units. However, this support is limited and only very
simple unit expressions can be readily converted. This function tries
to see if the unit is an atomic base unit that is present in the
em_conversions dict. If it does not contain E&M units, the function
returns an empty tuple. If it does contain an atomic E&M unit in
the em_conversions dict, it returns a tuple containing the unit to convert
to and scale factor. If it contains a more complicated E&M unit and we are
trying to convert between CGS & MKS E&M units, it raises an error.
"""
em_map = ()
if unit == to_unit or unit.dimensions not in em_conversion_dims:
return em_map
if unit.is_atomic:
prefix, unit_wo_prefix = _split_prefix(str(unit), unit.registry.lut)
else:
prefix, unit_wo_prefix = "", str(unit)
if (unit_wo_prefix, unit.dimensions) in em_conversions:
em_info = em_conversions[unit_wo_prefix, unit.dimensions]
em_unit = Unit(prefix + em_info[1], registry=registry)
if to_unit is None:
cmks_in_unit = current_mks in unit.dimensions.atoms()
cmks_in_unit_system = unit_system.units_map[current_mks]
cmks_in_unit_system = cmks_in_unit_system is not None
if cmks_in_unit and cmks_in_unit_system:
em_map = (unit_system[unit.dimensions], unit, 1.0)
else:
em_map = (None, em_unit, em_info[2])
elif to_unit.dimensions == em_unit.dimensions:
em_map = (to_unit, em_unit, em_info[2])
if em_map:
return em_map
if unit_system is None:
from unyt.unit_systems import unit_system_registry
unit_system = unit_system_registry["mks"]
for unit_atom in unit.expr.atoms():
if unit_atom.is_Number:
continue
bu = str(unit_atom)
budims = Unit(bu, registry=registry).dimensions
try:
if str(unit_system[budims]) == bu:
continue
except MissingMKSCurrent:
raise MKSCGSConversionError(unit)
return em_map
def _get_conversion_factor(old_units, new_units, dtype):
"""
Get the conversion factor between two units of equivalent dimensions. This
is the number you multiply data by to convert from values in `old_units` to
values in `new_units`.
Parameters
----------
old_units: str or Unit object
The current units.
new_units : str or Unit object
The units we want.
dtype: NumPy dtype
The dtype of the conversion factor
Returns
-------
conversion_factor : float
`old_units / new_units`
offset : float or None
Offset between the old unit and new unit.
"""
if old_units.dimensions != new_units.dimensions:
raise UnitConversionError(
old_units, old_units.dimensions, new_units, new_units.dimensions
)
old_basevalue = old_units.base_value
old_baseoffset = old_units.base_offset
new_basevalue = new_units.base_value
new_baseoffset = new_units.base_offset
ratio = old_basevalue / new_basevalue
if old_baseoffset == 0 and new_baseoffset == 0:
return (ratio, None)
else:
# the dimensions are either temperature or angle (lat, lon)
if old_units.dimensions == temperature:
# for degree Celsius, back out the SI prefix scaling from
# offset scaling for degree Fahrenheit
old_prefix, _ = _split_prefix(str(old_units), old_units.registry.lut)
if old_prefix != "":
old_baseoffset /= old_basevalue
new_prefix, _ = _split_prefix(str(new_units), new_units.registry.lut)
if new_prefix != "":
new_baseoffset /= new_basevalue
return ratio, ratio * old_baseoffset - new_baseoffset
def _get_unit_data_from_expr(unit_expr, unit_symbol_lut):
"""
Grabs the total base_value and dimensions from a valid unit expression.
Parameters
----------
unit_expr: Unit object, or sympy Expr object
The expression containing unit symbols.
unit_symbol_lut: dict
Provides the unit data for each valid unit symbol.
"""
# Now for the sympy possibilities
if isinstance(unit_expr, Number):
if unit_expr is sympy_one:
return (1.0, sympy_one)
return (float(unit_expr), sympy_one)
if isinstance(unit_expr, Symbol):
return _lookup_unit_symbol(unit_expr.name, unit_symbol_lut)
if isinstance(unit_expr, Pow):
unit_data = _get_unit_data_from_expr(unit_expr.args[0], unit_symbol_lut)
power = unit_expr.args[1]
if isinstance(power, Symbol):
raise UnitParseError("Invalid unit expression '%s'." % unit_expr)
conv = float(unit_data[0] ** power)
unit = unit_data[1] ** power
return (conv, unit)
if isinstance(unit_expr, Mul):
base_value = 1.0
dimensions = 1
for expr in unit_expr.args:
unit_data = _get_unit_data_from_expr(expr, unit_symbol_lut)
base_value *= unit_data[0]
dimensions *= unit_data[1]
return (float(base_value), dimensions)
raise UnitParseError(
"Cannot parse for unit data from '%s'. Please supply"
" an expression of only Unit, Symbol, Pow, and Mul"
"objects." % str(unit_expr)
)
def _validate_dimensions(dimensions):
if isinstance(dimensions, Mul):
for dim in dimensions.args:
_validate_dimensions(dim)
elif isinstance(dimensions, Symbol):
if dimensions not in base_dimensions:
raise UnitParseError(
"Dimensionality expression contains an "
"unknown symbol '%s'." % dimensions
)
elif isinstance(dimensions, Pow):
if not isinstance(dimensions.args[1], Number):
raise UnitParseError(
"Dimensionality expression '%s' contains a "
"unit symbol as a power." % dimensions
)
elif isinstance(dimensions, (Add, Number)):
if not isinstance(dimensions, One):
raise UnitParseError(
"Only dimensions that are instances of Pow, "
"Mul, or symbols in the base dimensions are "
"allowed. Got dimensions '%s'" % dimensions
)
elif not isinstance(dimensions, Basic):
raise UnitParseError("Bad dimensionality expression '%s'." % dimensions)
def define_unit(
symbol, value, tex_repr=None, offset=None, prefixable=False, registry=None
):
"""
Define a new unit and add it to the specified unit registry.
Parameters
----------
symbol : string
The symbol for the new unit.
value : tuple or :class:`unyt.array.unyt_quantity`
The definition of the new unit in terms of some other units. For
example, one would define a new "mph" unit with ``(1.0, "mile/hr")``
or with ``1.0*unyt.mile/unyt.hr``
tex_repr : string, optional
The LaTeX representation of the new unit. If one is not supplied, it
will be generated automatically based on the symbol string.
offset : float, optional
The default offset for the unit. If not set, an offset of 0 is assumed.
prefixable : boolean, optional
Whether or not the new unit can use SI prefixes. Default: False
registry : :class:`unyt.unit_registry.UnitRegistry` or None
The unit registry to add the unit to. If None, then defaults to the
global default unit registry. If registry is set to None then the
unit object will be added as an attribute to the top-level :mod:`unyt`
namespace to ease working with the newly defined unit. See the example
below.
Examples
--------
>>> from unyt import day
>>> two_weeks = 14.0*day
>>> one_day = 1.0*day
>>> define_unit("two_weeks", two_weeks)
>>> from unyt import two_weeks
>>> print((3*two_weeks)/one_day)
42.0 dimensionless
"""
from unyt.array import unyt_quantity, _iterable
import unyt
if registry is None:
registry = default_unit_registry
if symbol in registry:
raise RuntimeError(
"Unit symbol '%s' already exists in the provided " "registry" % symbol
)
if not isinstance(value, unyt_quantity):
if _iterable(value) and len(value) == 2:
value = unyt_quantity(value[0], value[1], registry=registry)
else:
raise RuntimeError(
'"value" needs to be a quantity or ' "(value, unit) tuple!"
)
base_value = float(value.in_base(unit_system="mks"))
dimensions = value.units.dimensions
registry.add(
symbol,
base_value,
dimensions,
prefixable=prefixable,
tex_repr=tex_repr,
offset=offset,
)
if registry is default_unit_registry:
u = Unit(symbol, registry=registry)
setattr(unyt, symbol, u)
NULL_UNIT = Unit()
|
import subprocess
import re
import sys
import os.path
class Crawler:
@staticmethod
def parse_simout(filename):
simout = open(filename, 'r')
for line in simout:
if 'command line:' in line:
tmpcmd = line.replace('command line:', '')
tmpcmd = tmpcmd.replace('gem5.debug', 'gem5.opt')
cmd = re.sub(r"(--debug-flags=)(\S*)", r"\1FI", tmpcmd)
subprocess.call(cmd, shell=True)
@staticmethod
def parse_debug(filename):
pass
if __name__ == '__main__':
for idx in range(int(sys.argv[2]), int(sys.argv[3]) + 1):
filename = sys.argv[1] + '/PipeReg_f2ToD/simout_' + str(idx)
if os.path.isfile(filename):
Crawler.parse_simout(filename)
|
from copy import deepcopy
import anyjson as json
import logging
import time
from django.db import connection, transaction
from django.db.models.query import QuerySet
from mypage.pages.models import Page
from mypage.pages.layout import WidgetInLayout
log = logging.getLogger('mypage.pages.migrations')
get_page_wips_query = '''SELECT
`pages_widgetinpage`.`widget_id`,
`pages_widgetinpage`.`config_json`,
`pages_widgetinpage`.`state`
FROM `pages_widgetinpage` WHERE `pages_widgetinpage`.`page_id` = %s'''
class MockedWidgetInPage(object):
"""
Emulates old WidgetInPage model
"""
def __init__(self, page, widget_id, config_json, state):
self.page = page
self.widget_id = widget_id
self.state = state
self.config_json = config_json
self.config = self.get_config(self.config_json)
def get_config(self, config_json):
if not config_json:
return {}
try:
return json.deserialize(config_json)
except Exception, e:
log.warn('Broken config! %s, %s', config_json, e)
return {}
def get_wips(page):
"""
Return mocked wips for given page
{14L: <MockedWidgetInPage: Widget: 14 in Page: 1>,
18L: <MockedWidgetInPage: Widget: 18 in Page: 1>,
...
"""
cursor = connection.cursor()
cursor.execute(get_page_wips_query, (page.pk,))
return dict((wip.widget_id, wip) for wip in [MockedWidgetInPage(page, *row) for row in cursor.fetchall()])
def create_wil(pair, wips):
"""
Migrates widget ids pair (ints allready!) to WidgetInLayout dict like object with config and state
If WIP does not exist, default config and states will be used
It makes something like this:
["<str ct_id>", "<str id>"] + wip.config, wip.state
=>
{
'widget_ct_id': <int ct_id>,
'widget_id': <int id>,
'config': <dict config>,
'state': <state>,
}
"""
w_ct_id, w_pk = map(lambda x: int(x), pair)
try:
wip = wips[w_pk]
config = wip.config
state = wip.state
except KeyError, e:
config = {}
state = WidgetInLayout.STATE_NORMAL
return dict(
widget_ct_id = w_ct_id,
widget_id = w_pk,
config = config,
state = state)
def migrate_container(container, wips):
"""
Migrates each pair in given container
"""
return [create_wil(pair, wips) for pair in container]
def migrate_layout(layout, wips):
"""
Migrates layout containers to a new structure with wils (widget in layout :)
"""
migrated_layout = deepcopy(layout)
migrated_layout['static_containers'] = [migrate_container(c, wips) for c in layout['static_containers']]
migrated_layout['containers'] = [migrate_container(c, wips) for c in layout['containers']]
return migrated_layout
def migrate_page(page):
old_layout = json.deserialize(page.layout_json)
layout = migrate_layout(old_layout, get_wips(page))
qs = Page.objects.filter(pk=page.pk, layout_migrated=False) # do not save allready migrated page
qs.update(layout_json=json.serialize(layout), layout_migrated=True)
log.info('Page %d layout migrated.', page.pk)
page.layout = layout
page.layout_migrated = True
return page
def migrate(limit=None, loop_sleep=0):
#qs = Page.objects.filter(layout_migrated=False).only('id', 'layout_json')
qs = QuerySet(Page).filter(layout_migrated=False).only('id', 'layout_json')
if limit:
qs = qs[:limit]
for page in qs.iterator():
migrate_page(page)
time.sleep(loop_sleep)
|
from mkt.fireplace.serializers import (FireplaceESAppSerializer,
FireplaceESWebsiteSerializer)
class GamesESAppSerializer(FireplaceESAppSerializer):
"""Include tags."""
class Meta(FireplaceESAppSerializer.Meta):
fields = FireplaceESAppSerializer.Meta.fields + ['tags']
exclude = FireplaceESAppSerializer.Meta.exclude
class GamesESWebsiteSerializer(FireplaceESWebsiteSerializer):
"""Include keywords."""
class Meta(FireplaceESWebsiteSerializer.Meta):
fields = FireplaceESWebsiteSerializer.Meta.fields + ['keywords']
|
from django.test import TestCase
from django.contrib.sites.models import Site
from faq.settings import DRAFTED
from faq.models import Topic, Question, OnSiteManager
class BaseTestCase(TestCase):
""""""
fixtures = ['test_data']
def setUp(self):
# Setup some new objects. We want these instead of ones from the test
# data because we will be testing for the state of something newly
# created, which the test data does not contain, obviously.
self.topics = {
'new': Site.objects.get_current().faq_topics.create(title=u'Test Topic',
slug=u'test-topic'),
'drafted': Topic.objects.get(slug='website'),
'published': Topic.objects.get(slug='shipping'),
'removed': Topic.objects.get(slug='black-market-items'),
'off_site': Topic.objects.get(slug='about-us'),
}
self. questions = {
'new1': self.topics['new'].questions.create(question=u'Where am I?',
answer=u'That is classified.'),
'new2': self.topics['new'].questions.create(question=u'Who are you?',
answer=u'I cannot say.'),
'drafted': Question.objects.get(slug='in-what-color-box-do-you-ship'),
'published': Question.objects.get(slug='how-much-does-shipping-cost'),
'removed': Question.objects.get(slug='what-carrier-do-you-use'),
'off_site': Question.objects.get(slug='are-you-hiring'),
'pub_topic_draft': Question.objects.get(slug='do-you-have-an-sla'),
'pub_topic_removed': Question.objects.get(slug='how-do-you-acquire-black-market-items'),
}
class ManagerTestCase(BaseTestCase):
""""""
pass
class ModelsTestCase(BaseTestCase):
""""""
def testManager(self):
# Because of our sublcassing with the models, be certain that the
# manager is wired up correctly.
self.assertTrue(isinstance(Topic.objects, OnSiteManager))
self.assertTrue(isinstance(Question.objects, OnSiteManager))
def testUnicode(self):
# Ensure that we don't absent-mindedly change what the `__unicode__()`
# method returns.
self.assertEqual(self.topics['new'].__unicode__(),
self.topics['new'].title)
self.assertEqual(self.questions['new1'].__unicode__(),
self.questions['new1'].question)
def testDefaultStatus(self):
# Items created without choosing a status should be drafted by default.
self.assertEqual(self.topics['new'].status, DRAFTED)
self.assertEqual(self.questions['new1'].status, DRAFTED)
def testSlugOnSave(self):
# Be sure we are properly creating slugs for questions that are created
# without them (those created as an inline to a topic).
self.assertEqual(self.questions['new1'].slug, u'where-am-i')
self.assertEqual(self.questions['new2'].slug, u'who-are-you')
def testOrderingOnSave(self):
# Be sure we are properly calculating and filling the ordering field
# when a user leaves it blank.
self.assertEqual(self.questions['new1'].ordering, 1)
self.assertEqual(self.questions['new2'].ordering, 2)
class ViewsBaseTestCase(BaseTestCase):
""""""
def setUp(self):
# Call `super` first because we used some of the stuff that it set up.
super(ViewsBaseTestCase, self).setUp()
# Set up some responses. We do this here because we are going to be
# testing on these responses with various methods here and in subclasses.
self.responses = {
'topic_list': self.client.get('/'),
'topic_detail': self.client.get(
self.topics['published'].get_absolute_url(), follow=True),
'question_detail': self.client.get(
self.questions['published'].get_absolute_url(), follow=True),
}
class ViewsShallowTestCase(ViewsBaseTestCase):
urls = 'faq.urls.shallow'
def testTopicDetail(self):
# Redirects to a fragment identifier on the topic list.
self.assertRedirects(self.responses['topic_detail'],
'/#shipping', status_code=301)
def testQuestionDetail(self):
# Redirects to a fragment identifier on the topic list.
self.assertRedirects(self.responses['question_detail'],
'/#how-much-does-shipping-cost', status_code=301)
class ViewsNormalTestCase(ViewsShallowTestCase):
""""""
urls = 'faq.urls.normal'
def testTopicDetail(self):
# Does not redirect.
self.assertEqual(self.responses['topic_detail'].status_code, 200)
# Check for our extra_context.
# Must `list()` the QuerySets because querysets are unique.
self.assertEqual(list(self.responses['topic_detail'].context['question_list']),
list(self.topics['published'].questions.published()))
def testQuestionDetail(self):
# Redirects to a fragment identifier on the topic detail.
self.assertRedirects(self.responses['question_detail'],
'/shipping/#how-much-does-shipping-cost', status_code=301)
class ViewsDeepTestCase(ViewsNormalTestCase):
""""""
urls = 'faq.urls.deep'
def testQuestionDetail(self):
# Does not redirect.
self.assertEqual(self.responses['question_detail'].status_code, 200)
# Check for our extra_context.
self.assertEqual(self.responses['question_detail'].context['topic'],
self.questions['published'].topic)
|
from __future__ import print_function
from nose import SkipTest
from pythonic_testcase import *
from soapfish import xsd
from soapfish import wsdl2py
from soapfish.testutil import generated_symbols
class XSDCodeGenerationTest(PythonicTestCase):
def test_can_generate_code_for_two_schemas(self):
raise SkipTest('can not generate code for wsdl with multiple schemas')
xml = '<wsdl:definitions xmlns:wsdl="http://schemas.xmlsoap.org/wsdl/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:b="http://example.org/B">'\
' <wsdl:types>'\
' <xsd:schema elementFormDefault="qualified" targetNamespace="http://example.org/A">' \
' <xsd:import namespace="http://example.org/B"/>' \
' <xsd:element name="A" type="b:B"/>' \
' </xsd:schema>' \
' <xsd:schema elementFormDefault="qualified" targetNamespace="http://example.org/B">'\
' <xsd:element name="B" type="xsd:string"/>'\
' </xsd:schema>'\
' </wsdl:types>'\
'</wsdl:definitions>'
code_string = wsdl2py.generate_code_from_wsdl(xml, 'client')
schema, new_symbols = generated_symbols(code_string)
assert_not_none(schema)
assert_length(4, new_symbols)
assert_equals(['B', 'A'], list(schema.elements))
def test_can_generate_code_for_inheritance(self):
raise SkipTest('can not generate code for wsdl with type inheritance')
xml = '<wsdl:definitions xmlns:wsdl="http://schemas.xmlsoap.org/wsdl/" xmlns:xsd="http://www.w3.org/2001/XMLSchema">' \
' <wsdl:types>' \
' <xsd:schema elementFormDefault="qualified" targetNamespace="http://example.org/A">' \
' <xsd:element name="A" type="B"/>' \
' <xsd:element name="B" type="xsd:string"/>' \
' </xsd:schema>' \
' </wsdl:types>' \
'</wsdl:definitions>'
code_string = wsdl2py.generate_code_from_wsdl(xml, 'client')
schema, new_symbols = generated_symbols(code_string)
assert_not_none(schema)
assert_length(4, new_symbols)
assert_equals(['B', 'A'], list(schema.elements))
assert_isinstance(schema.elements['B']._type, xsd.String)
assert_isinstance(schema.elements['A']._type, schema.elements['B']._type.__class__)
|
import unittest
import sys
import os
import tempfile
import shutil
import textwrap
from alphatwirl.concurrently import HTCondorJobSubmitter
class MockWorkingArea(object):
def open(self):
self.path = tempfile.mkdtemp()
def close(self):
shutil.rmtree(self.path)
self.path = None
def package_path(self, package_index):
return ''
class MockPopen(object):
def communicate(self, *args, **kwargs):
self.returncode = 0
return 'submitted to cluster 1012.', ''
class MockPIPE(object):
pass
class MockSubprocess(object):
def __init__(self):
self.PIPE = MockPIPE
def Popen(self, *args, **kwargs):
return MockPopen()
default_job_desc_template = """
Executable = {job_script}
output = {out}
error = {error}
log = {log}
{args}
should_transfer_files = YES
when_to_transfer_output = ON_EXIT
transfer_input_files = {input_files}
transfer_output_files = {output_files}
Universe = vanilla
notification = Error
getenv = True
queue 1
"""
default_job_desc_template = textwrap.dedent(default_job_desc_template).strip()
job_desc_template_with_extra = """
Executable = {job_script}
output = {out}
error = {error}
log = {log}
{args}
should_transfer_files = YES
when_to_transfer_output = ON_EXIT
transfer_input_files = {input_files}
transfer_output_files = {output_files}
Universe = vanilla
notification = Error
getenv = True
request_memory = 900
queue 1
"""
job_desc_template_with_extra = textwrap.dedent(job_desc_template_with_extra).strip()
class TestHTCondorJobSubmitter(unittest.TestCase):
def setUp(self):
self.module = sys.modules['alphatwirl.concurrently.HTCondorJobSubmitter']
self.org_subprocess = self.module.subprocess
self.module.subprocess = MockSubprocess()
self.cwd = os.getcwd()
self.workingArea = MockWorkingArea()
self.workingArea.open()
def tearDown(self):
self.module.subprocess = self.org_subprocess
os.chdir(self.cwd)
self.workingArea.close()
def test_init_job_desc_extra(self):
job_desc_extra = ['request_memory = 900']
obj = HTCondorJobSubmitter(job_desc_extra = job_desc_extra)
self.assertEqual(job_desc_template_with_extra, obj.job_desc_template)
def test_run(self):
obj = HTCondorJobSubmitter()
obj.run(workingArea = self.workingArea, package_index = 0)
|
"""Task results tests."""
from mock import patch
import requests
import requests.exceptions
import tests.helper
import snowfloat.task
class ResultsTests(tests.helper.Tests):
"""Task results tests."""
task = snowfloat.task.Task(
operation='test_operation_1',
uuid='test_task_1',
uri='/geo/1/tasks/test_task_1',
task_filter={'filter': 'test_task_filter_1'},
spatial={'spatial': 'test_task_spatial_1'},
extras={},
state='started',
reason='',
date_created=1,
date_modified=2)
@patch.object(requests, 'get')
def test_get_results(self, get_mock):
"""Get task results."""
get_mock.__name__ = 'get'
self.get_results_helper(get_mock, self.task.get_results)
|
from flask import Blueprint, render_template
touch_panels = Blueprint('touch_panels', __name__, url_prefix='/products/touch_panels')
@touch_panels.route('/')
def touch_panels_landing():
return render_template('/touch_panels/touch_panels_landing_en.html', title='Touch Panels | OPTO Logic TECHNOLOGY')
@touch_panels.route('/pcap')
def touch_panels_pcap():
return render_template('/touch_panels/touch_panels_pcap_en.html', title='Touch Panels PCAP | OPTO Logic TECHNOLOGY')
@touch_panels.route('/rtp')
def touch_panels_rtp():
return render_template('/touch_panels/touch_panels_rtp_en.html', title='Touch Panels RTP | OPTO Logic TECHNOLOGY')
'''@touch_panels.route('/<int:touch_panels_id>/touch_panels_details')
def touch_panels_details(touch_panels_id):
return render_template('/touch_panels/touch_panels_details.html',
title='Touch Panels Details | OPTO Logic TECHNOLOGY', lcd_id=touch_panels_id)'''
|
"""
Vumi scalable text messaging engine.
"""
__version__ = "0.5.20a0"
|
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
# Flag to indicate if this migration is too risky
# to run online and needs to be coordinated for offline
is_dangerous = False
def forwards(self, orm):
# Adding model 'GroupLink'
db.create_table('sentry_grouplink', (
('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)),
('group_id', self.gf('sentry.db.models.fields.bounded.BoundedBigIntegerField')()),
('project_id', self.gf('sentry.db.models.fields.bounded.BoundedBigIntegerField')(db_index=True)),
('linked_type', self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')(default=1)),
('linked_id', self.gf('sentry.db.models.fields.bounded.BoundedBigIntegerField')()),
('relationship', self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')(default=2)),
('data', self.gf('jsonfield.fields.JSONField')(default={})),
('datetime', self.gf('django.db.models.fields.DateTimeField')(
default=datetime.datetime.now, db_index=True)),
))
db.send_create_signal('sentry', ['GroupLink'])
# Adding unique constraint on 'GroupLink', fields ['group_id', 'linked_type', 'linked_id']
db.create_unique('sentry_grouplink', ['group_id', 'linked_type', 'linked_id'])
def backwards(self, orm):
# Removing unique constraint on 'GroupLink', fields ['group_id', 'linked_type', 'linked_id']
db.delete_unique('sentry_grouplink', ['group_id', 'linked_type', 'linked_id'])
# Deleting model 'GroupLink'
db.delete_table('sentry_grouplink')
models = {
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.apiapplication': {
'Meta': {'object_name': 'ApiApplication'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'client_id': ('django.db.models.fields.CharField', [], {'default': "'66393db7f22742c8904d4520b880c8659051f876549a403ea057073954162e0d'", 'unique': 'True', 'max_length': '64'}),
'client_secret': ('sentry.db.models.fields.encrypted.EncryptedTextField', [], {'default': "'30f38707d777450eaba30e613a3b7d270b44be90005347cdb41ae851f7a71b9c'"}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'homepage_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'Nearby Chimp'", 'max_length': '64', 'blank': 'True'}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'privacy_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'redirect_uris': ('django.db.models.fields.TextField', [], {}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'terms_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
},
'sentry.apiauthorization': {
'Meta': {'unique_together': "(('user', 'application'),)", 'object_name': 'ApiAuthorization'},
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.apigrant': {
'Meta': {'object_name': 'ApiGrant'},
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']"}),
'code': ('django.db.models.fields.CharField', [], {'default': "'6f0c01c81bea4c1f8f1d3ab380740c57'", 'max_length': '64', 'db_index': 'True'}),
'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2017, 11, 1, 0, 0)', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'redirect_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.apikey': {
'Meta': {'object_name': 'ApiKey'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'default': "'Default'", 'max_length': '64', 'blank': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Organization']"}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.apitoken': {
'Meta': {'object_name': 'ApiToken'},
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2017, 12, 1, 0, 0)', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'refresh_token': ('django.db.models.fields.CharField', [], {'default': "'7aefd6ee130543a2bab71166859e8dae5a076ab65db14275a4a1d6b02ce9eb29'", 'max_length': '64', 'unique': 'True', 'null': 'True'}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'token': ('django.db.models.fields.CharField', [], {'default': "'8fcc1174491e4d20b11bc8a777e8aff92770c66c2c7e427d898bbf04dd1427d1'", 'unique': 'True', 'max_length': '64'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.auditlogentry': {
'Meta': {'object_name': 'AuditLogEntry'},
'actor': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_actors'", 'null': 'True', 'to': "orm['sentry.User']"}),
'actor_key': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiKey']", 'null': 'True', 'blank': 'True'}),
'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'target_object': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'target_user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_targets'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.authenticator': {
'Meta': {'unique_together': "(('user', 'type'),)", 'object_name': 'Authenticator', 'db_table': "'auth_authenticator'"},
'config': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'last_used_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.authidentity': {
'Meta': {'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))", 'object_name': 'AuthIdentity'},
'auth_provider': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.AuthProvider']"}),
'data': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.authprovider': {
'Meta': {'object_name': 'AuthProvider'},
'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'default_role': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'default_teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'unique': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'sync_time': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.broadcast': {
'Meta': {'object_name': 'Broadcast'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_expires': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2017, 11, 8, 0, 0)', 'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'upstream_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'})
},
'sentry.broadcastseen': {
'Meta': {'unique_together': "(('broadcast', 'user'),)", 'object_name': 'BroadcastSeen'},
'broadcast': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Broadcast']"}),
'date_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.commit': {
'Meta': {'unique_together': "(('repository_id', 'key'),)", 'object_name': 'Commit', 'index_together': "(('repository_id', 'date_added'),)"},
'author': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.CommitAuthor']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'message': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.commitauthor': {
'Meta': {'unique_together': "(('organization_id', 'email'), ('organization_id', 'external_id'))", 'object_name': 'CommitAuthor'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '164', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
},
'sentry.commitfilechange': {
'Meta': {'unique_together': "(('commit', 'filename'),)", 'object_name': 'CommitFileChange'},
'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '1'})
},
'sentry.counter': {
'Meta': {'object_name': 'Counter', 'db_table': "'sentry_projectcounter'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'unique': 'True'}),
'value': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.deploy': {
'Meta': {'object_name': 'Deploy'},
'date_finished': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'notified': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'sentry.distribution': {
'Meta': {'unique_together': "(('release', 'name'),)", 'object_name': 'Distribution'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.dsymapp': {
'Meta': {'unique_together': "(('project', 'platform', 'app_id'),)", 'object_name': 'DSymApp'},
'app_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'platform': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'sync_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'})
},
'sentry.email': {
'Meta': {'object_name': 'Email'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('sentry.db.models.fields.citext.CIEmailField', [], {'unique': 'True', 'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.environment': {
'Meta': {'unique_together': "(('project_id', 'name'), ('organization_id', 'name'))", 'object_name': 'Environment'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'through': "orm['sentry.EnvironmentProject']", 'symmetrical': 'False'})
},
'sentry.environmentproject': {
'Meta': {'unique_together': "(('project', 'environment'),)", 'object_name': 'EnvironmentProject'},
'environment': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Environment']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.event': {
'Meta': {'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'", 'index_together': "(('group_id', 'datetime'),)"},
'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'time_spent': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'null': 'True'})
},
'sentry.eventmapping': {
'Meta': {'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventprocessingissue': {
'Meta': {'unique_together': "(('raw_event', 'processing_issue'),)", 'object_name': 'EventProcessingIssue'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'processing_issue': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProcessingIssue']"}),
'raw_event': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.RawEvent']"})
},
'sentry.eventtag': {
'Meta': {'unique_together': "(('event_id', 'key_id', 'value_id'),)", 'object_name': 'EventTag', 'index_together': "(('project_id', 'key_id', 'value_id'), ('group_id', 'key_id', 'value_id'))"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'value_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.eventuser': {
'Meta': {'unique_together': "(('project_id', 'ident'), ('project_id', 'hash'))", 'object_name': 'EventUser', 'index_together': "(('project_id', 'email'), ('project_id', 'username'), ('project_id', 'ip_address'))"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'})
},
'sentry.featureadoption': {
'Meta': {'unique_together': "(('organization', 'feature_id'),)", 'object_name': 'FeatureAdoption'},
'applicable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_completed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'feature_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"})
},
'sentry.file': {
'Meta': {'object_name': 'File'},
'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'legacy_blob'", 'null': 'True', 'to': "orm['sentry.FileBlob']"}),
'blobs': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.FileBlob']", 'through': "orm['sentry.FileBlobIndex']", 'symmetrical': 'False'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'headers': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.fileblob': {
'Meta': {'object_name': 'FileBlob'},
'checksum': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'})
},
'sentry.fileblobindex': {
'Meta': {'unique_together': "(('file', 'blob', 'offset'),)", 'object_name': 'FileBlobIndex'},
'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.FileBlob']"}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'short_id'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'", 'index_together': "(('project', 'first_release'),)"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'short_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'time_spent_total': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
'sentry.groupassignee': {
'Meta': {'object_name': 'GroupAssignee', 'db_table': "'sentry_groupasignee'"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'unique': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_assignee_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupcommitresolution': {
'Meta': {'unique_together': "(('group_id', 'commit_id'),)", 'object_name': 'GroupCommitResolution'},
'commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.groupemailthread': {
'Meta': {'unique_together': "(('email', 'group'), ('email', 'msgid'))", 'object_name': 'GroupEmailThread'},
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'msgid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Project']"})
},
'sentry.grouphash': {
'Meta': {'unique_together': "(('project', 'hash'),)", 'object_name': 'GroupHash'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'group_tombstone_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'state': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.grouplink': {
'Meta': {'unique_together': "(('group_id', 'linked_type', 'linked_id'),)", 'object_name': 'GroupLink'},
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'linked_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
'linked_type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'db_index': 'True'}),
'relationship': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '2'})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.groupredirect': {
'Meta': {'object_name': 'GroupRedirect'},
'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'previous_group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'unique': 'True'})
},
'sentry.grouprelease': {
'Meta': {'unique_together': "(('group_id', 'release_id', 'environment'),)", 'object_name': 'GroupRelease'},
'environment': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
},
'sentry.groupresolution': {
'Meta': {'object_name': 'GroupResolution'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.grouprulestatus': {
'Meta': {'unique_together': "(('rule', 'group'),)", 'object_name': 'GroupRuleStatus'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'rule': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Rule']"}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'sentry.groupseen': {
'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'})
},
'sentry.groupshare': {
'Meta': {'object_name': 'GroupShare'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "'c005bf6f8a0943a7a74c38e355ed6c94'", 'unique': 'True', 'max_length': '32'})
},
'sentry.groupsnooze': {
'Meta': {'object_name': 'GroupSnooze'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'state': ('jsonfield.fields.JSONField', [], {'null': 'True'}),
'until': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'user_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'user_window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.groupsubscription': {
'Meta': {'unique_together': "(('group', 'user'),)", 'object_name': 'GroupSubscription'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'subscription_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'subscription_set'", 'to': "orm['sentry.Project']"}),
'reason': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project_id', 'group_id', 'key'),)", 'object_name': 'GroupTagKey'},
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.grouptagvalue': {
'Meta': {'unique_together': "(('group_id', 'key', 'value'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'", 'index_together': "(('project_id', 'key', 'value', 'last_seen'),)"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.grouptombstone': {
'Meta': {'object_name': 'GroupTombstone'},
'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'previous_group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'unique': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.identity': {
'Meta': {'unique_together': "(('idp', 'external_id'),)", 'object_name': 'Identity'},
'data': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'idp': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.IdentityProvider']"}),
'scopes': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.identityprovider': {
'Meta': {'unique_together': "(('type', 'instance'),)", 'object_name': 'IdentityProvider'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.integration': {
'Meta': {'unique_together': "(('provider', 'external_id'),)", 'object_name': 'Integration'},
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'metadata': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organizations': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'integrations'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationIntegration']", 'to': "orm['sentry.Organization']"}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'integrations'", 'symmetrical': 'False', 'through': "orm['sentry.ProjectIntegration']", 'to': "orm['sentry.Project']"}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
},
'sentry.minidumpfile': {
'Meta': {'object_name': 'MinidumpFile'},
'event_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '36'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {'object_name': 'Organization'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'org_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMember']", 'to': "orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.organizationaccessrequest': {
'Meta': {'unique_together': "(('team', 'member'),)", 'object_name': 'OrganizationAccessRequest'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'member': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationavatar': {
'Meta': {'object_name': 'OrganizationAvatar'},
'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.Organization']"})
},
'sentry.organizationintegration': {
'Meta': {'unique_together': "(('organization', 'integration'),)", 'object_name': 'OrganizationIntegration'},
'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'default_auth_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'integration': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Integration']"}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"})
},
'sentry.organizationmember': {
'Meta': {'unique_together': "(('organization', 'user'), ('organization', 'email'))", 'object_name': 'OrganizationMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Organization']"}),
'role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
'teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMemberTeam']", 'blank': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50', 'blank': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'sentry_orgmember_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.organizationmemberteam': {
'Meta': {'unique_together': "(('team', 'organizationmember'),)", 'object_name': 'OrganizationMemberTeam', 'db_table': "'sentry_organizationmember_teams'"},
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'organizationmember': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationonboardingtask': {
'Meta': {'unique_together': "(('organization', 'task'),)", 'object_name': 'OrganizationOnboardingTask'},
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_completed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'task': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.organizationoption': {
'Meta': {'unique_together': "(('organization', 'key'),)", 'object_name': 'OrganizationOption', 'db_table': "'sentry_organizationoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.processingissue': {
'Meta': {'unique_together': "(('project', 'checksum', 'type'),)", 'object_name': 'ProcessingIssue'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'), ('organization', 'slug'))", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'first_event': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0', 'null': 'True'}),
'forced_color': ('django.db.models.fields.CharField', [], {'max_length': '6', 'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"}),
'teams': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'teams'", 'symmetrical': 'False', 'through': "orm['sentry.ProjectTeam']", 'to': "orm['sentry.Team']"})
},
'sentry.projectbookmark': {
'Meta': {'unique_together': "(('project_id', 'user'),)", 'object_name': 'ProjectBookmark'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.projectdsymfile': {
'Meta': {'unique_together': "(('project', 'uuid'),)", 'object_name': 'ProjectDSymFile'},
'cpu_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'object_name': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36'})
},
'sentry.projectintegration': {
'Meta': {'unique_together': "(('project', 'integration'),)", 'object_name': 'ProjectIntegration'},
'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'integration': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Integration']"}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'rate_limit_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'rate_limit_window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'roles': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.projectplatform': {
'Meta': {'unique_together': "(('project_id', 'platform'),)", 'object_name': 'ProjectPlatform'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.projectsymcachefile': {
'Meta': {'unique_together': "(('project', 'dsym_file'),)", 'object_name': 'ProjectSymCacheFile'},
'cache_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'dsym_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProjectDSymFile']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'version': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.projectteam': {
'Meta': {'unique_together': "(('project', 'team'),)", 'object_name': 'ProjectTeam'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.rawevent': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'RawEvent'},
'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.release': {
'Meta': {'unique_together': "(('organization', 'version'),)", 'object_name': 'Release'},
'authors': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'commit_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_released': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'last_deploy_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True', 'blank': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'releases'", 'symmetrical': 'False', 'through': "orm['sentry.ReleaseProject']", 'to': "orm['sentry.Project']"}),
'ref': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'total_deploys': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.releasecommit': {
'Meta': {'unique_together': "(('release', 'commit'), ('release', 'order'))", 'object_name': 'ReleaseCommit'},
'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.releaseenvironment': {
'Meta': {'unique_together': "(('organization_id', 'release_id', 'environment_id'),)", 'object_name': 'ReleaseEnvironment', 'db_table': "'sentry_environmentrelease'"},
'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'release_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
},
'sentry.releasefile': {
'Meta': {'unique_together': "(('release', 'ident'),)", 'object_name': 'ReleaseFile'},
'dist': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Distribution']", 'null': 'True'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'name': ('django.db.models.fields.TextField', [], {}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.releaseheadcommit': {
'Meta': {'unique_together': "(('repository_id', 'release'),)", 'object_name': 'ReleaseHeadCommit'},
'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
},
'sentry.releaseproject': {
'Meta': {'unique_together': "(('project', 'release'),)", 'object_name': 'ReleaseProject', 'db_table': "'sentry_release_project'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.repository': {
'Meta': {'unique_together': "(('organization_id', 'name'), ('organization_id', 'provider', 'external_id'))", 'object_name': 'Repository'},
'config': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'integration_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
},
'sentry.reprocessingreport': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'ReprocessingReport'},
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.rule': {
'Meta': {'object_name': 'Rule'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.savedsearch': {
'Meta': {'unique_together': "(('project', 'name'),)", 'object_name': 'SavedSearch'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'query': ('django.db.models.fields.TextField', [], {})
},
'sentry.savedsearchuserdefault': {
'Meta': {'unique_together': "(('project', 'user'),)", 'object_name': 'SavedSearchUserDefault', 'db_table': "'sentry_savedsearch_userdefault'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'savedsearch': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.SavedSearch']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.scheduleddeletion': {
'Meta': {'unique_together': "(('app_label', 'model_name', 'object_id'),)", 'object_name': 'ScheduledDeletion'},
'aborted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_scheduled': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2017, 12, 1, 0, 0)'}),
'guid': ('django.db.models.fields.CharField', [], {'default': "'2ef08d0c28f14ea38696bececf7ae2da'", 'unique': 'True', 'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'in_progress': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'model_name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'object_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
},
'sentry.scheduledjob': {
'Meta': {'object_name': 'ScheduledJob'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_scheduled': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'payload': ('jsonfield.fields.JSONField', [], {'default': '{}'})
},
'sentry.tagkey': {
'Meta': {'unique_together': "(('project_id', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.tagvalue': {
'Meta': {'unique_together': "(('project_id', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'", 'index_together': "(('project_id', 'key', 'last_seen'),)"},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.team': {
'Meta': {'unique_together': "(('organization', 'slug'),)", 'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_password_expired': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_password_change': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_column': "'first_name'", 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'session_nonce': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'sentry.useravatar': {
'Meta': {'object_name': 'UserAvatar'},
'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.User']"})
},
'sentry.useremail': {
'Meta': {'unique_together': "(('user', 'email'),)", 'object_name': 'UserEmail'},
'date_hash_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'emails'", 'to': "orm['sentry.User']"}),
'validation_hash': ('django.db.models.fields.CharField', [], {'default': "u'iu265vtroF4OwiKNLreMPtpCj6jI6Kka'", 'max_length': '32'})
},
'sentry.useridentity': {
'Meta': {'unique_together': "(('user', 'identity'),)", 'object_name': 'UserIdentity'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'identity': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Identity']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.userip': {
'Meta': {'unique_together': "(('user', 'ip_address'),)", 'object_name': 'UserIP'},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'), ('user', 'organization', 'key'))", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
},
'sentry.userreport': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'UserReport', 'index_together': "(('project', 'event_id'), ('project', 'date_added'))"},
'comments': ('django.db.models.fields.TextField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'event_user_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.versiondsymfile': {
'Meta': {'unique_together': "(('dsym_file', 'version', 'build'),)", 'object_name': 'VersionDSymFile'},
'build': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'dsym_app': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.DSymApp']"}),
'dsym_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProjectDSymFile']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '32'})
}
}
complete_apps = ['sentry']
|
import os
import io
import unittest
import json
from . import bodystructure
from .fhirdate import FHIRDate
class BodyStructureTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("BodyStructure", js["resourceType"])
return bodystructure.BodyStructure(js)
def testBodyStructure1(self):
inst = self.instantiate_from("bodystructure-example-fetus.json")
self.assertIsNotNone(inst, "Must have instantiated a BodyStructure instance")
self.implBodyStructure1(inst)
js = inst.as_json()
self.assertEqual("BodyStructure", js["resourceType"])
inst2 = bodystructure.BodyStructure(js)
self.implBodyStructure1(inst2)
def implBodyStructure1(self, inst):
self.assertEqual(inst.description, "EDD 1/1/2017 confirmation by LMP")
self.assertEqual(inst.id, "fetus")
self.assertEqual(inst.identifier[0].system, "http://goodhealth.org/bodystructure/identifiers")
self.assertEqual(inst.identifier[0].value, "12345")
self.assertEqual(inst.location.coding[0].code, "83418008")
self.assertEqual(inst.location.coding[0].display, "Entire fetus (body structure)")
self.assertEqual(inst.location.coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.location.text, "Fetus")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.text.status, "generated")
def testBodyStructure2(self):
inst = self.instantiate_from("bodystructure-example-tumor.json")
self.assertIsNotNone(inst, "Must have instantiated a BodyStructure instance")
self.implBodyStructure2(inst)
js = inst.as_json()
self.assertEqual("BodyStructure", js["resourceType"])
inst2 = bodystructure.BodyStructure(js)
self.implBodyStructure2(inst2)
def implBodyStructure2(self, inst):
self.assertEqual(inst.description, "7 cm maximum diameter")
self.assertEqual(inst.id, "tumor")
self.assertEqual(inst.identifier[0].system, "http://goodhealth.org/bodystructure/identifiers")
self.assertEqual(inst.identifier[0].value, "12345")
self.assertEqual(inst.image[0].contentType, "application/dicom")
self.assertEqual(inst.image[0].url, "http://imaging.acme.com/wado/server?requestType=WADO&wado_details")
self.assertEqual(inst.location.coding[0].code, "78961009")
self.assertEqual(inst.location.coding[0].display, "Splenic structure (body structure)")
self.assertEqual(inst.location.coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.location.text, "Spleen")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.morphology.coding[0].code, "4147007")
self.assertEqual(inst.morphology.coding[0].display, "Mass (morphologic abnormality)")
self.assertEqual(inst.morphology.coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.morphology.text, "Splenic mass")
self.assertEqual(inst.text.status, "generated")
def testBodyStructure3(self):
inst = self.instantiate_from("bodystructure-example-skin-patch.json")
self.assertIsNotNone(inst, "Must have instantiated a BodyStructure instance")
self.implBodyStructure3(inst)
js = inst.as_json()
self.assertEqual("BodyStructure", js["resourceType"])
inst2 = bodystructure.BodyStructure(js)
self.implBodyStructure3(inst2)
def implBodyStructure3(self, inst):
self.assertFalse(inst.active)
self.assertEqual(inst.description, "inner surface (volar) of the left forearm")
self.assertEqual(inst.id, "skin-patch")
self.assertEqual(inst.identifier[0].system, "http://goodhealth.org/bodystructure/identifiers")
self.assertEqual(inst.identifier[0].value, "12345")
self.assertEqual(inst.location.coding[0].code, "14975008")
self.assertEqual(inst.location.coding[0].display, "Forearm")
self.assertEqual(inst.location.coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.location.text, "Forearm")
self.assertEqual(inst.locationQualifier[0].coding[0].code, "419161000")
self.assertEqual(inst.locationQualifier[0].coding[0].display, "Unilateral left")
self.assertEqual(inst.locationQualifier[0].coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.locationQualifier[0].text, "Left")
self.assertEqual(inst.locationQualifier[1].coding[0].code, "263929005")
self.assertEqual(inst.locationQualifier[1].coding[0].display, "Volar")
self.assertEqual(inst.locationQualifier[1].coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.locationQualifier[1].text, "Volar")
self.assertEqual(inst.meta.tag[0].code, "HTEST")
self.assertEqual(inst.meta.tag[0].display, "test health data")
self.assertEqual(inst.meta.tag[0].system, "http://terminology.hl7.org/CodeSystem/v3-ActReason")
self.assertEqual(inst.morphology.text, "Skin patch")
self.assertEqual(inst.text.status, "generated")
|
import math
from .optimizer import Optimizer
class Adam(Optimizer):
"""Implements Adam algorithm.
It has been proposed in `Adam: A Method for Stochastic Optimization`_.
Arguments:
params (iterable): iterable of parameters to optimize or dicts defining
parameter groups
lr (float, optional): learning rate (default: 1e-3)
betas (Tuple[float, float], optional): coefficients used for computing
running averages of gradient and its square (default: (0.9, 0.999))
eps (float, optional): term added to the denominator to improve
numerical stability (default: 1e-8)
weight_decay (float, optional): weight decay (L2 penalty) (default: 0)
.. _Adam\: A Method for Stochastic Optimization:
https://arxiv.org/abs/1412.6980
"""
def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8,
weight_decay=0):
defaults = dict(lr=lr, betas=betas, eps=eps,
weight_decay=weight_decay)
super(Adam, self).__init__(params, defaults)
def step(self, closure=None):
"""Performs a single optimization step.
Arguments:
closure (callable, optional): A closure that reevaluates the model
and returns the loss.
"""
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
continue
grad = p.grad.data
state = self.state[p]
# State initialization
if len(state) == 0:
state['step'] = 0
# Exponential moving average of gradient values
state['exp_avg'] = grad.new().resize_as_(grad).zero_()
# Exponential moving average of squared gradient values
state['exp_avg_sq'] = grad.new().resize_as_(grad).zero_()
exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']
beta1, beta2 = group['betas']
state['step'] += 1
if group['weight_decay'] != 0:
grad = grad.add(group['weight_decay'], p.data)
# Decay the first and second moment running average coefficient
exp_avg.mul_(beta1).add_(1 - beta1, grad)
exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad)
denom = exp_avg_sq.sqrt().add_(group['eps'])
bias_correction1 = 1 - beta1 ** state['step']
bias_correction2 = 1 - beta2 ** state['step']
step_size = group['lr'] * math.sqrt(bias_correction2) / bias_correction1
p.data.addcdiv_(-step_size, exp_avg, denom)
return loss
|
from django import forms
from ietf.iesg.models import TelechatAgendaItem
from ietf.doc.models import State
from ietf.name.models import BallotPositionName, DocTagName
TELECHAT_TAGS = ('point','ad-f-up','extpty','need-rev')
class BallotForm(forms.Form):
name = forms.CharField(max_length=50,widget=forms.HiddenInput)
id = forms.IntegerField(widget=forms.HiddenInput)
position = forms.ModelChoiceField(queryset=BallotPositionName.objects.exclude(slug='block').order_by('order'), widget=forms.RadioSelect, initial="norecord", required=True)
class ChangeStateForm(forms.Form):
'''
This form needs to handle documents of different types (draft, and conflrev for now).
Start with all document states in the state ModelChoice query, on init restrict the
query to be the same type as the initial doc_state.
'''
state = forms.ModelChoiceField(State.objects.all(), empty_label=None, required=True)
substate = forms.ModelChoiceField(DocTagName.objects.filter(slug__in=(TELECHAT_TAGS)), required=False)
def __init__(self,*args,**kwargs):
super(ChangeStateForm, self).__init__(*args,**kwargs)
state = State.objects.get(id=self.initial['state'])
self.fields['state'].queryset = State.objects.filter(type=state.type)
class DateSelectForm(forms.Form):
date = forms.ChoiceField()
def __init__(self,*args,**kwargs):
choices = kwargs.pop('choices')
super(DateSelectForm, self).__init__(*args,**kwargs)
self.fields['date'].widget.choices = choices
class IssueModelForm(forms.ModelForm):
class Meta:
model = TelechatAgendaItem
fields = '__all__'
|
from django.db import models
from django.test import TestCase
from principals.fields import PrincipalField
class SinglePrinc(models.Model):
name = models.CharField(max_length=255)
principal = PrincipalField(default=False)
class MultiplePrinc(models.Model):
name = models.CharField(max_length=255)
principal_1 = PrincipalField()
principal_2 = PrincipalField()
principal_3 = PrincipalField()
class Director(models.Model):
name = models.CharField(max_length=255)
class Movie(models.Model):
title = models.CharField(max_length=255)
director = models.ForeignKey(Director, related_name='movies')
genre = models.CharField(max_length=255)
principal = PrincipalField(collection=('director',))
principal_in_genre = PrincipalField(collection=('director', 'genre'))
class PrincipalField(TestCase):
def test_principal_item(self):
a = SinglePrinc.objects.create(name="a", principal=False)
b = SinglePrinc.objects.create(name="b", principal=False)
c = SinglePrinc.objects.create(name="c", principal=False)
self.assertEqual( SinglePrinc.objects.filter(principal=True).count(), 0, "No item is principal by default" )
a.principal = True
a.save()
self.assertEqual( SinglePrinc.objects.get(principal=True).name, 'a', "Can set principal=True" )
b.principal = True
b.save()
self.assertEqual( SinglePrinc.objects.filter(principal=True).count(), 1, "Only one principal item at a time" )
self.assertEqual( SinglePrinc.objects.get(principal=True).name, 'b', "Principal item is now 'b'" )
def test_multiple_principal_fields(self):
# Create 3 new objects
a = MultiplePrinc.objects.create(name='a')
b = MultiplePrinc.objects.create(name='b')
c = MultiplePrinc.objects.create(name='c')
# define them as principal and save them
a.principal_1 = True
a.save()
b.principal_2 = True
b.save()
c.principal_3 = True
c.save()
self.assertEqual( MultiplePrinc.objects.get(principal_1=True).name, 'a', "Can set principal_1 correctly" )
self.assertEqual( MultiplePrinc.objects.get(principal_2=True).name, 'b', "Can set principal_2 correctly" )
self.assertEqual( MultiplePrinc.objects.get(principal_3=True).name, 'c', "Can set principal_3 correctly" )
# Get new instances, and update them
a = MultiplePrinc.objects.get(name='a')
a.principal_3 = True
a.save()
c = MultiplePrinc.objects.get(name='c')
c.principal_2 = True
c.save()
# Things just changed back there, so get new instances again
a = MultiplePrinc.objects.get(name='a')
c = MultiplePrinc.objects.get(name='c')
self.assertTrue(a.principal_1, "'a' is still principal 1")
self.assertTrue(a.principal_3, "'a' is now principal 3")
self.assertTrue(c.principal_2, "'c' is now principal 2")
self.assertTrue(not c.principal_3, "'c' is no longer principal 3")
def test_collection(self):
spielberg = Director.objects.create(name='Steven Spielberg')
gilliam = Director.objects.create(name='Terry Gilliam')
s1 = spielberg.movies.create(title="Jaws", genre="Thriller")
s2 = spielberg.movies.create(title="Lincoln", genre="Drama")
s3 = spielberg.movies.create(title="Minority Report", genre="Sci-Fi")
s4 = spielberg.movies.create(title="War of The Worlds", genre="Sci-Fi")
g1 = gilliam.movies.create(title="Time Bandits", genre="Fantasy")
g2 = gilliam.movies.create(title="12 Monkeys", genre="Sci-Fi")
g3 = gilliam.movies.create(title="Fear & Loathing in Las Vegas", genre="Drama")
g4 = gilliam.movies.create(title="Life of Brian", genre="Comedy")
g5 = gilliam.movies.create(title="Monty Python's THe Meaning of Life", genre="Comedy")
s1.principal = True
s1.principal_in_genre = True
s1.save()
s4.principal_in_genre = True
s4.save()
g1.principal_in_genre = True
g1.save()
g5.principal = True
g5.principal_in_genre = True
g5.save()
self.assertEqual( Movie.objects.filter(principal_in_genre=True).count(), 4, "Four movies are principal in their respective director/genre set" )
self.assertEqual( Movie.objects.filter(principal=True).count(), 2, "Two movies are principal in their respective director set")
s4.principal = True
s4.save()
self.assertEqual( Movie.objects.filter(principal_in_genre=True).count(), 4, "Four movies are principal in their respective director/genre set" )
self.assertEqual( Movie.objects.filter(principal=True).count(), 2, "Two movies are principal in their respective director set" )
spielbergs_best_movie = Movie.objects.get( director=spielberg, principal=True )
self.assertEqual(spielbergs_best_movie, s4, "Can update principal item with collection" )
gilliams_best_movie = Movie.objects.get( director=gilliam, principal=True )
self.assertEqual(gilliams_best_movie, g5, "Can update principal item with collection without changing others" )
gilliams_best_comedy = Movie.objects.get( director=gilliam, genre="Comedy", principal_in_genre=True )
self.assertEqual(gilliams_best_movie, g5, "Other principal fields are not affected" )
g4.principal_in_genre = True
g4.save()
gilliams_best_comedy = Movie.objects.get( director=gilliam, genre="Comedy", principal_in_genre=True )
self.assertEqual(gilliams_best_comedy, g4, "Can change complex collection properly" )
|
import os
from setuptools import find_packages, setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname), encoding="utf-8").read()
def get_version(rel_path):
for line in read(rel_path).splitlines():
if line.startswith("__version__"):
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
raise RuntimeError("Unable to find version string.")
setup(
name="permuta",
version=get_version("permuta/__init__.py"),
author="Permuta Triangle",
author_email="permutatriangle@gmail.com",
description="A comprehensive high performance permutation library.",
license="BSD-3",
keywords=(
"permutation perm mesh pattern patt avoid contain occurrence" "statistic"
),
url="https://github.com/PermutaTriangle/Permuta",
project_urls={
"Source": "https://github.com/PermutaTriangle/Permuta",
"Tracker": "https://github.com/PermutaTriangle/Permuta/issues",
},
packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
package_data={"permuta": ["py.typed"]},
long_description=read("README.rst"),
python_requires=">=3.7",
include_package_data=True,
zip_safe=False,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Education",
"Topic :: Scientific/Engineering :: Mathematics",
],
install_requires=["automata-lib==5.0.0"],
entry_points={"console_scripts": ["permtools=permuta.cli:main"]},
)
|
"""The app module, containing the app factory function."""
from flask import Flask, render_template
from lego_me import public, user
from lego_me.assets import assets
from lego_me.extensions import bcrypt, cache, db, debug_toolbar, login_manager, migrate
from lego_me.settings import ProdConfig
def create_app(config_object=ProdConfig):
"""An application factory, as explained here: http://flask.pocoo.org/docs/patterns/appfactories/.
:param config_object: The configuration object to use.
"""
app = Flask(__name__)
app.config.from_object(config_object)
register_extensions(app)
register_blueprints(app)
register_errorhandlers(app)
return app
def register_extensions(app):
"""Register Flask extensions."""
assets.init_app(app)
bcrypt.init_app(app)
cache.init_app(app)
db.init_app(app)
login_manager.init_app(app)
debug_toolbar.init_app(app)
migrate.init_app(app, db)
return None
def register_blueprints(app):
"""Register Flask blueprints."""
app.register_blueprint(public.views.blueprint)
app.register_blueprint(user.views.blueprint)
return None
def register_errorhandlers(app):
"""Register error handlers."""
def render_error(error):
"""Render error template."""
# If a HTTPException, pull the `code` attribute; default to 500
error_code = getattr(error, 'code', 500)
return render_template('{0}.html'.format(error_code)), error_code
for errcode in [401, 404, 500]:
app.errorhandler(errcode)(render_error)
return None
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 12, transform = "Logit", sigma = 0.0, exog_count = 20, ar_order = 0);
|
from graphserver.core import Graph, TripBoard, HeadwayBoard, HeadwayAlight, Crossing, TripAlight, Timezone, Street, Link, ElapseTime
from optparse import OptionParser
from graphserver.graphdb import GraphDatabase
from graphserver.ext.gtfs.gtfsdb import GTFSDatabase, parse_gtfs_date
import sys
import pytz
from tools import service_calendar_from_timezone
import datetime
def cons(ary):
for i in range(len(ary)-1):
yield (ary[i], ary[i+1])
class GTFSGraphCompiler:
def __init__(self, gtfsdb, agency_namespace, agency_id=None, reporter=None):
self.gtfsdb = gtfsdb
self.agency_namespace = agency_namespace
self.reporter = reporter
# get graphserver.core.Timezone and graphserver.core.ServiceCalendars from gtfsdb for agency with given agency_id
timezone_name = gtfsdb.agency_timezone_name(agency_id)
self.tz = Timezone.generate( timezone_name )
if reporter: reporter.write( "constructing service calendar for timezone '%s'\n"%timezone_name )
self.sc = service_calendar_from_timezone(gtfsdb, timezone_name )
def bundle_to_boardalight_edges(self, bundle, service_id):
"""takes a bundle and yields a bunch of edges"""
stop_time_bundles = bundle.stop_time_bundles(service_id)
n_trips = len(bundle.trip_ids)
# If there's less than two stations on this trip bundle, the trip bundle doesn't actually span two places
if len(stop_time_bundles)<2:
return
# If there are no stop_times in a bundle on this service day, there is nothing to load
if n_trips==0:
return
if self.reporter: self.reporter.write( "inserting %d trips with %d stop_time bundles on service_id '%s'\n"%(len(stop_time_bundles[0]),len(stop_time_bundles),service_id) )
#add board edges
for i, stop_time_bundle in enumerate(stop_time_bundles[:-1]):
trip_id, arrival_time, departure_time, stop_id, stop_sequence, stop_dist_traveled = stop_time_bundle[0]
if arrival_time != departure_time:
patternstop_vx_name = "psv-%s-%03d-%03d-%s-depart"%(self.agency_namespace,bundle.pattern.pattern_id,i,service_id)
# construct the board/alight/dwell triangle for this patternstop
patternstop_arrival_vx_name = "psv-%s-%03d-%03d-%s-arrive"%(self.agency_namespace,bundle.pattern.pattern_id,i,service_id)
dwell_crossing = Crossing()
for trip_id, arrival_time, departure_time, stop_id, stop_sequence, stop_dist_traveled in stop_time_bundle:
dwell_crossing.add_crossing_time( trip_id, departure_time-arrival_time )
yield (patternstop_arrival_vx_name,
patternstop_vx_name,
dwell_crossing)
else:
patternstop_vx_name = "psv-%s-%03d-%03d-%s"%(self.agency_namespace,bundle.pattern.pattern_id,i,service_id)
b = TripBoard(service_id, self.sc, self.tz, 0)
for trip_id, arrival_time, departure_time, stop_id, stop_sequence, stop_dist_traveled in stop_time_bundle:
b.add_boarding( trip_id, departure_time, stop_sequence )
yield ( "sta-%s"%stop_id, patternstop_vx_name, b )
#add alight edges
for i, stop_time_bundle in enumerate(stop_time_bundles[1:]):
trip_id, arrival_time, departure_time, stop_id, stop_sequence, stop_dist_traveled = stop_time_bundle[0]
if arrival_time != departure_time:
patternstop_vx_name = "psv-%s-%03d-%03d-%s-arrive"%(self.agency_namespace,bundle.pattern.pattern_id,i+1,service_id)
else:
patternstop_vx_name = "psv-%s-%03d-%03d-%s"%(self.agency_namespace,bundle.pattern.pattern_id,i+1,service_id)
al = TripAlight(service_id, self.sc, self.tz, 0)
for trip_id, arrival_time, departure_time, stop_id, stop_sequence, stop_dist_traveled in stop_time_bundle:
al.add_alighting( trip_id.encode('ascii'), arrival_time, stop_sequence )
yield ( patternstop_vx_name, "sta-%s"%stop_id, al )
# add crossing edges
for i, (from_stop_time_bundle, to_stop_time_bundle) in enumerate(cons(stop_time_bundles)):
trip_id, from_arrival_time, from_departure_time, stop_id, stop_sequence, stop_dist_traveled = from_stop_time_bundle[0]
trip_id, to_arrival_time, to_departure_time, stop_id, stop_sequence, stop_dist_traveled = to_stop_time_bundle[0]
if from_arrival_time!=from_departure_time:
from_patternstop_vx_name = "psv-%s-%03d-%03d-%s-depart"%(self.agency_namespace,bundle.pattern.pattern_id,i,service_id)
else:
from_patternstop_vx_name = "psv-%s-%03d-%03d-%s"%(self.agency_namespace,bundle.pattern.pattern_id,i,service_id)
if to_arrival_time!=to_departure_time:
to_patternstop_vx_name = "psv-%s-%03d-%03d-%s-arrive"%(self.agency_namespace,bundle.pattern.pattern_id,i+1,service_id)
else:
to_patternstop_vx_name = "psv-%s-%03d-%03d-%s"%(self.agency_namespace,bundle.pattern.pattern_id,i+1,service_id)
crossing = Crossing()
for i in range( len( from_stop_time_bundle ) ):
trip_id, from_arrival_time, from_departure_time, stop_id, stop_sequence, stop_dist_traveled = from_stop_time_bundle[i]
trip_id, to_arrival_time, to_departure_time, stop_id, stop_sequence, stop_dist_traveled = to_stop_time_bundle[i]
crossing.add_crossing_time( trip_id, (to_arrival_time-from_departure_time) )
yield ( from_patternstop_vx_name,
to_patternstop_vx_name,
crossing )
def gtfsdb_to_scheduled_edges(self, maxtrips=None, service_ids=None):
# compile trip bundles from gtfsdb
if self.reporter: self.reporter.write( "Compiling trip bundles...\n" )
bundles = self.gtfsdb.compile_trip_bundles(maxtrips=maxtrips, reporter=self.reporter)
# load bundles to graph
if self.reporter: self.reporter.write( "Loading trip bundles into graph...\n" )
n_bundles = len(bundles)
for i, bundle in enumerate(bundles):
if self.reporter: self.reporter.write( "%d/%d loading %s\n"%(i+1, n_bundles, bundle) )
for service_id in [x.encode("ascii") for x in self.gtfsdb.service_ids()]:
if service_ids is not None and service_id not in service_ids:
continue
for fromv_label, tov_label, edge in self.bundle_to_boardalight_edges(bundle, service_id):
yield fromv_label, tov_label, edge
def gtfsdb_to_headway_edges( self, maxtrips=None ):
# load headways
if self.reporter: self.reporter.write( "Loading headways trips to graph...\n" )
for trip_id, start_time, end_time, headway_secs in self.gtfsdb.execute( "SELECT * FROM frequencies" ):
service_id = list(self.gtfsdb.execute( "SELECT service_id FROM trips WHERE trip_id=?", (trip_id,) ))[0][0]
service_id = service_id.encode('utf-8')
hb = HeadwayBoard( service_id, self.sc, self.tz, 0, trip_id.encode('utf-8'), start_time, end_time, headway_secs )
ha = HeadwayAlight( service_id, self.sc, self.tz, 0, trip_id.encode('utf-8'), start_time, end_time, headway_secs )
stoptimes = list(self.gtfsdb.execute( "SELECT * FROM stop_times WHERE trip_id=? ORDER BY stop_sequence", (trip_id,)) )
#add board edges
for trip_id, arrival_time, departure_time, stop_id, stop_sequence, stop_dist_traveled in stoptimes[:-1]:
yield ( "sta-%s"%stop_id, "hwv-%s-%s-%s"%(self.agency_namespace,stop_id, trip_id), hb )
#add alight edges
for trip_id, arrival_time, departure_time, stop_id, stop_sequence, stop_dist_traveled in stoptimes[1:]:
yield ( "hwv-%s-%s-%s"%(self.agency_namespace,stop_id, trip_id), "sta-%s"%stop_id, ha )
#add crossing edges
for (trip_id1, arrival_time1, departure_time1, stop_id1, stop_sequence1, stop_dist_traveled1), (trip_id2, arrival_time2, departure_time2, stop_id2, stop_sequence2,stop_dist_traveled2) in cons(stoptimes):
cr = Crossing()
cr.add_crossing_time( trip_id1, (arrival_time2-departure_time1) )
yield ( "hwv-%s-%s-%s"%(self.agency_namespace,stop_id1, trip_id1), "hwv-%s-%s-%s"%(self.agency_namespace,stop_id2, trip_id2), cr )
def gtfsdb_to_transfer_edges( self ):
# load transfers
if self.reporter: self.reporter.write( "Loading transfers to graph...\n" )
# keep track to avoid redundancies
# this assumes that transfer relationships are bi-directional.
# TODO this implementation is also incomplete - it's theoretically possible that
# a transfers.txt table could contain "A,A,3,", which would mean you can't transfer
# at A.
seen = set([])
for stop_id1, stop_id2, conn_type, min_transfer_time in self.gtfsdb.execute( "SELECT * FROM transfers" ):
s1 = "sta-%s"%stop_id1
s2 = "sta-%s"%stop_id2
# TODO - what is the semantics of this? see note above
if s1 == s2:
continue
key = ".".join(sorted([s1,s2]))
if key not in seen:
seen.add(key)
else:
continue
assert conn_type == None or type(conn_type) == int
if conn_type in (0, None): # This is a recommended transfer point between two routes
if min_transfer_time in ("", None):
yield (s1, s2, Link())
yield (s2, s1, Link())
else:
yield (s1, s2, ElapseTime(int(min_transfer_time)))
yield (s2, s1, ElapseTime(int(min_transfer_time)))
elif conn_type == 1: # This is a timed transfer point between two routes
yield (s1, s2, Link())
yield (s2, s1, Link())
elif conn_type == 2: # This transfer requires a minimum amount of time
yield (s1, s2, ElapseTime(int(min_transfer_time)))
yield (s2, s1, ElapseTime(int(min_transfer_time)))
elif conn_type == 3: # Transfers are not possible between routes at this location.
print "WARNING: Support for no-transfer (transfers.txt transfer_type=3) not implemented."
def gtfsdb_to_edges( self, maxtrips=None, service_ids=None ):
for edge_tuple in self.gtfsdb_to_scheduled_edges(maxtrips, service_ids=service_ids):
yield edge_tuple
for edge_tuple in self.gtfsdb_to_headway_edges(maxtrips):
yield edge_tuple
for edge_tuple in self.gtfsdb_to_transfer_edges():
yield edge_tuple
def gdb_load_gtfsdb(gdb, agency_namespace, gtfsdb, cursor, agency_id=None, maxtrips=None, sample_date=None, reporter=sys.stdout):
# determine which service periods run on the given day, if a day is given
if sample_date is not None:
sample_date = datetime.date( *parse_gtfs_date( sample_date ) )
acceptable_service_ids = gtfsdb.service_periods( sample_date )
print "Importing only service periods operating on %s: %s"%(sample_date, acceptable_service_ids)
else:
acceptable_service_ids = None
compiler = GTFSGraphCompiler( gtfsdb, agency_namespace, agency_id, reporter )
v_added = set([])
for fromv_label, tov_label, edge in compiler.gtfsdb_to_edges( maxtrips, service_ids=acceptable_service_ids ):
if fromv_label not in v_added:
gdb.add_vertex( fromv_label, cursor )
v_added.add(fromv_label)
if tov_label not in v_added:
gdb.add_vertex( tov_label, cursor )
v_added.add(tov_label)
gdb.add_edge( fromv_label, tov_label, edge, cursor )
def graph_load_gtfsdb( agency_namespace, gtfsdb, agency_id=None, maxtrips=None, reporter=sys.stdout ):
compiler = GTFSGraphCompiler( gtfsdb, agency_namespace, agency_id, reporter )
gg = Graph()
for fromv_label, tov_label, edge in compiler.gtfsdb_to_edges( maxtrips ):
gg.add_vertex( fromv_label )
gg.add_vertex( tov_label )
gg.add_edge( fromv_label, tov_label, edge )
return gg
def main():
usage = """usage: python gdb_import_gtfs.py [options] <graphdb_filename> <gtfsdb_filename> [<agency_id>]"""
parser = OptionParser(usage=usage)
parser.add_option("-n", "--namespace", dest="namespace", default="0",
help="agency namespace")
parser.add_option("-m", "--maxtrips", dest="maxtrips", default=None, help="maximum number of trips to load")
parser.add_option("-d", "--date", dest="sample_date", default=None, help="only load transit running on a given day. YYYYMMDD" )
(options, args) = parser.parse_args()
if len(args) != 2:
parser.print_help()
exit(-1)
graphdb_filename = args[0]
gtfsdb_filename = args[1]
agency_id = args[2] if len(args)==3 else None
print "importing from gtfsdb '%s' into graphdb '%s'"%(gtfsdb_filename, graphdb_filename)
gtfsdb = GTFSDatabase( gtfsdb_filename )
gdb = GraphDatabase( graphdb_filename, overwrite=False )
maxtrips = int(options.maxtrips) if options.maxtrips else None
gdb_load_gtfsdb( gdb, options.namespace, gtfsdb, gdb.get_cursor(), agency_id, maxtrips=maxtrips, sample_date=options.sample_date)
gdb.commit()
print "done"
if __name__ == '__main__':
main()
|
import datetime
from django.test import TestCase
from dojo.models import Test, Finding
from dojo.tools.cyclonedx.parser import CycloneDXParser
class TestParser(TestCase):
def test_grype_report(self):
with open("dojo/unittests/scans/cyclonedx/grype_dd_1_14_1.xml") as file:
parser = CycloneDXParser()
findings = list(parser.get_findings(file, Test()))
for finding in findings:
self.assertIn(finding.severity, Finding.SEVERITIES)
self.assertEqual(619, len(findings))
with self.subTest(i=0):
finding = findings[0]
self.assertEqual("Info", finding.severity)
self.assertEqual("Deprecated", finding.component_name)
self.assertEqual("1.2.12", finding.component_version)
self.assertEqual(datetime.date(2021, 4, 13), datetime.datetime.date(finding.date))
with self.subTest(i=200):
finding = findings[200]
self.assertEqual("High", finding.severity)
self.assertEqual("jira", finding.component_name)
self.assertEqual("2.0.0", finding.component_version)
self.assertEqual("CVE-2019-8443", finding.cve)
self.assertEqual(datetime.date(2021, 4, 13), datetime.datetime.date(finding.date))
def test_spec1_report(self):
"""Test a report from the spec itself"""
with open("dojo/unittests/scans/cyclonedx/spec1.xml") as file:
parser = CycloneDXParser()
findings = list(parser.get_findings(file, Test()))
for finding in findings:
self.assertIn(finding.severity, Finding.SEVERITIES)
self.assertEqual(2, len(findings))
with self.subTest(i=0):
finding = findings[0]
self.assertIsNone(finding.cve)
self.assertEqual("Info", finding.severity)
with self.subTest(i=1):
finding = findings[1]
self.assertEqual("CVE-2018-7489", finding.cve)
self.assertEqual("Critical", finding.severity)
self.assertIn(finding.cwe, [184, 502]) # there is 2 CWE in the report
self.assertEqual("CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", finding.cvssv3)
self.assertEqual("jackson-databind", finding.component_name)
self.assertEqual("2.9.9", finding.component_version)
self.assertEqual("CVE-2018-7489", finding.vuln_id_from_tool)
def test_cyclonedx_bom_report(self):
with open("dojo/unittests/scans/cyclonedx/cyclonedx_bom.xml") as file:
parser = CycloneDXParser()
findings = parser.get_findings(file, Test())
for finding in findings:
self.assertIn(finding.severity, Finding.SEVERITIES)
self.assertEqual(73, len(findings))
with self.subTest(i=0):
finding = findings[0]
self.assertEqual("Info", finding.severity)
self.assertEqual("asteval", finding.component_name)
self.assertEqual("0.9.23", finding.component_version)
def test_cyclonedx_jake_report(self):
"""Test a report generated by Jake"""
with open("dojo/unittests/scans/cyclonedx/jake.xml") as file:
parser = CycloneDXParser()
findings = parser.get_findings(file, Test())
for finding in findings:
self.assertIn(finding.severity, Finding.SEVERITIES)
self.assertEqual(204, len(findings))
with self.subTest(i=0):
finding = findings[0]
self.assertEqual("Info", finding.severity)
self.assertEqual("yaspin", finding.component_name)
self.assertEqual("0.16.0", finding.component_version)
def test_cyclonedx_retirejs_report(self):
"""Test a report generated by RetireJS"""
with open("dojo/unittests/scans/cyclonedx/retirejs.latest.xml") as file:
parser = CycloneDXParser()
findings = parser.get_findings(file, Test())
for finding in findings:
self.assertIn(finding.severity, Finding.SEVERITIES)
self.assertEqual(6, len(findings))
with self.subTest(i=0):
finding = findings[0]
self.assertEqual("Info", finding.severity)
self.assertEqual("handlebars", finding.component_name)
self.assertEqual("3.0.0", finding.component_version)
with self.subTest(i=5):
finding = findings[5]
self.assertEqual("Info", finding.severity)
self.assertEqual("jquery", finding.component_name)
self.assertEqual("1.8.0", finding.component_version)
def test_cyclonedx_grype_11_report(self):
"""Test a report generated by Grype 0.11"""
with open("dojo/unittests/scans/cyclonedx/dd_1_15_0.xml") as file:
parser = CycloneDXParser()
findings = parser.get_findings(file, Test())
for finding in findings:
self.assertIn(finding.severity, Finding.SEVERITIES)
self.assertEqual(689, len(findings))
with self.subTest(i=0):
finding = findings[0]
self.assertEqual("Info", finding.severity)
self.assertEqual("Deprecated", finding.component_name)
self.assertEqual("1.2.12", finding.component_version)
with self.subTest(i=5):
finding = findings[5]
self.assertEqual("Info", finding.severity)
self.assertEqual("Jinja2", finding.component_name)
self.assertEqual("2.11.3", finding.component_version)
with self.subTest(i=640):
finding = findings[640]
self.assertEqual("High", finding.severity)
self.assertEqual("redis", finding.component_name)
self.assertEqual("3.5.3", finding.component_version)
self.assertEqual("CVE-2018-12326", finding.cve)
self.assertEqual("CVSS:3.0/AV:L/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", finding.cvssv3)
self.assertIn("Buffer overflow in redis-cli of Redis before 4.0.10 and 5.x before 5.0 RC3", finding.description)
self.assertEqual("CVE-2018-12326", finding.vuln_id_from_tool)
with self.subTest(i=641):
finding = findings[641]
self.assertEqual("High", finding.severity)
self.assertEqual("redis", finding.component_name)
self.assertEqual("3.5.3", finding.component_version)
self.assertEqual("CVE-2018-12453", finding.cve)
self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H", finding.cvssv3)
self.assertEqual(
"Type confusion in the xgroupCommand function in t_stream.c in redis-server in Redis before 5.0 allows"
" remote attackers to cause denial-of-service via an XGROUP command in which the key is not a stream.",
finding.description)
self.assertEqual("CVE-2018-12453", finding.vuln_id_from_tool)
|
"""
===================
NEXRAD Level 2 File
===================
Use MetPy to read information from a NEXRAD Level 2 (volume) file and plot
"""
import matplotlib.pyplot as plt
import numpy as np
from metpy.cbook import get_test_data
from metpy.io import Level2File
from metpy.plots import add_metpy_logo, add_timestamp
name = get_test_data('KTLX20130520_201643_V06.gz', as_file_obj=False)
f = Level2File(name)
print(f.sweeps[0][0])
sweep = 0
az = np.array([ray[0].az_angle for ray in f.sweeps[sweep]])
diff = np.diff(az)
diff[diff > 180] -= 360.
diff[diff < -180] += 360.
avg_spacing = diff.mean()
az = (az[:-1] + az[1:]) / 2
az = np.concatenate(([az[0] - avg_spacing], az, [az[-1] + avg_spacing]))
ref_hdr = f.sweeps[sweep][0][4][b'REF'][0]
ref_range = (np.arange(ref_hdr.num_gates + 1) - 0.5) * ref_hdr.gate_width + ref_hdr.first_gate
ref = np.array([ray[4][b'REF'][1] for ray in f.sweeps[sweep]])
rho_hdr = f.sweeps[sweep][0][4][b'RHO'][0]
rho_range = (np.arange(rho_hdr.num_gates + 1) - 0.5) * rho_hdr.gate_width + rho_hdr.first_gate
rho = np.array([ray[4][b'RHO'][1] for ray in f.sweeps[sweep]])
fig, axes = plt.subplots(1, 2, figsize=(15, 8))
add_metpy_logo(fig, 190, 85, size='large')
for var_data, var_range, ax in zip((ref, rho), (ref_range, rho_range), axes):
# Turn into an array, then mask
data = np.ma.array(var_data)
data[np.isnan(data)] = np.ma.masked
# Convert az,range to x,y
xlocs = var_range * np.sin(np.deg2rad(az[:, np.newaxis]))
ylocs = var_range * np.cos(np.deg2rad(az[:, np.newaxis]))
# Plot the data
ax.pcolormesh(xlocs, ylocs, data, cmap='viridis')
ax.set_aspect('equal', 'datalim')
ax.set_xlim(-40, 20)
ax.set_ylim(-30, 30)
add_timestamp(ax, f.dt, y=0.02, high_contrast=True)
plt.show()
|
from ....testing import assert_equal
from ..utils import FWHMx
def test_FWHMx_inputs():
input_map = dict(acf=dict(argstr='-acf',
usedefault=True,
),
args=dict(argstr='%s',
),
arith=dict(argstr='-arith',
xor=[u'geom'],
),
automask=dict(argstr='-automask',
usedefault=True,
),
combine=dict(argstr='-combine',
),
compat=dict(argstr='-compat',
),
demed=dict(argstr='-demed',
xor=[u'detrend'],
),
detrend=dict(argstr='-detrend',
usedefault=True,
xor=[u'demed'],
),
environ=dict(nohash=True,
usedefault=True,
),
geom=dict(argstr='-geom',
xor=[u'arith'],
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_file=dict(argstr='-input %s',
mandatory=True,
),
mask=dict(argstr='-mask %s',
),
out_detrend=dict(argstr='-detprefix %s',
keep_extension=False,
name_source='in_file',
name_template='%s_detrend',
),
out_file=dict(argstr='> %s',
keep_extension=False,
name_source='in_file',
name_template='%s_fwhmx.out',
position=-1,
),
out_subbricks=dict(argstr='-out %s',
keep_extension=False,
name_source='in_file',
name_template='%s_subbricks.out',
),
terminal_output=dict(nohash=True,
),
unif=dict(argstr='-unif',
),
)
inputs = FWHMx.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_FWHMx_outputs():
output_map = dict(acf_param=dict(),
fwhm=dict(),
out_acf=dict(),
out_detrend=dict(),
out_file=dict(),
out_subbricks=dict(),
)
outputs = FWHMx.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(outputs.traits()[key], metakey), value
|
from __future__ import print_function
import sys
import os.path
from pip.req import parse_requirements
try:
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
except ImportError:
from distutils.core import setup
VERSION = "0.0.1"
def get_local_file(path):
""" Read content of local file """
content = ''
with open(path, 'r') as inpute_file:
content = inpute_file.read()
return content
def get_requirements(path='requirements.txt'):
""" Read requirements from file """
return [unicode(ir.req) for ir in parse_requirements(path)]
class Tox(TestCommand):
""" Run tox from setup.py """
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import tox
errno = tox.cmdline(self.test_args)
sys.exit(errno)
setup(
# Project informations
name='ndsbkaggle',
version=VERSION,
author=u'Rémi',
author_email=u'remi.berson@gmail.com',
# License and description
license=get_local_file("LICENSE"),
description=u'Source code for NationalDataScienceBowl Kaggle',
long_description=(get_local_file("README.md")
+ '\n'
+ get_local_file(os.path.join("docs", "HISTORY.txt"))),
# Package, scripts informations
packages=find_packages(exclude=['ez_setup']),
classifiers=[
'Environment :: Console',
'Programming Language :: Python :: 2.7',
'License :: Other/Proprietary License'
],
install_requires=get_requirements(),
entry_points={
'console_scripts': [
'gen_test = ndsbkaggle.gen_test:main',
'gen_train = ndsbkaggle.gen_train:main',
'train = ndsbkaggle.train:main',
'predict = ndsbkaggle.predict:main',
'submission = ndsbkaggle.submission:main'
]
},
# Tests
tests_require=get_requirements('test-requirements.txt'),
cmdclass={'test': Tox},
# Magic !
zip_safe=False
)
|
import threading
import zmq
import leveldb
import json
import optparse
from time import sleep
class workerThread(threading.Thread):
"""workerThread"""
def __init__(self, context, db):
threading.Thread.__init__ (self)
self.context = context
self.db = db
self.running = True
self.processing = False
self.socket = self.context.socket(zmq.XREQ)
def run(self):
self.socket.connect('inproc://backend')
while self.running:
try:
msg = self.socket.recv_multipart()
except zmq.ZMQError:
self.running = False
continue
self.processing = True
if len(msg) != 3:
value = 'None'
reply = [msg[0], value]
self.socket.send_multipart(reply)
continue
id = msg[0]
op = msg[1]
data = json.loads(msg[2])
reply = [id]
if op == 'get':
try:
value = self.db.Get(data)
except:
value = ""
reply.append(value)
elif op == 'put':
try:
self.db.Put(data[0], data[1])
value = "True"
except:
value = ""
reply.append(value)
elif op == 'delete':
self.db.Delete(data)
value = ""
reply.append(value)
elif op == 'range':
start = data[0]
end = data[1]
if start and end:
try:
arr = []
for value in self.db.RangeIter(start, end):
arr.append({value[0]: value[1]})
reply.append(json.dumps(arr))
except:
value = ""
reply.append(value)
else:
try:
arr = []
for value in self.db.RangeIter():
arr.append({value[0]: value[1]})
reply.append(json.dumps(arr))
except:
value = ""
reply.append(value)
else:
value = ""
reply.append(value)
self.socket.send_multipart(reply)
self.processing = False
def close(self):
self.running = False
while self.processing:
sleep(1)
self.socket.close()
if __name__ == "__main__":
optparser = optparse.OptionParser(
prog='leveldb-server.py',
version='0.1.1',
description='leveldb-server',
usage='%prog \n\t-p [port and host settings] Default: tcp://127.0.0.1:5147\n' + \
'\t-f [database file name] Default: level.db')
optparser.add_option('--host', '-p', dest='host',
default='tcp://127.0.0.1:5147')
optparser.add_option('--dbfile', '-d', dest='dbfile',
default='level.db')
options, arguments = optparser.parse_args()
if not (options.host and options.dbfile):
optparser.print_help()
print "Starting leveldb-server %s" % options.host
context = zmq.Context()
frontend = context.socket(zmq.XREP)
frontend.bind(options.host)
backend = context.socket(zmq.XREQ)
backend.bind('inproc://backend')
poll = zmq.Poller()
poll.register(frontend, zmq.POLLIN)
poll.register(backend, zmq.POLLIN)
db = leveldb.LevelDB(options.dbfile)
workers = []
for i in xrange(3):
worker = workerThread(context, db)
worker.start()
workers.append(worker)
try:
while True:
sockets = dict(poll.poll())
if frontend in sockets:
if sockets[frontend] == zmq.POLLIN:
msg = frontend.recv_multipart()
backend.send_multipart(msg)
if backend in sockets:
if sockets[backend] == zmq.POLLIN:
msg = backend.recv_multipart()
frontend.send_multipart(msg)
except KeyboardInterrupt:
for worker in workers:
worker.close()
frontend.close()
backend.close()
context.term()
|
import pageadmin
import useradmin
import permissionadmin
|
""" General descriptor testing code
"""
from rdkit import RDConfig
import unittest,os.path
from rdkit import Chem
from rdkit.Chem import Descriptors
from rdkit.Chem import AllChem
from rdkit.Chem import rdMolDescriptors
import numpy as np
def feq(n1,n2,tol=1e-4):
return abs(n1-n2)<=tol
class TestCase(unittest.TestCase):
def testBadAtomHandling(self):
smis = ('CC[Pu]','CC[*]')
for smi in smis:
m = Chem.MolFromSmiles(smi)
self.failUnless(m)
for nm,fn in Descriptors._descList:
try:
v = fn(m)
except:
import traceback
traceback.print_exc()
self.failUnless(0,'SMILES: %s'%smi)
def testMolFormula(self):
for (smiles, expected) in ( ("[NH4+]", "H4N+"),
("c1ccccc1", "C6H6"),
("C1CCCCC1", "C6H12"),
("c1ccccc1O", "C6H6O"),
("C1CCCCC1O", "C6H12O"),
("C1CCCCC1=O", "C6H10O"),
("N[Na]", "H2NNa"),
("[C-][C-]", "C2-2"),
("[H]", "H"),
("[H-1]", "H-"),
("[H-1]", "H-"),
("[CH2]", "CH2"),
("[He-2]", "He-2"),
("[U+3]", "U+3"),
):
mol = Chem.MolFromSmiles(smiles)
actual = AllChem.CalcMolFormula(mol)
self.failUnlessEqual(actual,expected)
def testMQN(self):
tgt = np.array([42917, 274, 870, 621, 135, 1582, 29, 3147, 5463,
6999, 470, 81, 19055, 4424, 309, 24059, 17822, 1,
9303, 24146, 16076, 5560, 4262, 646, 746, 13725, 5430,
2629, 362, 24211, 15939, 292, 41, 20, 1852, 5642,
31, 9, 1, 2, 3060, 1750])
fn = os.path.join(RDConfig.RDCodeDir,'Chem','test_data','aromat_regress.txt')
ms = [x for x in Chem.SmilesMolSupplier(fn,delimiter='\t')]
vs = np.zeros((42,),np.int32)
for m in ms:
vs += rdMolDescriptors.MQNs_(m)
self.failIf(False in (vs==tgt))
if __name__ == '__main__':
import sys,getopt,re
doLong=0
if len(sys.argv) >1:
args,extras=getopt.getopt(sys.argv[1:],'l')
for arg,val in args:
if arg=='-l':
doLong=1
sys.argv.remove('-l')
if doLong:
for methName in dir(TestCase):
if re.match('_test',methName):
newName = re.sub('_test','test',methName)
exec('TestCase.%s = TestCase.%s'%(newName,methName))
unittest.main()
|
import django.utils.timezone
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("stories", "0012_story_written_by")]
operations = [
migrations.AlterField(
model_name="story",
name="created_on",
field=models.DateTimeField(
blank=True,
default=django.utils.timezone.now,
editable=False,
help_text="When this item was originally created",
),
),
migrations.AlterField(
model_name="story",
name="modified_on",
field=models.DateTimeField(
blank=True,
default=django.utils.timezone.now,
editable=False,
help_text="When this item was last modified",
),
),
migrations.AlterField(
model_name="storyimage",
name="created_on",
field=models.DateTimeField(
blank=True,
default=django.utils.timezone.now,
editable=False,
help_text="When this item was originally created",
),
),
migrations.AlterField(
model_name="storyimage",
name="modified_on",
field=models.DateTimeField(
blank=True,
default=django.utils.timezone.now,
editable=False,
help_text="When this item was last modified",
),
),
]
|
import tests.periodicities.period_test as per
per.buildModel((5 , 'B' , 1600));
|
"""Generate template values for a callback function.
Design doc: http://www.chromium.org/developers/design-documents/idl-compiler
"""
from v8_globals import includes # pylint: disable=W0403
import v8_utilities # pylint: disable=W0403
CALLBACK_FUNCTION_H_INCLUDES = frozenset([
'bindings/core/v8/NativeValueTraits.h',
'bindings/core/v8/ScriptWrappable.h',
'bindings/core/v8/TraceWrapperV8Reference.h',
'platform/heap/Handle.h',
'wtf/text/WTFString.h',
])
CALLBACK_FUNCTION_CPP_INCLUDES = frozenset([
'bindings/core/v8/ExceptionState.h',
'bindings/core/v8/ScriptState.h',
'bindings/core/v8/ToV8.h',
'bindings/core/v8/V8Binding.h',
'core/dom/ExecutionContext.h',
'wtf/Assertions.h',
])
def callback_function_context(callback_function):
includes.clear()
includes.update(CALLBACK_FUNCTION_CPP_INCLUDES)
idl_type = callback_function.idl_type
idl_type_str = str(idl_type)
forward_declarations = []
for argument in callback_function.arguments:
if argument.idl_type.is_interface_type:
forward_declarations.append(argument.idl_type)
argument.idl_type.add_includes_for_type(callback_function.extended_attributes)
context = {
'cpp_class': callback_function.name,
'cpp_includes': sorted(includes),
'forward_declarations': sorted(forward_declarations),
'header_includes': sorted(CALLBACK_FUNCTION_H_INCLUDES),
'idl_type': idl_type_str,
}
if idl_type_str != 'void':
context.update({
'return_cpp_type': idl_type.cpp_type + '&',
'return_value': idl_type.v8_value_to_local_cpp_value(
callback_function.extended_attributes,
'v8ReturnValue', 'cppValue',
isolate='m_scriptState->isolate()',
bailout_return_value='false'),
})
context.update(arguments_context(callback_function.arguments, context.get('return_cpp_type')))
return context
def arguments_context(arguments, return_cpp_type):
def argument_context(argument):
return {
'argument_name': '%sArgument' % argument.name,
'cpp_value_to_v8_value': argument.idl_type.cpp_value_to_v8_value(
argument.name, isolate='m_scriptState->isolate()',
creation_context='m_scriptState->context()->Global()'),
}
argument_declarations = [
'ScriptWrappable* scriptWrappable',
]
argument_declarations.extend(
'%s %s' % (argument.idl_type.callback_cpp_type, argument.name)
for argument in arguments)
if return_cpp_type:
argument_declarations.append('%s returnValue' % return_cpp_type)
return {
'argument_declarations': argument_declarations,
'arguments': [argument_context(argument) for argument in arguments],
}
|
from __future__ import unicode_literals
import logging
from django.views.generic import CreateView, UpdateView, DeleteView
from dj_diabetes.models import InitMixin, SuccessMixin, PaginateMixin
from dj_diabetes.views import LoginRequiredMixin
from dj_diabetes.models.meals import Meals
from dj_diabetes.forms.base import UserInstanceMixin
from dj_diabetes.forms.meals import MealsForm
logger = logging.getLogger(__name__)
class MealsMixin(SuccessMixin):
form_class = MealsForm
model = Meals
class MealsCreateView(InitMixin, MealsMixin, LoginRequiredMixin,
UserInstanceMixin, PaginateMixin, CreateView):
"""
to Create Meals
"""
template_name = "dj_diabetes/meals_form.html"
class MealsUpdateView(MealsMixin, LoginRequiredMixin, PaginateMixin,
UpdateView):
"""
to Edit Meals
"""
template_name = "dj_diabetes/meals_form.html"
class MealsDeleteView(MealsMixin, DeleteView):
"""
to Delete Meals
"""
template_name = 'dj_diabetes/confirm_delete.html'
|
import numpy as np
from GPy.inference.latent_function_inference.var_dtc import VarDTC
from GPy.util.linalg import jitchol, tdot, dtrtri, dtrtrs, backsub_both_sides,\
dpotrs, dpotri, symmetrify, mdot
from GPy.core.parameterization.variational import VariationalPosterior
from GPy.util import diag
from GPy.inference.latent_function_inference.posterior import Posterior
log_2_pi = np.log(2*np.pi)
import logging, itertools
logger = logging.getLogger('vardtc')
class VarDTCFixedCov(VarDTC):
"""
An object for inference when the likelihood is Gaussian, but we want to do sparse inference.
The function self.inference returns a Posterior object, which summarizes
the posterior.
For efficiency, we sometimes work with the cholesky of Y*Y.T. To save repeatedly recomputing this, we cache it.
save_per_dim:
save the log likelihood per output dimension, this is for testing the differential gene expression analysis using BGPLVM and MRD
"""
const_jitter = 1e-6
def __init__(self, limit=1, save_per_dim=False):
#self._YYTfactor_cache = caching.cache()
from paramz.caching import Cacher
self.limit = limit
self.get_trYYT = Cacher(self._get_trYYT, limit)
self.get_YYTfactor = Cacher(self._get_YYTfactor, limit)
self.save_per_dim = save_per_dim
def set_limit(self, limit):
self.get_trYYT.limit = limit
self.get_YYTfactor.limit = limit
def _get_trYYT(self, Y):
return np.einsum("ij,ij->", Y, Y)
# faster than, but same as:
# return np.sum(np.square(Y))
def __getstate__(self):
# has to be overridden, as Cacher objects cannot be pickled.
return self.limit
def __setstate__(self, state):
# has to be overridden, as Cacher objects cannot be pickled.
self.limit = state
from paramz.caching import Cacher
self.get_trYYT = Cacher(self._get_trYYT, self.limit)
self.get_YYTfactor = Cacher(self._get_YYTfactor, self.limit)
def _get_YYTfactor(self, Y):
"""
find a matrix L which satisfies LLT = YYT.
Note that L may have fewer columns than Y.
"""
N, D = Y.shape
if (N>=D):
return Y.view(np.ndarray)
else:
return jitchol(tdot(Y))
def compute_lik_per_dim(self, psi0, A, LB, _LBi_Lmi_psi1, beta, Y):
lik_1 = (-0.5 * Y.shape[0] * (np.log(2. * np.pi) - np.log(beta)) - 0.5 * beta * np.einsum('ij,ij->j',Y,Y))
lik_2 = -0.5 * (np.sum(beta * psi0) - np.trace(A)) * np.ones(Y.shape[1])
lik_3 = -(np.sum(np.log(np.diag(LB))))
lik_4 = .5* beta**2 * ((_LBi_Lmi_psi1.dot(Y).T)**2).sum(1)
return lik_1 + lik_2 + lik_3 + lik_4
def get_VVTfactor(self, Y, prec):
return Y * prec # TODO chache this, and make it effective
def inference(self, kern, X, Z, likelihood, Y, Y_metadata=None, Lm=None, dL_dKmm=None, fixed_covs_kerns=None, **kw):
_, output_dim = Y.shape
uncertain_inputs = isinstance(X, VariationalPosterior)
#see whether we've got a different noise variance for each datum
beta = 1./np.fmax(likelihood.gaussian_variance(Y_metadata), 1e-6)
# VVT_factor is a matrix such that tdot(VVT_factor) = VVT...this is for efficiency!
#self.YYTfactor = self.get_YYTfactor(Y)
#VVT_factor = self.get_VVTfactor(self.YYTfactor, beta)
het_noise = beta.size > 1
if het_noise:
raise(NotImplementedError("Heteroscedastic noise not implemented, should be possible though, feel free to try implementing it :)"))
if beta.ndim == 1:
beta = beta[:, None]
# do the inference:
num_inducing = Z.shape[0]
num_data = Y.shape[0]
# kernel computations, using BGPLVM notation
Kmm = kern.K(Z).copy()
diag.add(Kmm, self.const_jitter)
if Lm is None:
Lm = jitchol(Kmm)
# The rather complex computations of A, and the psi stats
if uncertain_inputs:
psi0 = kern.psi0(Z, X)
psi1 = kern.psi1(Z, X)
if het_noise:
psi2_beta = np.sum([kern.psi2(Z,X[i:i+1,:]) * beta_i for i,beta_i in enumerate(beta)],0)
else:
psi2_beta = kern.psi2(Z,X) * beta
LmInv = dtrtri(Lm)
A = LmInv.dot(psi2_beta.dot(LmInv.T))
else:
psi0 = kern.Kdiag(X)
psi1 = kern.K(X, Z)
if het_noise:
tmp = psi1 * (np.sqrt(beta))
else:
tmp = psi1 * (np.sqrt(beta))
tmp, _ = dtrtrs(Lm, tmp.T, lower=1)
A = tdot(tmp)
# factor B
B = np.eye(num_inducing) + A
LB = jitchol(B)
# back substutue C into psi1Vf
#tmp, _ = dtrtrs(Lm, psi1.T.dot(VVT_factor), lower=1, trans=0)
#_LBi_Lmi_psi1Vf, _ = dtrtrs(LB, tmp, lower=1, trans=0)
#tmp, _ = dtrtrs(LB, _LBi_Lmi_psi1Vf, lower=1, trans=1)
#Cpsi1Vf, _ = dtrtrs(Lm, tmp, lower=1, trans=1)
# data fit and derivative of L w.r.t. Kmm
#delit = tdot(_LBi_Lmi_psi1Vf)
# Expose YYT to get additional covariates in (YYT + Kgg):
tmp, _ = dtrtrs(Lm, psi1.T, lower=1, trans=0)
_LBi_Lmi_psi1, _ = dtrtrs(LB, tmp, lower=1, trans=0)
tmp, _ = dtrtrs(LB, _LBi_Lmi_psi1, lower=1, trans=1)
Cpsi1, _ = dtrtrs(Lm, tmp, lower=1, trans=1)
# TODO: cache this:
# Compute fixed covariates covariance:
if fixed_covs_kerns is not None:
K_fixed = 0
for name, [cov, k] in fixed_covs_kerns.iteritems():
K_fixed += k.K(cov)
#trYYT = self.get_trYYT(Y)
YYT_covs = (tdot(Y) + K_fixed)
data_term = beta**2 * YYT_covs
trYYT_covs = np.trace(YYT_covs)
else:
data_term = beta**2 * tdot(Y)
trYYT_covs = self.get_trYYT(Y)
#trYYT = self.get_trYYT(Y)
delit = mdot(_LBi_Lmi_psi1, data_term, _LBi_Lmi_psi1.T)
data_fit = np.trace(delit)
DBi_plus_BiPBi = backsub_both_sides(LB, output_dim * np.eye(num_inducing) + delit)
if dL_dKmm is None:
delit = -0.5 * DBi_plus_BiPBi
delit += -0.5 * B * output_dim
delit += output_dim * np.eye(num_inducing)
# Compute dL_dKmm
dL_dKmm = backsub_both_sides(Lm, delit)
# derivatives of L w.r.t. psi
dL_dpsi0, dL_dpsi1, dL_dpsi2 = _compute_dL_dpsi(num_inducing, num_data, output_dim, beta, Lm,
data_term, Cpsi1, DBi_plus_BiPBi,
psi1, het_noise, uncertain_inputs)
# log marginal likelihood
log_marginal = _compute_log_marginal_likelihood(likelihood, num_data, output_dim, beta, het_noise,
psi0, A, LB, trYYT_covs, data_fit, Y)
if self.save_per_dim:
self.saved_vals = [psi0, A, LB, _LBi_Lmi_psi1, beta]
# No heteroscedastics, so no _LBi_Lmi_psi1Vf:
# For the interested reader, try implementing the heteroscedastic version, it should be possible
_LBi_Lmi_psi1Vf = None # Is just here for documentation, so you can see, what it was.
#noise derivatives
dL_dR = _compute_dL_dR(likelihood,
het_noise, uncertain_inputs, LB,
_LBi_Lmi_psi1Vf, DBi_plus_BiPBi, Lm, A,
psi0, psi1, beta,
data_fit, num_data, output_dim, trYYT_covs, Y, None)
dL_dthetaL = likelihood.exact_inference_gradients(dL_dR,Y_metadata)
#put the gradients in the right places
if uncertain_inputs:
grad_dict = {'dL_dKmm': dL_dKmm,
'dL_dpsi0':dL_dpsi0,
'dL_dpsi1':dL_dpsi1,
'dL_dpsi2':dL_dpsi2,
'dL_dthetaL':dL_dthetaL}
else:
grad_dict = {'dL_dKmm': dL_dKmm,
'dL_dKdiag':dL_dpsi0,
'dL_dKnm':dL_dpsi1,
'dL_dthetaL':dL_dthetaL}
if fixed_covs_kerns is not None:
# For now, we do not take the gradients, we can compute them,
# but the maximum likelihood solution is to switch off the additional covariates....
dL_dcovs = beta * np.eye(K_fixed.shape[0]) - beta**2*tdot(_LBi_Lmi_psi1.T)
grad_dict['dL_dcovs'] = -.5 * dL_dcovs
#get sufficient things for posterior prediction
#TODO: do we really want to do this in the loop?
if 1:
woodbury_vector = (beta*Cpsi1).dot(Y)
else:
import ipdb; ipdb.set_trace()
psi1V = np.dot(Y.T*beta, psi1).T
tmp, _ = dtrtrs(Lm, psi1V, lower=1, trans=0)
tmp, _ = dpotrs(LB, tmp, lower=1)
woodbury_vector, _ = dtrtrs(Lm, tmp, lower=1, trans=1)
Bi, _ = dpotri(LB, lower=1)
symmetrify(Bi)
Bi = -dpotri(LB, lower=1)[0]
diag.add(Bi, 1)
woodbury_inv = backsub_both_sides(Lm, Bi)
#construct a posterior object
post = Posterior(woodbury_inv=woodbury_inv, woodbury_vector=woodbury_vector, K=Kmm, mean=None, cov=None, K_chol=Lm)
return post, log_marginal, grad_dict
def _compute_dL_dpsi(num_inducing, num_data, output_dim, beta, Lm, data_term, Cpsi1, DBi_plus_BiPBi, psi1, het_noise, uncertain_inputs):
dL_dpsi0 = -0.5 * output_dim * (beta* np.ones([num_data, 1])).flatten()
dL_dpsi1 = np.dot(data_term, Cpsi1.T)
dL_dpsi2_beta = 0.5 * backsub_both_sides(Lm, output_dim * np.eye(num_inducing) - DBi_plus_BiPBi)
if het_noise:
if uncertain_inputs:
dL_dpsi2 = beta[:, None] * dL_dpsi2_beta[None, :, :]
else:
dL_dpsi1 += 2.*np.dot(dL_dpsi2_beta, (psi1 * beta).T).T
dL_dpsi2 = None
else:
dL_dpsi2 = beta * dL_dpsi2_beta
if not uncertain_inputs:
# subsume back into psi1 (==Kmn)
dL_dpsi1 += 2.*np.dot(psi1, dL_dpsi2)
dL_dpsi2 = None
return dL_dpsi0, dL_dpsi1, dL_dpsi2
def _compute_dL_dR(likelihood, het_noise, uncertain_inputs, LB, _LBi_Lmi_psi1Vf, DBi_plus_BiPBi, Lm, A, psi0, psi1, beta, data_fit, num_data, output_dim, trYYT, Y, VVT_factr=None):
# the partial derivative vector for the likelihood
if likelihood.size == 0:
# save computation here.
dL_dR = None
elif het_noise:
if uncertain_inputs:
raise(NotImplementedError, "heteroscedatic derivates with uncertain inputs not implemented")
else:
#from ...util.linalg import chol_inv
#LBi = chol_inv(LB)
LBi, _ = dtrtrs(LB,np.eye(LB.shape[0]))
Lmi_psi1, nil = dtrtrs(Lm, psi1.T, lower=1, trans=0)
_LBi_Lmi_psi1, _ = dtrtrs(LB, Lmi_psi1, lower=1, trans=0)
dL_dR = -0.5 * beta + 0.5 * VVT_factr**2
dL_dR += 0.5 * output_dim * (psi0 - np.sum(Lmi_psi1**2,0))[:,None] * beta**2
dL_dR += 0.5*np.sum(mdot(LBi.T,LBi,Lmi_psi1)*Lmi_psi1,0)[:,None]*beta**2
dL_dR += -np.dot(_LBi_Lmi_psi1Vf.T,_LBi_Lmi_psi1).T * Y * beta**2
dL_dR += 0.5*np.dot(_LBi_Lmi_psi1Vf.T,_LBi_Lmi_psi1).T**2 * beta**2
else:
# likelihood is not heteroscedatic
dL_dR = -0.5 * num_data * output_dim * beta + 0.5 * trYYT * beta ** 2
dL_dR += 0.5 * output_dim * (psi0.sum() * beta ** 2 - np.trace(A) * beta)
dL_dR += beta * (0.5 * np.sum(A * DBi_plus_BiPBi) - data_fit)
return dL_dR
def _compute_log_marginal_likelihood(likelihood, num_data, output_dim, beta, het_noise, psi0, A, LB, trYYT_covs, data_fit, Y):
#compute log marginal likelihood
if het_noise:
lik_1 = -0.5 * num_data * output_dim * np.log(2. * np.pi) + 0.5 * output_dim * np.sum(np.log(beta)) - 0.5 * np.sum(beta.ravel() * np.square(Y).sum(axis=-1))
lik_2 = -0.5 * output_dim * (np.sum(beta.flatten() * psi0) - np.trace(A))
else:
lik_1 = -0.5 * num_data * output_dim * (np.log(2. * np.pi) - np.log(beta)) - 0.5 * beta * trYYT_covs
lik_2 = -0.5 * output_dim * (np.sum(beta * psi0) - np.trace(A))
lik_3 = -output_dim * (np.sum(np.log(np.diag(LB))))
lik_4 = 0.5 * data_fit
log_marginal = lik_1 + lik_2 + lik_3 + lik_4
return log_marginal
|
import collections
import datetime
import operator
import os
import time
import subprocess
from PyQt4 import QtCore, QtGui, Qt
import emdash.config
import emdash.log
def starstring(stars):
return emdash.config.get('starclosed') * stars + emdash.config.get('staropen') * (5-stars)
class ControlMenu(QtGui.QMenu):
signal_enqueue = QtCore.pyqtSignal(unicode, object)
signal_dequeue = QtCore.pyqtSignal(unicode, object)
def __init__(self, parent=None, name=None, data=None):
QtGui.QMenu.__init__(self, parent=parent)
# ian: is there a better way than passing this dict around..
self.data = data
self.name = name
self.addAction(QtGui.QAction("Add to Queue", self, triggered=self.enqueue))
def enqueue(self):
self.signal_enqueue.emit(self.name, self.data)
def dequeue(self):
self.signal_dequeue.emit(self.name, self.data)
class ProgressBarDelegate(QtGui.QItemDelegate):
def sizeHint(self, option, index):
return Qt.QSize(130, 30)
def paint(self, painter, option, index):
progress = index.model().data(index, QtCore.Qt.DisplayRole)
if isinstance(progress, basestring) or progress == None:
return QtGui.QItemDelegate.paint(self, painter, option, index)
opts = QtGui.QStyleOptionProgressBarV2()
opts.rect = option.rect
opts.minimum = 1
opts.maximum = 100
opts.progress = int(progress*100)
return QtGui.QApplication.style().drawControl(QtGui.QStyle.CE_ProgressBar, opts, painter)
class Delegate(QtGui.QStyledItemDelegate):
def _gettext(self, data):
return data
def paint(self, painter, option, index):
painter.save()
# self.drawBackground(painter, option, index)
rect = option.rect
rect = QtCore.QRect(rect.left()+3, rect.top()+6, rect.width()-5, rect.height())
data = index.model().data(index, QtCore.Qt.DisplayRole)
if data == None:
t = ""
elif data == -1:
t = "Saving.."
else:
t = self._gettext(data)
QtGui.QApplication.style().drawItemText(painter, rect, 0, option.palette, True, QtCore.QString(t))
painter.restore()
class StarDelegate(Delegate):
def _gettext(self, data):
starmap = {
0: "Trash",
1: starstring(1),
2: starstring(1),
3: starstring(2),
4: starstring(2),
5: starstring(3),
6: starstring(3),
7: starstring(4),
8: starstring(4),
9: starstring(5),
10: starstring(5)
}
return starmap.get(data)
class Menu(QtGui.QMenu):
signal_set = QtCore.pyqtSignal(unicode, unicode, unicode)
def __init__(self, parent=None, name=None, param=None, value=None, choices=None):
QtGui.QMenu.__init__(self, parent=parent)
self.choices = choices
self.value = value
self.param = param
self.name = name
self.init()
def set(self, value):
self.signal_set.emit(self.name, self.param, str(value))
def init(self):
for value in self.choices:
lx = lambda value:self.signal_set.emit(self.name, self.param, str(value))
self.addAction(QtGui.QAction(str(value), self, triggered=lx))
class QualityMenu(Menu):
def init(self):
so = emdash.config.get('staropen')
sc = emdash.config.get('starclosed')
self.addAction(QtGui.QAction("Trash", self, triggered=self.set_0))
self.addAction(QtGui.QAction(sc*1+so*4, self, triggered=self.set_1))
self.addAction(QtGui.QAction(sc*2+so*3, self, triggered=self.set_2))
self.addAction(QtGui.QAction(sc*3+so*2, self, triggered=self.set_3))
self.addAction(QtGui.QAction(sc*4+so*1, self, triggered=self.set_4))
self.addAction(QtGui.QAction(sc*5+so*0, self, triggered=self.set_5))
def set_0(self):
self.set(0)
def set_1(self):
self.set(1)
def set_2(self):
self.set(3)
def set_3(self):
self.set(5)
def set_4(self):
self.set(7)
def set_5(self):
self.set(9)
class FilesView(QtGui.QTreeView):
signal_dequeue = QtCore.pyqtSignal(unicode, object)
signal_enqueue = QtCore.pyqtSignal(unicode, object)
signal_set = QtCore.pyqtSignal(unicode, unicode, unicode)
def star_column(self, col):
self.setItemDelegateForColumn(col, StarDelegate(parent=self))
def prog_column(self, col):
self.setItemDelegateForColumn(col, ProgressBarDelegate(parent=self))
@QtCore.pyqtSlot(unicode, object)
def newfile(self, filename, data=None):
"""When a new file is discovered, scroll to bottom and expand all"""
self.expandAll()
sb = self.verticalScrollBar()
size = sb.maximum()
sb.setValue(size)
def load_viewer(self, filename):
filename = unicode(filename)
emdash.log.msg("loading viewer:", filename)
dname = os.path.dirname(filename)
subprocess.Popen(['python', '-m', 'e2display', filename], cwd=dname)
def mousePressEvent(self, event):
index = self.indexAt(event.pos())
if index.row() < 0:
return super(FilesView, self).mousePressEvent(event)
data = self.model().getmdata(index)
name = data.get('name', -1)
headers = self.model().headers
param = headers[index.column()]
###################
# View columns
if param == "_status":
# Launch web browser...
if data.get("name") != None:
QtGui.QDesktopServices.openUrl(QtCore.QUrl("%s/record/%s/?ctxid=%s"%(emdash.config.get('host'), data.get("name"), emdash.config.get('ctxid'))))
else:
editor = ControlMenu(parent=self, name=data.get("_name"), data=data)
editor.signal_enqueue.connect(self.signal_enqueue)
editor.popup(self.mapToGlobal(event.pos()))
elif param == '_recname':
# Or file viewer
self.load_viewer(data.get('_filename'))
###################
# Editable columns
if data.get('name') == None:
param = None
if param == 'tem_magnification_set':
d, ok = QtGui.QInputDialog.getDouble(self, "Set Magnification", "Magnification (x1000):", data.get('tem_magnification_set', 0), 0, 1000, 2)
if ok:
self.signal_set.emit(name, 'tem_magnification_set', str(d))
elif param == 'ctf_defocus_set':
d, ok = QtGui.QInputDialog.getDouble(self, "Set Defocus", "Defocus (underfocus is positive)", data.get('ctf_defocus_set', 0), -100, 100, 2)
if ok:
self.signal_set.emit(name, 'ctf_defocus_set', str(d))
elif param == 'tem_dose_rate':
d, ok = QtGui.QInputDialog.getDouble(self, "Set Dose Rate", "Dose Rate (e/A2/sec)", data.get('tem_dose_rate', 0), 0, 100, 2)
if ok:
self.signal_set.emit(name, 'tem_dose_rate', str(d))
elif param == 'time_exposure_tem':
d, ok = QtGui.QInputDialog.getDouble(self, "Set Exposure Time", "Exposure Time (s)", data.get('time_exposure_tem', 0), 0, 10, 2)
if ok:
self.signal_set.emit(name, 'time_exposure_tem', str(d))
elif param == 'assess_image_quality':
editor = QualityMenu(parent=self, name=data.get('name'), param='assess_image_quality')
editor.signal_set.connect(self.signal_set)
editor.popup(self.mapToGlobal(event.pos()))
return super(FilesView, self).mousePressEvent(event)
|
from django.conf import settings
from django.http import HttpResponsePermanentRedirect
from django.shortcuts import get_object_or_404
from django.views.generic import ListView, DetailView
from django.db.models import get_model
from django.utils.translation import ugettext_lazy as _
from oscar.core.loading import get_class
from oscar.apps.catalogue.signals import product_viewed, product_search
Product = get_model('catalogue', 'product')
ProductReview = get_model('reviews', 'ProductReview')
Category = get_model('catalogue', 'category')
ProductAlert = get_model('customer', 'ProductAlert')
ProductAlertForm = get_class('customer.forms', 'ProductAlertForm')
class ProductDetailView(DetailView):
context_object_name = 'product'
model = Product
view_signal = product_viewed
template_folder = "catalogue"
def get(self, request, **kwargs):
"""
Ensures that the correct URL is used before rendering a response
"""
self.object = product = self.get_object()
if product.is_variant:
return HttpResponsePermanentRedirect(
product.parent.get_absolute_url())
correct_path = product.get_absolute_url()
if correct_path != request.path:
return HttpResponsePermanentRedirect(correct_path)
response = super(ProductDetailView, self).get(request, **kwargs)
self.send_signal(request, response, product)
return response
def get_object(self, queryset=None):
# Check if self.object is already set to prevent unnecessary DB calls
return getattr(
self, 'object', super(ProductDetailView, self).get_object(queryset))
def get_context_data(self, **kwargs):
ctx = super(ProductDetailView, self).get_context_data(**kwargs)
ctx['reviews'] = self.get_reviews()
ctx['alert_form'] = self.get_alert_form()
ctx['has_active_alert'] = self.get_alert_status()
return ctx
def get_alert_status(self):
# Check if this user already have an alert for this product
has_alert = False
if self.request.user.is_authenticated():
alerts = ProductAlert.objects.filter(
product=self.object, user=self.request.user,
status=ProductAlert.ACTIVE)
has_alert = alerts.count() > 0
return has_alert
def get_alert_form(self):
return ProductAlertForm(
user=self.request.user, product=self.object)
def get_reviews(self):
return self.object.reviews.filter(status=ProductReview.APPROVED)
def send_signal(self, request, response, product):
self.view_signal.send(
sender=self, product=product, user=request.user, request=request,
response=response)
def get_template_names(self):
"""
Return a list of possible templates.
We try 2 options before defaulting to catalogue/detail.html:
1). detail-for-upc-<upc>.html
2). detail-for-class-<classname>.html
This allows alternative templates to be provided for a per-product
and a per-item-class basis.
"""
return [
'%s/detail-for-upc-%s.html' % (
self.template_folder, self.object.upc),
'%s/detail-for-class-%s.html' % (
self.template_folder, self.object.get_product_class().slug),
'%s/detail.html' % (self.template_folder)]
def get_product_base_queryset():
"""
Return ``QuerySet`` for product model with related
content pre-loaded. The ``QuerySet`` returns unfiltered
results for further filtering.
"""
return Product.browsable.select_related(
'product_class',
).prefetch_related(
'variants',
'product_options',
'product_class__options',
'stockrecord',
'images',
).all()
class ProductCategoryView(ListView):
"""
Browse products in a given category
Category URLs used to be based on solely the slug. Renaming the category
or any of the parent categories would break the URL. Hence, the new URLs
consist of both the slug and category PK (compare product URLs).
The legacy way still works to not break existing systems.
"""
context_object_name = "products"
template_name = 'catalogue/browse.html'
paginate_by = settings.OSCAR_PRODUCTS_PER_PAGE
def get_object(self):
if 'pk' in self.kwargs:
self.category = get_object_or_404(Category, pk=self.kwargs['pk'])
else:
self.category = get_object_or_404(Category,
slug=self.kwargs['category_slug'])
def get(self, request, *args, **kwargs):
self.get_object()
correct_path = self.category.get_absolute_url()
if correct_path != request.path:
return HttpResponsePermanentRedirect(correct_path)
self.categories = self.get_categories()
return super(ProductCategoryView, self).get(request, *args, **kwargs)
def get_categories(self):
"""
Return a list of the current category and it's ancestors
"""
categories = list(self.category.get_descendants())
categories.append(self.category)
return categories
def get_context_data(self, **kwargs):
context = super(ProductCategoryView, self).get_context_data(**kwargs)
context['categories'] = self.categories
context['category'] = self.category
context['summary'] = self.category.name
return context
def get_queryset(self):
return get_product_base_queryset().filter(
categories__in=self.categories
).distinct()
class ProductListView(ListView):
"""
A list of products
"""
context_object_name = "products"
template_name = 'catalogue/browse.html'
paginate_by = settings.OSCAR_PRODUCTS_PER_PAGE
search_signal = product_search
model = Product
def get_search_query(self):
q = self.request.GET.get('q', None)
return q.strip() if q else q
def get_queryset(self):
q = self.get_search_query()
if q:
# Send signal to record the view of this product
self.search_signal.send(sender=self, query=q, user=self.request.user)
return get_product_base_queryset().filter(title__icontains=q)
else:
return get_product_base_queryset()
def get_context_data(self, **kwargs):
context = super(ProductListView, self).get_context_data(**kwargs)
q = self.get_search_query()
if not q:
context['summary'] = _('All products')
else:
context['summary'] = _("Products matching '%(query)s'") % {'query': q}
context['search_term'] = q
return context
|
from django.conf import settings
def enabled_social_auth(request):
return {'ENABLED_SOCIAL_AUTH': settings.ENABLED_SOCIAL_AUTH}
|
""" Test the adaptation manager. """
import sys
from traits.adaptation.api import AdaptationManager, adapt
import traits.adaptation.tests.abc_examples
import traits.adaptation.tests.interface_examples
from traits.testing.unittest_tools import unittest
class TestAdaptationManagerWithABC(unittest.TestCase):
""" Test the adaptation manager. """
#: Class attribute pointing at the module containing the example data
examples = traits.adaptation.tests.abc_examples
#### 'TestCase' protocol ##################################################
def setUp(self):
""" Prepares the test fixture before each test method is called. """
self.adaptation_manager = AdaptationManager()
return
def tearDown(self):
""" Called immediately after each test method has been called. """
return
#### Tests ################################################################
def test_no_adapter_required(self):
ex = self.examples
plug = ex.UKPlug()
# Try to adapt it to its own concrete type.
uk_plug = self.adaptation_manager.adapt(plug, ex.UKPlug)
# The adaptation manager should simply return the same object.
self.assertIs(uk_plug, plug)
# Try to adapt it to an ABC that is registered for its type.
uk_plug = self.adaptation_manager.adapt(plug, ex.UKStandard)
# The adaptation manager should simply return the same object.
self.assertIs(uk_plug, plug)
return
def test_no_adapter_available(self):
ex = self.examples
plug = ex.UKPlug()
# Try to adapt it to a concrete type.
eu_plug = self.adaptation_manager.adapt(plug, ex.EUPlug, None)
# There should be no way to adapt a UKPlug to a EUPlug.
self.assertEqual(eu_plug, None)
# Try to adapt it to an ABC.
eu_plug = self.adaptation_manager.adapt(plug, ex.EUStandard, None)
# There should be no way to adapt a UKPlug to a EUPlug.
self.assertEqual(eu_plug, None)
return
def test_one_step_adaptation(self):
ex = self.examples
# UKStandard->EUStandard.
self.adaptation_manager.register_factory(
factory = ex.UKStandardToEUStandard,
from_protocol = ex.UKStandard,
to_protocol = ex.EUStandard
)
plug = ex.UKPlug()
# Adapt it to an ABC.
eu_plug = self.adaptation_manager.adapt(plug, ex.EUStandard)
self.assertIsNotNone(eu_plug)
self.assertIsInstance(eu_plug, ex.UKStandardToEUStandard)
# We shouldn't be able to adapt it to a *concrete* 'EUPlug' though.
eu_plug = self.adaptation_manager.adapt(plug, ex.EUPlug, None)
self.assertIsNone(eu_plug)
return
def test_adapter_chaining(self):
ex = self.examples
# UKStandard->EUStandard.
self.adaptation_manager.register_factory(
factory = ex.UKStandardToEUStandard,
from_protocol = ex.UKStandard,
to_protocol = ex.EUStandard
)
# EUStandard->JapanStandard.
self.adaptation_manager.register_factory(
factory = ex.EUStandardToJapanStandard,
from_protocol = ex.EUStandard,
to_protocol = ex.JapanStandard
)
# Create a UKPlug.
uk_plug = ex.UKPlug()
# Adapt it to a JapanStandard via the chain.
japan_plug = self.adaptation_manager.adapt(uk_plug, ex.JapanStandard)
self.assertIsNotNone(japan_plug)
self.assertIsInstance(japan_plug, ex.EUStandardToJapanStandard)
self.assertIs(japan_plug.adaptee.adaptee, uk_plug)
return
def test_multiple_paths_unambiguous(self):
ex = self.examples
# UKStandard->EUStandard.
self.adaptation_manager.register_factory(
factory = ex.UKStandardToEUStandard,
from_protocol = ex.UKStandard,
to_protocol = ex.EUStandard
)
# EUStandard->JapanStandard.
self.adaptation_manager.register_factory(
factory = ex.EUStandardToJapanStandard,
from_protocol = ex.EUStandard,
to_protocol = ex.JapanStandard
)
# JapanStandard->IraqStandard.
self.adaptation_manager.register_factory(
factory = ex.JapanStandardToIraqStandard,
from_protocol = ex.JapanStandard,
to_protocol = ex.IraqStandard
)
# EUStandard->IraqStandard.
self.adaptation_manager.register_factory(
factory = ex.EUStandardToIraqStandard,
from_protocol = ex.EUStandard,
to_protocol = ex.IraqStandard
)
# Create a UKPlug.
uk_plug = ex.UKPlug()
# Adapt it to a IraqStandard via the chain.
iraq_plug = self.adaptation_manager.adapt(uk_plug, ex.IraqStandard)
self.assertIsNotNone(iraq_plug)
self.assertIsInstance(iraq_plug, ex.EUStandardToIraqStandard)
self.assertIs(iraq_plug.adaptee.adaptee, uk_plug)
return
def test_multiple_paths_ambiguous(self):
ex = self.examples
# UKStandard->EUStandard.
self.adaptation_manager.register_factory(
factory = ex.UKStandardToEUStandard,
from_protocol = ex.UKStandard,
to_protocol = ex.EUStandard
)
# UKStandard->JapanStandard.
self.adaptation_manager.register_factory(
factory = ex.UKStandardToJapanStandard,
from_protocol = ex.UKStandard,
to_protocol = ex.JapanStandard
)
# JapanStandard->IraqStandard.
self.adaptation_manager.register_factory(
factory = ex.JapanStandardToIraqStandard,
from_protocol = ex.JapanStandard,
to_protocol = ex.IraqStandard
)
# EUStandard->IraqStandard.
self.adaptation_manager.register_factory(
factory = ex.EUStandardToIraqStandard,
from_protocol = ex.EUStandard,
to_protocol = ex.IraqStandard
)
# Create a UKPlug.
uk_plug = ex.UKPlug()
# Adapt it to a IraqStandard via the chain.
iraq_plug = self.adaptation_manager.adapt(uk_plug, ex.IraqStandard)
self.assertIsNotNone(iraq_plug)
self.assertIn(
type(iraq_plug),
[ex.EUStandardToIraqStandard, ex.JapanStandardToIraqStandard]
)
self.assertIs(iraq_plug.adaptee.adaptee, uk_plug)
return
def test_conditional_adaptation(self):
ex = self.examples
# TravelPlug->EUStandard.
def travel_plug_to_eu_standard(adaptee):
if adaptee.mode == 'Europe':
return ex.TravelPlugToEUStandard(adaptee=adaptee)
else:
return None
self.adaptation_manager.register_factory(
factory = travel_plug_to_eu_standard,
from_protocol = ex.TravelPlug,
to_protocol = ex.EUStandard
)
# Create a TravelPlug.
travel_plug = ex.TravelPlug(mode='Europe')
# Adapt it to a EUStandard.
eu_plug = self.adaptation_manager.adapt(travel_plug, ex.EUStandard)
self.assertIsNotNone(eu_plug)
self.assertIsInstance(eu_plug, ex.TravelPlugToEUStandard)
# Create a TravelPlug.
travel_plug = ex.TravelPlug(mode='Asia')
# Adapt it to a EUStandard.
eu_plug = self.adaptation_manager.adapt(travel_plug, ex.EUStandard, None)
self.assertIsNone(eu_plug)
return
def test_spillover_adaptation_behavior(self):
ex = self.examples
# FileType->IEditor.
self.adaptation_manager.register_factory(
factory = ex.FileTypeToIEditor,
from_protocol = ex.FileType,
to_protocol = ex.IEditor
)
# Meanwhile, in a plugin far, far away ...
# IScriptable->IPrintable.
self.adaptation_manager.register_factory(
factory = ex.IScriptableToIUndoable,
from_protocol = ex.IScriptable,
to_protocol = ex.IUndoable
)
# Create a file type.
file_type = ex.FileType()
# Try to adapt to IPrintable: since we did not define an adapter
# chain that goes from FileType to IPrintable, this should fail.
printable = self.adaptation_manager.adapt(file_type, ex.IUndoable, None)
self.assertIsNone(printable)
return
def test_adaptation_prefers_subclasses(self):
ex = self.examples
# TextEditor->IPrintable.
self.adaptation_manager.register_factory(
factory = ex.TextEditorToIPrintable,
from_protocol = ex.TextEditor,
to_protocol = ex.IPrintable
)
# Editor->IPrintable.
self.adaptation_manager.register_factory(
factory = ex.EditorToIPrintable,
from_protocol = ex.Editor,
to_protocol = ex.IPrintable
)
# Create a text editor.
text_editor = ex.TextEditor()
# Adapt to IPrintable: we should get the TextEditorToIPrintable
# adapter, not the EditorToIPrintable one.
printable = self.adaptation_manager.adapt(text_editor, ex.IPrintable)
self.assertIsNotNone(printable)
self.assertIs(type(printable), ex.TextEditorToIPrintable)
return
def test_adaptation_prefers_subclasses_other_registration_order(self):
# This test is identical to `test_adaptation_prefers_subclasses`
# with adapters registered in the opposite order. Both of them
# should pass
ex = self.examples
# Editor->IPrintable.
self.adaptation_manager.register_factory(
factory = ex.EditorToIPrintable,
from_protocol = ex.Editor,
to_protocol = ex.IPrintable
)
# TextEditor->IPrintable.
self.adaptation_manager.register_factory(
factory = ex.TextEditorToIPrintable,
from_protocol = ex.TextEditor,
to_protocol = ex.IPrintable
)
# Create a text editor.
text_editor = ex.TextEditor()
# Adapt to IPrintable: we should get the TextEditorToIPrintable
# adapter, not the EditorToIPrintable one.
printable = self.adaptation_manager.adapt(text_editor, ex.IPrintable)
self.assertIsNotNone(printable)
self.assertIs(type(printable), ex.TextEditorToIPrintable)
return
def test_circular_adaptation(self):
# Circles in the adaptation graph should not lead to infinite loops
# when it is impossible to reach the target.
class Foo(object):
pass
class Bar(object):
pass
# object->Foo
self.adaptation_manager.register_factory(
factory = lambda adaptee: Foo(),
from_protocol = object,
to_protocol = Foo
)
# Foo->object
self.adaptation_manager.register_factory(
factory = lambda adaptee: [],
from_protocol = Foo,
to_protocol = object
)
# Create an object.
obj = []
# Try to adapt to an unreachable target.
bar = self.adaptation_manager.adapt(obj, Bar, None)
self.assertIsNone(bar)
return
def test_default_argument_in_adapt(self):
from traits.adaptation.adaptation_manager import AdaptationError
# Without a default argument, a failed adaptation raises an error.
with self.assertRaises(AdaptationError):
self.adaptation_manager.adapt('string', int)
# With a default argument, a failed adaptation returns the default.
default = 'default'
result = self.adaptation_manager.adapt('string', int, default=default)
self.assertIs(result, default)
return
def test_prefer_specific_interfaces(self):
ex = self.examples
# IIntermediate -> ITarget.
self.adaptation_manager.register_factory(
factory = ex.IIntermediateToITarget,
from_protocol = ex.IIntermediate,
to_protocol = ex.ITarget
)
# IHuman -> IIntermediate.
self.adaptation_manager.register_factory(
factory = ex.IHumanToIIntermediate,
from_protocol = ex.IHuman,
to_protocol = ex.IIntermediate
)
# IChild -> IIntermediate.
self.adaptation_manager.register_factory(
factory = ex.IChildToIIntermediate,
from_protocol = ex.IChild,
to_protocol = ex.IIntermediate
)
# IPrimate -> IIntermediate.
self.adaptation_manager.register_factory(
factory = ex.IPrimateToIIntermediate,
from_protocol = ex.IPrimate,
to_protocol = ex.IIntermediate
)
# Create a source.
source = ex.Source()
# Adapt to ITarget: we should get the adapter for the most specific
# interface, i.e. IChildToITarget.
target = self.adaptation_manager.adapt(source, ex.ITarget)
self.assertIsNotNone(target)
self.assertIs(type(target.adaptee), ex.IChildToIIntermediate)
return
def test_chaining_with_intermediate_mro_climbing(self):
ex = self.examples
# IStart -> ISpecific.
self.adaptation_manager.register_factory(
factory = ex.IStartToISpecific,
from_protocol = ex.IStart,
to_protocol = ex.ISpecific
)
# IGeneric -> IEnd.
self.adaptation_manager.register_factory(
factory = ex.IGenericToIEnd,
from_protocol = ex.IGeneric,
to_protocol = ex.IEnd
)
# Create a start.
start = ex.Start()
# Adapt to IEnd; this should succeed going from IStart to ISpecific,
# climbing up the MRO to IGeneric, then crossing to IEnd.
end = self.adaptation_manager.adapt(start, ex.IEnd)
self.assertIsNotNone(end)
self.assertIs(type(end), ex.IGenericToIEnd)
return
def test_conditional_recycling(self):
# Test that an offer that has been considered but failed if considered
# again at a later time, when it might succeed because of conditional
# adaptation.
# C -- A -fails- B
# C -- D -- A -succeeds- B
class A(object):
def __init__(self, allow_adaptation):
self.allow_adaptation = allow_adaptation
class B(object):
pass
class C(object):
pass
class D(object):
pass
self.adaptation_manager.register_factory(
factory=lambda adaptee: A(False), from_protocol=C, to_protocol=A
)
self.adaptation_manager.register_factory(
factory=lambda adaptee: A(True), from_protocol=D, to_protocol=A
)
self.adaptation_manager.register_factory(
factory=lambda adaptee: D(), from_protocol=C, to_protocol=D
)
# Conditional adapter
def a_to_b_adapter(adaptee):
if adaptee.allow_adaptation:
b = B()
b.marker = True
else:
b = None
return b
self.adaptation_manager.register_factory(
factory=a_to_b_adapter, from_protocol=A, to_protocol=B
)
# Create a A
c = C()
# Adaptation to B should succeed through D
b = self.adaptation_manager.adapt(c, B)
self.assertIsNotNone(b)
self.assertTrue(hasattr(b, 'marker'))
return
def test_provides_protocol_for_interface_subclass(self):
from traits.api import Interface
class IA(Interface):
pass
class IB(IA):
pass
self.assertTrue(self.adaptation_manager.provides_protocol(IB, IA))
return
def test_register_provides(self):
from traits.api import Interface
class IFoo(Interface):
pass
obj = {}
self.assertEqual(None, self.adaptation_manager.adapt(obj, IFoo, None))
self.adaptation_manager.register_provides(dict, IFoo)
self.assertEqual(obj, self.adaptation_manager.adapt(obj, IFoo))
return
class TestAdaptationManagerWithInterfaces(TestAdaptationManagerWithABC):
""" Test the adaptation manager with Interfaces. """
examples = traits.adaptation.tests.interface_examples
def test_adapts_should_register_class_as_providing_the_to_protocol(self):
if sys.version_info[0] >= 3:
self.skipTest("""
Currently, under Python 3, class advisors do not work anymore.
Therefore, this test will fail due to the use of "adapts".
""")
from traits.api import Adapter, adapts, HasTraits, Instance, \
Int, Interface
class IFoo(Interface):
x = Int
class Bar(HasTraits):
foo = Instance(IFoo)
class Baz(HasTraits):
pass
# Warning: because we are trying to test the 'adapts' class advisor,
# this will effect the global adaptation manager and hence may
# interfere with any other tests that rely on it (all of the tests
# in this package use a separate adaptation manager so there should
# be no clashes here ;^).
#
# 'adapts' is also deprecated, so expect a warning message when you
# run the tests.
class BazToIFooAdapter(Adapter):
adapts(Baz, IFoo)
baz = Baz()
bar = Bar()
bar.foo = adapt(baz, IFoo)
self.assertEqual(bar.foo.adaptee, baz)
return
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.