code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/python
#
# \file 1_export.py
# \brief Export displace
# \date 2009-03-10-21-45-GMT
# \author Jan Boon (Kaetemi)
# Python port of game data build pipeline.
# Export displace
#
# NeL - MMORPG Framework <http://dev.ryzom.com/projects/nel/>
# Copyright (C) 2010 Winch Gate Property Limited
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import time, sys, os, shutil, subprocess, distutils.dir_util
sys.path.append("../../configuration")
if os.path.isfile("log.log"):
os.remove("log.log")
log = open("log.log", "w")
from scripts import *
from buildsite import *
from process import *
from tools import *
from directories import *
printLog(log, "")
printLog(log, "-------")
printLog(log, "--- Export displace")
printLog(log, "-------")
printLog(log, time.strftime("%Y-%m-%d %H:%MGMT", time.gmtime(time.time())))
printLog(log, "")
mkPath(log, ExportBuildDirectory + "/" + DisplaceExportDirectory)
for dir in DisplaceSourceDirectories:
mkPath(log, DatabaseDirectory + "/" + dir)
copyFilesExtNoTreeIfNeeded(log, DatabaseDirectory + "/" + dir, ExportBuildDirectory + "/" + DisplaceExportDirectory, ".tga")
copyFilesExtNoTreeIfNeeded(log, DatabaseDirectory + "/" + dir, ExportBuildDirectory + "/" + DisplaceExportDirectory, ".png")
log.close()
# end of file
|
osgcc/ryzom
|
nel/tools/build_gamedata/processes/displace/1_export.py
|
Python
|
agpl-3.0
| 1,885
|
#-*- coding: utf-8 -*-
#
# Blah
'''opencv wrapper'''
__all__ = ('ALoaderOpenCV', 'ImageOpenCV', 'AsyncImageOpenCV')
from kivy.logger import Logger
from kivy.core.image import ImageLoader
from kivy.compat import PY2
from os import write, close, O_RDWR, O_CREAT
from os import open as openfile
import mimetypes
from kivy.uix.image import Image as UixImage
from kivy.core.image import Image as CoreImage
from kivy.cache import Cache
from kivy.loader import LoaderThreadPool
from kivy.graphics.texture import Texture
from kivy.resources import resource_find
from kivy.properties import BooleanProperty, NumericProperty
from modules.core.crc64 import CRC64digest
from modules.core.tempstorage import get_temp_folder_prefix
from kivy.compat import queue
from threading import Thread
class _WorkerLow(Thread):
'''Thread executing tasks from a given tasks queue
'''
def __init__(self, pool, tasks):
Thread.__init__(self)
self.tasks = tasks
self.daemon = True
self.pool = pool
self.start()
def run(self):
from kivy import platform
if platform == 'android':
from jnius import autoclass
AndroidSystem = autoclass('android.os.Process')
AndroidSystem.setThreadPriority(AndroidSystem.THREAD_PRIORITY_LOWEST)
Logger.info('_WorkerLow: set low thread property')
while self.pool.running:
func, args, kargs = self.tasks.get()
try:
func(*args, **kargs)
except Exception as e:
print(e)
self.tasks.task_done()
class _ThreadPoolLow(object):
'''Pool of threads consuming tasks from a queue
'''
def __init__(self, num_threads):
super(_ThreadPoolLow, self).__init__()
self.running = True
self.tasks = queue.Queue()
for _ in range(num_threads):
_WorkerLow(self, self.tasks)
def add_task(self, func, *args, **kargs):
'''Add a task to the queue
'''
self.tasks.put((func, args, kargs))
def stop(self):
self.running = False
self.tasks.join()
class LoaderOpenCV(LoaderThreadPool):
def __init__(self, **kwargs):
super(LoaderOpenCV, self).__init__(**kwargs)
def __del__(self):
super(LoaderOpenCV, self).__del__()
def _set_num_workers(self, num):
if num < 1:
raise Exception('Must have at least 1 workers')
self._num_workers = num
def _get_num_workers(self):
return self._num_workers
num_workers = property(_get_num_workers, _set_num_workers)
def start(self):
super(LoaderThreadPool, self).start()
self.pool = _ThreadPoolLow(self._num_workers)
from kivy.clock import Clock
Clock.schedule_interval(self.run, 0)
def drop_request(self, filename, client, load_callback=None, post_callback=None, **kwargs):
#Logger.info('_drop_request: from cache queue %s %d %d' % (filename,len(self._client),len(self._q_load)))
item = None
for i in self._client:
if (i[0] == filename):
#and (i[1] == client):
item = i
break
if item:
Logger.info('_drop_request: found client %s ' % filename)
self._client.remove(item)
item = None
for i in self._q_load:
if (i['filename'] == filename):
#and (i['load_callback'] == load_callback) and (i['post_callback'] == post_callback):
item = i
break
if item:
Logger.info('_drop_request: found _q_load %s ' % filename)
self._q_load.remove(item)
if not kwargs.get('nocache', False):
#Logger.info('_drop_request: dropped %s ' % filename)
Cache.remove('kv.loader', filename)
def _load_local(self, filename, kwargs):
'''(internal) Loading a local file'''
# With recent changes to CoreImage, we must keep data otherwise,
# we might be unable to recreate the texture afterwise.
# -- Well we don't need it - we use opencv image
return ImageLoader.load(filename, keep_data=True, **kwargs)
def _load_urllib(self, filename, kwargs):
'''(internal) Loading a network file. First download it, save it to a
temporary file, and pass it to _load_local().'''
if PY2:
import urllib2 as urllib_request
else:
import urllib.request as urllib_request
proto = filename.split(':', 1)[0]
if proto == 'smb':
try:
# note: it's important to load SMBHandler every time
# otherwise the data is occasionaly not loaded
from smb.SMBHandler import SMBHandler
except ImportError:
Logger.warning(
'Loader: can not load PySMB: make sure it is installed')
return
_BINARY = 0
import glob
import tempfile
from kivy import platform
if platform in ['win', 'windows']:
from os import O_BINARY
_BINARY = O_BINARY
data = fd = _out_osfd = None
try:
# check if we already have the file locally
filename_hash = CRC64digest(filename)
filename_hashed_no_ext = get_temp_folder_prefix()+filename_hash
filename_hash_cached = glob.glob(filename_hashed_no_ext+'.*')
#Logger.info('url_load: <%s> %s <%s> %s' % (filename,filename_hash,filename_hashed_no_ext,filename_hash_cached))
data = None
if len(filename_hash_cached) > 0:
# load data
Logger.info('AsyncImageOpenCV: Loading local cached <%s>' % filename_hash_cached[0])
try:
data = self._load_local(filename_hash_cached[0], kwargs)
except Exception:
data = None
# try to download
if data is None:
Logger.info('AsyncImageOpenCV: download <%s>' % filename)
if proto == 'smb':
# read from samba shares
fd = urllib_request.build_opener(SMBHandler).open(filename)
else:
# read from internet
fd = urllib_request.urlopen(filename)
idata = fd.read()
info = fd.info()
fd.close()
fd = None
if 'content-type' in info.keys():
suffix = mimetypes.guess_extension(info['content-type'])
else:
suffix = '.%s' % (filename.split('.')[-1])
_out_filename = filename_hashed_no_ext+suffix
# write to local filename
_out_osfd = openfile(_out_filename, O_RDWR | O_CREAT | _BINARY)
write(_out_osfd, idata)
close(_out_osfd)
_out_osfd = None
# load data
data = self._load_local(_out_filename, kwargs)
# FIXME create a clean API for that
for imdata in data._data:
imdata.source = filename
except Exception:
Logger.exception('Failed to load image <%s>' % filename)
# close file when remote file not found or download error
try:
close(_out_osfd)
_out_osfd = None
except OSError:
pass
return self.error_image
finally:
if fd:
fd.close()
if _out_osfd:
close(_out_osfd)
#if _out_filename != '':
# unlink(_out_filename)
return data
ALoaderOpenCV = LoaderOpenCV()
#Loader = ALoaderOpenCV
class CoreImageOpenCV(CoreImage):
res_width = NumericProperty(-1)
res_height = NumericProperty(-1)
res_autosize = BooleanProperty(False)
load_exif = BooleanProperty(False)
def __init__(self, arg, **kwargs):
#Logger.info('CoreImageOpenCV: kwargs %s' % (kwargs))
self.res_width = kwargs.get('res_width', -1)
self.res_height = kwargs.get('res_height', -1)
self.load_exif = kwargs.get('load_exif', False)
if self.res_autosize and self.res_width <= 0:
self.res_width = self.width
if self.res_autosize and self.res_height <= 0:
self.res_height = self.height
kwargs['res_width'] = self.res_width
kwargs['res_height'] = self.res_height
super(CoreImageOpenCV, self).__init__(arg, **kwargs)
def _set_filename(self, value):
#Logger.info('CoreImageOpenCV: value %s' % (value))
if value is None or value == self._filename:
return
self._filename = value
# construct uid as a key for Cache
if (self.res_width > 0) or (self.res_height > 0):
uid = '%s|%d|%d|%s|%s' % (self.filename, self.res_width, self.res_height, self._mipmap, 0)
else:
uid = '%s|%s|%s' % (self.filename, self._mipmap, 0)
# in case of Image have been asked with keep_data
# check the kv.image cache instead of texture.
image = Cache.get('kv.image', uid)
if image:
# we found an image, yeah ! but reset the texture now.
self.image = image
# if image.__class__ is core image then it's a texture
# from atlas or other sources and has no data so skip
if (image.__class__ != self.__class__ and
not image.keep_data and self._keep_data):
self.remove_from_cache()
self._filename = ''
self._set_filename(value)
else:
self._texture = None
self._img_iterate()
return
else:
# if we already got a texture, it will be automatically reloaded.
_texture = Cache.get('kv.texture', uid)
if _texture:
self._texture = _texture
return
# if image not already in cache then load
tmpfilename = self._filename
#Logger.info('CoreImageOpenCV: set_filename %s' % (tmpfilename))
#Logger.info('CoreImageOpenCV: %d %d' % (self.res_width, self.res_height))
image = ImageLoader.load(
self._filename, keep_data=self._keep_data,
mipmap=self._mipmap, nocache=self._nocache, res_width=self.res_width,
res_height=self.res_height, load_exif=self.load_exif)
self._filename = tmpfilename
# put the image into the cache if needed
if isinstance(image, Texture):
self._texture = image
self._size = image.size
else:
self.image = image
if not self._nocache:
Cache.append('kv.image', uid, self.image)
def _get_filename(self):
return self._filename
filename = property(_get_filename, _set_filename,
doc='Get/set the filename of image')
class ImageOpenCV(UixImage):
res_width = NumericProperty(-1)
res_height = NumericProperty(-1)
res_autosize = BooleanProperty(False)
load_exif = BooleanProperty(False)
auto_update_size = BooleanProperty(False)
def __init__(self, **kwargs):
#Logger.info('ImageOpenCV: INIT [%d %d] ' % (self.res_width,self.res_height))
self.res_width = kwargs.get('res_width', -1)
self.res_height = kwargs.get('res_height', -1)
self.load_exif = kwargs.get('load_exif', False)
self.auto_update_size = kwargs.get('auto_update_size', False)
if self.res_autosize and self.res_width <= 0:
self.res_width = self.width
if self.res_autosize and self.res_height <= 0:
self.res_height = self.height
kwargs['res_width'] = self.res_width
kwargs['res_height'] = self.res_height
super(ImageOpenCV, self).__init__(**kwargs)
def texture_update(self, *largs, **kwargs):
#Logger.info('ImageOpenCV: texture_update %s' % kwargs)
if not self.source:
self.texture = None
elif 'atlas://' in self.source:
return super(ImageOpenCV, self).texture_update(self, *largs, **kwargs)
else:
filename = resource_find(self.source)
if filename is None:
return Logger.error('ImageOpenCV: Error reading file {filename}'.
format(filename=self.source))
mipmap = self.mipmap
if self._coreimage is not None:
self._coreimage.unbind(on_texture=self._on_tex_change)
try:
#Logger.info('ImageOpenCV: resource size [%d %d] ' % (self.res_width,self.res_height))
kwargs['res_width'] = self.res_width
kwargs['res_height'] = self.res_height
#Logger.info('ImageOpenCV: kwargs %s' % kwargs)
self._coreimage = ci = CoreImageOpenCV(filename, mipmap=mipmap,
anim_delay=self.anim_delay,
keep_data=self.keep_data,
nocache=self.nocache,
load_exif=self.load_exif,
**kwargs)
except:
self._coreimage = ci = None
if ci:
if self.auto_update_size and ci._texture:
self.size = [ci._texture.width, ci._texture.height]
self.width = ci._texture.width
self.height = ci._texture.height
ci.bind(on_texture=self._on_tex_change)
self.texture = ci.texture
class AsyncImageOpenCV(ImageOpenCV):
def __init__(self, **kwargs):
self.prev_source = None
self.prev_opacity = None
self.prev_allow_stretch = None
self.Loaded = False
self._LoadedCallBack = kwargs.get('LoadedCallBack', None);
self._LoadedCallBackArgs = kwargs.get('LoadedCallBackArgs',[]);
self._coreimage = None
super(AsyncImageOpenCV, self).__init__(**kwargs)
self.bind(source=self._load_source)
if self.source:
self._load_source()
#Logger.info('AsyncImageOpenCV: resource size [%d %d] ' % (self.res_width,self.res_height))
def __del__(self):
self.unbind(source=self._load_source)
self._coreimage.unbind(on_load=self._on_source_load)
self._coreimage.unbind(on_texture=self._on_tex_change)
super(AsyncImageOpenCV, self).__del__()
def on_parent(self, instance, value):
if value is None:
ALoaderOpenCV.drop_request(self.source, self._coreimage)
def on_load(self):
self.allow_stretch = self.prev_allow_stretch
self.prev_allow_stretch = None
#Logger.info('_AsyncImage_load_source: on_load: ImageLoad %s ' % self.allow_stretch)
pass
def _load_source(self, *args, **kwargs):
#Logger.info('_AsyncImage_load_source: %s', (self.source))
self.Loaded = False
if not self.prev_allow_stretch:
self.prev_allow_stretch = self.allow_stretch
self.allow_stretch = False
#Logger.info('_AsyncImage_load_source: _load_source: ImageProxy %s' % self.prev_allow_stretch)
source = self.source
if self.prev_source:
ALoaderOpenCV.drop_request(self.prev_source, self._coreimage)
self.prev_source = source
if not source:
if self._coreimage is not None:
self._coreimage.unbind(on_texture=self._on_tex_change)
self.texture = None
self._coreimage = None
self.prev_opacity = self.opacity
self.opacity = 0.0
else:
if self._coreimage:
self._coreimage.unbind(on_load=self._on_source_load)
self._coreimage.unbind(on_texture=self._on_tex_change)
if not self.is_uri(source):
source = resource_find(source)
kwargs['res_width'] = self.res_width
kwargs['res_height'] = self.res_height
kwargs['load_exif'] = self.load_exif
#Logger.info('AsyncImageOpenCV: ----- resource size [%d %d] ' % (self.res_width,self.res_height))
self._coreimage = image = ALoaderOpenCV.image(source,
nocache=self.nocache,
mipmap=self.mipmap, **kwargs)
image.bind(on_load=self._on_source_load)
image.bind(on_texture=self._on_tex_change)
self.texture = image.texture
if self.prev_opacity:
self.opacity = self.prev_opacity
else:
self.prev_opacity = self.opacity
# if the image was loaded from cache we already have it -
# so on load won't be called - call it manually -
# but make sure it's not the placeholder image
if (image.loaded is True) and (image._texture is not None):
self._coreimage.dispatch('on_load')
def _on_source_load(self, value):
if self.prev_allow_stretch:
#Logger.info('_AsyncImage_load_source: _load_source: Allow %s' % self.prev_allow_stretch)
self.allow_stretch = self.prev_allow_stretch
self.prev_allow_stretch = None
#Logger.info('_AsyncImage_load_source: _load_source: %s' % self.source)
self.Loaded = True
image = self._coreimage.image
if not image:
#return
pass
else:
self.texture = image.texture
if self._LoadedCallBack:
self._LoadedCallBack(self, self._LoadedCallBackArgs)
def SetLoadCallback(self, cb, cbargs):
self._LoadedCallBack = cb
self._LoadedCallBackArgs = cbargs
if self.Loaded and self._LoadedCallBack:
self._LoadedCallBack(self, self._LoadedCallBackArgs)
def is_uri(self, filename):
proto = filename.split('://', 1)[0]
return proto in ('http', 'https', 'ftp', 'smb')
def _on_tex_change(self, *largs):
if self._coreimage:
self.texture = self._coreimage.texture
def texture_update(self, *largs):
pass
g_bInitialized = False
# register
if not g_bInitialized:
from kivy import platform
if platform in ['android', 'linux']:
from native_opencv import ImageLoaderOpenCV
#ImageLoader.register(ImageLoaderOpenCV)
ImageLoader.loaders.insert(0, ImageLoaderOpenCV)
g_bInitialized = True
#####################################################################################
|
insiderr/insiderr-app
|
app/modules/image/img_opencv.py
|
Python
|
gpl-3.0
| 18,899
|
import json
import os
import requests
import webbrowser
from get_twitter_api_params import get_rate_limit_rules, save_api_dict
from lxml import html
from twython import Twython
class TweetCatcher():
def __init__(self):
self.app_creds = Twython(APP_KEY, APP_SECRET).get_authentication_tokens()
self.client = self.gen_client()
self.get_api_dict()
def gen_client(self):
filepath = "{}/.tweetcatcher_oath".format(os.path.expanduser('~'))
if os.path.isfile(filepath):
with open(filepath, "r") as oauth_file:
oauth_tokens = oauth_file.read().splitlines()
return Twython( app_key=APP_KEY,
app_secret=APP_SECRET,
oauth_token=oauth_tokens[0],
oauth_token_secret=oauth_tokens[1] )
else:
webbrowser.open(app_creds['auth_url'])
pin = raw_input('Enter PIN: ')
oauth = Twython( app_key=APP_KEY,
app_secret=APP_SECRET,
oauth_token=app_creds['oauth_token'],
oauth_token_secret=app_creds['oauth_token_secret'] ).get_authorized_tokens(pin)
return Twython( app_key=APP_KEY,
app_secret=APP_SECRET,
oauth_token=oauth['oauth_token'],
oauth_token_secret=oauth['oauth_token_secret'] )
def catch(self, endpoint):
if endpoint in self.twitter_endpoints['GET']:
new_params = self.interact('GET', endpoint)
return self.client.get(endpoint, new_params)
elif endpoint in self.twitter_endpoints['POST']:
new_params = self.interact('POST', endpoint)
return self.client.post(endpoint, new_params)
else:
print("\n*** Please use an actual endpoint. See TweetCatcher.twitter_endpoints for details ***\n")
def save_oauth_tokens(self):
filepath = "{}/.tweetcatcher_oath".format(os.path.expanduser('~'))
with open(filepath, "w") as oauth_file:
oauth_file.write("{}\n{}\n".format(self.OAUTH_TOKEN, self.OAUTH_TOKEN_SECRET))
def get_api_dict(self):
try:
filepath = "{}/.twitter_api".format(os.path.expanduser('~'))
with open(filepath, "r") as api_file:
api_dict = json.load(api_file)
self.twitter_endpoints = api_dict
except IOError as e:
self.twitter_endpoints = save_api_dict()
def interact(self, method, endpoint):
params = self.twitter_endpoints[method][endpoint]
print('-'*20)
print('Please enter values for the following params: ')
print('-'*20)
for param in params.iterkeys():
params[param] = raw_input('{} = '.format(param)) or None
return params
|
withtwoemms/tweetcatcher
|
tweetcatcher.py
|
Python
|
mit
| 2,926
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from nova.api.openstack.compute.plugins.v3 import migrations
from nova import context
from nova import exception
from nova import objects
from nova.objects import base
from nova.openstack.common.fixture import moxstubout
from nova import test
fake_migrations = [
{
'id': 1234,
'source_node': 'node1',
'dest_node': 'node2',
'source_compute': 'compute1',
'dest_compute': 'compute2',
'dest_host': '1.2.3.4',
'status': 'Done',
'instance_uuid': 'instance_id_123',
'old_instance_type_id': 1,
'new_instance_type_id': 2,
'created_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'deleted_at': None,
'deleted': False
},
{
'id': 5678,
'source_node': 'node10',
'dest_node': 'node20',
'source_compute': 'compute10',
'dest_compute': 'compute20',
'dest_host': '5.6.7.8',
'status': 'Done',
'instance_uuid': 'instance_id_456',
'old_instance_type_id': 5,
'new_instance_type_id': 6,
'created_at': datetime.datetime(2013, 10, 22, 13, 42, 2),
'updated_at': datetime.datetime(2013, 10, 22, 13, 42, 2),
'deleted_at': None,
'deleted': False
}
]
migrations_obj = base.obj_make_list(
'fake-context',
objects.MigrationList(),
objects.Migration,
fake_migrations)
class FakeRequest(object):
environ = {"nova.context": context.get_admin_context()}
GET = {}
class MigrationsTestCase(test.NoDBTestCase):
def setUp(self):
"""Run before each test."""
super(MigrationsTestCase, self).setUp()
self.controller = migrations.MigrationsController()
self.context = context.get_admin_context()
self.req = FakeRequest()
self.req.environ['nova.context'] = self.context
mox_fixture = self.useFixture(moxstubout.MoxStubout())
self.mox = mox_fixture.mox
def test_index(self):
migrations_in_progress = {
'migrations': migrations.output(migrations_obj)}
for mig in migrations_in_progress['migrations']:
self.assertIn('id', mig)
self.assertNotIn('deleted', mig)
self.assertNotIn('deleted_at', mig)
filters = {'host': 'host1', 'status': 'migrating',
'cell_name': 'ChildCell'}
self.req.GET = filters
self.mox.StubOutWithMock(self.controller.compute_api,
"get_migrations")
self.controller.compute_api.get_migrations(
self.context, filters).AndReturn(migrations_obj)
self.mox.ReplayAll()
response = self.controller.index(self.req)
self.assertEqual(migrations_in_progress, response)
def test_index_needs_authorization(self):
user_context = context.RequestContext(user_id=None,
project_id=None,
is_admin=False,
read_deleted="no",
overwrite=False)
self.req.environ['nova.context'] = user_context
self.assertRaises(exception.PolicyNotAuthorized, self.controller.index,
self.req)
|
luzheqi1987/nova-annotation
|
nova/tests/unit/api/openstack/compute/plugins/v3/test_migrations.py
|
Python
|
apache-2.0
| 3,967
|
# Copyright 2019 Zadara Storage, Inc.
# Originally authored by Jeremy Brown - https://github.com/jwbrown77
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from typing import List
from future.standard_library import install_aliases
from urllib.parse import quote
install_aliases()
from zadarapy.validators import varify_notification_severity, verify_field, verify_start_limit, \
verify_email, varify_otp_token
def get_all_vpsa_users(session, start=None, limit=None, return_type=None,
**kwargs):
"""
Retrieves details for all configured VPSA users.
:type session: zadarapy.session.Session
:param session: A valid zadarapy.session.Session object. Required.
:type start: int
:param start: The offset to start displaying VPSA users from.
Optional.
:type: limit: int
:param limit: The maximum number of VPSA users to return.
Optional.
:type return_type: str
:param return_type: If this is set to the string 'json', this function
will return a JSON string. Otherwise, it will return a Python
dictionary. Optional (will return a Python dictionary by default).
:rtype: dict, str
:returns: A dictionary or JSON data set as a string depending on
return_type parameter.
"""
parameters = verify_start_limit(start, limit)
path = '/api/users.json'
return session.get_api(path=path, parameters=parameters,
return_type=return_type, **kwargs)
def create_user(session, username, email, notify_on_events, notification_severity, return_type=None, **kwargs):
"""
Creates a VPSA user. User will receive a temporary password at the
provided email address and will be forced to change it on first login.
Only a VPSA admin may perform this action.
:type session: zadarapy.session.Session
:param session: A valid zadarapy.session.Session object. Required.
:type username: str
:param username: The VPSA user's username. Required.
:type email: str
:param email: The VPSA user's email address. Required.
:type return_type: str
:param return_type: If this is set to the string 'json', this function
will return a JSON string. Otherwise, it will return a Python
dictionary. Optional (will return a Python dictionary by default).
:rtype: dict, str
:returns: A dictionary or JSON data set as a string depending on
return_type parameter.
"""
username = verify_field(username, "username")
verify_email(email)
varify_notification_severity(notification_severity)
body_values = {'username': username, 'email': email,
'notify_on_events': notify_on_events, 'notification_severity': notification_severity}
path = '/api/users.json'
return session.post_api(path=path, body=body_values,
return_type=return_type, **kwargs)
def delete_vpsa_user(session, username, return_type=None, **kwargs):
"""
Deletes a VPSA user. Only a VPSA admin may perform this action.
:type session: zadarapy.session.Session
:param session: A valid zadarapy.session.Session object. Required.
:type username: str
:param username: The VPSA user's username. Required.
:type return_type: str
:param return_type: If this is set to the string 'json', this function
will return a JSON string. Otherwise, it will return a Python
dictionary. Optional (will return a Python dictionary by default).
:rtype: dict, str
:returns: A dictionary or JSON data set as a string depending on
return_type parameter.
"""
username = verify_field(username, "username")
username = quote(username)
path = '/api/users/{0}.json'.format(username)
return session.delete_api(path=path, return_type=return_type, **kwargs)
def get_vpsa_user_api_key(session, username, password, return_type=None,
**kwargs):
"""
Retrieves a VPSA user's API key by their username and password.
:type session: zadarapy.session.Session
:param session: A valid zadarapy.session.Session object. Required.
:type username: str
:param username: The VPSA user's username. Required.
:type password: str
:param password: The VPSA user's password. Required.
:type return_type: str
:param return_type: If this is set to the string 'json', this function
will return a JSON string. Otherwise, it will return a Python
dictionary. Optional (will return a Python dictionary by default).
:rtype: dict, str
:returns: A dictionary or JSON data set as a string depending on
return_type parameter.
"""
username = verify_field(username, "username")
password = verify_field(password, "password", allow_quote=True)
body_values = {'user': username, 'password': password}
path = '/api/users/login.json'
return session.post_api(path=path, body=body_values,
return_type=return_type, **kwargs)
def reset_vpsa_user_api_key(session, username, password, otp_attempt,
return_type=None, **kwargs):
"""
Resets the VPSA user's API/access key to a new value. Only a VPSA admin
may perform this action. This action is irreversible.
:type session: zadarapy.session.Session
:param session: A valid zadarapy.session.Session object. Required. Note
that API key MUST be specified for the Session, even though the point
of this function is to reset the API key. So just provide any string
as the API key for this session.
:type username: str
:param username: The VPSA user's username. Required.
:type password: str
:param password: The VPSA user's password. Required.
:type otp_attemp: str
:param otp_attempt: A dual factor authentication token
:type return_type: str
:param return_type: If this is set to the string 'json', this function
will return a JSON string. Otherwise, it will return a Python
dictionary. Optional (will return a Python dictionary by default).
:rtype: dict, str
:returns: A dictionary or JSON data set as a string depending on
return_type parameter.
"""
username = verify_field(username, "username")
username_for_path = quote(username)
password = verify_field(password, 'password', allow_quote=True)
if otp_attempt:
otp_attempt = varify_otp_token(otp_attempt)
path = '/api/users/{0}/access_key.json'.format(username_for_path)
body = {'username': username, 'password' : password, 'otp_attempt' : otp_attempt}
# call without X-Access-Key in header - change the zadara_key to ''
zadara_key = session.zadara_key
session.zadara_key = ''
api_res = session.post_api(path=path, body=body,
return_type=return_type,**kwargs)
session.zadara_key = zadara_key
return api_res
def change_vpsa_user_password_by_password(session, username,
existing_password, new_password,
return_type=None, **kwargs):
"""
Changes a VPSA user's password. The user needs their existing password to
use this method. If the password was forgotten, use the
change_vpsa_user_password_by_code method.
:type session: zadarapy.session.Session
:param session: A valid zadarapy.session.Session object. Required.
:type username: str
:param username: The VPSA user's username. Required.
:type existing_password: str
:param existing_password: The VPSA user's existing password. Required.
:type new_password: str
:param new_password: The new password for the VPSA user. Required.
:type return_type: str
:param return_type: If this is set to the string 'json', this function
will return a JSON string. Otherwise, it will return a Python
dictionary. Optional (will return a Python dictionary by default).
:rtype: dict, str
:returns: A dictionary or JSON data set as a string depending on
return_type parameter.
"""
username = verify_field(username, "username")
existing_password = verify_field(existing_password, "existing_password",
allow_quote=True)
new_password = verify_field(new_password, "new_password", allow_quote=True)
body_values = {'user': username, 'password': existing_password,
'new_password': new_password}
path = '/api/users/password.json'
return session.post_api(path=path, body=body_values,
return_type=return_type, **kwargs)
def change_vpsa_user_password_by_code(session, username, code, new_password,
return_type=None, **kwargs):
"""
Changes a VPSA user's password with a password reset code. If the user
knows their existing password, use change_vpsa_user_password_by_password
instead. Use generate_vpsa_user_password_reset_code to send a reset code
to the user via e-mail.
:type session: zadarapy.session.Session
:param session: A valid zadarapy.session.Session object. Required.
:type username: str
:param username: The VPSA user's username. Required.
:type code: str
:param code: The password reset code e-mailed to the user. Required.
:type new_password: str
:param new_password: The new password for the VPSA user. Required.
:type return_type: str
:param return_type: If this is set to the string 'json', this function
will return a JSON string. Otherwise, it will return a Python
dictionary. Optional (will return a Python dictionary by default).
:rtype: dict, str
:returns: A dictionary or JSON data set as a string depending on
return_type parameter.
"""
username = verify_field(username, "username")
username = quote(username)
new_password = verify_field(new_password, "new_password", allow_quote=True)
body_values = {'user': username, 'code': code, 'new_password': new_password}
path = '/api/users/{0}/password_code.json'.format(username)
return session.post_api(path=path, body=body_values,
return_type=return_type, **kwargs)
def generate_vpsa_user_password_reset_code(session, username,
return_type=None, **kwargs):
"""
Initiates a password reset for a VPSA user and e-mails the user a code
that can be used with change_vpsa_user_password_by_code.
:type session: zadarapy.session.Session
:param session: A valid zadarapy.session.Session object. Required.
:type username: str
:param username: The VPSA user's username. Required.
:type return_type: str
:param return_type: If this is set to the string 'json', this function
will return a JSON string. Otherwise, it will return a Python
dictionary. Optional (will return a Python dictionary by default).
:rtype: dict, str
:returns: A dictionary or JSON data set as a string depending on
return_type parameter.
"""
username = verify_field(username, "username")
body_values = {'username': username}
path = '/api/users/reset_password.json'
return session.post_api(path=path, body=body_values,
return_type=return_type, **kwargs)
def enable_cloud_admin_access(session, confirm, return_type=None, **kwargs):
"""
Enables the ability of a storage cloud administrator to access the VPSA
GUI of this VPSA to assist in troubleshooting. This does not grant access
to any volume data. Enabled by default.
:type session: zadarapy.session.Session
:param session: A valid zadarapy.session.Session object. Required.
:type confirm: bool
:param confirm: If True, cloud admin access will be enabled. This is a
safeguard for this function since it requires no other arguments.
:type return_type: str
:param return_type: If this is set to the string 'json', this function
will return a JSON string. Otherwise, it will return a Python
dictionary. Optional (will return a Python dictionary by default).
:rtype: dict, str
:returns: A dictionary or JSON data set as a string depending on
return_type parameter.
"""
if not confirm:
raise ValueError('The confirm parameter is not set to True - '
'cloud admin access will not be enabled.')
path = '/api/users/admin_access/enable.json'
return session.post_api(path=path, return_type=return_type, **kwargs)
def disable_cloud_admin_access(session, confirm, return_type=None, **kwargs):
"""
Disables the ability of a storage cloud administrator to access the VPSA
GUI of this VPSA to assist in troubleshooting. This does not grant access
to any volume data. Enabled by default.
:type session: zadarapy.session.Session
:param session: A valid zadarapy.session.Session object. Required.
:type confirm: bool
:param confirm: If True, cloud admin access will be disabled. This is a
safeguard for this function since it requires no other arguments.
:type return_type: str
:param return_type: If this is set to the string 'json', this function
will return a JSON string. Otherwise, it will return a Python
dictionary. Optional (will return a Python dictionary by default).
:rtype: dict, str
:returns: A dictionary or JSON data set as a string depending on
return_type parameter.
"""
if not confirm:
raise ValueError('The confirm parameter is not set to True - '
'cloud admin access will not be disabled.')
path = '/api/users/admin_access/disable.json'
return session.post_api(path=path, return_type=return_type, **kwargs)
def update_user_roles(session, username:str, roles:List[str], return_type=None, **kwargs):
"""
:param roles: Array of role ids
"""
body_values = {'roles': roles}
path = f'/api/users/{username}/roles.json'
return session.post_api(path=path, body=body_values, return_type=return_type, **kwargs)
def get_user_details(session, username:str,password:str, return_type=None, **kwargs):
body_values = {'username': username, 'password': password}
path = '/api/users/login.json'
return session.post_api(path=path, body=body_values, return_type=return_type, **kwargs)
|
zadarastorage/zadarapy
|
zadarapy/vpsa/vpsa_users.py
|
Python
|
apache-2.0
| 15,007
|
print(bytearray(2**65 - (2**65 - 1)))
|
martinribelotta/micropython
|
tests/basics/bytearray_longint.py
|
Python
|
mit
| 38
|
# This file is part of beets.
# Copyright 2011, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Tests for the command-line interface.
"""
import os
import shutil
import textwrap
import logging
import re
from StringIO import StringIO
import ConfigParser
import _common
from _common import unittest
from beets import library
from beets import ui
from beets.ui import commands
from beets import autotag
from beets import importer
from beets.mediafile import MediaFile
class ListTest(unittest.TestCase):
def setUp(self):
self.io = _common.DummyIO()
self.io.install()
self.lib = library.Library(':memory:')
i = _common.item()
i.path = 'xxx/yyy'
self.lib.add(i)
self.lib.add_album([i])
self.item = i
def tearDown(self):
self.io.restore()
def _run_list(self, query='', album=False, path=False, fmt=None):
commands.list_items(self.lib, query, album, fmt, None)
def test_list_outputs_item(self):
self._run_list()
out = self.io.getoutput()
self.assertTrue(u'the title' in out)
def test_list_unicode_query(self):
self.item.title = u'na\xefve'
self.lib.store(self.item)
self.lib._connection().commit()
self._run_list([u'na\xefve'])
out = self.io.getoutput()
self.assertTrue(u'na\xefve' in out.decode(self.io.stdout.encoding))
def test_list_item_path(self):
self._run_list(fmt='$path')
out = self.io.getoutput()
self.assertEqual(out.strip(), u'xxx/yyy')
def test_list_album_outputs_something(self):
self._run_list(album=True)
out = self.io.getoutput()
self.assertGreater(len(out), 0)
def test_list_album_path(self):
self._run_list(album=True, fmt='$path')
out = self.io.getoutput()
self.assertEqual(out.strip(), u'xxx')
def test_list_album_omits_title(self):
self._run_list(album=True)
out = self.io.getoutput()
self.assertTrue(u'the title' not in out)
def test_list_uses_track_artist(self):
self._run_list()
out = self.io.getoutput()
self.assertTrue(u'the artist' in out)
self.assertTrue(u'the album artist' not in out)
def test_list_album_uses_album_artist(self):
self._run_list(album=True)
out = self.io.getoutput()
self.assertTrue(u'the artist' not in out)
self.assertTrue(u'the album artist' in out)
def test_list_item_format_artist(self):
self._run_list(fmt='$artist')
out = self.io.getoutput()
self.assertTrue(u'the artist' in out)
def test_list_item_format_multiple(self):
self._run_list(fmt='$artist - $album - $year')
out = self.io.getoutput()
self.assertTrue(u'1' in out)
self.assertTrue(u'the album' in out)
self.assertTrue(u'the artist' in out)
self.assertEqual(u'the artist - the album - 1', out.strip())
def test_list_album_format(self):
self._run_list(album=True, fmt='$genre')
out = self.io.getoutput()
self.assertTrue(u'the genre' in out)
self.assertTrue(u'the album' not in out)
class RemoveTest(unittest.TestCase):
def setUp(self):
self.io = _common.DummyIO()
self.io.install()
self.libdir = os.path.join(_common.RSRC, 'testlibdir')
os.mkdir(self.libdir)
# Copy a file into the library.
self.lib = library.Library(':memory:', self.libdir)
self.i = library.Item.from_path(os.path.join(_common.RSRC, 'full.mp3'))
self.lib.add(self.i, True)
def tearDown(self):
self.io.restore()
shutil.rmtree(self.libdir)
def test_remove_items_no_delete(self):
self.io.addinput('y')
commands.remove_items(self.lib, '', False, False, None)
items = self.lib.items()
self.assertEqual(len(list(items)), 0)
self.assertTrue(os.path.exists(self.i.path))
def test_remove_items_with_delete(self):
self.io.addinput('y')
commands.remove_items(self.lib, '', False, True, None)
items = self.lib.items()
self.assertEqual(len(list(items)), 0)
self.assertFalse(os.path.exists(self.i.path))
class ModifyTest(unittest.TestCase):
def setUp(self):
self.io = _common.DummyIO()
self.io.install()
self.libdir = os.path.join(_common.RSRC, 'testlibdir')
os.mkdir(self.libdir)
# Copy a file into the library.
self.lib = library.Library(':memory:', self.libdir)
self.i = library.Item.from_path(os.path.join(_common.RSRC, 'full.mp3'))
self.lib.add(self.i, True)
self.album = self.lib.add_album([self.i])
def tearDown(self):
self.io.restore()
shutil.rmtree(self.libdir)
def _modify(self, mods, query=(), write=False, move=False, album=False):
self.io.addinput('y')
commands.modify_items(self.lib, mods, query,
write, move, album, True, True, None)
def test_modify_item_dbdata(self):
self._modify(["title=newTitle"])
item = self.lib.items().next()
self.assertEqual(item.title, 'newTitle')
def test_modify_album_dbdata(self):
self._modify(["album=newAlbum"], album=True)
album = self.lib.albums()[0]
self.assertEqual(album.album, 'newAlbum')
def test_modify_item_tag_unmodified(self):
self._modify(["title=newTitle"], write=False)
item = self.lib.items().next()
item.read()
self.assertEqual(item.title, 'full')
def test_modify_album_tag_unmodified(self):
self._modify(["album=newAlbum"], write=False, album=True)
item = self.lib.items().next()
item.read()
self.assertEqual(item.album, 'the album')
def test_modify_item_tag(self):
self._modify(["title=newTitle"], write=True)
item = self.lib.items().next()
item.read()
self.assertEqual(item.title, 'newTitle')
def test_modify_album_tag(self):
self._modify(["album=newAlbum"], write=True, album=True)
item = self.lib.items().next()
item.read()
self.assertEqual(item.album, 'newAlbum')
def test_item_move(self):
self._modify(["title=newTitle"], move=True)
item = self.lib.items().next()
self.assertTrue('newTitle' in item.path)
def test_album_move(self):
self._modify(["album=newAlbum"], move=True, album=True)
item = self.lib.items().next()
item.read()
self.assertTrue('newAlbum' in item.path)
def test_item_not_move(self):
self._modify(["title=newTitle"], move=False)
item = self.lib.items().next()
self.assertFalse('newTitle' in item.path)
def test_album_not_move(self):
self._modify(["album=newAlbum"], move=False, album=True)
item = self.lib.items().next()
item.read()
self.assertFalse('newAlbum' in item.path)
class MoveTest(unittest.TestCase, _common.ExtraAsserts):
def setUp(self):
self.io = _common.DummyIO()
self.io.install()
self.libdir = os.path.join(_common.RSRC, 'testlibdir')
os.mkdir(self.libdir)
self.itempath = os.path.join(self.libdir, 'srcfile')
shutil.copy(os.path.join(_common.RSRC, 'full.mp3'), self.itempath)
# Add a file to the library but don't copy it in yet.
self.lib = library.Library(':memory:', self.libdir)
self.i = library.Item.from_path(self.itempath)
self.lib.add(self.i, False)
self.album = self.lib.add_album([self.i])
# Alternate destination directory.
self.otherdir = os.path.join(_common.RSRC, 'testotherdir')
def tearDown(self):
self.io.restore()
shutil.rmtree(self.libdir)
if os.path.exists(self.otherdir):
shutil.rmtree(self.otherdir)
def _move(self, query=(), dest=None, copy=False, album=False):
commands.move_items(self.lib, dest, query, copy, album)
def test_move_item(self):
self._move()
self.lib.load(self.i)
self.assertTrue('testlibdir' in self.i.path)
self.assertExists(self.i.path)
self.assertNotExists(self.itempath)
def test_copy_item(self):
self._move(copy=True)
self.lib.load(self.i)
self.assertTrue('testlibdir' in self.i.path)
self.assertExists(self.i.path)
self.assertExists(self.itempath)
def test_move_album(self):
self._move(album=True)
self.lib.load(self.i)
self.assertTrue('testlibdir' in self.i.path)
self.assertExists(self.i.path)
self.assertNotExists(self.itempath)
def test_copy_album(self):
self._move(copy=True, album=True)
self.lib.load(self.i)
self.assertTrue('testlibdir' in self.i.path)
self.assertExists(self.i.path)
self.assertExists(self.itempath)
def test_move_item_custom_dir(self):
self._move(dest=self.otherdir)
self.lib.load(self.i)
self.assertTrue('testotherdir' in self.i.path)
self.assertExists(self.i.path)
self.assertNotExists(self.itempath)
def test_move_album_custom_dir(self):
self._move(dest=self.otherdir, album=True)
self.lib.load(self.i)
self.assertTrue('testotherdir' in self.i.path)
self.assertExists(self.i.path)
self.assertNotExists(self.itempath)
class UpdateTest(unittest.TestCase, _common.ExtraAsserts):
def setUp(self):
self.io = _common.DummyIO()
self.io.install()
self.libdir = os.path.join(_common.RSRC, 'testlibdir')
os.mkdir(self.libdir)
# Copy a file into the library.
self.lib = library.Library(':memory:', self.libdir)
self.i = library.Item.from_path(os.path.join(_common.RSRC, 'full.mp3'))
self.lib.add(self.i, True)
self.album = self.lib.add_album([self.i])
# Album art.
artfile = os.path.join(_common.RSRC, 'testart.jpg')
_common.touch(artfile)
self.album.set_art(artfile)
os.remove(artfile)
def tearDown(self):
self.io.restore()
shutil.rmtree(self.libdir)
def _update(self, query=(), album=False, move=False, reset_mtime=True):
self.io.addinput('y')
if reset_mtime:
self.i.mtime = 0
self.lib.store(self.i)
commands.update_items(self.lib, query, album, move, True, False, None)
def test_delete_removes_item(self):
self.assertTrue(list(self.lib.items()))
os.remove(self.i.path)
self._update()
self.assertFalse(list(self.lib.items()))
def test_delete_removes_album(self):
self.assertTrue(self.lib.albums())
os.remove(self.i.path)
self._update()
self.assertFalse(self.lib.albums())
def test_delete_removes_album_art(self):
artpath = self.album.artpath
self.assertExists(artpath)
os.remove(self.i.path)
self._update()
self.assertNotExists(artpath)
def test_modified_metadata_detected(self):
mf = MediaFile(self.i.path)
mf.title = 'differentTitle'
mf.save()
self._update()
item = self.lib.items().next()
self.assertEqual(item.title, 'differentTitle')
def test_modified_metadata_moved(self):
mf = MediaFile(self.i.path)
mf.title = 'differentTitle'
mf.save()
self._update(move=True)
item = self.lib.items().next()
self.assertTrue('differentTitle' in item.path)
def test_modified_metadata_not_moved(self):
mf = MediaFile(self.i.path)
mf.title = 'differentTitle'
mf.save()
self._update(move=False)
item = self.lib.items().next()
self.assertTrue('differentTitle' not in item.path)
def test_modified_album_metadata_moved(self):
mf = MediaFile(self.i.path)
mf.album = 'differentAlbum'
mf.save()
self._update(move=True)
item = self.lib.items().next()
self.assertTrue('differentAlbum' in item.path)
def test_modified_album_metadata_art_moved(self):
artpath = self.album.artpath
mf = MediaFile(self.i.path)
mf.album = 'differentAlbum'
mf.save()
self._update(move=True)
album = self.lib.albums()[0]
self.assertNotEqual(artpath, album.artpath)
def test_mtime_match_skips_update(self):
mf = MediaFile(self.i.path)
mf.title = 'differentTitle'
mf.save()
# Make in-memory mtime match on-disk mtime.
self.i.mtime = os.path.getmtime(self.i.path)
self.lib.store(self.i)
self._update(reset_mtime=False)
item = self.lib.items().next()
self.assertEqual(item.title, 'full')
class PrintTest(unittest.TestCase):
def setUp(self):
self.io = _common.DummyIO()
self.io.install()
def tearDown(self):
self.io.restore()
def test_print_without_locale(self):
lang = os.environ.get('LANG')
if lang:
del os.environ['LANG']
try:
ui.print_(u'something')
except TypeError:
self.fail('TypeError during print')
finally:
if lang:
os.environ['LANG'] = lang
def test_print_with_invalid_locale(self):
old_lang = os.environ.get('LANG')
os.environ['LANG'] = ''
old_ctype = os.environ.get('LC_CTYPE')
os.environ['LC_CTYPE'] = 'UTF-8'
try:
ui.print_(u'something')
except ValueError:
self.fail('ValueError during print')
finally:
if old_lang:
os.environ['LANG'] = old_lang
else:
del os.environ['LANG']
if old_ctype:
os.environ['LC_CTYPE'] = old_ctype
else:
del os.environ['LC_CTYPE']
class AutotagTest(unittest.TestCase):
def setUp(self):
self.io = _common.DummyIO()
self.io.install()
def tearDown(self):
self.io.restore()
def _no_candidates_test(self, result):
task = importer.ImportTask(
'toppath',
'path',
[_common.item()],
)
task.set_candidates('artist', 'album', [], autotag.RECOMMEND_NONE)
res = commands.choose_match(task, _common.iconfig(None, quiet=False))
self.assertEqual(res, result)
self.assertTrue('No match' in self.io.getoutput())
def test_choose_match_with_no_candidates_skip(self):
self.io.addinput('s')
self._no_candidates_test(importer.action.SKIP)
def test_choose_match_with_no_candidates_asis(self):
self.io.addinput('u')
self._no_candidates_test(importer.action.ASIS)
class ImportTest(unittest.TestCase):
def test_quiet_timid_disallowed(self):
self.assertRaises(ui.UserError, commands.import_files,
None, [], False, False, False, False, None,
False, False, False, True, False, None, False, True,
None, False, [], False)
class InputTest(unittest.TestCase):
def setUp(self):
self.io = _common.DummyIO()
self.io.install()
def tearDown(self):
self.io.restore()
def test_manual_search_gets_unicode(self):
self.io.addinput('\xc3\x82me')
self.io.addinput('\xc3\x82me')
artist, album = commands.manual_search(False)
self.assertEqual(artist, u'\xc2me')
self.assertEqual(album, u'\xc2me')
class ConfigTest(unittest.TestCase):
def setUp(self):
self.io = _common.DummyIO()
self.io.install()
self.test_cmd = ui.Subcommand('test', help='test')
commands.default_commands.append(self.test_cmd)
def tearDown(self):
self.io.restore()
commands.default_commands.pop()
def _run_main(self, args, config, func):
self.test_cmd.func = func
ui._raw_main(args + ['test'], StringIO(config))
def test_paths_section_respected(self):
def func(lib, config, opts, args):
key, template = lib.path_formats[0]
self.assertEqual(key, 'x')
self.assertEqual(template.original, 'y')
self._run_main([], textwrap.dedent("""
[paths]
x=y"""), func)
def test_default_paths_preserved(self):
def func(lib, config, opts, args):
self.assertEqual(lib.path_formats[1:],
ui.DEFAULT_PATH_FORMATS)
self._run_main([], textwrap.dedent("""
[paths]
x=y"""), func)
def test_nonexistant_config_file(self):
os.environ['BEETSCONFIG'] = '/xxxxx'
ui.main(['version'])
def test_nonexistant_db(self):
def func(lib, config, opts, args):
pass
with self.assertRaises(ui.UserError):
self._run_main([], textwrap.dedent("""
[beets]
library: /xxx/yyy/not/a/real/path
"""), func)
def test_replacements_parsed(self):
def func(lib, config, opts, args):
replacements = lib.replacements
self.assertEqual(replacements, [(re.compile(ur'[xy]'), u'z')])
self._run_main([], textwrap.dedent("""
[beets]
replace=[xy] z"""), func)
def test_replacements_parsed_unicode(self):
def func(lib, config, opts, args):
replacements = lib.replacements
self.assertEqual(replacements, [(re.compile(ur'\u2019'), u'z')])
self._run_main([], textwrap.dedent(u"""
[beets]
replace=\u2019 z"""), func)
def test_empty_replacements_produce_none(self):
def func(lib, config, opts, args):
replacements = lib.replacements
self.assertFalse(replacements)
self._run_main([], textwrap.dedent("""
[beets]
"""), func)
def test_multiple_replacements_parsed(self):
def func(lib, config, opts, args):
replacements = lib.replacements
self.assertEqual(replacements, [
(re.compile(ur'[xy]'), u'z'),
(re.compile(ur'foo'), u'bar'),
])
self._run_main([], textwrap.dedent("""
[beets]
replace=[xy] z
foo bar"""), func)
class ShowdiffTest(unittest.TestCase):
def setUp(self):
self.io = _common.DummyIO()
self.io.install()
def tearDown(self):
self.io.restore()
def test_showdiff_strings(self):
commands._showdiff('field', 'old', 'new', True)
out = self.io.getoutput()
self.assertTrue('field' in out)
def test_showdiff_identical(self):
commands._showdiff('field', 'old', 'old', True)
out = self.io.getoutput()
self.assertFalse('field' in out)
def test_showdiff_ints(self):
commands._showdiff('field', 2, 3, True)
out = self.io.getoutput()
self.assertTrue('field' in out)
def test_showdiff_ints_no_color(self):
commands._showdiff('field', 2, 3, False)
out = self.io.getoutput()
self.assertTrue('field' in out)
def test_showdiff_shows_both(self):
commands._showdiff('field', 'old', 'new', True)
out = self.io.getoutput()
self.assertTrue('old' in out)
self.assertTrue('new' in out)
def test_showdiff_floats_close_to_identical(self):
commands._showdiff('field', 1.999, 2.001, True)
out = self.io.getoutput()
self.assertFalse('field' in out)
def test_showdiff_floats_differenct(self):
commands._showdiff('field', 1.999, 4.001, True)
out = self.io.getoutput()
self.assertTrue('field' in out)
def test_showdiff_ints_colorizing_is_not_stringwise(self):
commands._showdiff('field', 222, 333, True)
complete_diff = self.io.getoutput().split()[1]
commands._showdiff('field', 222, 232, True)
partial_diff = self.io.getoutput().split()[1]
self.assertEqual(complete_diff, partial_diff)
AN_ID = "28e32c71-1450-463e-92bf-e0a46446fc11"
class ManualIDTest(unittest.TestCase):
def setUp(self):
_common.log.setLevel(logging.CRITICAL)
self.io = _common.DummyIO()
self.io.install()
def tearDown(self):
self.io.restore()
def test_id_accepted(self):
self.io.addinput(AN_ID)
out = commands.manual_id(False)
self.assertEqual(out, AN_ID)
def test_non_id_returns_none(self):
self.io.addinput("blah blah")
out = commands.manual_id(False)
self.assertEqual(out, None)
def test_url_finds_id(self):
self.io.addinput("http://musicbrainz.org/entity/%s?something" % AN_ID)
out = commands.manual_id(False)
self.assertEqual(out, AN_ID)
class ShowChangeTest(unittest.TestCase):
def setUp(self):
self.io = _common.DummyIO()
self.io.install()
self.items = [_common.item()]
self.items[0].track = 1
self.items[0].path = '/path/to/file.mp3'
self.info = autotag.AlbumInfo(
'the album', 'album id', 'the artist', 'artist id', [
autotag.TrackInfo('the title', 'track id', index=1)
])
def tearDown(self):
self.io.restore()
def _show_change(self, items=None, info=None,
cur_artist='the artist', cur_album='the album',
dist=0.1):
items = items or self.items
info = info or self.info
mapping = dict(zip(items, info.tracks))
commands.show_change(
cur_artist,
cur_album,
autotag.AlbumMatch(0.1, info, mapping, set(), set()),
color=False,
)
return self.io.getoutput().lower()
def test_null_change(self):
msg = self._show_change()
self.assertTrue('similarity: 90' in msg)
self.assertTrue('tagging:' in msg)
def test_album_data_change(self):
msg = self._show_change(cur_artist='another artist',
cur_album='another album')
self.assertTrue('correcting tags from:' in msg)
def test_item_data_change(self):
self.items[0].title = 'different'
msg = self._show_change()
self.assertTrue('different -> the title' in msg)
def test_item_data_change_with_unicode(self):
self.items[0].title = u'caf\xe9'
msg = self._show_change()
self.assertTrue(u'caf\xe9 -> the title' in msg.decode('utf8'))
def test_album_data_change_with_unicode(self):
msg = self._show_change(cur_artist=u'caf\xe9',
cur_album=u'another album')
self.assertTrue('correcting tags from:' in msg)
def test_item_data_change_title_missing(self):
self.items[0].title = ''
msg = self._show_change()
self.assertTrue('file.mp3 -> the title' in msg)
def test_item_data_change_title_missing_with_unicode_filename(self):
self.items[0].title = ''
self.items[0].path = u'/path/to/caf\xe9.mp3'.encode('utf8')
msg = self._show_change().decode('utf8')
self.assertTrue(u'caf\xe9.mp3 -> the title' in msg
or u'caf.mp3 ->' in msg)
class DefaultPathTest(unittest.TestCase):
def setUp(self):
self.old_home = os.environ.get('HOME')
self.old_appdata = os.environ.get('APPDATA')
os.environ['HOME'] = 'xhome'
os.environ['APPDATA'] = 'xappdata'
def tearDown(self):
if self.old_home is None:
del os.environ['HOME']
else:
os.environ['HOME'] = self.old_home
if self.old_appdata is None:
del os.environ['APPDATA']
else:
os.environ['APPDATA'] = self.old_appdata
def test_unix_paths_in_home(self):
import posixpath
config, lib, libdir = ui.default_paths(posixpath)
self.assertEqual(config, 'xhome/.beetsconfig')
self.assertEqual(lib, 'xhome/.beetsmusic.blb')
self.assertEqual(libdir, 'xhome/Music')
def test_windows_paths_in_home_and_appdata(self):
import ntpath
config, lib, libdir = ui.default_paths(ntpath)
self.assertEqual(config, 'xappdata\\beetsconfig.ini')
self.assertEqual(lib, 'xappdata\\beetsmusic.blb')
self.assertEqual(libdir, 'xhome\\Music')
class PathFormatTest(unittest.TestCase):
def _config(self, text):
cp = ConfigParser.SafeConfigParser()
cp.readfp(StringIO(text))
return cp
def _paths_for(self, text):
return ui._get_path_formats(self._config("[paths]\n%s" %
textwrap.dedent(text)))
def test_default_paths(self):
pf = self._paths_for("")
self.assertEqual(pf, ui.DEFAULT_PATH_FORMATS)
def test_custom_paths_prepend(self):
pf = self._paths_for("""
foo: bar
""")
key, tmpl = pf[0]
self.assertEqual(key, 'foo')
self.assertEqual(tmpl.original, 'bar')
self.assertEqual(pf[1:], ui.DEFAULT_PATH_FORMATS)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
MusikPolice/beets
|
test/test_ui.py
|
Python
|
mit
| 25,895
|
import sys
import warnings
from django.db.models.fields import FieldDoesNotExist
from django.utils.text import capfirst
from django.utils.encoding import smart_text
try:
from django.db.models.options import get_verbose_name
except ImportError:
from django.utils.text import camel_case_to_spaces as get_verbose_name
from mongoengine.fields import ReferenceField
class PkWrapper(object):
"""Used to wrap the Primary Key so it can mimic Django's expectations
"""
editable = False
remote_field = None
def __init__(self, wrapped):
self.obj = wrapped
def __getattr__(self, attr):
if attr in dir(self.obj):
return getattr(self.obj, attr)
raise AttributeError("{} has no {}".format(self, attr))
def __setattr__(self, attr, value):
if attr != 'obj' and hasattr(self.obj, attr):
setattr(self.obj, attr, value)
super(PkWrapper, self).__setattr__(attr, value)
def value_to_string(self, obj):
"""
Returns a string value of this field from the passed obj.
This is used by the serialization framework.
"""
return smart_text(obj.pk)
class DocumentMetaWrapper(object):
"""
Used to store mongoengine's _meta dict to make the document admin
as compatible as possible to django's meta class on models.
"""
_pk = None
pk_name = None
app_label = None
model_name = None
verbose_name = None
has_auto_field = False
object_name = None
proxy = []
virtual_fields = []
concrete_fields = []
proxied_children = []
parents = {}
many_to_many = []
swapped = False
_field_cache = None
document = None
_meta = None
def __init__(self, document):
if isinstance(document._meta, DocumentMetaWrapper):
meta = document._meta._meta
else:
meta = document._meta
self.document = document
self._meta = meta or {}
self.model = document
self.concrete_model = document
self.concrete_fields = document._fields.values()
self.fields = self.concrete_fields
try:
self.object_name = self.document.__name__
except AttributeError:
self.object_name = self.document.__class__.__name__
self.model_name = self.object_name.lower()
self.app_label = self.get_app_label()
self.verbose_name = self.get_verbose_name()
# EmbeddedDocuments don't have an id field.
try:
self.pk_name = self._meta['id_field']
self._init_pk()
except KeyError:
pass
@property
def module_name(self):
"""
This property has been deprecated in favor of `model_name`.
"""
warnings.warn(
"Options.module_name has been deprecated in favor of model_name",
PendingDeprecationWarning, stacklevel=2)
return self.model_name
def get_app_label(self):
model_module = sys.modules[self.document.__module__]
return model_module.__name__.split('.')[-2]
def get_verbose_name(self):
"""
Returns the verbose name of the document.
Checks the original meta dict first. If it is not found
then generates a verbose name from from the object name.
"""
try:
return capfirst(get_verbose_name(self._meta['verbose_name']))
except KeyError:
return capfirst(get_verbose_name(self.object_name))
@property
def verbose_name_raw(self):
return self.verbose_name
@property
def verbose_name_plural(self):
return "%ss" % self.verbose_name
@property
def pk(self):
if not hasattr(self._pk, 'attname'):
self._init_pk()
return self._pk
def get_fields(self, include_parents=True, include_hidden=False):
# XXX: simple placeholder; TODO: handle options;
return self.concrete_fields
def _init_pk(self):
"""
Adds a wrapper around the documents pk field. The wrapper object gets the attributes
django expects on the pk field, like name and attname.
The function also adds a _get_pk_val method to the document.
"""
if self.id_field is None:
return
try:
pk_field = getattr(self.document, self.id_field)
self._pk = PkWrapper(pk_field)
self._pk.name = self.id_field
self._pk.attname = self.id_field
except AttributeError:
return
def get_add_permission(self):
return 'add_%s' % self.object_name.lower()
def get_change_permission(self):
return 'change_%s' % self.object_name.lower()
def get_delete_permission(self):
return 'delete_%s' % self.object_name.lower()
def get_ordered_objects(self):
return []
def get_field_by_name(self, name):
"""
Returns the (field_object, model, direct, m2m), where field_object is
the Field instance for the given name, model is the model containing
this field (None for local fields), direct is True if the field exists
on this model, and m2m is True for many-to-many relations. When
'direct' is False, 'field_object' is the corresponding RelatedObject
for this field (since the field doesn't have an instance associated
with it).
Uses a cache internally, so after the first access, this is very fast.
"""
try:
try:
return self._field_cache[name]
except TypeError:
self._init_field_cache()
return self._field_cache[name]
except KeyError:
raise FieldDoesNotExist('%s has no field named %r'
% (self.object_name, name))
def _init_field_cache(self):
if self._field_cache is None:
self._field_cache = {}
for f in self.document._fields.values():
if isinstance(f, ReferenceField):
document = f.document_type
self._field_cache[document._meta.module_name] = (f, document, False, False)
else:
self._field_cache[f.name] = (f, None, True, False)
return self._field_cache
def get_field(self, name, many_to_many=True):
"""
Returns the requested field by name. Raises FieldDoesNotExist on error.
"""
return self.get_field_by_name(name)[0]
def __getattr__(self, name):
try:
return self._meta[name]
except KeyError as e:
raise AttributeError(*e.args)
def __setattr__(self, name, value):
if not hasattr(self, name):
self._meta[name] = value
else:
super(DocumentMetaWrapper, self).__setattr__(name, value)
def __getitem__(self, key):
return self._meta[key]
def __setitem__(self, key, value):
self._meta[key] = value
def __contains__(self, key):
return key in self._meta
def get(self, key, default=None):
try:
return self.__getitem__(key)
except KeyError:
return default
def get_parent_list(self):
return []
def get_all_related_objects(self, *args, **kwargs):
return []
def iteritems(self):
return self._meta.iteritems()
def items(self):
return self._meta.items()
|
iandd0824/ri-app
|
web/django_mongoengine/forms/document_options.py
|
Python
|
apache-2.0
| 7,439
|
#init for the project
import shapely
import geopandas as gpd
import pandas as pd
import numpy as np
import matplotlib
import pysal
from scipy import ndimage
from urllib2 import Request, urlopen, URLError
from datetime import time
from datetime import date
import datetime
import time
import sys
import os
import matplotlib.pyplot as plt
%matplotlib inline
|
gengho/Car2know
|
Car2know/__init__.py
|
Python
|
mit
| 358
|
""" Default class inherited by all test classes that provides basic behaviour
to write a test class easily """
import sublime
import inspect
import re
import sys
if sys.version_info < (3, 0):
from tableofcomments import TableOfComments
else:
from ..tableofcomments import TableOfComments
class TestCase():
output = ''
errors = []
def __init__(self, view, edit):
self.output = ''
self.errors = []
self.view = view
self.edit = edit
# Called before class is run
def setup(self):
self.view.set_syntax_file('Packages/JavaScript/JavaScript.tmLanguage')
# Runs all test methods
def run(self):
self.backup_plugin_settings()
self.set_settings({
'level_char': '>',
'toc_char': '-'
})
methods = self.get_test_methods()
for testname in methods:
self.output += testname + '() '
eval('self.'+testname+'()')
self.output += '\n'
self.restore_plugin_settings()
return self.output
# Called after class is run
def teardown(self):
pass
#
# Helper functions
#
def get_test_methods(self):
test_methods = []
members = inspect.getmembers(self, predicate=inspect.ismethod)
for name, func in members:
if name.find("test_") == 0:
test_methods.append(name)
return test_methods
def set_text(self, text):
self.view.replace(self.edit, sublime.Region(0, self.view.size()), '')
self.view.insert(self.edit, 0, text)
def get_text(self):
return self.view.substr(sublime.Region(0, self.view.size()))
def get_plugin(self):
# return ''
return TableOfComments(self.view, self.edit)
def run_plugin(self):
self.view.run_command('table_of_comments')
def set_syntax(self, syntax):
shortcuts = {
'javascript': 'Packages/JavaScript/JavaScript.tmLanguage',
'python': 'Packages/Python/Python.tmLanguage',
'css': 'Packages/CSS/CSS.tmLanguage'
}
if syntax in shortcuts.keys():
syntax = shortcuts[syntax]
self.view.set_syntax_file(syntax)
#
# Settings functions
# (allows us to have differnt settings for different tests - and restore to
# normal afterwards)
#
def backup_plugin_settings(self):
self.settings = sublime.load_settings(
'tableofcomments.sublime-settings')
self._original_settings = {}
for name in ['toc_char', 'level_char', 'toc_level']:
if self.settings.has(name):
self._original_settings[name] = self.settings.get(name)
def restore_plugin_settings(self):
if self._original_settings:
values = self._original_settings
for name in values:
self.settings.set(name, values[name])
sublime.save_settings('tableofcomments.sublime-settings')
def set_settings(self, settings):
for name in settings:
self.settings.set(name, settings[name])
sublime.save_settings('tableofcomments.sublime-settings')
#
# Result functions
#
def error(self, text):
self.errors.append(text)
self.output += 'F'
def ok(self):
self.output += '.'
#
# Assert functions for unit tests
#
def assert_true(self, value, msg='Was not true'):
if value is True:
self.ok()
return True
else:
self.error(msg)
return False
def assert_false(self, value, msg='Was not false'):
if value is False:
self.ok()
return True
else:
self.error(msg)
return False
# Assert function to see if entire result text equals the sent text
def text_equals(self, sent):
text = self.get_text()
if text.strip() == sent.strip():
self.ok()
else:
self.error(
"Text not equal betwen \n# : From..." + "\n---" + text +
"\n---\nto...\n---"+sent+"\n---"
)
# Assert function to check for sent text witin result text
def find(self, text):
result = self.get_text()
match = result.find(text)
if match >= 0:
self.ok()
return True
else:
self.error("Couldn't find \"" + text + "\"")
return False
|
kizza/Table-of-comments
|
tests/testcase.py
|
Python
|
mit
| 4,447
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Product serial module for OpenERP
# Copyright (C) 2010-2011 Anevia. All Rights Reserved
# Copyright (C) 2013 Akretion
# @author: Sebastien Beau <sebastien.beau@akretion.com>
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
class company(orm.Model):
_inherit = 'res.company'
_columns = {
'autosplit_is_active': fields.boolean('Active auto split', help="Active the automatic split of move lines on the pickings."),
'is_group_invoice_line': fields.boolean('Group invoice lines', help="If active, OpenERP will group the identical invoice lines when generating an invoice from a picking. If inactive, each move line will generate one invoice line."),
}
_defaults = {
'autosplit_is_active': True,
'is_group_invoice_line': True,
}
|
numerigraphe/stock-logistics-workflow
|
__unported__/product_serial/company.py
|
Python
|
agpl-3.0
| 1,719
|
#!/usr/bin/env python
# encoding: utf-8
import os
import subprocess
import stat
import tempfile
from UltiSnips.compatibility import as_unicode
from UltiSnips.text_objects._base import NoneditableTextObject
class ShellCode(NoneditableTextObject):
def __init__(self, parent, token):
NoneditableTextObject.__init__(self, parent, token)
self._code = token.code.replace("\\`", "`")
def _update(self, done, not_done):
# Write the code to a temporary file
handle, path = tempfile.mkstemp(text=True)
os.write(handle, self._code.encode("utf-8"))
os.close(handle)
os.chmod(path, stat.S_IRWXU)
# Execute the file and read stdout
proc = subprocess.Popen(path, shell=True, stdout=subprocess.PIPE)
proc.wait()
output = as_unicode(proc.stdout.read())
if len(output) and output[-1] == '\n':
output = output[:-1]
if len(output) and output[-1] == '\r':
output = output[:-1]
os.unlink(path)
self.overwrite(output)
self._parent._del_child(self)
return True
|
zenshade/vim_config
|
bundle/UltiSnips-2.1/plugin/UltiSnips/text_objects/_shell_code.py
|
Python
|
mit
| 1,117
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test txindex generation and fetching
#
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
import binascii
class TxIndexTest(BitcoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 4)
def setup_network(self):
self.nodes = []
# Nodes 0/1 are "wallet" nodes
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug"]))
self.nodes.append(start_node(1, self.options.tmpdir, ["-debug", "-txindex"]))
# Nodes 2/3 are used for testing
self.nodes.append(start_node(2, self.options.tmpdir, ["-debug", "-txindex"]))
self.nodes.append(start_node(3, self.options.tmpdir, ["-debug", "-txindex"]))
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[0], 2)
connect_nodes(self.nodes[0], 3)
self.is_network_split = False
self.sync_all()
def run_test(self):
print("Mining blocks...")
self.nodes[0].generate(105)
self.sync_all()
chain_height = self.nodes[1].getblockcount()
assert_equal(chain_height, 105)
print("Testing transaction index...")
privkey = "cU4zhap7nPJAWeMFu4j6jLrfPmqakDAzy8zn8Fhb3oEevdm4e5Lc"
address = "yeMpGzMj3rhtnz48XsfpB8itPHhHtgxLc3"
addressHash = binascii.unhexlify("C5E4FB9171C22409809A3E8047A29C83886E325D")
scriptPubKey = CScript([OP_DUP, OP_HASH160, addressHash, OP_EQUALVERIFY, OP_CHECKSIG])
unspent = self.nodes[0].listunspent()
tx = CTransaction()
amount = unspent[0]["amount"] * 100000000
tx.vin = [CTxIn(COutPoint(int(unspent[0]["txid"], 16), unspent[0]["vout"]))]
tx.vout = [CTxOut(amount, scriptPubKey)]
tx.rehash()
signed_tx = self.nodes[0].signrawtransaction(binascii.hexlify(tx.serialize()).decode("utf-8"))
txid = self.nodes[0].sendrawtransaction(signed_tx["hex"], True)
self.nodes[0].generate(1)
self.sync_all()
# Check verbose raw transaction results
verbose = self.nodes[3].getrawtransaction(unspent[0]["txid"], 1)
assert_equal(verbose["vout"][0]["valueSat"], 5000000000);
assert_equal(verbose["vout"][0]["value"], 50);
print("Passed\n")
if __name__ == '__main__':
TxIndexTest().main()
|
terracoin/terracoin
|
qa/rpc-tests/txindex.py
|
Python
|
mit
| 2,703
|
# Задача 6. Вариант 10.
# Создайте игру, в которой компьютер загадывает название одной из трех стран, входящих в военно-политический блок "Тройственный союз", а игрок должен его угадать.
# Donkor A.H.
# 14.04.2016
import random
x=random.choice(['Германия','Австро-Венгрия','Италия'])
y=input('Сыграем в игру. Я загадываю вам одну из трёх стран входящих в военно-политический блок "Тройственный союз", а вы должны угадать, какая именно это страна ? ')
z=1
while y!=x:
print("Вы не угадали!")
z+=1
y=input('Попробуйте снова ')
else:
print("Всё верно!Вы угадали!!!")
print("Число ваших попыток - "+ str(z))
input("\nВведите Enter, чтобы завершить")
|
Mariaanisimova/pythonintask
|
PMIa/2015/Donkor_A_H/task_6_10.py
|
Python
|
apache-2.0
| 1,052
|
import sys
from io import StringIO
from unittest.mock import patch
import tables.scripts.ptrepack as ptrepack
import tables.scripts.ptdump as ptdump
import tables.scripts.pttree as pttree
from tables.tests import common
class ptrepackTestCase(common.PyTablesTestCase):
"""Test ptrepack"""
@patch.object(ptrepack, 'copy_leaf')
@patch.object(ptrepack.tb, 'open_file')
def test_paths_windows(self, mock_open_file, mock_copy_leaf):
"""Checking handling of windows filenames: test gh-616"""
# this filename has a semi-colon to check for
# regression of gh-616
src_fn = 'D:\\window~1\\path\\000\\infile'
src_path = '/'
dst_fn = 'another\\path\\'
dst_path = '/path/in/outfile'
argv = ['ptrepack', src_fn + ':' + src_path, dst_fn + ':' + dst_path]
with patch.object(sys, 'argv', argv):
ptrepack.main()
args, kwargs = mock_open_file.call_args_list[0]
self.assertEqual(args, (src_fn, 'r'))
args, kwargs = mock_copy_leaf.call_args_list[0]
self.assertEqual(args, (src_fn, dst_fn, src_path, dst_path))
class ptdumpTestCase(common.PyTablesTestCase):
"""Test ptdump"""
@patch.object(ptdump.tb, 'open_file')
@patch('sys.stdout', new_callable=StringIO)
def test_paths_windows(self, _, mock_open_file):
"""Checking handling of windows filenames: test gh-616"""
# this filename has a semi-colon to check for
# regression of gh-616 (in ptdump)
src_fn = 'D:\\window~1\\path\\000\\ptdump'
src_path = '/'
argv = ['ptdump', src_fn + ':' + src_path]
with patch.object(sys, 'argv', argv):
ptdump.main()
args, kwargs = mock_open_file.call_args_list[0]
self.assertEqual(args, (src_fn, 'r'))
class pttreeTestCase(common.PyTablesTestCase):
"""Test ptdump"""
@patch.object(pttree.tb, 'open_file')
@patch.object(pttree, 'get_tree_str')
@patch('sys.stdout', new_callable=StringIO)
def test_paths_windows(self, _, mock_get_tree_str, mock_open_file):
"""Checking handling of windows filenames: test gh-616"""
# this filename has a semi-colon to check for
# regression of gh-616 (in pttree)
src_fn = 'D:\\window~1\\path\\000\\pttree'
src_path = '/'
argv = ['pttree', src_fn + ':' + src_path]
with patch.object(sys, 'argv', argv):
pttree.main()
args, kwargs = mock_open_file.call_args_list[0]
self.assertEqual(args, (src_fn, 'r'))
def suite():
theSuite = common.unittest.TestSuite()
theSuite.addTest(common.unittest.makeSuite(ptrepackTestCase))
theSuite.addTest(common.unittest.makeSuite(ptdumpTestCase))
theSuite.addTest(common.unittest.makeSuite(pttreeTestCase))
return theSuite
if __name__ == '__main__':
common.parse_argv(sys.argv)
common.print_versions()
common.unittest.main(defaultTest='suite')
|
PyTables/PyTables
|
tables/tests/test_utils.py
|
Python
|
bsd-3-clause
| 2,960
|
from .. import BaseForm
from wtforms import StringField, TextAreaField
from wtforms.validators import DataRequired
class CategoryForm(BaseForm):
name = StringField('name',
validators=[
DataRequired()
])
description = TextAreaField('description',
validators=[
DataRequired()
])
|
friendly-of-python/flask-online-store
|
flask_online_store/forms/admin/category.py
|
Python
|
mit
| 461
|
# -*- coding: utf-8 -*-
"""Utility stuff for tests."""
__revision__ = "$Rev: 492 $"
__date__ = "$Date: 2007-07-06 21:38:45 -0400 (Fri, 06 Jul 2007) $"
__author__ = "Ryan Tomayko (rtomayko@gmail.com)"
__copyright__ = "Copyright 2004-2005, Ryan Tomayko"
__license__ = "MIT <http://www.opensource.org/licenses/mit-license.php>"
import sys
import os
import traceback
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import kid.test
class stdold:
"""Original sys.stderr and sys.stdout."""
out = sys.stdout
err = sys.stderr
def raises(ExpectedException, *args, **kwargs):
"""Raise AssertionError if code does not raise expected exception."""
assert args
if isinstance(args[0], str):
(expr,) = args
assert isinstance(expr, str)
frame = sys._getframe(1)
loc = frame.f_locals.copy()
loc.update(kwargs)
try:
exec expr in frame.f_globals, loc
except ExpectedException, e:
return e
except Exception, e:
pass
else:
e = None
else:
func, args = args[0], args[1:]
assert callable(func)
try:
func(*args, **kwargs)
except ExpectedException, e:
return e
except Exception, e:
pass
else:
e = None
expr = ["%r" % x for x in args]
expr.extend(["%s=%r" % x for x in kwargs.items()])
expr = '%s(%s)' % (func.__name__, ', '.join(expr))
if e:
e = 'raised %s instead of' % e.__class__
else:
e = 'did not raise'
raise AssertionError('%s %s %s' % (expr, e, ExpectedException))
def dot():
stdold.err.write('.')
def skip():
stdold.err.write('s')
def come_on_guido_this_is_just_wrong(name):
mod = __import__(name)
components = name.split('.')
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
def get_funcs(mod):
"""Return a list of test functions for the given module object."""
funcs = []
for name in dir(mod):
if name[:4] == 'test':
attr = getattr(mod, name)
if callable(attr):
funcs.append(attr)
return funcs
def run_suite(tests, stop_first=True):
"""Run tests given a list of modules that export __test__ variables."""
try:
os.mkdir(kid.test.output_dir)
except OSError:
e = sys.exc_info()[1]
if int(e.errno) != 17:
raise
bad = []
kid.test.basic_tests = 1
test_cnt = skip_cnt = bad_cnt = 0
from time import time
start = time()
# run over modules...
for module_name in tests:
try:
mod = come_on_guido_this_is_just_wrong(module_name)
except ImportError, e:
if 'No module named py' not in str(e):
raise
skip_cnt += 1
skip()
continue # you don't have pylib - so i won't run these tests
#if not hasattr(mod, '__tests__'):
# raise '%r does not export a __tests__ variable.' % module_name
if hasattr(mod, 'setup_module'):
mod.setup_module(mod)
try:
# run each test...
for test in get_funcs(mod):
test_cnt += 1
sys.stdout, sys.stderr = StringIO(), StringIO()
try:
test()
except:
bad_cnt += 1
asserr = isinstance(sys.exc_info()[0], AssertionError)
ftype = asserr and 'F' or 'E'
buf = StringIO()
traceback.print_exc(file=buf)
stdold.err.write(ftype)
bad.append((test, ftype, sys.exc_info(), \
(sys.stdout.getvalue(), sys.stderr.getvalue())))
if stop_first:
sys.stdout, sys.stderr = stdold.out, stdold.err
sys.stderr.write(
'*\n\bBailing after %d tests\n\n' % test_cnt)
out, err = bad[-1][3]
if out:
sys.stderr.write(
'-- sys.stdout:\n%s\n' % out.strip())
if err:
sys.stderr.write(
'-- sys.stderr:\n%s\n' % err.strip())
raise
else:
dot()
sys.stdout, sys.stderr = stdold.out, stdold.err
finally:
if hasattr(mod, 'teardown_module'):
mod.teardown_module(mod)
done = time()
sys.stderr.write('\n')
for test, ftype, exc_info, (out, err) in bad:
sys.stderr.write('\n%s: %s\n' %
({'F': 'Failure', 'E': 'Error'}.get(ftype, 'Bad'),
test.__doc__ or test.__name__))
if out:
sys.stderr.write(
'-- sys.stdout:\n%s\n' % out.strip())
if err:
sys.stderr.write(
'-- sys.stderr:\n%s\n' % err.strip())
traceback.print_exception(
exc_info[0], exc_info[1], exc_info[2], 15, sys.stderr)
sys.stderr.write('\nTests: %d (+%d extended) OK (%g seconds)\n'
% (test_cnt, kid.test.additional_tests, done - start))
if skip_cnt:
sys.stderr.write('Skipped tests (need py lib): %d\n' % skip_cnt)
if bad_cnt:
sys.stderr.write('Bad tests: %d\n' % bad_cnt)
|
gandrewstone/yadog
|
kid/test/util.py
|
Python
|
gpl-3.0
| 5,500
|
import pickle
from functools import partial
from typing import Tuple
import numpy as np
try:
import torch
import torch.nn as nn
except ImportError:
raise ImportError("pytorch is not installed. Please installed version it by running pip install torch torchvision")
try:
from pybnn.util.layers import AppendLayer
except ImportError:
raise ImportError("pybnn is not installed. Please install it by running pip install pybnn")
from emukit.core import ContinuousParameter, ParameterSpace
from emukit.core.loop.user_function import UserFunctionWrapper
def get_architecture_forrester(input_dimensionality: int) -> torch.nn.Module:
class Architecture(nn.Module):
def __init__(self, n_inputs, n_hidden=100):
super(Architecture, self).__init__()
self.fc1 = nn.Linear(n_inputs, n_hidden)
self.fc2 = nn.Linear(n_hidden, 2)
self.sigma_layer = AppendLayer(noise=1e-3)
def forward(self, x):
x = torch.tanh(self.fc1(x))
x = self.fc2(x)
mean = x[:, None, 0]
return self.sigma_layer(mean)
return Architecture(n_inputs=input_dimensionality)
def meta_forrester(fname_objective: str) -> Tuple[UserFunctionWrapper, ParameterSpace]:
"""
Interface to the Meta-Forrester benchmark.
Offline generated function samples can be download here:
http://www.ml4aad.org/wp-content/uploads/2019/05/profet_data.tar.gz
For further information about Profet and the generated meta-surrogate benchmarks see:
Meta-Surrogate Benchmarking for Hyperparameter Optimization
A. Klein and Z. Dai and F. Hutter and N. Lawrence and J. Gonzalez
arXiv:1905.12982 [cs.LG] (2019)
:param fname_objective: filename for the objective function
:return: Tuple of user function object and parameter space
"""
parameter_space = ParameterSpace([ContinuousParameter("x", 0, 1)])
data = pickle.load(open(fname_objective, "rb"))
x_mean_objective = data["x_mean"]
x_std_objective = data["x_std"]
y_mean_objective = data["y_mean"]
y_std_objective = data["y_std"]
task_feature_objective = data["task_feature"]
objective = get_architecture_forrester(x_mean_objective.shape[0]).float()
objective.load_state_dict(data["state_dict"])
def objective_function(config):
Ht = np.repeat(task_feature_objective[None, :], config.shape[0], axis=0)
x = np.concatenate((config, Ht), axis=1)
x_norm = torch.from_numpy((x - x_mean_objective) / x_std_objective).float()
output = objective.forward(x_norm).data.numpy()
mean = output[:, 0]
feval = mean * y_std_objective + y_mean_objective
return feval[:, None]
f = partial(objective_function)
return f, parameter_space
|
EmuKit/emukit
|
emukit/examples/profet/meta_benchmarks/meta_forrester.py
|
Python
|
apache-2.0
| 2,792
|
##
# Copyright (c) 2005-2014 Apple Inc. All rights reserved.
# Copyright (c) 2007 Twisted Matrix Laboratories.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
##
"""
WebDAV XML base classes.
This module provides XML utilities for use with WebDAV.
See RFC 2518: http://www.ietf.org/rfc/rfc2518.txt (WebDAV)
"""
__all__ = [
"dav_namespace",
"twisted_dav_namespace",
"twisted_private_namespace",
"encodeXMLName",
"decodeXMLName",
"WebDAVElement",
"PCDATAElement",
"WebDAVOneShotElement",
"WebDAVUnknownElement",
"WebDAVEmptyElement",
"WebDAVTextElement",
"WebDAVDateTimeElement",
"DateTimeHeaderElement",
]
import datetime
import string
import cStringIO as StringIO
import re
from twext.python.log import Logger
from txweb2.http_headers import parseDateTime
log = Logger()
##
# Base XML elements
##
_elements_by_qname = {}
dav_namespace = "DAV:"
twisted_dav_namespace = "http://twistedmatrix.com/xml_namespace/dav/"
twisted_private_namespace = twisted_dav_namespace + "private/"
def encodeXMLName(namespace, name):
"""
Encodes an XML namespace and name into a UTF-8 string.
If namespace is None, returns "name", otherwise, returns
"{namespace}name".
"""
if not namespace:
sname = name
else:
sname = u"{%s}%s" % (namespace, name)
return sname.encode("utf-8")
def decodeXMLName(name):
"""
Decodes an XML (namespace, name) pair from an ASCII string as
encoded by encodeXMLName().
"""
def invalid():
raise ValueError("Invalid encoded name: %r" % (name,))
if not name:
invalid()
if name[0] is "{":
index = name.find("}")
if (index is -1 or not len(name) > index):
invalid()
namespace = name[1:index].decode("utf-8")
localname = name[index + 1:].decode("utf-8")
if not namespace:
namespace = None
if not localname:
invalid()
else:
namespace = None
localname = name.decode("utf-8")
if "{" in localname or "}" in localname:
invalid()
return (namespace, localname)
class WebDAVElement (object):
"""
WebDAV XML element. (RFC 2518, section 12)
"""
namespace = dav_namespace # Element namespace (class variable)
name = None # Element name (class variable)
allowed_children = None # Types & count limits on child elements
allowed_attributes = None # Allowed attribute names
hidden = False # Don't list in PROPFIND with <allprop>
protected = False # See RFC 3253 section 1.4.1
unregistered = False # Subclass of factory; doesn't register
def __init__(self, *children, **attributes):
super(WebDAVElement, self).__init__()
if self.allowed_children is None:
raise NotImplementedError(
"WebDAVElement subclass %s is not implemented."
% (self.__class__.__name__,)
)
my_children = []
allowPCDATA = PCDATAElement in self.allowed_children
for child in children:
if child is None:
continue
if isinstance(child, (str, unicode)):
child = PCDATAElement(child)
if isinstance(child, PCDATAElement) and not allowPCDATA:
continue
my_children.append(child)
self.children = tuple(my_children)
self.attributes = attributes
@classmethod
def qname(cls):
return (cls.namespace, cls.name)
@classmethod
def sname(cls):
return encodeXMLName(cls.namespace, cls.name)
def validate(self):
children = self.children
attributes = self.attributes
if self.allowed_children is None:
raise NotImplementedError(
"WebDAVElement subclass %s is not implemented."
% (self.__class__.__name__,)
)
#
# Validate that children are of acceptable types
#
allowed_children = dict([
(child_type, list(limits))
for child_type, limits
in self.allowed_children.items()
])
my_children = []
for child in children:
assert isinstance(child, (WebDAVElement, PCDATAElement)), "Not an element: %r" % (child,)
child.validate()
for allowed, (min, max) in allowed_children.items():
if type(allowed) == type and isinstance(child, allowed):
qname = allowed
elif child.qname() == allowed:
qname = allowed
else:
continue
if min is not None and min > 0:
min -= 1
if max is not None:
assert max > 0, "Too many children of type %s for %s" % (child.sname(), self.sname())
max -= 1
allowed_children[qname] = (min, max)
my_children.append(child)
break
else:
if not (isinstance(child, PCDATAElement) and child.isWhitespace()):
log.debug(
"Child of type %s is unexpected and therefore ignored in %s element"
% (child.sname(), self.sname())
)
for qname, (min, max) in allowed_children.items():
if min != 0:
raise ValueError("Not enough children of type %s for %s"
% (encodeXMLName(*qname), self.sname()))
self.children = tuple(my_children)
#
# Validate that attributes have known names
#
my_attributes = {}
if self.allowed_attributes:
for name in attributes:
if name not in self.allowed_attributes:
log.debug("Attribute %s is unexpected in %s element" % (name, self.sname()))
my_attributes[name] = attributes[name]
for name, required in self.allowed_attributes.items():
if required and name not in my_attributes:
raise ValueError("Attribute %s is required in %s element"
% (name, self.sname()))
else:
if not isinstance(self, WebDAVUnknownElement) and attributes:
log.debug("Attributes %s are unexpected in %s element"
% (attributes.keys(), self.sname()))
my_attributes.update(attributes)
self.attributes = my_attributes
def __str__(self):
return self.sname()
def __repr__(self):
if hasattr(self, "attributes") and hasattr(self, "children"):
return "<%s %r: %r>" % (self.sname(), self.attributes, self.children)
else:
return "<%s>" % (self.sname())
def __eq__(self, other):
if isinstance(other, WebDAVElement):
return (
self.name == other.name and
self.namespace == other.namespace and
self.attributes == other.attributes and
self.children == other.children
)
else:
return NotImplemented
def __ne__(self, other):
return not self.__eq__(other)
def __contains__(self, child):
return child in self.children
def writeXML(self, output, pretty=True):
output.write("<?xml version='1.0' encoding='UTF-8'?>" + ("\n" if pretty else ""))
self._writeToStream(output, "", 0, pretty)
def _writeToStream(self, output, ns, level, pretty):
"""
Fast XML output.
@param output: C{stream} to write to.
@param ns: C{str} containing the namespace of the enclosing element.
@param level: C{int} containing the element nesting level (starts at 0).
@param pretty: C{bool} whether to use 'pretty' formatted output or not.
"""
# Do pretty indent
if pretty and level:
output.write(" " * level)
# Check for empty element (one with either no children or a single PCDATA that is itself empty)
if (len(self.children) == 0 or
(len(self.children) == 1 and isinstance(self.children[0], PCDATAElement) and len(str(self.children[0])) == 0)):
# Write out any attributes or the namespace if difference from enclosing element.
if self.attributes or (ns != self.namespace):
output.write("<%s" % (self.name,))
for name, value in self.attributes.iteritems():
self._writeAttributeToStream(output, name, value)
if ns != self.namespace:
output.write(" xmlns='%s'" % (self.namespace,))
output.write("/>")
else:
output.write("<%s/>" % (self.name,))
else:
# Write out any attributes or the namespace if difference from enclosing element.
if self.attributes or (ns != self.namespace):
output.write("<%s" % (self.name,))
for name, value in self.attributes.iteritems():
self._writeAttributeToStream(output, name, value)
if ns != self.namespace:
output.write(" xmlns='%s'" % (self.namespace,))
ns = self.namespace
output.write(">")
else:
output.write("<%s>" % (self.name,))
# Determine nature of children when doing pretty print: we do
# not want to insert CRLFs or any other whitespace in PCDATA.
hasPCDATA = False
for child in self.children:
if isinstance(child, PCDATAElement):
hasPCDATA = True
break
# Write out the children.
if pretty and not hasPCDATA:
output.write("\r\n")
for child in self.children:
child._writeToStream(output, ns, level + 1, pretty)
# Close the element.
if pretty and not hasPCDATA and level:
output.write(" " * level)
output.write("</%s>" % (self.name,))
if pretty and level:
output.write("\r\n")
def _writeAttributeToStream(self, output, name, value):
# Quote any single quotes. We do not need to be any smarter than this.
value = value.replace("'", "'")
output.write(" %s='%s'" % (name, value,))
def toxml(self, pretty=True):
output = StringIO.StringIO()
self.writeXML(output, pretty)
return str(output.getvalue())
def element(self, document):
element = document.createElementNS(self.namespace, self.name)
if hasattr(self, "attributes"):
for name, value in self.attributes.items():
namespace, name = decodeXMLName(name)
attribute = document.createAttributeNS(namespace, name)
attribute.nodeValue = value
element.setAttributeNodeNS(attribute)
return element
def addToDOM(self, document, parent):
element = self.element(document)
if parent is None:
document.appendChild(element)
else:
parent.appendChild(element)
for child in self.children:
if child:
try:
child.addToDOM(document, element)
except:
log.error("Unable to add child %r of element %s to DOM" % (child, self))
raise
def childrenOfType(self, child_type):
"""
Returns a list of children with the same qname as the given type.
"""
if type(child_type) is tuple:
qname = child_type
else:
qname = child_type.qname()
return [c for c in self.children if c.qname() == qname]
def childOfType(self, child_type):
"""
Returns a child of the given type, if any, or None.
Raises ValueError if more than one is found.
"""
found = None
for child in self.childrenOfType(child_type):
if found:
raise ValueError("Multiple %s elements found in %s" % (child_type.sname(), self.toxml()))
found = child
return found
def removeWhitespaceNodes(self):
""" Removes all of the whitespace-only text decendants of a DOM node. """
# prepare the list of text nodes to remove (and recurse when needed)
remove_list = []
for child in self.children:
if isinstance(child, PCDATAElement) and not child.data.strip():
# add this text node to the to-be-removed list
remove_list.append(child)
elif isinstance(child, WebDAVElement):
# recurse, it's the simplest way to deal with the subtree
child.removeWhitespaceNodes()
# perform the removals
newchildren = []
for child in self.children:
if child not in remove_list:
newchildren.append(child)
self.children = tuple(newchildren)
class PCDATAElement (object):
def __init__(self, data):
super(PCDATAElement, self).__init__()
if data is None:
data = ""
elif type(data) is unicode:
data = data.encode("utf-8")
else:
assert type(data) is str, ("PCDATA must be a string: %r" % (data,))
self.data = data
@classmethod
def qname(cls):
return (None, "#PCDATA")
@classmethod
def sname(cls):
return "#PCDATA"
def validate(self):
pass
def __str__(self):
return str(self.data)
def __repr__(self):
return "<%s: %r>" % (self.__class__.__name__, self.data)
def __add__(self, other):
if isinstance(other, PCDATAElement):
return self.__class__(self.data + other.data)
else:
return self.__class__(self.data + other)
def __eq__(self, other):
if isinstance(other, PCDATAElement):
return self.data == other.data
elif type(other) in (str, unicode):
return self.data == other
else:
return NotImplemented
def __ne__(self, other):
return not self.__eq__(other)
def isWhitespace(self):
for char in str(self):
if char not in string.whitespace:
return False
return True
def element(self, document):
return document.createTextNode(self.data)
def addToDOM(self, document, parent):
try:
parent.appendChild(self.element(document))
except TypeError:
log.error("Invalid PCDATA: %r" % (self.data,))
raise
def _writeToStream(self, output, ns, level, pretty):
# Do escaping/CDATA behavior
if "\r" in self.data or "\n" in self.data:
# Do CDATA
cdata = "<![CDATA[%s]]>" % (self.data.replace("]]>", "]]>"),)
else:
cdata = self.data
if "&" in cdata:
cdata = cdata.replace("&", "&")
if "<" in cdata:
cdata = cdata.replace("<", "<")
if ">" in cdata:
cdata = cdata.replace(">", ">")
output.write(cdata)
class WebDAVOneShotElement (WebDAVElement):
"""
Element with exactly one WebDAVEmptyElement child and no attributes.
"""
__singletons = {}
def __new__(cls, *children):
child = None
for next in children:
if isinstance(next, WebDAVEmptyElement):
if child is not None:
raise ValueError("%s must have exactly one child, not %r"
% (cls.__name__, children))
child = next
elif isinstance(next, PCDATAElement):
pass
else:
raise ValueError("%s child is not a WebDAVEmptyElement instance: %s"
% (cls.__name__, next))
if cls not in WebDAVOneShotElement.__singletons:
WebDAVOneShotElement.__singletons[cls] = {
child: WebDAVElement.__new__(cls)
}
elif child not in WebDAVOneShotElement.__singletons[cls]:
WebDAVOneShotElement.__singletons[cls][child] = (
WebDAVElement.__new__(cls)
)
return WebDAVOneShotElement.__singletons[cls][child]
class WebDAVUnknownElement (WebDAVElement):
"""
Placeholder for unknown element tag names.
"""
allowed_children = {
WebDAVElement: (0, None),
PCDATAElement: (0, None),
}
@classmethod
def withName(cls, namespace, name):
child = cls()
child.namespace = namespace
child.name = name
return child
def qname(self):
return (self.namespace, self.name)
def sname(self):
return encodeXMLName(self.namespace, self.name)
class WebDAVEmptyElement (WebDAVElement):
"""
WebDAV element with no contents.
"""
__singletons = {}
def __new__(cls, *args, **kwargs):
assert not args
if kwargs:
return WebDAVElement.__new__(cls)
else:
if cls not in WebDAVEmptyElement.__singletons:
WebDAVEmptyElement.__singletons[cls] = (WebDAVElement.__new__(cls))
return WebDAVEmptyElement.__singletons[cls]
allowed_children = {}
children = ()
def __hash__(self):
"""
Define a hash method, so that an empty element can serve as dictionary
keys. It's mainly useful to define singletons with
L{WebDAVOneShotElement}.
"""
return hash((self.name, self.namespace))
class WebDAVTextElement (WebDAVElement):
"""
WebDAV element containing PCDATA.
"""
@classmethod
def fromString(clazz, string):
if string is None:
return clazz()
elif isinstance(string, (unicode, str)):
return clazz(PCDATAElement(string))
else:
return clazz(PCDATAElement(str(string)))
allowed_children = {PCDATAElement: (0, None)}
def toString(self):
"""
@return: a unicode string containing the text in this element.
"""
return self.__str__().decode("utf-8")
def __str__(self):
"""
@return: a byte string containing the text in this element.
"""
return b"".join([c.data for c in self.children])
def __repr__(self):
content = str(self)
if content:
return "<%s: %r>" % (self.sname(), content)
else:
return "<%s>" % (self.sname(),)
def __eq__(self, other):
if isinstance(other, self.__class__):
return str(self) == str(other)
elif type(other) in (str, unicode):
return str(self) == other
else:
return NotImplemented
class WebDAVDateTimeElement (WebDAVTextElement):
"""
WebDAV date-time element. (RFC 2518, section 23.2)
"""
@classmethod
def fromDate(clazz, date):
"""
date may be a datetime.datetime instance, a POSIX timestamp
(integer value, such as returned by time.time()), or an ISO
8601-formatted (eg. "2005-06-13T16:14:11Z") date/time string.
"""
def isoformat(date):
if date.utcoffset() is None:
return date.isoformat() + "Z"
else:
return date.isoformat()
if type(date) is int:
date = isoformat(datetime.datetime.fromtimestamp(date))
elif type(date) is str:
pass
elif type(date) is unicode:
date = date.encode("utf-8")
elif isinstance(date, datetime.datetime):
date = isoformat(date)
else:
raise ValueError("Unknown date type: %r" % (date,))
return clazz(PCDATAElement(date))
def __init__(self, *children, **attributes):
super(WebDAVDateTimeElement, self).__init__(*children, **attributes)
self.datetime() # Raise ValueError if the format is wrong
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.datetime() == other.datetime()
else:
return NotImplemented
def datetime(self):
s = str(self)
if not s:
return None
else:
return parse_date(s)
class DateTimeHeaderElement (WebDAVDateTimeElement):
"""
WebDAV date-time element for elements that substitute for HTTP
headers. (RFC 2068, section 3.3.1)
"""
@classmethod
def fromDate(clazz, date):
"""
date may be a datetime.datetime instance, a POSIX timestamp
(integer value, such as returned by time.time()), or an RFC
2068 Full Date (eg. "Mon, 23 May 2005 04:52:22 GMT") string.
"""
def format(date):
#
# FIXME: strftime() is subject to localization nonsense; we need to
# ensure that we're using the correct localization, or don't use
# strftime().
#
return date.strftime("%a, %d %b %Y %H:%M:%S GMT")
if type(date) is int:
date = format(datetime.datetime.utcfromtimestamp(date))
elif type(date) is str:
pass
elif type(date) is unicode:
date = date.encode("utf-8")
elif isinstance(date, datetime.datetime):
if date.tzinfo:
raise NotImplementedError("I need to normalize to UTC")
date = format(date)
else:
raise ValueError("Unknown date type: %r" % (date,))
return clazz(PCDATAElement(date))
def datetime(self):
s = str(self)
if not s:
return None
else:
return parseDateTime(s)
##
# Utilities
##
class FixedOffset (datetime.tzinfo):
"""
Fixed offset in minutes east from UTC.
"""
def __init__(self, offset, name=None):
super(FixedOffset, self).__init__()
self._offset = datetime.timedelta(minutes=offset)
self._name = name
def utcoffset(self, dt):
return self._offset
def tzname(self, dt):
return self._name
def dst(self, dt):
return datetime.timedelta(0)
_regex_ISO8601Date = re.compile(
"^" +
"(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})T" +
"(?P<hour>\d{2}):(?P<minute>\d{2}):(?P<second>\d{2})(?:.(?P<subsecond>\d+))*" +
"(?:Z|(?P<offset_sign>\+|-)(?P<offset_hour>\d{2}):(?P<offset_minute>\d{2}))" +
"$"
)
def parse_date(date):
"""
Parse an ISO 8601 date and return a corresponding datetime.datetime object.
"""
# See http://www.iso.org/iso/en/prods-services/popstds/datesandtime.html
match = _regex_ISO8601Date.match(date)
if match is not None:
subsecond = match.group("subsecond")
if subsecond is None:
subsecond = 0
else:
subsecond = int(subsecond)
offset_sign = match.group("offset_sign")
if offset_sign is None:
offset = FixedOffset(0)
else:
offset_hour = int(match.group("offset_hour"))
offset_minute = int(match.group("offset_minute"))
delta = (offset_hour * 60) + offset_minute
if offset_sign == "+":
offset = FixedOffset(0 - delta)
elif offset_sign == "-":
offset = FixedOffset(0 + delta)
return datetime.datetime(
int(match.group("year")),
int(match.group("month")),
int(match.group("day")),
int(match.group("hour")),
int(match.group("minute")),
int(match.group("second")),
subsecond,
offset
)
else:
raise ValueError("Invalid ISO 8601 date format: %r" % (date,))
|
trevor/calendarserver
|
txdav/xml/base.py
|
Python
|
apache-2.0
| 25,071
|
import _plotly_utils.basevalidators
class ReversescaleValidator(_plotly_utils.basevalidators.BooleanValidator):
def __init__(
self, plotly_name="reversescale", parent_name="treemap.marker", **kwargs
):
super(ReversescaleValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
**kwargs
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/treemap/marker/_reversescale.py
|
Python
|
mit
| 433
|
try:
from ._speedups import pack_command
except ImportError:
from ._backup import pack_command
|
simonz05/pack-command
|
pack_command/__init__.py
|
Python
|
mit
| 103
|
# -*- coding: utf-8 -*-
"""
fudcon.ui.backend
------
fudcon ui backend application package
"""
|
echevemaster/fudcon
|
fudcon/ui/backend/__init__.py
|
Python
|
mit
| 107
|
#!/usr/bin/env python
import rospy
from task_executor.testing import TestEntry
def check_task_descriptions(task_descriptions):
print 'remaining task descriptions: ', task_descriptions
def check_time_diffs(time_diffs):
print 'time diffs: ', time_diffs
if __name__ == '__main__':
executor = TestEntry('use_all_test')
# testing time critical task
# executor.run_test(check_task_descriptions, test_tasks = 5, time_critical_tasks = 3, time_diffs_fn = check_time_diffs)
# testing pause / restart
executor.run_test(check_task_descriptions, test_tasks = 10, demanded_tasks = 3, time_critical_tasks = 1)
rospy.spin()
|
bfalacerda/strands_executive
|
task_executor/scripts/use_all_test_script.py
|
Python
|
mit
| 667
|
#!/usr/bin/env python
from cloudmon_dash import app
if __name__ == '__main__':
app.run(host='0.0.0.0')
|
dgivens/cloudmon-dash
|
dash.py
|
Python
|
apache-2.0
| 109
|
# Copyright (c) 2008,2010,2015,2016,2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Setup script for installing MetPy."""
import sys
from setuptools import setup
if sys.version_info[0] < 3:
error = """
MetPy v0.12 and greater requires the Python 3.6 or above.
If you're using Python 2.7, please install MetPy v0.11.1,
which is the last release of MetPy that supports Python 2.7,
but it is no longer maintained.
Python {py} detected.
""".format(py='.'.join([str(v) for v in sys.version_info[:3]]))
print(error) # noqa: T001
sys.exit(1)
setup(use_scm_version={'version_scheme': 'post-release'})
|
ahaberlie/MetPy
|
setup.py
|
Python
|
bsd-3-clause
| 713
|
#Author Yukun Chen
#email: cykustc@gmail.com
#Date: Sept 04 2015
import getopt
import argparse
import sys
import re
from collections import defaultdict
def displaymatch(match):
if match is None:
return None
return '<Match: %r, groups=%r>' % (match.group(), match.groups())
def bib2rest(input_bibfile,output_txtfile):
print input_bibfile
print output_txtfile
start_pattern = re.compile(r"^(?: |\t)*\@(?:book|article|incollection|inproceedings)\{([a-z0-9]+), *$")
title_pattern = re.compile(r"^(?: |\t)*title=\{([a-zA-Z0-9 ]+)\}(?: |\t)*,(?: |\t)*$")
author_pattern = re.compile(r"^(?: |\t)*author=\{([a-zA-Z0-9 ,;\.\-]+)\}(?: |\t)*,(?: |\t)*$")
other_info_pattern = re.compile(r"^(?: |\t)*(?:journal|volume|number|year|publisher|pages|organization|booktitle)=\{([a-zA-Z0-9 ,;\.-]+)\}(?: |\t)*,(?: |\t)*$")
end_pattern = re.compile("^(?: |\t)*}(?: |\t)*$")
with open(input_bibfile,'rb') as input_handle:
with open(output_txtfile,'wb') as output_handle:
in_a_bib_block = False;
rest_ref_block = "";
title = "";
author = "";
ref="";
output_handle.write(".. _references:\n\n==========\nReferences\n==========\n\n")
for line in input_handle:
if not in_a_bib_block:
# not in a bib block
if start_pattern.match(line):
matches = start_pattern.match(line)
in_a_bib_block = True
ref = matches.group(1)
else:
pass
else:
# in a bib block
if end_pattern.match(line):
matches = end_pattern.match(line)
in_a_bib_block = False
rest_ref_block = ".. [" + ref +"]" + " " + author +", " + title +", " + other_info
output_handle.write(rest_ref_block+"\n\n")
elif title_pattern.match(line):
matches = title_pattern.match(line)
title = matches.group(1)
elif author_pattern.match(line):
matches = author_pattern.match(line)
author = matches.group(1)
elif other_info_pattern.match(line):
matches = other_info_pattern.match(line)
other_info = matches.group(1)
rest_ref_block = rest_ref_block + ", "+ other_info
else:
pass
if __name__ == '__main__':
throot = "/".join(sys.path[0].split("/")[:])
parser = argparse.ArgumentParser(description='bib2reSTcitation is a tool to convert bib tex file to reStructuredText Markup citation format.')
parser.add_argument('-o', '--output', help='output file path')
parser.add_argument('-i', '--input', help='input file path')
args = parser.parse_args()
input_file = args.input
if input_file==None:
input_file = 'tex.bib'
output_file = args.output
if output_file==None:
output_file = "references.txt"
bib2rest(input_file,output_file)
|
cykustcc/bib2reSTcitation
|
bib2reSTcitation.py
|
Python
|
mit
| 2,664
|
#/usr/bin/env python
# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Common objects shared by all _ps* modules."""
from __future__ import division
import errno
import os
import socket
import stat
import sys
import warnings
from socket import AF_INET, SOCK_STREAM, SOCK_DGRAM
from psutil._compat import namedtuple, wraps
# --- constants
AF_INET6 = getattr(socket, 'AF_INET6', None)
AF_UNIX = getattr(socket, 'AF_UNIX', None)
STATUS_RUNNING = "running"
STATUS_SLEEPING = "sleeping"
STATUS_DISK_SLEEP = "disk-sleep"
STATUS_STOPPED = "stopped"
STATUS_TRACING_STOP = "tracing-stop"
STATUS_ZOMBIE = "zombie"
STATUS_DEAD = "dead"
STATUS_WAKE_KILL = "wake-kill"
STATUS_WAKING = "waking"
STATUS_IDLE = "idle" # BSD
STATUS_LOCKED = "locked" # BSD
STATUS_WAITING = "waiting" # BSD
CONN_ESTABLISHED = "ESTABLISHED"
CONN_SYN_SENT = "SYN_SENT"
CONN_SYN_RECV = "SYN_RECV"
CONN_FIN_WAIT1 = "FIN_WAIT1"
CONN_FIN_WAIT2 = "FIN_WAIT2"
CONN_TIME_WAIT = "TIME_WAIT"
CONN_CLOSE = "CLOSE"
CONN_CLOSE_WAIT = "CLOSE_WAIT"
CONN_LAST_ACK = "LAST_ACK"
CONN_LISTEN = "LISTEN"
CONN_CLOSING = "CLOSING"
CONN_NONE = "NONE"
# --- functions
def usage_percent(used, total, _round=None):
"""Calculate percentage usage of 'used' against 'total'."""
try:
ret = (used / total) * 100
except ZeroDivisionError:
ret = 0
if _round is not None:
return round(ret, _round)
else:
return ret
def memoize(f):
"""A simple memoize decorator for functions."""
cache = {}
def memf(*x):
if x not in cache:
cache[x] = f(*x)
return cache[x]
return memf
class cached_property(object):
"""A memoize decorator for class properties."""
enabled = True
def __init__(self, func):
self.func = func
def __get__(self, instance, type):
ret = self.func(instance)
if self.enabled:
instance.__dict__[self.func.__name__] = ret
return ret
# http://goo.gl/jYLvf
def deprecated(replacement=None):
"""A decorator which can be used to mark functions as deprecated."""
def outer(fun):
msg = "psutil.%s is deprecated" % fun.__name__
if replacement is not None:
msg += "; use %s instead" % replacement
if fun.__doc__ is None:
fun.__doc__ = msg
@wraps(fun)
def inner(*args, **kwargs):
warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
return fun(*args, **kwargs)
return inner
return outer
def isfile_strict(path):
"""Same as os.path.isfile() but does not swallow EACCES / EPERM
exceptions, see:
http://mail.python.org/pipermail/python-dev/2012-June/120787.html
"""
try:
st = os.stat(path)
except OSError:
err = sys.exc_info()[1]
if err.errno in (errno.EPERM, errno.EACCES):
raise
return False
else:
return stat.S_ISREG(st.st_mode)
# --- Process.get_connections() 'kind' parameter mapping
conn_tmap = {
"all": ([AF_INET, AF_INET6, AF_UNIX], [SOCK_STREAM, SOCK_DGRAM]),
"tcp": ([AF_INET, AF_INET6], [SOCK_STREAM]),
"tcp4": ([AF_INET], [SOCK_STREAM]),
"udp": ([AF_INET, AF_INET6], [SOCK_DGRAM]),
"udp4": ([AF_INET], [SOCK_DGRAM]),
"inet": ([AF_INET, AF_INET6], [SOCK_STREAM, SOCK_DGRAM]),
"inet4": ([AF_INET], [SOCK_STREAM, SOCK_DGRAM]),
"inet6": ([AF_INET6], [SOCK_STREAM, SOCK_DGRAM]),
}
if AF_INET6 is not None:
conn_tmap.update({
"tcp6": ([AF_INET6], [SOCK_STREAM]),
"udp6": ([AF_INET6], [SOCK_DGRAM]),
})
if AF_UNIX is not None:
conn_tmap.update({
"unix": ([AF_UNIX], [SOCK_STREAM, SOCK_DGRAM]),
})
del AF_INET, AF_INET6, AF_UNIX, SOCK_STREAM, SOCK_DGRAM, socket
# --- namedtuples
# system
nt_sysmeminfo = namedtuple('usage', 'total used free percent')
# XXX - would 'available' be better than 'free' as for virtual_memory() nt?
nt_swapmeminfo = namedtuple('swap', 'total used free percent sin sout')
nt_diskinfo = namedtuple('usage', 'total used free percent')
nt_partition = namedtuple('partition', 'device mountpoint fstype opts')
nt_net_iostat = namedtuple(
'iostat',
'bytes_sent bytes_recv packets_sent packets_recv errin errout dropin dropout')
nt_disk_iostat = namedtuple(
'iostat',
'read_count write_count read_bytes write_bytes read_time write_time')
nt_user = namedtuple('user', 'name terminal host started')
# processes
nt_meminfo = namedtuple('meminfo', 'rss vms')
nt_cputimes = namedtuple('cputimes', 'user system')
nt_openfile = namedtuple('openfile', 'path fd')
nt_thread = namedtuple('thread', 'id user_time system_time')
nt_uids = namedtuple('user', 'real effective saved')
nt_gids = namedtuple('group', 'real effective saved')
nt_io = namedtuple('io', 'read_count write_count read_bytes write_bytes')
nt_ionice = namedtuple('ionice', 'ioclass value')
nt_ctxsw = namedtuple('amount', 'voluntary involuntary')
# --- misc
# backward compatibility layer for Process.get_connections() ntuple
class nt_connection(namedtuple('connection',
'fd family type laddr raddr status')):
__slots__ = ()
@property
def local_address(self):
warnings.warn("'local_address' field is deprecated; use 'laddr'"
"instead", category=DeprecationWarning, stacklevel=2)
return self.laddr
@property
def remote_address(self):
warnings.warn("'remote_address' field is deprecated; use 'raddr'"
"instead", category=DeprecationWarning, stacklevel=2)
return self.raddr
|
devs1991/test_edx_docmode
|
venv/lib/python2.7/site-packages/psutil/_common.py
|
Python
|
agpl-3.0
| 5,770
|
from test.integration.base import DBTIntegrationTest, FakeArgs, use_profile
import json
class TestChangingPartitions(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "partition-models"
def run_changes(self, before, after):
results = self.run_dbt(['run', '--vars', json.dumps(before)])
self.assertEqual(len(results), 1)
results = self.run_dbt(['run', '--vars', json.dumps(after)])
self.assertEqual(len(results), 1)
@use_profile('bigquery')
def test_bigquery_add_partition(self):
before = {"partition_by": None, "cluster_by": None}
after = {"partition_by": {'field': 'cur_time', 'data_type': 'timestamp'}, "cluster_by": None}
self.run_changes(before, after)
@use_profile('bigquery')
def test_bigquery_remove_partition(self):
before = {"partition_by": {'field': 'cur_time', 'data_type': 'timestamp'}, "cluster_by": None}
after = {"partition_by": None, "cluster_by": None}
self.run_changes(before, after)
@use_profile('bigquery')
def test_bigquery_change_partitions(self):
before = {"partition_by": {'field': 'cur_time', 'data_type': 'timestamp'}, "cluster_by": None}
after = {"partition_by": {'field': "cur_date"}, "cluster_by": None}
self.run_changes(before, after)
self.run_changes(after, before)
@use_profile('bigquery')
def test_bigquery_change_partitions_from_int(self):
before = {"partition_by": {"field": "id", "data_type": "int64", "range": {"start": 0, "end": 10, "interval": 1}}, "cluster_by": None}
after = {"partition_by": {"field": "cur_date", "data_type": "date"}, "cluster_by": None}
self.run_changes(before, after)
self.run_changes(after, before)
@use_profile('bigquery')
def test_bigquery_add_clustering(self):
before = {"partition_by": {'field': 'cur_time', 'data_type': 'timestamp'}, "cluster_by": None}
after = {"partition_by": {'field': "cur_date"}, "cluster_by": "id"}
self.run_changes(before, after)
@use_profile('bigquery')
def test_bigquery_remove_clustering(self):
before = {"partition_by": {'field': 'cur_time', 'data_type': 'timestamp'}, "cluster_by": "id"}
after = {"partition_by": {'field': "cur_date"}, "cluster_by": None}
self.run_changes(before, after)
@use_profile('bigquery')
def test_bigquery_change_clustering(self):
before = {"partition_by": {'field': 'cur_time', 'data_type': 'timestamp'}, "cluster_by": "id"}
after = {"partition_by": {'field': "cur_date"}, "cluster_by": "name"}
self.run_changes(before, after)
@use_profile('bigquery')
def test_bigquery_change_clustering_strict(self):
before = {'partition_by': {'field': 'cur_time', 'data_type': 'timestamp'}, 'cluster_by': 'id'}
after = {'partition_by': {'field': 'cur_date', 'data_type': 'date'}, 'cluster_by': 'name'}
self.run_changes(before, after)
|
fishtown-analytics/dbt
|
test/integration/022_bigquery_test/test_bigquery_changing_partitions.py
|
Python
|
apache-2.0
| 3,058
|
#!/usr/bin/env python
from __future__ import print_function
import argparse
import os
import subprocess
import sys
from test_util import TestFailedError, run_command, \
serializeIncrParseMarkupFile
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description='Utility for testing incremental syntax tree transfer',
epilog='''
Based of a single template the utility generates a pre-edit and a post-edit
file. It then verifies that the incrementally transferred syntax tree
matches the syntax tree passed as --expected-incremental-syntax-tree.
To generate the pre-edit and the post-edit file from the template, it
operates on markers of the form:
<<test_case<pre|||post>>>
These placeholders are replaced by:
- 'pre' if a different test case than 'test_case' is run
- 'pre' for the pre-edit version of 'test_case'
- 'post' for the post-edit version of 'test_case'
''')
parser.add_argument(
'file', type=argparse.FileType(),
help='The template file to test')
parser.add_argument(
'--test-case', default='',
help='The test case to execute. If no test case is specified all '
'unnamed substitutions are applied')
parser.add_argument(
'--temp-dir', required=True,
help='A temporary directory where pre-edit and post-edit files can be '
'saved')
parser.add_argument(
'--swift-syntax-test', required=True,
help='The path to swift-syntax-test')
parser.add_argument(
'--expected-incremental-syntax-tree', required=True,
help='The path to a file that contains the expected incrementally '
'transferred syntax tree')
args = parser.parse_args(sys.argv[1:])
test_file = args.file.name
test_file_name = os.path.basename(test_file)
test_case = args.test_case
temp_dir = args.temp_dir
swift_syntax_test = args.swift_syntax_test
expected_syntax_tree_file = args.expected_incremental_syntax_tree
if not os.path.exists(temp_dir):
os.makedirs(temp_dir)
incremental_serialized_file = temp_dir + '/' + test_file_name + '.' \
+ test_case + '.incr.json'
try:
serializeIncrParseMarkupFile(test_file=test_file,
test_case=test_case,
mode='incremental',
serialization_mode='incremental',
omit_node_ids=False,
output_file=incremental_serialized_file,
temp_dir=temp_dir + '/temp',
swift_syntax_test=swift_syntax_test,
print_visual_reuse_info=False)
except TestFailedError as e:
print('Test case "%s" of %s FAILed' % (test_case, test_file),
file=sys.stderr)
print(e.message, file=sys.stderr)
sys.exit(1)
# Check if the two syntax trees are the same
try:
run_command(
[
'diff', '-u',
incremental_serialized_file,
expected_syntax_tree_file
])
except subprocess.CalledProcessError as e:
print('Test case "%s" of %s FAILed' % (test_case, test_file),
file=sys.stderr)
print('Syntax tree of incremental parsing does not match expected '
'incrementally transfer syntax tree:\n\n', file=sys.stderr)
print(e.output, file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
main()
|
jopamer/swift
|
utils/incrparse/incr_transfer_tree.py
|
Python
|
apache-2.0
| 3,699
|
# This file is part of h5py, a Python interface to the HDF5 library.
#
# http://www.h5py.org
#
# Copyright 2008-2013 Andrew Collette and contributors
#
# License: Standard 3-clause BSD; see "license.txt" for full license terms
# and contributor agreement.
from __future__ import absolute_import
from h5py import _objects as o
from ..common import TestCase
class TestObjects(TestCase):
def test_invalid(self):
# Check for segfault on close
oid = o.ObjectID(0)
del oid
oid = o.ObjectID(1)
del oid
def test_equality(self):
# Identifier-based equality
oid1 = o.ObjectID(42)
oid2 = o.ObjectID(42)
oid3 = o.ObjectID(43)
self.assertEqual(oid1, oid2)
self.assertNotEqual(oid1, oid3)
def test_hash(self):
# Default objects are not hashable
oid = o.ObjectID(42)
with self.assertRaises(TypeError):
hash(oid)
|
ryfeus/lambda-packs
|
HDF4_H5_NETCDF/source2.7/h5py/tests/old/test_objects.py
|
Python
|
mit
| 953
|
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 29 14:29:21 2016
A script for drawing the Phase Plane plot for the
glucose/oxygen levels (for the 6 selected EFMs)
@author: noore
"""
import os
import pandas as pd
import numpy as np
from matplotlib import rcParams, cm, pyplot, colors, colorbar
import zipfile
import definitions as D
from prepare_data import read_sweep_zipfile, get_df_from_sweep_zipfile, \
get_general_parameters_from_zipfile
from scipy.interpolate import RectBivariateSpline
import itertools
import seaborn as sns
PREFIX = 'n39-p'
ITER_RANGE = range(31, 48)
REGEX = 'mext-glucoseExt-'
class SweepInterpolator(object):
def __init__(self, data_df, efm_list):
self.efm_list = efm_list or data_df['efm'].unique().tolist()
# interpolate 2D function for each EFM
self.f_interp_dict = {}
max_growth_rate = 0
for efm in self.efm_list:
try:
gr_df = data_df[data_df['efm'] == efm].pivot(index=D.GLU_COL,
columns=D.OX_COL,
values=D.GROWTH_RATE_L)
f = RectBivariateSpline(np.log10(gr_df.index),
np.log10(gr_df.columns),
gr_df)
self.f_interp_dict[efm] = f
max_gr_efm = f(np.log10(D.MAX_CONC['glucoseExt']),
np.log10(D.MAX_CONC['oxygen']))[0, 0]
max_growth_rate = max(max_growth_rate, max_gr_efm)
except ValueError:
print("WARNING: cannot interpolate 2D function for EFM #%04d" % efm)
def calc_gr(self, efm, glucose, oxygen):
return self.f_interp_dict[efm](np.log10(glucose), np.log10(oxygen))[0, 0]
def calc_all_gr(self, glucose, oxygen):
data = [self.calc_gr(efm, glucose, oxygen) for efm in self.efm_list]
return pd.Series(index=self.efm_list, data=data)
@staticmethod
def get_general_params(iter_num):
prefix = '%s%d' % (PREFIX, iter_num)
zip_fname = os.path.join(D.DATA_DIR, '%s.zip' % prefix)
# get the oxygen level from the "metfixed.csv" file inside the zipfile
with zipfile.ZipFile(zip_fname, 'r') as z:
rates_df, params_df, km_df = \
get_general_parameters_from_zipfile(z, prefix)
return rates_df, params_df, km_df
@staticmethod
def get_efm_list_for_knockout(ko):
"""
get the descriptions of the EFMs from one of the sweep
files (doesn't matter which)
"""
rates_df, _, _ = SweepInterpolator.get_general_params(min(ITER_RANGE))
if type(ko) != list:
ko = [ko]
efms_to_keep = rates_df[~rates_df[ko].any(1)].index
return list(efms_to_keep)
def calculate_growth_on_grid(self, ko=None, N=200):
"""
use the interpolations to calculate the growth rate on a NxN grid
"""
glu_grid = np.logspace(np.log10(D.MIN_CONC['glucoseExt']),
np.log10(D.MAX_CONC['glucoseExt']), N)
ox_grid = np.logspace(np.log10(D.MIN_CONC['oxygen']),
np.log10(D.MAX_CONC['oxygen']), N)
if ko:
efms = SweepInterpolator.get_efm_list_for_knockout(ko)
else:
efms = self.efm_list
monod_df = []
for g, o in itertools.product(glu_grid, ox_grid):
growth_rates = [(self.calc_gr(efm, g, o), efm) for efm in efms]
growth_rates.sort(reverse=True)
best_gr, best_efm = growth_rates[0]
monod_df.append((g, o, best_efm, best_gr))
monod_df = pd.DataFrame(monod_df,
columns=[D.GLU_COL, D.OX_COL, 'best_efm', D.GROWTH_RATE_L])
return monod_df
@staticmethod
def interpolate_2D_sweep(efm_list=None):
"""
Combine all glucose and oxygen sweeps into one DataFrame
"""
data_df = get_complete_sweep_data()
return SweepInterpolator(data_df, efm_list)
def get_raw_sweep_data(iter_num):
prefix = '%s%d' % (PREFIX, iter_num)
zip_fname = os.path.join(D.DATA_DIR, '%s.zip' % prefix)
# get the oxygen level from the "metfixed.csv" file inside the zipfile
with zipfile.ZipFile(zip_fname, 'r') as z:
ox_df = pd.read_csv(z.open('%s/metfixed.csv' % prefix, 'r'),
header=None, index_col=0)
ox_conc = ox_df.at['oxygen', 1] # in mM
_, df = get_df_from_sweep_zipfile(zip_fname, REGEX)
df.rename(columns={REGEX: D.GLU_COL}, inplace=True)
df.insert(2, D.OX_COL, float(ox_conc))
return df
def get_sweep_data(iter_num):
prefix = '%s%d' % (PREFIX, iter_num)
zip_fname = os.path.join(D.DATA_DIR, '%s.zip' % prefix)
# get the oxygen level from the "metfixed.csv" file inside the zipfile
with zipfile.ZipFile(zip_fname, 'r') as z:
ox_df = pd.read_csv(z.open('%s/metfixed.csv' % prefix, 'r'),
header=None, index_col=0)
ox_conc = ox_df.at['oxygen', 1] # in mM
df = read_sweep_zipfile(zip_fname, REGEX)
df = pd.melt(df.reset_index(), id_vars='efm', value_name=D.GROWTH_RATE_L)
df.rename(columns={REGEX: D.GLU_COL}, inplace=True)
df.insert(2, D.OX_COL, float(ox_conc))
return df
def cache_complete_sweep_data():
df_list = []
for iter_num in ITER_RANGE:
df_list.append(get_sweep_data(iter_num))
data_df = pd.concat(df_list)
data_df.sort_values(['efm', D.GLU_COL, D.OX_COL], inplace=True)
data_df = data_df[['efm', D.GLU_COL, D.OX_COL, D.GROWTH_RATE_L]]
data_df[D.GLU_COL] = pd.to_numeric(data_df[D.GLU_COL])
data_df[D.OX_COL] = pd.to_numeric(data_df[D.OX_COL])
data_df.to_csv(os.path.join(D.TEMP_DIR, 'sweep2d_gr.csv'))
def get_complete_sweep_data():
sweep_cache_fname = os.path.join(D.TEMP_DIR, 'sweep2d_gr.csv')
if not os.path.exists(sweep_cache_fname):
cache_complete_sweep_data()
return pd.read_csv(sweep_cache_fname)
def get_winning_enzyme_allocations():
df_list = []
for iter_num in ITER_RANGE:
df_std_ox = get_raw_sweep_data(iter_num)
df_std_ox_gr = get_sweep_data(iter_num)
# find the winning EFM in each condition (glucose level)
winning = df_std_ox_gr.sort_values(D.GROWTH_RATE_L, ascending=False).groupby(D.GLU_COL).first().reset_index()
# merge the winning table with the enzyme data table, so that only the
# enzyme allocation data for the winning EFM in each condition is kept
win_enz_df = pd.merge(df_std_ox, winning, on=['efm', D.GLU_COL, D.OX_COL], how='inner')
df_list.append(win_enz_df)
df = pd.concat(df_list)
return df
def write_cache_files():
"""
write all relevant cache files
"""
# 1) the growth rates for each triplet: EFM, glucose, oxygen
cache_complete_sweep_data()
# 2) for each glucose and oxygen pair, find the EFM with the maximal
# growth rate, and keep only its enzyme allocation values
sweep2d_win_enzymes = get_winning_enzyme_allocations()
sweep2d_win_enzymes.to_csv(os.path.join(D.TEMP_DIR, 'sweep2d_win_enzymes.csv'))
# 3) after interpolating the g.r. for each EFM over a 200x200 2D grid
# find the EFM with the maximal growth rate (best_efm)
f_interp_dict = SweepInterpolator.interpolate_2D_sweep()
kos = [(None, None),
('R60', 'ed'),
('R3', 'emp'),
(D.R_OXYGEN_DEPENDENT, 'oxphos')]
for ko, name in kos:
if name is None:
fname = os.path.join(D.TEMP_DIR, 'sweep2d_win_200x200.csv')
else:
fname = os.path.join(D.TEMP_DIR, 'sweep2d_%sko_win_200x200.csv' % name)
sweep2d_grid = f_interp_dict.calculate_growth_on_grid(ko)
sweep2d_grid.to_csv(fname)
def interpolate_single_condition(glucose=None, oxygen=None):
interpolator = SweepInterpolator.interpolate_2D_sweep()
glucose = glucose or D.STD_CONC['glucoseExt']
oxygen = oxygen or D.STD_CONC['oxygen']
data_df = interpolator.calc_all_gr(glucose, oxygen)
return data_df
def plot_growth_rate_hist(glucose=None, oxygen=None, ax=None):
glucose = glucose or D.STD_CONC['glucoseExt']
oxygen = oxygen or D.STD_CONC['oxygen']
data_df = interpolate_single_condition(glucose, oxygen)
if ax is not None:
bins = np.linspace(0, 0.8, 20)
sns.distplot(data_df, ax=ax, bins=bins,
color=D.BAR_COLOR, kde=False)
ax.set_title('[glu] = %g mM, [O$_2$] = %g mM' % (glucose, oxygen))
ax.set_xlabel(D.GROWTH_RATE_L)
ax.set_ylabel('no. of EFMs')
ax.set_xlim(0, None)
def allocation_pie_chart(ax, glucose=100.0, oxygen=3.7e-3):
win_enz_df = pd.read_csv(
os.path.join(D.TEMP_DIR, 'sweep2d_win_enzymes.csv'))
glu = sorted(win_enz_df[D.GLU_COL].unique(), key=lambda x: (x-glucose)**2)[0]
ox = sorted(win_enz_df[D.OX_COL].unique(), key=lambda x: (x-oxygen)**2)[0]
enz = win_enz_df[(win_enz_df[D.GLU_COL] == glu) & (win_enz_df[D.OX_COL] == ox)]
efm = enz['efm'].unique()[0]
gr = enz[D.GROWTH_RATE_L].unique()[0]
E_i = enz.set_index('reaction')['E_i'].sort_values(ascending=False)
E_i = E_i / E_i.sum()
E_lumped = E_i.drop(E_i[E_i.cumsum() > 0.95].index)
E_lumped.loc[D.REMAINDER_L] = E_i[E_i.cumsum() > 0.95].sum()
E_lumped.name = ''
E_lumped.plot.pie(colors=list(map(D.reaction_to_rgb, E_lumped.index)),
labels=list(map(D.GET_REACTION_NAME, E_lumped.index)),
ax=ax)
if efm in D.efm_dict:
efm_name = D.efm_dict[efm]['label']
else:
efm_name = '%d' % efm
ax.set_title('[glu] = %g mM, [O$_2$] = %g mM\nbest EFM is %s, %s = %.2f' %
(glucose, oxygen, efm_name, D.GROWTH_RATE_L, gr))
return efm
def plot_surface(ax, figdata,
z=D.GROWTH_RATE_L, c=D.GROWTH_RATE_L, cmap=None, vmax=None,
sweep_cache_fname='sweep2d_win_200x200.csv'):
"""
plot a 3D surface plot of the 2D-sweep axes, with growth rate (by default)
as the z-axis. One can either use color to indicate hight, or overlay the
mesh with another color based on a 4th parameter.
"""
monod_df, axis_params = get_monod_data(sweep_cache_fname)
#X = np.log10(monod_df[D.GLU_COL].as_matrix().reshape(200, 200).T)
#Y = np.log10(monod_df[D.OX_COL].as_matrix().reshape(200, 200).T)
monod_df = monod_df.join(figdata, on='best_efm', rsuffix='_')
X = np.arange(0, axis_params[D.GLU_COL]['N'])
Y = np.arange(0, axis_params[D.OX_COL]['N'])
X, Y = np.meshgrid(X, Y)
# create matrix-style DataFrames for the growth rate and oxygen uptake rate
z_mat = monod_df.pivot(index=D.GLU_COL, columns=D.OX_COL, values=z).T.as_matrix()
cmap = cmap or cm.magma_r
if z == c: # make a standard surface plot with gridlines and big strides
vmax = vmax or z_mat.max().max()
ax.plot_surface(X, Y, z_mat, rstride=1, cstride=1, cmap=cmap,
antialiased=False, rasterized=True,
linewidth=0, vmin=0, vmax=vmax, shade=False)
else: # use a different matrix for the color coding of the surface
c_mat = monod_df.pivot(index=D.GLU_COL, columns=D.OX_COL, values=c).T.as_matrix()
vmax = vmax or c_mat.max().max()
c_colors = np.empty((X.shape[1], X.shape[0], 4), dtype=float)
for ox in range(X.shape[1]):
for gl in range(X.shape[0]):
c_colors[ox, gl, :] = cmap(c_mat[ox, gl] / vmax)
ax.plot_surface(X, Y, z_mat, facecolors=c_colors,
antialiased=False, rasterized=True,
rstride=1, cstride=1, linewidth=0, shade=False)
sm = cm.ScalarMappable(cmap=cmap, norm=pyplot.Normalize(vmin=0, vmax=vmax))
sm._A = []
pyplot.colorbar(sm, ax=ax, fraction=0.07, shrink=0.5, label=c)
ax.plot_wireframe(X, Y, z_mat, rstride=6, cstride=6, linewidth=0.2,
edgecolor='k', alpha=0.3)
ax.set_xticks(axis_params[D.GLU_COL]['xticks'])
ax.set_xticklabels(axis_params[D.GLU_COL]['xticklabels'])
ax.set_yticks(axis_params[D.OX_COL]['xticks'])
ax.set_yticklabels(axis_params[D.OX_COL]['xticklabels'])
ax.set_xlabel(D.GLU_COL)
ax.set_ylabel(D.OX_COL)
ax.set_zlabel(z, rotation=90)
ax.view_init(20, -120)
def plot_surface_diff(ax, ko_cache_fname, wt_cache_fname='sweep2d_win_200x200.csv'):
monod_df, axis_params = get_monod_data(wt_cache_fname)
wt_gr_mat = monod_df.pivot(index=D.GLU_COL, columns=D.OX_COL,
values=D.GROWTH_RATE_L).T.as_matrix()
monod_df, axis_params = get_monod_data(ko_cache_fname)
ko_gr_mat = monod_df.pivot(index=D.GLU_COL, columns=D.OX_COL,
values=D.GROWTH_RATE_L).T.as_matrix()
X = np.arange(0, axis_params[D.GLU_COL]['N'])
Y = np.arange(0, axis_params[D.OX_COL]['N'])
X, Y = np.meshgrid(X, Y)
ax.plot_surface(X, Y, ko_gr_mat,
rstride=6, cstride=6, cmap='Oranges',
linewidth=0.25, edgecolors='r',
vmin=0, vmax=0.7)
ax.plot_wireframe(X, Y, wt_gr_mat,
rstride=6, cstride=6, linewidth=0.5,
colors=(0.1, 0.1, 0.6), alpha=1)
ax.set_xticks(axis_params[D.GLU_COL]['xticks'])
ax.set_xticklabels(axis_params[D.GLU_COL]['xticklabels'])
ax.set_yticks(axis_params[D.OX_COL]['xticks'])
ax.set_yticklabels(axis_params[D.OX_COL]['xticklabels'])
ax.set_xlabel(D.GLU_COL)
ax.set_ylabel(D.OX_COL)
ax.set_zlabel(D.GROWTH_RATE_L, rotation=90)
ax.view_init(20, -120)
def plot_heatmap_diff(ax, cache_fname1, cache_fname2, vmax=1):
monod1_df, wt_axis_params = get_monod_data(cache_fname1)
gr1_mat = monod1_df.pivot(index=D.GLU_COL, columns=D.OX_COL, values=D.GROWTH_RATE_L).T
monod2_df, ko_axis_params = get_monod_data(cache_fname2)
gr2_mat = monod2_df.pivot(index=D.GLU_COL, columns=D.OX_COL, values=D.GROWTH_RATE_L).T
pcol = ax.imshow(np.log2(gr1_mat) - np.log2(gr2_mat),
interpolation='none', cmap='bwr', vmin=-vmax, vmax=vmax,
origin='lower', aspect=1)
pyplot.colorbar(pcol, ax=ax, label=r'log$_2$ fold change',
fraction=0.1)
ax.set_xlabel(D.GLU_COL)
ax.set_ylabel(D.OX_COL)
ax.set_xticks(wt_axis_params[D.GLU_COL]['xticks'])
ax.set_xticklabels(wt_axis_params[D.GLU_COL]['xticklabels'])
ax.set_yticks(wt_axis_params[D.OX_COL]['xticks'])
ax.set_yticklabels(wt_axis_params[D.OX_COL]['xticklabels'])
def plot_heatmap(ax, wt_cache_fname='sweep2d_win_200x200.csv', vmax=None):
wt_monod_df, wt_axis_params = get_monod_data(wt_cache_fname)
wt_gr_mat = wt_monod_df.pivot(index=D.GLU_COL,
columns=D.OX_COL, values=D.GROWTH_RATE_L).T
pcol = ax.imshow(wt_gr_mat,
interpolation='none', cmap='magma_r', vmin=0, vmax=vmax,
origin='lower', aspect=1)
pyplot.colorbar(pcol, ax=ax, label=r'growth rate [h$^-1$]',
fraction=0.1)
ax.set_xlabel(D.GLU_COL)
ax.set_ylabel(D.OX_COL)
ax.set_xticks(wt_axis_params[D.GLU_COL]['xticks'])
ax.set_xticklabels(wt_axis_params[D.GLU_COL]['xticklabels'])
ax.set_yticks(wt_axis_params[D.OX_COL]['xticks'])
ax.set_yticklabels(wt_axis_params[D.OX_COL]['xticklabels'])
def get_monod_data(sweep_cache_fname='sweep2d_win_200x200.csv'):
monod_df = pd.read_csv(os.path.join(D.TEMP_DIR, sweep_cache_fname))
# construct the bitmap by assigning the color of the winning EFM to each
# pixel
monod_df['hexcolor'] = monod_df['best_efm'].apply(D.efm_to_hex)
standard_conc = {D.GLU_COL: 100.0, D.OX_COL: 3.7}
ticks = {D.GLU_COL: [0, 44, 88, 133, 177],
D.OX_COL: [0, 50, 100, 150, 199]}
axis_params = {}
for col in [D.GLU_COL, D.OX_COL]:
axis_params[col] = {}
levels = sorted(monod_df[col].unique())
axis_params[col]['N'] = len(levels)
axis_params[col]['min'] = monod_df[col].min()
axis_params[col]['max'] = monod_df[col].max()
x_std = np.log(standard_conc[col])
x_min = np.log(monod_df[col].min())
x_max = np.log(monod_df[col].max())
axis_params[col]['std_ind'] = len(levels) * (x_std - x_min) / (x_max - x_min)
axis_params[col]['xticks'] = ticks[col]
tickvalues = [levels[i] for i in ticks[col]]
axis_params[col]['xticklabels'] = map(D.as_base10_exp, tickvalues)
return monod_df, axis_params
def plot_monod_surface(figure_data, sweep_cache_fname='sweep2d_win_200x200.csv'):
monod_df, axis_params = get_monod_data(sweep_cache_fname)
max_growth_rate = monod_df[D.GROWTH_RATE_L].max()
figS12, axS12 = pyplot.subplots(3, 3, figsize=(12, 12))
cbar_ax = figS12.add_axes([.72, .75, .02, .2])
# create a bitmap to be used with imshow
hexcolor_df = monod_df.pivot(index=D.GLU_COL,
columns=D.OX_COL,
values='hexcolor')
best_efm_color = np.zeros((axis_params[D.OX_COL]['N'],
axis_params[D.GLU_COL]['N'], 3))
for i, g in enumerate(hexcolor_df.index):
for j, o in enumerate(hexcolor_df.columns):
best_efm_color[j, i, :] = colors.hex2color(hexcolor_df.at[g, o])
axS12[0, 0].imshow(best_efm_color, interpolation='none', origin='lower')
axS12[0, 0].set_xlabel(D.GLU_COL)
axS12[0, 0].set_ylabel(D.OX_COL)
axS12[0, 0].set_xticks(axis_params[D.GLU_COL]['xticks'])
axS12[0, 0].set_xticklabels(axis_params[D.GLU_COL]['xticklabels'])
axS12[0, 0].set_yticks(axis_params[D.OX_COL]['xticks'])
axS12[0, 0].set_yticklabels(axis_params[D.OX_COL]['xticklabels'])
# convert the standard glucose concentration to the imshow coordinates
# we know that the minimum level is mapped to 0
# and the maximum level is mapped to N
# and that it is a logarithmic scale
axS12[0, 0].plot([axis_params[D.GLU_COL]['std_ind'], axis_params[D.GLU_COL]['std_ind']],
[0, axis_params[D.OX_COL]['N']-1],
'--', color='grey', linewidth=1)
axS12[0, 0].plot([0, axis_params[D.GLU_COL]['N']-1],
[axis_params[D.OX_COL]['std_ind'], axis_params[D.OX_COL]['std_ind']],
'--', color='grey', linewidth=1 )
# mark the 3 selected EFMs in the Monod surface plot
axS12[0, 0].annotate('max-gr', xy=(0.5, 0.8),
xycoords='axes fraction', ha='left', va='top',
size=14, color='k')
axS12[0, 0].annotate('pareto', xy=(0.1, 0.4),
xycoords='axes fraction', ha='left', va='top',
size=14, color='k')
axS12[0, 0].annotate('ana-lac', xy=(0.73, 0.1),
xycoords='axes fraction', ha='left', va='top',
size=14, color='k')
axS12[0, 0].annotate('aero-ace', xy=(0.82, 0.29),
xycoords='axes fraction', ha='left', va='top',
size=14, color='k')
best_efm_gr_df = monod_df.pivot(index=D.GLU_COL,
columns=D.OX_COL,
values=D.GROWTH_RATE_L)
axS12[0, 1].set_xlabel(best_efm_gr_df.index.name)
axS12[0, 1].set_xticks(axis_params[D.GLU_COL]['xticks'])
axS12[0, 1].set_xticklabels(axis_params[D.GLU_COL]['xticklabels'])
axS12[0, 1].get_yaxis().set_visible(False)
pcol = axS12[0, 1].imshow(best_efm_gr_df.T, interpolation='spline16',
cmap='Oranges', vmin=0,
vmax=max_growth_rate, origin='lower')
norm = colors.Normalize(vmin=0, vmax=max_growth_rate)
colorbar.ColorbarBase(cbar_ax, cmap='Oranges', norm=norm)
cbar_ax.set_title(D.GROWTH_RATE_L, loc='center')
for i, efm in enumerate(sorted(monod_df['best_efm'].unique())):
if efm in D.efm_dict:
label = 'EFM %04d (%s)' % (efm, D.efm_dict[efm]['label'])
else:
label = 'EFM %04d' % efm
axS12[0, 2].plot([0, 1], [i, i],
label=label, color=D.efm_to_hex(efm), linewidth=3)
axS12[0, 2].set_xlim(-1, 0)
axS12[0, 2].set_ylim(-1, 0)
axS12[0, 2].get_xaxis().set_visible(False)
axS12[0, 2].get_yaxis().set_visible(False)
axS12[0, 2].legend(fontsize=10, labelspacing=0.1, loc='center right')
axS12[0, 2].axis('off')
# make a Monod surface plot where certain features of the winning EFMs
# are presented in color coding
plot_parameters = [
{'c': D.YIELD_L, 'cmap': 'magma_r', 'vmin': 0, 'vmax': 30 , 'ax': axS12[1, 0]},
{'c': D.OXYGEN_L, 'cmap': 'magma_r', 'vmin': 0, 'vmax': 0.7, 'ax': axS12[1, 1]},
{'c': D.ACE_L, 'cmap': 'magma_r', 'vmin': 0, 'vmax': 1.5, 'ax': axS12[1, 2]},
{'c': D.LACTATE_L, 'cmap': 'magma_r', 'vmin': 0, 'vmax': 1.5, 'ax': axS12[2, 0]},
{'c': D.ED_L, 'cmap': 'magma_r', 'vmin': 0, 'vmax': 2 , 'ax': axS12[2, 1]},
{'c': D.PPP_L, 'cmap': 'magma_r', 'vmin': 0, 'vmax': 4.5, 'ax': axS12[2, 2]},
]
pareto_data_df = figure_data['standard']
for i, d in enumerate(plot_parameters):
ax = d['ax']
ax.set_title(d['c'])
df = monod_df.join(pareto_data_df[d['c']], on='best_efm')
df = df.pivot(index=D.GLU_COL,
columns=D.OX_COL,
values=d['c'])
ax.set_xlabel(df.index.name)
ax.set_ylabel(df.columns.name)
pcol = ax.imshow(df.T, interpolation='none', cmap=d['cmap'],
origin='lower', vmin=d['vmin'], vmax=d['vmax'])
pyplot.colorbar(pcol, ax=ax, shrink=0.6)
# since the plot is made in a linear scale, we need to "manually" convert
# the ticks to the log-scale using the index and columns of 'df'
ax.set_xticks(axis_params[D.GLU_COL]['xticks'])
ax.set_xticklabels(axis_params[D.GLU_COL]['xticklabels'])
ax.set_yticks(axis_params[D.OX_COL]['xticks'])
ax.set_yticklabels(axis_params[D.OX_COL]['xticklabels'])
axS12[0, 0].annotate('a', xy=(0.02, 0.98),
xycoords='axes fraction', ha='left', va='top',
size=20, color='white')
axS12[0, 1].annotate('b', xy=(0.02, 0.98),
xycoords='axes fraction', ha='left', va='top',
size=20, color='black')
for i, ax in enumerate(axS12.flat):
if i == 2:
continue
if i in [0, 3, 4]:
color = 'w'
else:
color = 'k'
letter = chr(ord('a') + (i if i < 2 else i-1))
ax.annotate(letter, xy=(0.02, 0.98),
xycoords='axes fraction', ha='left', va='top',
size=20, color=color)
axS12[1, 1].get_yaxis().set_visible(False)
axS12[1, 2].get_yaxis().set_visible(False)
axS12[2, 1].get_yaxis().set_visible(False)
axS12[2, 2].get_yaxis().set_visible(False)
return figS12
def plot_conc_versus_uptake_figure(figure_data,
sweep_cache_fname='sweep2d_win_200x200.csv'):
"""
in order to compare to FBA predictions
join the Monod surface data with the EFM rates table, in order to
get specific rates for each winning EFM
"""
monod_df, axis_params = get_monod_data(sweep_cache_fname)
best_efm_hex = monod_df.pivot(index=D.GLU_COL,
columns=D.OX_COL,
values='hexcolor')
best_efm_color = np.zeros((best_efm_hex.shape[1], best_efm_hex.shape[0], 3))
for i, g in enumerate(best_efm_hex.index):
for j, o in enumerate(best_efm_hex.columns):
hexcolor = best_efm_hex.at[g, o]
best_efm_color[j, i, :] = colors.hex2color(hexcolor)
fig = pyplot.figure(figsize=(8, 8))
ax_list = []
##################### Monod surface plot of winning EFMs ##################
ax = fig.add_subplot(2, 2, 1)
ax_list.append(ax)
ax.imshow(best_efm_color, interpolation='none', origin='lower')
ax.set_xticks(axis_params[D.GLU_COL]['xticks'])
ax.set_xticklabels(axis_params[D.GLU_COL]['xticklabels'])
ax.set_yticks(axis_params[D.OX_COL]['xticks'])
ax.set_yticklabels(axis_params[D.OX_COL]['xticklabels'])
ax.set_xlabel(D.GLU_COL)
ax.set_ylabel(D.OX_COL)
################### growth rate surface plot vs concentrations ############
ax = fig.add_subplot(2, 2, 2, projection='3d', facecolor='white')
ax_list.append(ax)
X = np.arange(0, axis_params[D.GLU_COL]['N'])
Y = np.arange(0, axis_params[D.OX_COL]['N'])
X, Y = np.meshgrid(X, Y)
z_mat = monod_df.pivot(index=D.GLU_COL, columns=D.OX_COL, values=D.GROWTH_RATE_L).T.as_matrix()
ax.plot_surface(X, Y, z_mat, facecolors=best_efm_color,
rstride=1, cstride=1,
antialiased=False, rasterized=True,
linewidth=0, shade=False)
ax.plot_wireframe(X, Y, z_mat, rstride=6, cstride=6, linewidth=0.2,
edgecolor='k', alpha=0.3)
ax.view_init(20, -120)
ax.set_xticks(axis_params[D.GLU_COL]['xticks'])
ax.set_xticklabels(axis_params[D.GLU_COL]['xticklabels'])
ax.set_yticks(axis_params[D.OX_COL]['xticks'])
ax.set_yticklabels(axis_params[D.OX_COL]['xticklabels'])
ax.set_xlabel(D.GLU_COL)
ax.set_ylabel(D.OX_COL)
ax.set_zlabel(D.GROWTH_RATE_L, rotation=90)
###########################################################################
OX_UPTAKE_L = 'O$_2$ uptake rate (a.u.)'
GLU_UPRATE_L = 'glucose uptake rate (a.u.)'
rates_df, _, _ = SweepInterpolator.get_general_params(min(ITER_RANGE))
monod_df = monod_df.join(rates_df, on='best_efm')
monod_df[OX_UPTAKE_L] = monod_df[D.R_OXYGEN_DEPENDENT].sum(1) * monod_df[D.GROWTH_RATE_L]
monod_df[OX_UPTAKE_L] = monod_df[OX_UPTAKE_L].round(0)
monod_df[GLU_UPRATE_L] = monod_df[D.R_GLUCOSE_IN] * monod_df[D.GROWTH_RATE_L]
monod_df[GLU_UPRATE_L] = monod_df[GLU_UPRATE_L].round(0)
monod_df[D.GROWTH_RATE_L] = monod_df[D.GROWTH_RATE_L].round(3)
small_monod_df = monod_df[[GLU_UPRATE_L, OX_UPTAKE_L, D.GROWTH_RATE_L, 'hexcolor']].drop_duplicates()
small_monod_df.sort_values(D.GROWTH_RATE_L, inplace=True)
########## 2D scatter plot of uptake rates (winning EFM as color) #########
ax = fig.add_subplot(2, 2, 3)
ax_list.append(ax)
ax.scatter(x=small_monod_df[GLU_UPRATE_L],
y=small_monod_df[OX_UPTAKE_L],
s=15, c=small_monod_df['hexcolor'],
linewidth=0)
ax.set_xlabel(GLU_UPRATE_L)
ax.set_ylabel(OX_UPTAKE_L)
############## 3D scatter plot of growth rate vs uptake rates #############
ax = fig.add_subplot(2, 2, 4, projection='3d')
ax_list.append(ax)
ax.scatter(xs=small_monod_df[GLU_UPRATE_L],
ys=small_monod_df[OX_UPTAKE_L],
zs=small_monod_df[D.GROWTH_RATE_L],
s=15, c=small_monod_df['hexcolor'],
cmap='Oranges', vmax=0.7, linewidth=0,
alpha=1)
ax.set_xlabel(GLU_UPRATE_L, labelpad=10)
ax.set_ylabel(OX_UPTAKE_L, labelpad=10)
ax.set_zlabel(D.GROWTH_RATE_L, labelpad=10)
ax.view_init(20, -120)
for i, ax in enumerate(ax_list):
ax.annotate(chr(ord('a')+i), xy=(0.98, 0.98), xycoords='axes fraction',
fontsize=20, ha='right', va='top')
return fig
def plot_oxygen_sweep(ax, glucose_conc=None, N=200,
legend_loc='lower right', legend_fontsize=10):
"""make line plots of gr vs one of the axes (oxygen or glucose)"""
if glucose_conc is None:
glucose_conc = D.STD_CONC['glucoseExt']
ox_grid = np.logspace(np.log10(D.MIN_CONC['oxygen']),
np.log10(D.MAX_CONC['oxygen']),
N)
interp_data_df = pd.DataFrame(index=ox_grid, columns=D.efm_dict.keys())
interpolator = SweepInterpolator.interpolate_2D_sweep(D.efm_dict.keys())
for efm in interp_data_df.columns:
interp_data_df[efm] = [interpolator.calc_gr(efm, glucose_conc, o)
for o in ox_grid]
colors, labels = zip(*D.efm_dict.values())
interp_data_df.plot(kind='line', ax=ax, linewidth=2, color=colors)
ax.legend(labels,
loc=legend_loc, fontsize=legend_fontsize, labelspacing=0.2)
ax.set_xscale('log')
ax.set_xlabel(D.OX_COL)
ax.set_ylabel(r'growth rate [h$^{-1}$]')
ax.set_ylim([0, None])
# mark the line where 'standard' oxygen levels are
std_ox = D.STD_CONC['oxygen']
ax.plot([std_ox, std_ox], ax.get_ylim(), '--', color='grey', linewidth=1)
ax.text(std_ox, ax.get_ylim()[1], ' std. $O_2$', ha='center', va='bottom',
color='grey', fontsize=14)
ax.text(0.02, 0.6, 'glucose (%d mM)' % glucose_conc, ha='left', va='center',
rotation=90, fontsize=14, color='grey', transform=ax.transAxes)
def plot_glucose_sweep(ax, oxygen_conc=None, N=200, ylim=None,
legend_loc='upper left', legend_fontsize=10,
mark_glucose=True):
"""make line plots of gr vs one of the axes (oxygen or glucose)"""
if oxygen_conc is None:
oxygen_conc = D.STD_CONC['oxygen']
glu_grid = np.logspace(np.log10(D.MIN_CONC['glucoseExt']),
np.log10(D.MAX_CONC['glucoseExt']),
N)
interp_data_df = pd.DataFrame(index=glu_grid, columns=D.efm_dict.keys())
interpolator = SweepInterpolator.interpolate_2D_sweep(D.efm_dict.keys())
for efm in interp_data_df.columns:
interp_data_df[efm] = [interpolator.calc_gr(efm, g, oxygen_conc)
for g in glu_grid]
colors, labels = zip(*D.efm_dict.values())
interp_data_df.plot(kind='line', ax=ax, linewidth=2, color=colors)
if legend_loc is not None:
ax.legend(labels,
loc=legend_loc, fontsize=legend_fontsize, labelspacing=0.2)
else:
ax.legend().remove()
ax.set_xscale('log')
ax.set_xlabel(D.GLU_COL)
ax.set_ylabel(r'growth rate [h$^{-1}$]')
if ylim is None:
ax.set_ylim([0, None])
else:
ax.set_ylim(ylim)
if mark_glucose:
# mark the line where 'standard' oxygen levels are
std_ox = D.STD_CONC['glucoseExt']
ax.plot([std_ox, std_ox], ax.get_ylim(), '--', color='grey', linewidth=1)
ax.text(std_ox, ax.get_ylim()[1], ' std. glucose', ha='center', va='bottom',
color='grey', fontsize=14)
ax.text(0.02, 0.6, '$O_2$ (%g mM)' % oxygen_conc, ha='left', va='center',
rotation=90, fontsize=14, color='grey', transform=ax.transAxes)
def get_glucose_sweep_df(oxygen_conc=None, efm_list=None, N=200):
if oxygen_conc is None:
oxygen_conc = D.STD_CONC['oxygen']
glu_grid = np.logspace(np.log10(D.MIN_CONC['glucoseExt']),
np.log10(D.MAX_CONC['glucoseExt']),
N)
interpolator = SweepInterpolator.interpolate_2D_sweep(efm_list)
interp_data_df = pd.DataFrame(index=glu_grid,
columns=interpolator.efm_list)
for efm in interpolator.efm_list:
interp_data_df[efm] = [interpolator.calc_gr(efm, g, oxygen_conc)
for g in glu_grid]
return interp_data_df
def get_anaerobic_glucose_sweep_df(figure_data, N=200):
anaerobic_sweep_data_df = figure_data['monod_glucose_anae'].drop(9999)
# filter all EMFs that have a > 1% drop in the function (it should be
# completely monotonic, but some numerical errors should be okay).
non_monotinic = (np.log(anaerobic_sweep_data_df).diff(axis=1) < 0)
anaerobic_sweep_data_df[non_monotinic] = np.nan
glu_grid = np.logspace(np.log10(D.MIN_CONC['glucoseExt']),
np.log10(D.MAX_CONC['glucoseExt']),
N)
interp_df = anaerobic_sweep_data_df.transpose()
interp_df = interp_df.append(
pd.DataFrame(index=glu_grid, columns=anaerobic_sweep_data_df.index))
interp_df = interp_df[~interp_df.index.duplicated(keep='first')]
interp_df.sort_index(inplace=True)
interp_df.index = np.log(interp_df.index)
interpolated_df = interp_df.interpolate(method='polynomial', order=3)
interpolated_df.index = np.exp(interpolated_df.index)
return interpolated_df
def plot_oxygen_dual_pareto(data_df, ax, s=9,
std_ox=None, low_ox=None, std_glu=None,
draw_lines=True):
std_ox = std_ox or D.STD_CONC['oxygen']
low_ox = low_ox or D.LOW_CONC['oxygen']
std_glu = std_glu or D.STD_CONC['glucoseExt']
std_ox_df = pd.DataFrame(index=data_df.index,
columns=[D.GROWTH_RATE_L, D.YIELD_L])
std_ox_df[D.YIELD_L] = data_df[D.YIELD_L]
low_ox_df = pd.DataFrame(index=data_df.index,
columns=[D.GROWTH_RATE_L, D.YIELD_L])
low_ox_df[D.YIELD_L] = data_df[D.YIELD_L]
# calculate the growth rates in the lower oxygen level, using the
# interpolated functions
interpolator = SweepInterpolator.interpolate_2D_sweep()
for efm in data_df.index:
std_ox_df.at[efm, D.GROWTH_RATE_L] = \
interpolator.calc_gr(efm, std_glu, std_ox)
low_ox_df.at[efm, D.GROWTH_RATE_L] = \
interpolator.calc_gr(efm, std_glu, low_ox)
D.plot_dual_pareto(std_ox_df, 'std. O$_2$ (0.21 mM)',
low_ox_df, 'low O$_2$ (%g mM)' % low_ox,
s=s, ax=ax, x=D.YIELD_L, y=D.GROWTH_RATE_L,
draw_lines=draw_lines)
ax.set_xlim(-1e-3, None)
ax.set_ylim(-1e-3, None)
def plot_glucose_dual_pareto(data_df, ax,
std_glu=None, low_glu=None, std_ox=None,
draw_lines=True):
std_glu = std_glu or D.STD_CONC['glucoseExt']
low_glu = low_glu or D.LOW_CONC['glucoseExt']
std_ox = std_ox or D.STD_CONC['oxygen']
std_glu_df = pd.DataFrame(index=data_df.index,
columns=[D.GROWTH_RATE_L, D.YIELD_L])
std_glu_df[D.YIELD_L] = data_df[D.YIELD_L]
low_glu_df = pd.DataFrame(index=data_df.index,
columns=[D.GROWTH_RATE_L, D.YIELD_L])
low_glu_df[D.YIELD_L] = data_df[D.YIELD_L]
# calculate the growth rates in the lower oxygen level, using the
# interpolated functions
interpolator = SweepInterpolator.interpolate_2D_sweep()
for efm in data_df.index:
std_glu_df.at[efm, D.GROWTH_RATE_L] = \
interpolator.calc_gr(efm, std_glu, std_ox)
low_glu_df.at[efm, D.GROWTH_RATE_L] = \
interpolator.calc_gr(efm, low_glu, std_ox)
D.plot_dual_pareto(std_glu_df, 'std. glucose (100 mM)',
low_glu_df, 'low glucose (%g mM)' % low_glu,
s=9, ax=ax, x=D.YIELD_L, y=D.GROWTH_RATE_L,
draw_lines=draw_lines)
ax.set_xlim(-1e-3, None)
ax.set_ylim(-1e-3, None)
if __name__ == '__main__':
figure_data = D.get_figure_data()
rcParams['font.size'] = 12.0
rcParams['font.family'] = 'sans-serif'
rcParams['font.sans-serif'] = 'Arial'
rcParams['legend.fontsize'] = 'small'
rcParams['axes.labelsize'] = 12.0
rcParams['axes.titlesize'] = 12.0
rcParams['xtick.labelsize'] = 10.0
rcParams['ytick.labelsize'] = 10.0
# run this script in order to calculate the extrapolated growth rates for
# all the 200x200 grid and cache the results in a temp file for quick
# access for the scripts that plot the data
fig = pyplot.figure(figsize=(10, 5))
ax = fig.add_subplot(1, 2, 1)
plot_oxygen_sweep(ax)
ax = fig.add_subplot(1, 2, 2)
plot_oxygen_dual_pareto(figure_data['standard'], ax)
|
liebermeister/flux-enzyme-cost-minimization
|
scripts/monod_surface.py
|
Python
|
gpl-2.0
| 36,182
|
from datetime import datetime
import time
import pytest
from scheduler_service.models import Task, URLDetail
from tests.const import task_url
async def test_task(user):
await Task.objects.create(name="test",
start_time=datetime.now(),
user_id=user.id,
cookies={"test": "test"})
task = await Task.objects.get(name="test")
await task.delete()
async def test_http_task(test_cli, headers):
resp = await test_cli.post(task_url,
headers=headers,
json={
"name": "test",
"interval": 1000,
"start_time": time.time()
})
assert resp.status == 200
resp = await test_cli.get(task_url, headers=headers)
assert resp.status == 200
|
moonlitlaputa/scheduler-service
|
tests/test_task.py
|
Python
|
bsd-2-clause
| 942
|
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
from django.contrib import admin
# Create your models here.
from django.contrib import admin
SUBMITTER = (('parent', 'parent'),
('faculty','faculty'),
('expert', 'expert'),
('child', 'child')
)
from users.models import Learner
class Observation(models.Model):
instance_id = models.CharField(max_length=100)
submission_date = models.DateField()
image = models.ImageField(blank=True, null=True)
observation = models.TextField(blank=True, null=True)
child = models.ForeignKey(User, null=True)
submitter = models.CharField(max_length=10, choices=SUBMITTER, blank=True, null=True)
def __str__(self):
return self.observation
class ChildSheet(models.Model):
sheetcode = models.CharField(max_length=200)
foldercode = models.CharField(max_length=200)
learner = models.OneToOneField(Learner)
def __str__(self):
return self.learner.user.get_full_name() + " - Sheet: %s" % (self.sheetcode)
|
pankajlal/prabandh
|
obspost/models.py
|
Python
|
apache-2.0
| 1,110
|
def print_board(board):
print "The board look like this: \n"
for i in range(3):
print " ",
for j in range(3):
if board[i*3+j] == 1:
print 'X',
elif board[i*3+j] == 0:
print 'O',
elif board[i*3+j] != -1:
print board[i*3+j]-1,
else:
print ' ',
if j != 2:
print " | ",
print
if i != 2:
print "-----------------"
else:
print
def print_instruction():
print "Please use the following cell numbers to make your move"
print_board([2,3,4,5,6,7,8,9,10])
def get_input(turn):
valid = False
while not valid:
try:
user = raw_input("Where would you like to place " + turn + " (1-9)? ")
user = int(user)
if user >= 1 and user <= 9:
return user-1
else:
print "That is not a valid move! Please try again.\n"
print_instruction()
except Exception as e:
print user + " is not a valid move! Please try again.\n"
def check_win(board):
win_cond = ((1,2,3),(4,5,6),(7,8,9),(1,4,7),(2,5,8),(3,6,9),(1,5,9),(3,5,7))
for each in win_cond:
try:
if board[each[0]-1] == board[each[1]-1] and board[each[1]-1] == board[each[2]-1]:
return board[each[0]-1]
except:
pass
return -1
def quit_game(board,msg):
print_board(board)
print msg
quit()
def main():
# setup game
# alternate turns
# check if win or end
# quit and show the board
print_instruction()
board = []
for i in range(9):
board.append(-1)
win = False
move = 0
while not win:
# print board
print_board(board)
print "Turn number " + str(move+1)
if move % 2 == 0:
turn = 'X'
else:
turn = 'O'
# get user input
user = get_input(turn)
while board[user] != -1:
print "Invalid move! Cell already taken. Please try again.\n"
user = get_input(turn)
board[user] = 1 if turn == 'X' else 0
# advance move and check for end game
move += 1
if move > 4:
winner = check_win(board)
if winner != -1:
out = "The winner is "
out += "X" if winner == 1 else "O"
out += " :)"
quit_game(board,out)
elif move == 9:
quit_game(board,"No winner :(")
if __name__ == "__main__":
main()
|
motealle/python
|
tictactoe-web.py
|
Python
|
gpl-2.0
| 2,703
|
from django.test import TestCase
from django.test import Client
from django.contrib.auth.models import User
from ..models import Message, UserFollowingRelationship
class ServicesTestCase(TestCase):
def setUp(self):
self.username = 'test'
self.password = '123'
self.email = 'tester@abv.bg'
self.test_user_test = User.objects.create_user(
self.username,
self.email,
self.password)
self.test_user_az = User.objects.create_user("az", "az@abv.bg", "456")
self.test_user_ti = User.objects.create_user("ti", "ti@abv.bg", "abv")
self.message1 = Message.objects.create(
text="Hello world",
location="Studentski",
author=self.test_user_test)
self.message2 = Message.objects.create(
text="Ahhh omg!",
location="Varna",
author=self.test_user_test)
self.message3 = Message.objects.create(
text="Bateeeee",
location="Varna",
author=self.test_user_ti)
UserFollowingRelationship.objects.create(
follower=self.test_user_ti,
followed=self.test_user_test)
def test_home(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
def test_register(self):
self.client.post(
'/register/',
{
'first_name': 'vasko',
'last_name': 'todorov',
'username': 'vasko',
'email': 'vasko@abv.bg',
'password': 'abvgd',
'password_check': 'abvgd'
})
self.assertTrue(User.objects.filter(username='vasko').exists())
self.assertFalse(User.objects.filter(username='vasko12').exists())
def test_login_valid_user(self):
response = self.client.post(
'/login/', {'username': 'test', 'password:': '123'}
)
self.assertEqual(302, response.status_code)
def test_logout(self):
self.client.login(username='test', password='123')
response = self.client.get('/logout/')
self.assertEqual(302, response.status_code)
def test_users(self):
self.client.login(username='test', password='123')
response = self.client.get('/users/', follow=True)
self.assertTrue(
self.test_user_ti.username in
str(response.context['not_followed_users'])
)
self.assertTrue(
self.test_user_ti.username not in
str(response.context['followed_users'])
)
def test_follow_user(self):
self.client.login(username='test', password='123')
self.client.get('/follow/az')
self.assertTrue(UserFollowingRelationship.objects.filter(
follower=self.test_user_test,
followed=self.test_user_az).exists())
self.assertFalse(UserFollowingRelationship.objects.filter(
follower=self.test_user_test,
followed=self.test_user_ti).exists())
def test_unfollow_user(self):
self.client.login(username="ti", password="abv")
self.assertTrue(UserFollowingRelationship.objects.filter(
follower=self.test_user_ti,
followed=self.test_user_test).exists())
self.client.get('/unfollow/test')
self.assertFalse(UserFollowingRelationship.objects.filter(
follower=self.test_user_ti,
followed=self.test_user_test).exists())
def test_messages(self):
self.client.login(username="ti", password="abv")
response = self.client.get('/messages/')
self.assertTrue(
self.message1 in
response.context['all_messages_from_followed_users']
)
self.assertTrue(
self.message2 in
response.context['all_messages_from_followed_users']
)
self.assertFalse(
self.message3 in
response.context['all_messages_from_followed_users']
)
def test_createmessage(self):
text = "Aaaaaa mnogo dobur test"
location = "Tonight we'll dine in HELL"
self.client.login(username='test', password='123')
self.client.post(
'/createmessage/', {
'text': text,
'location': location
}
)
self.assertTrue(Message.objects.filter(
author=self.test_user_test,
text=text,
location=location).exists())
self.assertFalse(Message.objects.filter(
author=self.test_user_az,
text=text,
location=location).exists())
def test_about(self):
response = self.client.get('/about/')
self.assertEqual(200, response.status_code)
def test_contact(self):
response = self.client.get('/contact/')
self.assertEqual(200, response.status_code)
def test_unexisting_page(self):
response = self.client.get('/pesho/')
self.assertEqual(404, response.status_code)
def test_mymessages(self):
self.client.login(username='test', password='123')
response = self.client.get('/mymessages/')
self.assertEqual(2, len(response.context['my_messages']))
self.assertTrue(self.message1 in response.context['my_messages'])
self.assertTrue(self.message2 in response.context['my_messages'])
self.assertTrue(self.message3 not in response.context['my_messages'])
def test_delete_my_message(self):
self.client.login(username='test', password='123')
message1_id = self.message1.id
self.assertTrue(Message.objects.filter(id=message1_id).exists())
self.client.get('/mymessages/delete/' + str(message1_id))
self.assertFalse(Message.objects.filter(id=message1_id).exists())
|
vdtodorov93/Hirundo
|
hirundo/website/tests/tests.py
|
Python
|
mit
| 5,836
|
# -*- coding: utf-8 -*-
import os
import subprocess
import sys
import time
from ..base.addon import BaseAddon, expose
try:
import caffeine
except ImportError:
pass
class Kernel32:
ES_AWAYMODE_REQUIRED = 0x00000040
ES_CONTINUOUS = 0x80000000
ES_DISPLAY_REQUIRED = 0x00000002
ES_SYSTEM_REQUIRED = 0x00000001
ES_USER_PRESENT = 0x00000004
class AntiStandby(BaseAddon):
__name__ = "AntiStandby"
__type__ = "addon"
__version__ = "0.18"
__status__ = "testing"
__config__ = [
("enabled", "bool", "Activated", False),
("hdd", "bool", "Prevent HDD standby", True),
("system", "bool", "Prevent OS standby", True),
("display", "bool", "Prevent display standby", False),
("interval", "int", "HDD touching interval in seconds", 25),
]
__description__ = """Prevent OS, HDD and display standby"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
TMP_FILE = ".antistandby"
def init(self):
self.pid = None
self.mtime = 0
def activate(self):
hdd = self.config.get("hdd")
system = not self.config.get("system")
display = not self.config.get("display")
if hdd:
print("INTERVAL", type(self.config.get("interval")))
self.periodical.start(self.config.get("interval"), threaded=True)
if os.name == "nt":
self.win_standby(system, display)
elif sys.platform == "darwin":
self.osx_standby(system, display)
else:
self.linux_standby(system, display)
def deactivate(self):
self.remove(self.TMP_FILE, try_trash=False)
if os.name == "nt":
self.win_standby(True)
elif sys.platform == "darwin":
self.osx_standby(True)
else:
self.linux_standby(True)
@expose
def win_standby(self, system=True, display=True):
import ctypes
set = ctypes.windll.kernel32.SetThreadExecutionState
if system:
if display:
set(Kernel32.ES_CONTINUOUS)
else:
set(Kernel32.ES_CONTINUOUS | Kernel32.ES_DISPLAY_REQUIRED)
else:
if display:
set(Kernel32.ES_CONTINUOUS | Kernel32.ES_SYSTEM_REQUIRED)
else:
set(
Kernel32.ES_CONTINUOUS
| Kernel32.ES_SYSTEM_REQUIRED
| Kernel32.ES_DISPLAY_REQUIRED
)
@expose
def osx_standby(self, system=True, display=True):
try:
if system:
caffeine.off()
else:
caffeine.on(display)
except NameError:
self.log_warning(
self._("Unable to change power state"), self._("caffeine lib not found")
)
except Exception as exc:
self.log_warning(self._("Unable to change power state"), exc)
@expose
def linux_standby(self, system=True, display=True):
try:
if system:
if self.pid:
self.pid.kill()
elif not self.pid:
self.pid = subprocess.Popen(["caffeine"])
except Exception as exc:
self.log_warning(self._("Unable to change system power state"), exc)
try:
if display:
subprocess.call(["xset", "+dpms", "s", "default"])
else:
subprocess.call(["xset", "-dpms", "s", "off"])
except Exception as exc:
self.log_warning(self._("Unable to change display power state"), exc)
@expose
def touch(self, path):
with open(path, mode="w"):
os.utime(path, None)
self.mtime = time.time()
@expose
def max_mtime(self, path):
return max(
0,
0,
*(
os.path.getmtime(os.path.join(root, file))
for root, dirs, files in os.walk(os.fsdecode(path), topdown=False)
for file in files
),
)
def periodical_task(self):
if not self.config.get("hdd"):
return
if (
self.pyload.thread_manager.pause
or not self.pyload.api.is_time_download()
or not self.pyload.thread_manager.get_active_files()
):
return
dl_folder = self.pyload.config.get("general", "storage_folder")
if (self.max_mtime(dl_folder) - self.mtime) < self.periodical.interval:
return
self.touch(self.TMP_FILE)
|
vuolter/pyload
|
src/pyload/plugins/addons/AntiStandby.py
|
Python
|
agpl-3.0
| 4,619
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# Gestión de parámetros de configuración - xbmc
#------------------------------------------------------------
# tvalacarta
# http://blog.tvalacarta.info/plugin-xbmc/tvalacarta/
#------------------------------------------------------------
# Creado por: Jesús (tvalacarta@gmail.com)
# Licencia: GPL (http://www.gnu.org/licenses/gpl-3.0.html)
#------------------------------------------------------------
import sys
import os
import xbmcplugin
import xbmc
PLATFORM_NAME = "xbmc-plugin"
PLUGIN_NAME = "pelisalacarta"
def get_platform():
return PLATFORM_NAME
def is_xbmc():
return True
def get_library_support():
return True
def get_system_platform():
""" fonction: pour recuperer la platform que xbmc tourne """
import xbmc
platform = "unknown"
if xbmc.getCondVisibility( "system.platform.linux" ):
platform = "linux"
elif xbmc.getCondVisibility( "system.platform.xbox" ):
platform = "xbox"
elif xbmc.getCondVisibility( "system.platform.windows" ):
platform = "windows"
elif xbmc.getCondVisibility( "system.platform.osx" ):
platform = "osx"
return platform
def open_settings():
xbmcplugin.openSettings( sys.argv[ 0 ] )
def get_setting(name):
return xbmcplugin.getSetting(name)
def set_setting(name,value):
try:
xbmcplugin.setSetting(name,value)
except:
pass
def get_localized_string(code):
dev = xbmc.getLocalizedString( code )
try:
dev = dev.encode ("utf-8") #This only aplies to unicode strings. The rest stay as they are.
except:
pass
return dev
def get_library_path():
#return os.path.join( get_data_path(), 'library' )
default = os.path.join( get_data_path(), 'library' )
value = get_setting("librarypath")
if value=="":
value=default
return value
def get_temp_file(filename):
return xbmc.translatePath( os.path.join( "special://temp/", filename ))
def get_runtime_path():
return os.getcwd()
def get_data_path():
devuelve = xbmc.translatePath( os.path.join("special://home/","userdata","plugin_data","video",PLUGIN_NAME) )
# XBMC en modo portable
if devuelve.startswith("special:"):
devuelve = xbmc.translatePath( os.path.join("special://xbmc/","userdata","plugin_data","video",PLUGIN_NAME) )
# Plex 8
if devuelve.startswith("special:"):
devuelve = os.getcwd()
return devuelve
def get_cookie_data():
import os
ficherocookies = os.path.join( get_data_path(), 'cookies.dat' )
cookiedatafile = open(ficherocookies,'r')
cookiedata = cookiedatafile.read()
cookiedatafile.close();
return cookiedata
# Test if all the required directories are created
def verify_directories_created():
import logger
import os
logger.info("pelisalacarta.core.config.verify_directories_created")
# Force download path if empty
download_path = get_setting("downloadpath")
if download_path=="":
download_path = os.path.join( get_data_path() , "downloads")
set_setting("downloadpath" , download_path)
# Force download list path if empty
download_list_path = get_setting("downloadlistpath")
if download_list_path=="":
download_list_path = os.path.join( get_data_path() , "downloads" , "list")
set_setting("downloadlistpath" , download_list_path)
# Force bookmark path if empty
bookmark_path = get_setting("bookmarkpath")
if bookmark_path=="":
bookmark_path = os.path.join( get_data_path() , "bookmarks")
set_setting("bookmarkpath" , bookmark_path)
# Create data_path if not exists
if not os.path.exists(get_data_path()):
logger.debug("Creating data_path "+get_data_path())
try:
os.mkdir(get_data_path())
except:
pass
# Create download_path if not exists
if not download_path.lower().startswith("smb") and not os.path.exists(download_path):
logger.debug("Creating download_path "+download_path)
try:
os.mkdir(download_path)
except:
pass
# Create download_list_path if not exists
if not download_list_path.lower().startswith("smb") and not os.path.exists(download_list_path):
logger.debug("Creating download_list_path "+download_list_path)
try:
os.mkdir(download_list_path)
except:
pass
# Create bookmark_path if not exists
if not bookmark_path.lower().startswith("smb") and not os.path.exists(bookmark_path):
logger.debug("Creating bookmark_path "+bookmark_path)
try:
os.mkdir(bookmark_path)
except:
pass
# Create library_path if not exists
if not get_library_path().lower().startswith("smb") and not os.path.exists(get_library_path()):
logger.debug("Creating library_path "+get_library_path())
try:
os.mkdir(get_library_path())
except:
pass
# Checks that a directory "xbmc" is not present on platformcode
old_xbmc_directory = os.path.join( get_runtime_path() , "platformcode" , "xbmc" )
if os.path.exists( old_xbmc_directory ):
logger.debug("Removing old platformcode.xbmc directory")
try:
import shutil
shutil.rmtree(old_xbmc_directory)
except:
pass
|
conejoninja/pelisalacarta
|
python/version-xbmc-09-plugin/core/config.py
|
Python
|
gpl-3.0
| 5,454
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
import uuid
from .. import models
class ActivityLogAlertsOperations(object):
"""ActivityLogAlertsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: Client Api Version. Constant value: "2017-04-01".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2017-04-01"
self.config = config
def create_or_update(
self, resource_group_name, activity_log_alert_name, activity_log_alert, custom_headers=None, raw=False, **operation_config):
"""Create a new activity log alert or update an existing one.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param activity_log_alert_name: The name of the activity log alert.
:type activity_log_alert_name: str
:param activity_log_alert: The activity log alert to create or use for
the update.
:type activity_log_alert: :class:`ActivityLogAlertResource
<azure.mgmt.monitor.models.ActivityLogAlertResource>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ActivityLogAlertResource
<azure.mgmt.monitor.models.ActivityLogAlertResource>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorResponseException<azure.mgmt.monitor.models.ErrorResponseException>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/activityLogAlerts/{activityLogAlertName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'activityLogAlertName': self._serialize.url("activity_log_alert_name", activity_log_alert_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(activity_log_alert, 'ActivityLogAlertResource')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200, 201]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ActivityLogAlertResource', response)
if response.status_code == 201:
deserialized = self._deserialize('ActivityLogAlertResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get(
self, resource_group_name, activity_log_alert_name, custom_headers=None, raw=False, **operation_config):
"""Get an activity log alert.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param activity_log_alert_name: The name of the activity log alert.
:type activity_log_alert_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ActivityLogAlertResource
<azure.mgmt.monitor.models.ActivityLogAlertResource>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorResponseException<azure.mgmt.monitor.models.ErrorResponseException>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/activityLogAlerts/{activityLogAlertName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'activityLogAlertName': self._serialize.url("activity_log_alert_name", activity_log_alert_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ActivityLogAlertResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def delete(
self, resource_group_name, activity_log_alert_name, custom_headers=None, raw=False, **operation_config):
"""Delete an activity log alert.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param activity_log_alert_name: The name of the activity log alert.
:type activity_log_alert_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorResponseException<azure.mgmt.monitor.models.ErrorResponseException>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/activityLogAlerts/{activityLogAlertName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'activityLogAlertName': self._serialize.url("activity_log_alert_name", activity_log_alert_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200, 204]:
raise models.ErrorResponseException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def update(
self, resource_group_name, activity_log_alert_name, tags=None, enabled=True, custom_headers=None, raw=False, **operation_config):
"""Updates an existing ActivityLogAlertResource's tags. To update other
fields use the CreateOrUpdate method.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param activity_log_alert_name: The name of the activity log alert.
:type activity_log_alert_name: str
:param tags: Resource tags
:type tags: dict
:param enabled: Indicates whether this activity log alert is enabled.
If an activity log alert is not enabled, then none of its actions will
be activated.
:type enabled: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ActivityLogAlertResource
<azure.mgmt.monitor.models.ActivityLogAlertResource>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorResponseException<azure.mgmt.monitor.models.ErrorResponseException>`
"""
activity_log_alert_patch = models.ActivityLogAlertPatchBody(tags=tags, enabled=enabled)
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/activityLogAlerts/{activityLogAlertName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'activityLogAlertName': self._serialize.url("activity_log_alert_name", activity_log_alert_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(activity_log_alert_patch, 'ActivityLogAlertPatchBody')
# Construct and send request
request = self._client.patch(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ActivityLogAlertResource', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_by_subscription_id(
self, custom_headers=None, raw=False, **operation_config):
"""Get a list of all activity log alerts in a subscription.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ActivityLogAlertResourcePaged
<azure.mgmt.monitor.models.ActivityLogAlertResourcePaged>`
:raises:
:class:`ErrorResponseException<azure.mgmt.monitor.models.ErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/microsoft.insights/activityLogAlerts'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.ActivityLogAlertResourcePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ActivityLogAlertResourcePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list_by_resource_group(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Get a list of all activity log alerts in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`ActivityLogAlertResourcePaged
<azure.mgmt.monitor.models.ActivityLogAlertResourcePaged>`
:raises:
:class:`ErrorResponseException<azure.mgmt.monitor.models.ErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.insights/activityLogAlerts'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.ActivityLogAlertResourcePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ActivityLogAlertResourcePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
|
SUSE/azure-sdk-for-python
|
azure-mgmt-monitor/azure/mgmt/monitor/operations/activity_log_alerts_operations.py
|
Python
|
mit
| 19,888
|
# -*- coding: utf-8 -*-
from __future__ import division, unicode_literals, print_function, absolute_import
import math
import copy
from pint import UnitRegistry
from pint.unit import UnitsContainer
from pint.util import ParserHelper
from pint.compat import np, long_type
from pint.errors import UndefinedUnitError, DimensionalityError
from pint.testsuite import QuantityTestCase, helpers
from pint.testsuite.compat import unittest
class TestIssues(QuantityTestCase):
FORCE_NDARRAY = False
def setup(self):
self.ureg.autoconvert_offset_to_baseunit = False
@unittest.expectedFailure
def test_issue25(self):
x = ParserHelper.from_string('10 %')
self.assertEqual(x, ParserHelper(10, {'%': 1}))
x = ParserHelper.from_string('10 ‰')
self.assertEqual(x, ParserHelper(10, {'‰': 1}))
ureg = UnitRegistry()
ureg.define('percent = [fraction]; offset: 0 = %')
ureg.define('permille = percent / 10 = ‰')
x = ureg.parse_expression('10 %')
self.assertEqual(x, ureg.Quantity(10, {'%': 1}))
y = ureg.parse_expression('10 ‰')
self.assertEqual(y, ureg.Quantity(10, {'‰': 1}))
self.assertEqual(x.to('‰'), ureg.Quantity(1, {'‰': 1}))
def test_issue29(self):
ureg = UnitRegistry()
ureg.define('molar = mole / liter = M')
t = 4 * ureg('mM')
self.assertEqual(t.magnitude, 4)
self.assertEqual(t._units, UnitsContainer(millimolar=1))
self.assertEqual(t.to('mole / liter'), 4e-3 * ureg('M'))
def test_issue52(self):
u1 = UnitRegistry()
u2 = UnitRegistry()
q1 = 1*u1.meter
q2 = 1*u2.meter
import operator as op
for fun in (op.add, op.iadd,
op.sub, op.isub,
op.mul, op.imul,
op.floordiv, op.ifloordiv,
op.truediv, op.itruediv):
self.assertRaises(ValueError, fun, q1, q2)
def test_issue54(self):
ureg = UnitRegistry()
self.assertEqual((1*ureg.km/ureg.m + 1).magnitude, 1001)
def test_issue54_related(self):
ureg = UnitRegistry()
self.assertEqual(ureg.km/ureg.m, 1000)
self.assertEqual(1000, ureg.km/ureg.m)
self.assertLess(900, ureg.km/ureg.m)
self.assertGreater(1100, ureg.km/ureg.m)
def test_issue61(self):
ureg = UnitRegistry()
Q_ = ureg.Quantity
for value in ({}, {'a': 3}, None):
self.assertRaises(TypeError, Q_, value)
self.assertRaises(TypeError, Q_, value, 'meter')
self.assertRaises(ValueError, Q_, '', 'meter')
self.assertRaises(ValueError, Q_, '')
@helpers.requires_not_numpy()
def test_issue61_notNP(self):
ureg = UnitRegistry()
Q_ = ureg.Quantity
for value in ([1, 2, 3], (1, 2, 3)):
self.assertRaises(TypeError, Q_, value)
self.assertRaises(TypeError, Q_, value, 'meter')
def test_issue66(self):
ureg = UnitRegistry()
self.assertEqual(ureg.get_dimensionality(UnitsContainer({'[temperature]': 1})),
UnitsContainer({'[temperature]': 1}))
self.assertEqual(ureg.get_dimensionality(ureg.kelvin),
UnitsContainer({'[temperature]': 1}))
self.assertEqual(ureg.get_dimensionality(ureg.degC),
UnitsContainer({'[temperature]': 1}))
def test_issue66b(self):
ureg = UnitRegistry()
self.assertEqual(ureg.get_base_units(ureg.kelvin),
(1.0, ureg.Unit(UnitsContainer({'kelvin': 1}))))
self.assertEqual(ureg.get_base_units(ureg.degC),
(1.0, ureg.Unit(UnitsContainer({'kelvin': 1}))))
def test_issue69(self):
ureg = UnitRegistry()
q = ureg('m').to(ureg('in'))
self.assertEqual(q, ureg('m').to('in'))
@helpers.requires_uncertainties()
def test_issue77(self):
ureg = UnitRegistry()
acc = (5.0 * ureg('m/s/s')).plus_minus(0.25)
tim = (37.0 * ureg('s')).plus_minus(0.16)
dis = acc * tim ** 2 / 2
self.assertEqual(dis.value, acc.value * tim.value ** 2 / 2)
def test_issue85(self):
ureg = UnitRegistry()
T = 4. * ureg.kelvin
m = 1. * ureg.amu
va = 2. * ureg.k * T / m
try:
va.to_base_units()
except:
self.assertTrue(False, 'Error while trying to get base units for {}'.format(va))
boltmk = 1.3806488e-23*ureg.J/ureg.K
vb = 2. * boltmk * T / m
self.assertQuantityAlmostEqual(va.to_base_units(), vb.to_base_units())
def test_issue86(self):
ureg = self.ureg
ureg.autoconvert_offset_to_baseunit = True
def parts(q):
return q.magnitude, q.units
q1 = 10. * ureg.degC
q2 = 10. * ureg.kelvin
k1 = q1.to_base_units()
q3 = 3. * ureg.meter
q1m, q1u = parts(q1)
q2m, q2u = parts(q2)
q3m, q3u = parts(q3)
k1m, k1u = parts(k1)
self.assertEqual(parts(q2 * q3), (q2m * q3m, q2u * q3u))
self.assertEqual(parts(q2 / q3), (q2m / q3m, q2u / q3u))
self.assertEqual(parts(q3 * q2), (q3m * q2m, q3u * q2u))
self.assertEqual(parts(q3 / q2), (q3m / q2m, q3u / q2u))
self.assertEqual(parts(q2 ** 1), (q2m ** 1, q2u ** 1))
self.assertEqual(parts(q2 ** -1), (q2m ** -1, q2u ** -1))
self.assertEqual(parts(q2 ** 2), (q2m ** 2, q2u ** 2))
self.assertEqual(parts(q2 ** -2), (q2m ** -2, q2u ** -2))
self.assertEqual(parts(q1 * q3), (k1m * q3m, k1u * q3u))
self.assertEqual(parts(q1 / q3), (k1m / q3m, k1u / q3u))
self.assertEqual(parts(q3 * q1), (q3m * k1m, q3u * k1u))
self.assertEqual(parts(q3 / q1), (q3m / k1m, q3u / k1u))
self.assertEqual(parts(q1 ** -1), (k1m ** -1, k1u ** -1))
self.assertEqual(parts(q1 ** 2), (k1m ** 2, k1u ** 2))
self.assertEqual(parts(q1 ** -2), (k1m ** -2, k1u ** -2))
def test_issues86b(self):
ureg = self.ureg
T1 = 200. * ureg.degC
T2 = T1.to(ureg.kelvin)
m = 132.9054519 * ureg.amu
v1 = 2 * ureg.k * T1 / m
v2 = 2 * ureg.k * T2 / m
self.assertQuantityAlmostEqual(v1, v2)
self.assertQuantityAlmostEqual(v1, v2.to_base_units())
self.assertQuantityAlmostEqual(v1.to_base_units(), v2)
self.assertQuantityAlmostEqual(v1.to_base_units(), v2.to_base_units())
@unittest.expectedFailure
def test_issue86c(self):
ureg = self.ureg
ureg.autoconvert_offset_to_baseunit = True
T = ureg.degC
T = 100. * T
self.assertQuantityAlmostEqual(ureg.k*2*T, ureg.k*(2*T))
def test_issue93(self):
ureg = UnitRegistry()
x = 5 * ureg.meter
self.assertIsInstance(x.magnitude, int)
y = 0.1 * ureg.meter
self.assertIsInstance(y.magnitude, float)
z = 5 * ureg.meter
self.assertIsInstance(z.magnitude, int)
z += y
self.assertIsInstance(z.magnitude, float)
self.assertQuantityAlmostEqual(x + y, 5.1 * ureg.meter)
self.assertQuantityAlmostEqual(z, 5.1 * ureg.meter)
def test_issue523(self):
ureg = UnitRegistry()
src, dst = UnitsContainer({'meter': 1}), UnitsContainer({'degF': 1})
value = 10.
convert = self.ureg.convert
self.assertRaises(DimensionalityError, convert, value, src, dst)
self.assertRaises(DimensionalityError, convert, value, dst, src)
def _test_issueXX(self):
ureg = UnitRegistry()
try:
ureg.convert(1, ureg.degC, ureg.kelvin * ureg.meter / ureg.nanometer)
except:
self.assertTrue(False,
'Error while trying to convert {} to {}'.format(ureg.degC, ureg.kelvin * ureg.meter / ureg.nanometer))
def test_issue121(self):
sh = (2, 1)
ureg = UnitRegistry()
z, v = 0, 2.
self.assertEqual(z + v * ureg.meter, v * ureg.meter)
self.assertEqual(z - v * ureg.meter, -v * ureg.meter)
self.assertEqual(v * ureg.meter + z, v * ureg.meter)
self.assertEqual(v * ureg.meter - z, v * ureg.meter)
self.assertEqual(sum([v * ureg.meter, v * ureg.meter]), 2 * v * ureg.meter)
def test_issue105(self):
ureg = UnitRegistry()
func = ureg.parse_unit_name
val = list(func('meter'))
self.assertEqual(list(func('METER')), [])
self.assertEqual(val, list(func('METER', False)))
for func in (ureg.get_name, ureg.parse_expression):
val = func('meter')
self.assertRaises(AttributeError, func, 'METER')
self.assertEqual(val, func('METER', False))
def test_issue104(self):
ureg = UnitRegistry()
x = [ureg('1 meter'), ureg('1 meter'), ureg('1 meter')]
y = [ureg('1 meter')] * 3
def summer(values):
if not values:
return 0
total = values[0]
for v in values[1:]:
total += v
return total
self.assertQuantityAlmostEqual(summer(x), ureg.Quantity(3, 'meter'))
self.assertQuantityAlmostEqual(x[0], ureg.Quantity(1, 'meter'))
self.assertQuantityAlmostEqual(summer(y), ureg.Quantity(3, 'meter'))
self.assertQuantityAlmostEqual(y[0], ureg.Quantity(1, 'meter'))
def test_issue170(self):
Q_ = UnitRegistry().Quantity
q = Q_('1 kHz')/Q_('100 Hz')
iq = int(q)
self.assertEqual(iq, 10)
self.assertIsInstance(iq, int)
@helpers.requires_python2()
def test_issue170b(self):
Q_ = UnitRegistry().Quantity
q = Q_('1 kHz')/Q_('100 Hz')
iq = long(q)
self.assertEqual(iq, long(10))
self.assertIsInstance(iq, long)
def test_angstrom_creation(self):
ureg = UnitRegistry()
try:
ureg.Quantity(2, 'Å')
except SyntaxError:
self.fail('Quantity with Å could not be created.')
def test_alternative_angstrom_definition(self):
ureg = UnitRegistry()
try:
ureg.Quantity(2, '\u212B')
except UndefinedUnitError:
self.fail('Quantity with Å could not be created.')
def test_micro_creation(self):
ureg = UnitRegistry()
try:
ureg.Quantity(2, 'µm')
except SyntaxError:
self.fail('Quantity with µ prefix could not be created.')
@helpers.requires_numpy()
class TestIssuesNP(QuantityTestCase):
FORCE_NDARRAY = False
@unittest.expectedFailure
def test_issue37(self):
x = np.ma.masked_array([1, 2, 3], mask=[True, True, False])
ureg = UnitRegistry()
q = ureg.meter * x
self.assertIsInstance(q, ureg.Quantity)
np.testing.assert_array_equal(q.magnitude, x)
self.assertEqual(q.units, ureg.meter.units)
q = x * ureg.meter
self.assertIsInstance(q, ureg.Quantity)
np.testing.assert_array_equal(q.magnitude, x)
self.assertEqual(q.units, ureg.meter.units)
m = np.ma.masked_array(2 * np.ones(3,3))
qq = q * m
self.assertIsInstance(qq, ureg.Quantity)
np.testing.assert_array_equal(qq.magnitude, x * m)
self.assertEqual(qq.units, ureg.meter.units)
qq = m * q
self.assertIsInstance(qq, ureg.Quantity)
np.testing.assert_array_equal(qq.magnitude, x * m)
self.assertEqual(qq.units, ureg.meter.units)
@unittest.expectedFailure
def test_issue39(self):
x = np.matrix([[1, 2, 3], [1, 2, 3], [1, 2, 3]])
ureg = UnitRegistry()
q = ureg.meter * x
self.assertIsInstance(q, ureg.Quantity)
np.testing.assert_array_equal(q.magnitude, x)
self.assertEqual(q.units, ureg.meter.units)
q = x * ureg.meter
self.assertIsInstance(q, ureg.Quantity)
np.testing.assert_array_equal(q.magnitude, x)
self.assertEqual(q.units, ureg.meter.units)
m = np.matrix(2 * np.ones(3,3))
qq = q * m
self.assertIsInstance(qq, ureg.Quantity)
np.testing.assert_array_equal(qq.magnitude, x * m)
self.assertEqual(qq.units, ureg.meter.units)
qq = m * q
self.assertIsInstance(qq, ureg.Quantity)
np.testing.assert_array_equal(qq.magnitude, x * m)
self.assertEqual(qq.units, ureg.meter.units)
def test_issue44(self):
ureg = UnitRegistry()
x = 4. * ureg.dimensionless
np.sqrt(x)
self.assertQuantityAlmostEqual(np.sqrt([4.] * ureg.dimensionless), [2.] * ureg.dimensionless)
self.assertQuantityAlmostEqual(np.sqrt(4. * ureg.dimensionless), 2. * ureg.dimensionless)
def test_issue45(self):
import math
ureg = UnitRegistry()
self.assertAlmostEqual(math.sqrt(4 * ureg.m/ureg.cm), math.sqrt(4 * 100))
self.assertAlmostEqual(float(ureg.V / ureg.mV), 1000.)
def test_issue45b(self):
ureg = UnitRegistry()
self.assertAlmostEqual(np.sin([np.pi/2] * ureg.m / ureg.m ), np.sin([np.pi/2] * ureg.dimensionless))
self.assertAlmostEqual(np.sin([np.pi/2] * ureg.cm / ureg.m ), np.sin([np.pi/2] * ureg.dimensionless * 0.01))
def test_issue50(self):
ureg = UnitRegistry()
Q_ = ureg.Quantity
self.assertEqual(Q_(100), 100 * ureg.dimensionless)
self.assertEqual(Q_('100'), 100 * ureg.dimensionless)
def test_issue62(self):
ureg = UnitRegistry()
m = ureg('m**0.5')
self.assertEqual(str(m.units), 'meter ** 0.5')
def test_issue74(self):
ureg = UnitRegistry()
v1 = np.asarray([1., 2., 3.])
v2 = np.asarray([3., 2., 1.])
q1 = v1 * ureg.ms
q2 = v2 * ureg.ms
np.testing.assert_array_equal(q1 < q2, v1 < v2)
np.testing.assert_array_equal(q1 > q2, v1 > v2)
np.testing.assert_array_equal(q1 <= q2, v1 <= v2)
np.testing.assert_array_equal(q1 >= q2, v1 >= v2)
q2s = np.asarray([0.003, 0.002, 0.001]) * ureg.s
v2s = q2s.to('ms').magnitude
np.testing.assert_array_equal(q1 < q2s, v1 < v2s)
np.testing.assert_array_equal(q1 > q2s, v1 > v2s)
np.testing.assert_array_equal(q1 <= q2s, v1 <= v2s)
np.testing.assert_array_equal(q1 >= q2s, v1 >= v2s)
def test_issue75(self):
ureg = UnitRegistry()
v1 = np.asarray([1., 2., 3.])
v2 = np.asarray([3., 2., 1.])
q1 = v1 * ureg.ms
q2 = v2 * ureg.ms
np.testing.assert_array_equal(q1 == q2, v1 == v2)
np.testing.assert_array_equal(q1 != q2, v1 != v2)
q2s = np.asarray([0.003, 0.002, 0.001]) * ureg.s
v2s = q2s.to('ms').magnitude
np.testing.assert_array_equal(q1 == q2s, v1 == v2s)
np.testing.assert_array_equal(q1 != q2s, v1 != v2s)
def test_issue93(self):
ureg = UnitRegistry()
x = 5 * ureg.meter
self.assertIsInstance(x.magnitude, int)
y = 0.1 * ureg.meter
self.assertIsInstance(y.magnitude, float)
z = 5 * ureg.meter
self.assertIsInstance(z.magnitude, int)
z += y
self.assertIsInstance(z.magnitude, float)
self.assertQuantityAlmostEqual(x + y, 5.1 * ureg.meter)
self.assertQuantityAlmostEqual(z, 5.1 * ureg.meter)
@helpers.requires_numpy_previous_than('1.10')
def test_issue94(self):
ureg = UnitRegistry()
v1 = np.array([5, 5]) * ureg.meter
v2 = 0.1 * ureg.meter
v3 = np.array([5, 5]) * ureg.meter
v3 += v2
np.testing.assert_array_equal((v1 + v2).magnitude, np.array([5.1, 5.1]))
np.testing.assert_array_equal(v3.magnitude, np.array([5, 5]))
@helpers.requires_numpy18()
def test_issue121(self):
sh = (2, 1)
ureg = UnitRegistry()
z, v = 0, 2.
self.assertEqual(z + v * ureg.meter, v * ureg.meter)
self.assertEqual(z - v * ureg.meter, -v * ureg.meter)
self.assertEqual(v * ureg.meter + z, v * ureg.meter)
self.assertEqual(v * ureg.meter - z, v * ureg.meter)
self.assertEqual(sum([v * ureg.meter, v * ureg.meter]), 2 * v * ureg.meter)
z, v = np.zeros(sh), 2. * np.ones(sh)
self.assertQuantityEqual(z + v * ureg.meter, v * ureg.meter)
self.assertQuantityEqual(z - v * ureg.meter, -v * ureg.meter)
self.assertQuantityEqual(v * ureg.meter + z, v * ureg.meter)
self.assertQuantityEqual(v * ureg.meter - z, v * ureg.meter)
z, v = np.zeros((3, 1)), 2. * np.ones(sh)
for x, y in ((z, v),
(z, v * ureg.meter),
(v * ureg.meter, z)
):
try:
w = x + y
self.assertTrue(False, "ValueError not raised")
except ValueError:
pass
try:
w = x - y
self.assertTrue(False, "ValueError not raised")
except ValueError:
pass
def test_issue127(self):
q = [1., 2., 3., 4.] * self.ureg.meter
q[0] = np.nan
self.assertNotEqual(q[0], 1.)
self.assertTrue(math.isnan(q[0].magnitude))
q[1] = float('NaN')
self.assertNotEqual(q[1], 2.)
self.assertTrue(math.isnan(q[1].magnitude))
def test_issue171_real_imag(self):
qr = [1., 2., 3., 4.] * self.ureg.meter
qi = [4., 3., 2., 1.] * self.ureg.meter
q = qr + 1j * qi
self.assertQuantityEqual(q.real, qr)
self.assertQuantityEqual(q.imag, qi)
def test_issue171_T(self):
a = np.asarray([[1., 2., 3., 4.],[4., 3., 2., 1.]])
q1 = a * self.ureg.meter
q2 = a.T * self.ureg.meter
self.assertQuantityEqual(q1.T, q2)
def test_issue250(self):
a = self.ureg.V
b = self.ureg.mV
self.assertEqual(np.float16(a/b), 1000.)
self.assertEqual(np.float32(a/b), 1000.)
self.assertEqual(np.float64(a/b), 1000.)
if "float128" in dir(np):
self.assertEqual(np.float128(a/b), 1000.)
def test_issue252(self):
ur = UnitRegistry()
q = ur("3 F")
t = copy.deepcopy(q)
u = t.to(ur.mF)
self.assertQuantityEqual(q.to(ur.mF), u)
def test_issue323(self):
from fractions import Fraction as F
self.assertEqual((self.Q_(F(2,3), 's')).to('ms'), self.Q_(F(2000,3), 'ms'))
self.assertEqual((self.Q_(F(2,3), 'm')).to('km'), self.Q_(F(1,1500), 'km'))
def test_issue339(self):
q1 = self.ureg('')
self.assertEqual(q1.magnitude, 1)
self.assertEqual(q1.units, self.ureg.dimensionless)
q2 = self.ureg('1 dimensionless')
self.assertEqual(q1, q2)
def test_issue354_356_370(self):
q = 1 * self.ureg.second / self.ureg.millisecond
self.assertEqual('{0:~}'.format(1 * self.ureg.second / self.ureg.millisecond),
'1.0 s / ms')
self.assertEqual("{0:~}".format(1 * self.ureg.count),
'1 count')
self.assertEqual('{0:~}'.format(1 * self.ureg('MiB')),
'1 MiB')
def test_issue482(self):
q = self.ureg.Quantity(1, self.ureg.dimensionless)
qe = np.exp(q)
self.assertIsInstance(qe, self.ureg.Quantity)
def test_issue468(self):
ureg = UnitRegistry()
@ureg.wraps(('kg'), 'meter')
def f(x):
return x
x = ureg.Quantity(1., 'meter')
y = f(x)
z = x * y
self.assertEquals(z, ureg.Quantity(1., 'meter * kilogram'))
def test_issue483(self):
ureg = self.ureg
a = np.asarray([1, 2, 3])
q = [1, 2, 3] * ureg.dimensionless
p = (q ** q).m
np.testing.assert_array_equal(p, a ** a)
|
fernandog/Medusa
|
ext/pint/testsuite/test_issues.py
|
Python
|
gpl-3.0
| 19,906
|
# -*- coding: utf-8 -*-
# wasp_backup/file_archiver.py
#
# Copyright (C) 2017 the wasp-backup authors and contributors
# <see AUTHORS file>
#
# This file is part of wasp-backup.
#
# wasp-backup is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wasp-backup is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with wasp-backup. If not, see <http://www.gnu.org/licenses/>.
# TODO: document the code
# TODO: write tests for the code
# noinspection PyUnresolvedReferences
from wasp_backup.version import __author__, __version__, __credits__, __license__, __copyright__, __email__
# noinspection PyUnresolvedReferences
from wasp_backup.version import __status__
import io
from wasp_general.verify import verify_type, verify_value
from wasp_backup.cipher import WBackupCipher
from wasp_backup.core import WBackupMeta
from wasp_backup.archiver import WBasicArchiveCreator
class WFileArchiveCreator(WBasicArchiveCreator):
@verify_type('paranoid', archive_path=str, io_write_rate=(float, int, None))
@verify_value('paranoid', archive_path=lambda x: len(x) > 0, io_write_rate=lambda x: x is None or x > 0)
@verify_type(cipher=(WBackupCipher, None), compression_mode=(WBackupMeta.Archive.CompressionMode, None))
def __init__(
self, backup_source, archive_path, logger, stop_event=None, io_write_rate=None, compression_mode=None,
cipher=None, buffer_size=io.DEFAULT_BUFFER_SIZE
):
WBasicArchiveCreator.__init__(
self, archive_path, logger, stop_event=stop_event, io_write_rate=io_write_rate,
compression_mode=compression_mode, cipher=cipher
)
self.__backup_source = backup_source
self.__buffer_size = buffer_size
def backup_source(self):
return self.__backup_source
def buffer_size(self):
return self.__buffer_size
def write_archive(self, fo, archive):
backup_source = self.backup_source()
buffer_size = self.buffer_size()
read_buffer = backup_source.read(buffer_size)
while len(read_buffer) > 0:
fo.write(read_buffer)
read_buffer = backup_source.read(buffer_size)
def meta(self):
result = WBasicArchiveCreator.meta(self)
result[WBackupMeta.Archive.MetaOptions.archived_files] = self.backup_source()
return result
|
a1ezzz/wasp-backup
|
wasp_backup/file_archiver.py
|
Python
|
lgpl-3.0
| 2,627
|
# -*- coding: utf-8 -*-
try:
from urllib import urlencode
from urlparse import urlparse, urljoin
from urllib import unquote
except ImportError:
from urllib.parse import urlencode # nopyflakes
from urllib.parse import urlparse # nopyflakes
from urllib.parse import unquote # nopyflakes
from urllib.parse import urljoin # nopyflakes
|
SurfasJones/djcmsrc3
|
venv/lib/python2.7/site-packages/cms/utils/compat/urls.py
|
Python
|
mit
| 363
|
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# Baruwa - Web 2.0 MailScanner front-end.
# Copyright (C) 2010-2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import paste
import logging
from celery.app import app_or_default
from pylons import config as pylonsconfig
from paste.script.command import Command, BadCommand
from celery.bin import camqadm, celerybeat, celeryd, celeryev
__all__ = ['CeleryDaemonCommand', 'CeleryBeatCommand',
'CAMQPAdminCommand', 'CeleryEventCommand']
log = logging.getLogger(__name__)
class BasePasterCommand(Command):
"""
Abstract Base Class for paster commands.
The celery commands are somewhat aggressive about loading
celery.conf, and since our module sets the `CELERY_LOADER`
environment variable to our loader, we have to bootstrap a bit and
make sure we've had a chance to load the pylons config off of the
command line, otherwise everything fails.
"""
group_name = 'baruwa'
min_args = 1
min_args_error = "Please provide a paster config file as an argument."
takes_config_file = 1
requires_config_file = True
def notify_msg(self, msg, log=False):
"""Make a notification to user, additionally if logger is passed
it logs this action using given logger
:param msg: message that will be printed to user
:param log: logging instance, to use to additionally log this message
"""
if log and isinstance(log, logging):
log(msg)
def run(self, args):
"""
Overrides Command.run
Checks for a config file argument and loads it.
"""
if len(args) < self.min_args:
raise BadCommand(self.min_args_error % {'min_args': self.min_args,
'actual_args': len(args)})
# Decrement because we're going to lob off the first argument.
# @@ This is hacky
self.min_args -= 1
self.bootstrap_config(args[0])
self.update_parser()
return Command.run(self, args[1:])
# return super(BasePasterCommand, self).run(args[1:])
def update_parser(self):
"""
Abstract method. Allows for the class's parser to be updated
before the superclass's `run` method is called. Necessary to
allow options/arguments to be passed through to the underlying
celery command.
"""
raise NotImplementedError("Abstract Method.")
def bootstrap_config(self, conf):
"""
Loads the pylons configuration.
"""
path_to_ini_file = os.path.realpath(conf)
conf = paste.deploy.appconfig('config:' + path_to_ini_file)
pylonsconfig.init_app(conf.global_conf, conf.local_conf)
class CeleryCommand(BasePasterCommand):
"""Abstract class implements run methods needed for celery
Starts the celery worker that uses a paste.deploy configuration
file.
"""
def update_parser(self):
"""
Abstract method. Allows for the class's parser to be updated
before the superclass's `run` method is called. Necessary to
allow options/arguments to be passed through to the underlying
celery command.
"""
cmd = self.celery_command(app_or_default())
for x in cmd.get_options():
self.parser.add_option(x)
def command(self):
cmd = self.celery_command(app_or_default())
return cmd.run(**vars(self.options))
class CeleryDaemonCommand(CeleryCommand):
"""Start the celery worker
Starts the celery worker that uses a paste.deploy configuration
file.
"""
usage = 'CONFIG_FILE [celeryd options...]'
summary = __doc__.splitlines()[0]
description = "".join(__doc__.splitlines()[2:])
parser = Command.standard_parser(quiet=True)
celery_command = celeryd.WorkerCommand
class CeleryBeatCommand(CeleryCommand):
"""Start the celery beat server
Starts the celery beat server using a paste.deploy configuration
file.
"""
usage = 'CONFIG_FILE [celerybeat options...]'
summary = __doc__.splitlines()[0]
description = "".join(__doc__.splitlines()[2:])
parser = Command.standard_parser(quiet=True)
celery_command = celerybeat.BeatCommand
class CAMQPAdminCommand(CeleryCommand):
"""CAMQP Admin
CAMQP celery admin tool.
"""
usage = 'CONFIG_FILE [camqadm options...]'
summary = __doc__.splitlines()[0]
description = "".join(__doc__.splitlines()[2:])
parser = Command.standard_parser(quiet=True)
celery_command = camqadm.AMQPAdminCommand
class CeleryEventCommand(CeleryCommand):
"""Celery event command.
Capture celery events.
"""
usage = 'CONFIG_FILE [celeryev options...]'
summary = __doc__.splitlines()[0]
description = "".join(__doc__.splitlines()[2:])
parser = Command.standard_parser(quiet=True)
celery_command = celeryev.EvCommand
|
akissa/baruwa2
|
baruwa/lib/mq/commands.py
|
Python
|
gpl-3.0
| 5,582
|
"""Support for the EZcontrol XS1 gateway."""
import asyncio
import logging
import voluptuous as vol
import xs1_api_client
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
CONF_USERNAME,
)
from homeassistant.helpers import discovery
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DOMAIN = "xs1"
ACTUATORS = "actuators"
SENSORS = "sensors"
# define configuration parameters
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PORT, default=80): cv.string,
vol.Optional(CONF_SSL, default=False): cv.boolean,
vol.Optional(CONF_USERNAME): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
XS1_COMPONENTS = ["climate", "sensor", "switch"]
# Lock used to limit the amount of concurrent update requests
# as the XS1 Gateway can only handle a very
# small amount of concurrent requests
UPDATE_LOCK = asyncio.Lock()
def setup(hass, config):
"""Set up XS1 Component."""
_LOGGER.debug("Initializing XS1")
host = config[DOMAIN][CONF_HOST]
port = config[DOMAIN][CONF_PORT]
ssl = config[DOMAIN][CONF_SSL]
user = config[DOMAIN].get(CONF_USERNAME)
password = config[DOMAIN].get(CONF_PASSWORD)
# initialize XS1 API
try:
xs1 = xs1_api_client.XS1(
host=host, port=port, ssl=ssl, user=user, password=password
)
except ConnectionError as error:
_LOGGER.error(
"Failed to create XS1 API client because of a connection error: %s",
error,
)
return False
_LOGGER.debug("Establishing connection to XS1 gateway and retrieving data...")
hass.data[DOMAIN] = {}
actuators = xs1.get_all_actuators(enabled=True)
sensors = xs1.get_all_sensors(enabled=True)
hass.data[DOMAIN][ACTUATORS] = actuators
hass.data[DOMAIN][SENSORS] = sensors
_LOGGER.debug("Loading components for XS1 platform...")
# Load components for supported devices
for component in XS1_COMPONENTS:
discovery.load_platform(hass, component, DOMAIN, {}, config)
return True
class XS1DeviceEntity(Entity):
"""Representation of a base XS1 device."""
def __init__(self, device):
"""Initialize the XS1 device."""
self.device = device
async def async_update(self):
"""Retrieve latest device state."""
async with UPDATE_LOCK:
await self.hass.async_add_executor_job(self.device.update)
|
tchellomello/home-assistant
|
homeassistant/components/xs1/__init__.py
|
Python
|
apache-2.0
| 2,712
|
"""
A collection of functions for evaluating TimeSeries.
"""
# pylint: disable=E1101
import numpy as np
def f_score(test, truth, threshold=np.float32(0.0)):
"""
Calculate the F1 score of the test timeseries against the ground truth,
with indicators calculated according to the given threshold.
We assume both timeseries have the same length, and are aligned in time.
"""
test_ind = test.indicators(threshold)
test_ind_not = np.logical_not(test_ind)
truth_ind = truth.indicators(threshold)
truth_ind_not = np.logical_not(truth_ind)
true_positives = np.sum(np.logical_and(test_ind, truth_ind),
dtype=np.float32)
false_positives = np.sum(np.logical_and(test_ind, truth_ind_not),
dtype=np.float32)
false_negatives = np.sum(np.logical_and(test_ind_not, truth_ind),
dtype=np.float32)
precision = true_positives / (true_positives + false_positives)
recall = true_positives / (true_positives + false_negatives)
return 2 * precision * recall / (precision + recall)
def mean_squared_error(test, truth):
"""Calculate the mean squared error between the two arrays."""
return ((test - truth) ** 2).mean(axis=None)
def root_mean_squared_error(test, truth):
"""Calculate the root mean squared error between the two arrays."""
return np.sqrt(mean_squared_error(test, truth))
|
CMPUT-466-551-ML-Project/NILM-Project
|
nilm/evaluation.py
|
Python
|
gpl-2.0
| 1,433
|
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from .commands import COMMANDS
from .excludes import add_exclude_patters, get_exclude_patterns, set_exclude_patters
from .finding import find_packages
from .parsing import get_long_description
from .resource_building import build_resources
from .versions import get_version, write_version_to_file
__all__ = [
"COMMANDS",
"build_resources",
"find_packages",
"add_exclude_patters",
"get_exclude_patterns",
"get_long_description",
"get_version",
"set_exclude_patters",
"write_version_to_file",
]
|
shoopio/shoop
|
shuup_setup_utils/__init__.py
|
Python
|
agpl-3.0
| 760
|
"""Config flow for local_ip."""
from __future__ import annotations
from typing import Any
from homeassistant.config_entries import ConfigFlow
from homeassistant.data_entry_flow import FlowResult
from .const import DOMAIN
class SimpleConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for local_ip."""
VERSION = 1
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle the initial step."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
if user_input is None:
return self.async_show_form(step_id="user")
return self.async_create_entry(title=DOMAIN, data=user_input)
|
jawilson/home-assistant
|
homeassistant/components/local_ip/config_flow.py
|
Python
|
apache-2.0
| 757
|
import re
from django.core.exceptions import ValidationError
def colorValidator(value):
"""
Validates if the given value is a string representing an hexadecimal color
value (starts with a #, then RGB composants in hex).
>>> colorValidator('#0a1b2C')
>>> colorValidator('#AAAAAA')
>>> colorValidator('ff00ff')
ValidationError: ['ff00ff is not a valid hex color.']
"""
exp = re.compile('^#[a-fA-F0-9]{6}$')
if not re.match(exp, value):
raise ValidationError('{} is not a valid hex color.'.format(value))
|
gordon-/yolo
|
learning/validators.py
|
Python
|
agpl-3.0
| 555
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
import re
from jinja2 import Environment
from jinja2.loaders import FileSystemLoader
from jinja2.exceptions import TemplateSyntaxError, UndefinedError
from jinja2.utils import concat as j2_concat
from jinja2.runtime import StrictUndefined
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFilterError, AnsibleUndefinedVariable
from ansible.plugins import _basedirs, filter_loader, lookup_loader, test_loader
from ansible.template.safe_eval import safe_eval
from ansible.template.template import AnsibleJ2Template
from ansible.template.vars import AnsibleJ2Vars
from ansible.utils.debug import debug
from numbers import Number
__all__ = ['Templar']
# A regex for checking to see if a variable we're trying to
# expand is just a single variable name.
# Primitive Types which we don't want Jinja to convert to strings.
NON_TEMPLATED_TYPES = ( bool, Number )
JINJA2_OVERRIDE = '#jinja2:'
def _preserve_backslashes(data, jinja_env):
"""Double backslashes within jinja2 expressions
A user may enter something like this in a playbook::
debug:
msg: "Test Case 1\\3; {{ test1_name | regex_replace('^(.*)_name$', '\\1')}}"
The string inside of the {{ gets interpreted multiple times First by yaml.
Then by python. And finally by jinja2 as part of it's variable. Because
it is processed by both python and jinja2, the backslash escaped
characters get unescaped twice. This means that we'd normally have to use
four backslashes to escape that. This is painful for playbook authors as
they have to remember different rules for inside vs outside of a jinja2
expression (The backslashes outside of the "{{ }}" only get processed by
yaml and python. So they only need to be escaped once). The following
code fixes this by automatically performing the extra quoting of
backslashes inside of a jinja2 expression.
"""
if '\\' in data and '{{' in data:
new_data = []
d2 = jinja_env.preprocess(data)
in_var = False
for token in jinja_env.lex(d2):
if token[1] == 'variable_begin':
in_var = True
new_data.append(token[2])
elif token[1] == 'variable_end':
in_var = False
new_data.append(token[2])
elif in_var and token[1] == 'string':
# Double backslashes only if we're inside of a jinja2 variable
new_data.append(token[2].replace('\\','\\\\'))
else:
new_data.append(token[2])
data = ''.join(new_data)
return data
class Templar:
'''
The main class for templating, with the main entry-point of template().
'''
def __init__(self, loader, shared_loader_obj=None, variables=dict()):
self._loader = loader
self._filters = None
self._tests = None
self._available_variables = variables
if loader:
self._basedir = loader.get_basedir()
else:
self._basedir = './'
if shared_loader_obj:
self._filter_loader = getattr(shared_loader_obj, 'filter_loader')
self._lookup_loader = getattr(shared_loader_obj, 'lookup_loader')
else:
self._filter_loader = filter_loader
self._lookup_loader = lookup_loader
# flags to determine whether certain failures during templating
# should result in fatal errors being raised
self._fail_on_lookup_errors = True
self._fail_on_filter_errors = True
self._fail_on_undefined_errors = C.DEFAULT_UNDEFINED_VAR_BEHAVIOR
self.environment = Environment(
trim_blocks=True,
undefined=StrictUndefined,
extensions=self._get_extensions(),
finalize=self._finalize,
loader=FileSystemLoader(self._basedir),
)
self.environment.template_class = AnsibleJ2Template
self.SINGLE_VAR = re.compile(r"^%s\s*(\w*)\s*%s$" % (self.environment.variable_start_string, self.environment.variable_end_string))
def _count_newlines_from_end(self, in_str):
'''
Counts the number of newlines at the end of a string. This is used during
the jinja2 templating to ensure the count matches the input, since some newlines
may be thrown away during the templating.
'''
i = len(in_str)
while i > 0:
if in_str[i-1] != '\n':
break
i -= 1
return len(in_str) - i
def _get_filters(self):
'''
Returns filter plugins, after loading and caching them if need be
'''
if self._filters is not None:
return self._filters.copy()
plugins = [x for x in self._filter_loader.all()]
self._filters = dict()
for fp in plugins:
self._filters.update(fp.filters())
self._filters.update(self._get_tests())
return self._filters.copy()
def _get_tests(self):
'''
Returns tests plugins, after loading and caching them if need be
'''
if self._tests is not None:
return self._tests.copy()
plugins = [x for x in test_loader.all()]
self._tests = dict()
for fp in plugins:
self._tests.update(fp.tests())
return self._tests.copy()
def _get_extensions(self):
'''
Return jinja2 extensions to load.
If some extensions are set via jinja_extensions in ansible.cfg, we try
to load them with the jinja environment.
'''
jinja_exts = []
if C.DEFAULT_JINJA2_EXTENSIONS:
# make sure the configuration directive doesn't contain spaces
# and split extensions in an array
jinja_exts = C.DEFAULT_JINJA2_EXTENSIONS.replace(" ", "").split(',')
return jinja_exts
def set_available_variables(self, variables):
'''
Sets the list of template variables this Templar instance will use
to template things, so we don't have to pass them around between
internal methods.
'''
assert isinstance(variables, dict)
self._available_variables = variables.copy()
def template(self, variable, convert_bare=False, preserve_trailing_newlines=False, fail_on_undefined=None, overrides=None, convert_data=True):
'''
Templates (possibly recursively) any given data as input. If convert_bare is
set to True, the given data will be wrapped as a jinja2 variable ('{{foo}}')
before being sent through the template engine.
'''
try:
if convert_bare:
variable = self._convert_bare_variable(variable)
if isinstance(variable, basestring):
result = variable
if self._contains_vars(variable):
# Check to see if the string we are trying to render is just referencing a single
# var. In this case we don't want to accidentally change the type of the variable
# to a string by using the jinja template renderer. We just want to pass it.
only_one = self.SINGLE_VAR.match(variable)
if only_one:
var_name = only_one.group(1)
if var_name in self._available_variables:
resolved_val = self._available_variables[var_name]
if isinstance(resolved_val, NON_TEMPLATED_TYPES):
return resolved_val
elif resolved_val is None:
return C.DEFAULT_NULL_REPRESENTATION
result = self._do_template(variable, preserve_trailing_newlines=preserve_trailing_newlines, fail_on_undefined=fail_on_undefined, overrides=overrides)
if convert_data:
# if this looks like a dictionary or list, convert it to such using the safe_eval method
if (result.startswith("{") and not result.startswith(self.environment.variable_start_string)) or \
result.startswith("[") or result in ("True", "False"):
eval_results = safe_eval(result, locals=self._available_variables, include_exceptions=True)
if eval_results[1] is None:
result = eval_results[0]
else:
# FIXME: if the safe_eval raised an error, should we do something with it?
pass
return result
elif isinstance(variable, (list, tuple)):
return [self.template(v, convert_bare=convert_bare, preserve_trailing_newlines=preserve_trailing_newlines, fail_on_undefined=fail_on_undefined, overrides=overrides) for v in variable]
elif isinstance(variable, dict):
d = {}
# we don't use iteritems() here to avoid problems if the underlying dict
# changes sizes due to the templating, which can happen with hostvars
for k in variable.keys():
d[k] = self.template(variable[k], convert_bare=convert_bare, preserve_trailing_newlines=preserve_trailing_newlines, fail_on_undefined=fail_on_undefined, overrides=overrides)
return d
else:
return variable
except AnsibleFilterError:
if self._fail_on_filter_errors:
raise
else:
return variable
def _contains_vars(self, data):
'''
returns True if the data contains a variable pattern
'''
return self.environment.block_start_string in data or self.environment.variable_start_string in data
def _convert_bare_variable(self, variable):
'''
Wraps a bare string, which may have an attribute portion (ie. foo.bar)
in jinja2 variable braces so that it is evaluated properly.
'''
if isinstance(variable, basestring):
contains_filters = "|" in variable
first_part = variable.split("|")[0].split(".")[0].split("[")[0]
if (contains_filters or first_part in self._available_variables) and self.environment.variable_start_string not in variable:
return "%s%s%s" % (self.environment.variable_start_string, variable, self.environment.variable_end_string)
# the variable didn't meet the conditions to be converted,
# so just return it as-is
return variable
def _finalize(self, thing):
'''
A custom finalize method for jinja2, which prevents None from being returned
'''
return thing if thing is not None else ''
def _lookup(self, name, *args, **kwargs):
instance = self._lookup_loader.get(name.lower(), loader=self._loader, templar=self)
if instance is not None:
from ansible.utils.listify import listify_lookup_plugin_terms
loop_terms = listify_lookup_plugin_terms(terms=args, templar=self, loader=self._loader, fail_on_undefined=True, convert_bare=False)
# safely catch run failures per #5059
try:
ran = instance.run(loop_terms, variables=self._available_variables, **kwargs)
except (AnsibleUndefinedVariable, UndefinedError) as e:
raise AnsibleUndefinedVariable(e)
except Exception as e:
if self._fail_on_lookup_errors:
raise
ran = None
if ran:
ran = ",".join(ran)
return ran
else:
raise AnsibleError("lookup plugin (%s) not found" % name)
def _do_template(self, data, preserve_trailing_newlines=False, fail_on_undefined=None, overrides=None):
if fail_on_undefined is None:
fail_on_undefined = self._fail_on_undefined_errors
try:
# allows template header overrides to change jinja2 options.
if overrides is None:
myenv = self.environment.overlay()
else:
myenv = self.environment.overlay(overrides)
# Get jinja env overrides from template
if data.startswith(JINJA2_OVERRIDE):
eol = data.find('\n')
line = data[len(JINJA2_OVERRIDE):eol]
data = data[eol+1:]
for pair in line.split(','):
(key,val) = pair.split(':')
key = key.strip()
setattr(myenv, key, ast.literal_eval(val.strip()))
#FIXME: add tests
myenv.filters.update(self._get_filters())
myenv.tests.update(self._get_tests())
data = _preserve_backslashes(data, myenv)
try:
t = myenv.from_string(data)
except TemplateSyntaxError as e:
raise AnsibleError("template error while templating string: %s" % str(e))
except Exception as e:
if 'recursion' in str(e):
raise AnsibleError("recursive loop detected in template string: %s" % data)
else:
return data
t.globals['lookup'] = self._lookup
t.globals['finalize'] = self._finalize
jvars = AnsibleJ2Vars(self, t.globals)
new_context = t.new_context(jvars, shared=True)
rf = t.root_render_func(new_context)
try:
res = j2_concat(rf)
except TypeError as te:
if 'StrictUndefined' in str(te):
raise AnsibleUndefinedVariable(
"Unable to look up a name or access an attribute in template string. " + \
"Make sure your variable name does not contain invalid characters like '-'."
)
else:
debug("failing because of a type error, template data is: %s" % data)
raise AnsibleError("an unexpected type error occurred. Error was %s" % te)
if preserve_trailing_newlines:
# The low level calls above do not preserve the newline
# characters at the end of the input data, so we use the
# calculate the difference in newlines and append them
# to the resulting output for parity
res_newlines = self._count_newlines_from_end(res)
data_newlines = self._count_newlines_from_end(data)
if data_newlines > res_newlines:
res += '\n' * (data_newlines - res_newlines)
return res
except (UndefinedError, AnsibleUndefinedVariable) as e:
if fail_on_undefined:
raise AnsibleUndefinedVariable(e)
else:
#TODO: return warning about undefined var
return data
|
mcltn/ansible
|
lib/ansible/template/__init__.py
|
Python
|
gpl-3.0
| 15,952
|
# coding=utf-8
# Copyright 2022 RigL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Tests for weight_symmetry.fixed_param."""
import glob
from os import path
import tempfile
from absl.testing import absltest
from absl.testing import flagsaver
from rigl.experimental.jax import fixed_param
class FixedParamTest(absltest.TestCase):
def test_run(self):
"""Tests if the driver for shuffled training runs correctly."""
experiment_dir = tempfile.mkdtemp()
eval_flags = dict(
epochs=1,
experiment_dir=experiment_dir,
)
with flagsaver.flagsaver(**eval_flags):
fixed_param.main([])
with self.subTest(name='tf_summary_file_exists'):
outfile = path.join(experiment_dir, '*', 'events.out.tfevents.*')
files = glob.glob(outfile)
self.assertTrue(len(files) == 1 and path.exists(files[0]))
if __name__ == '__main__':
absltest.main()
|
google-research/rigl
|
rigl/experimental/jax/fixed_param_test.py
|
Python
|
apache-2.0
| 1,424
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Hybrid LFP scheme example script, applying the methodology with the model of:
Potjans, T. and Diesmann, M. "The Cell-Type Specific Cortical Microcircuit:
Relating Structure and Activity in a Full-Scale Spiking Network Model".
Cereb. Cortex (2014) 24 (3): 785-806.
doi: 10.1093/cercor/bhs358
Synopsis of the main simulation procedure:
1. Loading of parameterset
a. network parameters
b. parameters for hybrid scheme
2. Set up file destinations for different simulation output
3. network simulation
a. execute network simulation using NEST (www.nest-initiative.org)
b. merge network output (spikes, currents, voltages)
4. Create a object-representation that uses sqlite3 of all the spiking output
5. Iterate over post-synaptic populations:
a. Create Population object with appropriate parameters for
each specific population
b. Run all computations for populations
c. Postprocess simulation output of all cells in population
6. Postprocess all cell- and population-specific output data
7. Create a tarball for all non-redundant simulation output
The full simulation can be evoked by issuing a mpirun call, such as
mpirun -np 64 python cellsim16pops.py
Given the size of the network and demands for the multi-compartment LFP-
predictions using the present scheme, running the model on nothing but a large-
scale compute facility is strongly discouraged.
'''
from cellsim16popsParams_modified_ac_exc import multicompartment_params, \
point_neuron_network_params
import os
import numpy as np
from time import time
import neuron # NEURON compiled with MPI must be imported before NEST and mpi4py
# to avoid NEURON being aware of MPI.
import nest # Import not used, but done in order to ensure correct execution
import nest_simulation
from hybridLFPy import PostProcess, Population, CachedNetwork, setup_file_dest
import nest_output_processing
import lfpykit
# set some seed values
SEED = 12345678
SIMULATIONSEED = 12345678
np.random.seed(SEED)
##########################################################################
# PARAMETERS
##########################################################################
# Full set of parameters including network parameters
params = multicompartment_params()
# set up the file destination
setup_file_dest(params, clearDestination=True)
###############################################################################
# MAIN simulation procedure
###############################################################################
# tic toc
tic = time()
######## Perform network simulation ######################################
# initiate nest simulation with only the point neuron network parameter class
networkParams = point_neuron_network_params()
nest_simulation.sli_run(parameters=networkParams,
fname='microcircuit.sli',
verbosity='M_INFO')
# preprocess the gdf files containing spiking output, voltages, weighted and
# spatial input spikes and currents:
nest_output_processing.merge_gdf(networkParams,
raw_label=networkParams.spike_recorder_label,
file_type='dat',
fileprefix=params.networkSimParams['label'],
skiprows=3)
nest_output_processing.merge_gdf(networkParams,
raw_label=networkParams.voltmeter_label,
file_type='dat',
fileprefix='voltages',
skiprows=3)
nest_output_processing.merge_gdf(
networkParams,
raw_label=networkParams.weighted_input_spikes_label,
file_type='dat',
fileprefix='population_input_spikes',
skiprows=3)
# spatial input currents
# nest_output_processing.create_spatial_input_spikes_hdf5(networkParams,
# fileprefix='depth_res_input_spikes-')
# create tar file archive of <raw_nest_output_path> folder as .dat files are
# no longer needed. Also removes .dat files
nest_output_processing.tar_raw_nest_output(params.raw_nest_output_path,
delete_files=True)
# Create an object representation of the simulation output that uses sqlite3
networkSim = CachedNetwork(**params.networkSimParams)
toc = time() - tic
print('NEST simulation and gdf file processing done in %.3f seconds' % toc)
# Set up LFPykit measurement probes for LFPs and CSDs
probes = []
probes.append(lfpykit.RecExtElectrode(cell=None, **params.electrodeParams))
probes.append(
lfpykit.LaminarCurrentSourceDensity(
cell=None,
**params.CSDParams))
####### Set up populations ###############################################
# iterate over each cell type, and create populationulation object
for i, y in enumerate(params.y):
# create population:
pop = Population(
# parent class
cellParams=params.yCellParams[y],
rand_rot_axis=params.rand_rot_axis[y],
simulationParams=params.simulationParams,
populationParams=params.populationParams[y],
y=y,
layerBoundaries=params.layerBoundaries,
probes=probes,
savelist=params.savelist,
savefolder=params.savefolder,
dt_output=params.dt_output,
POPULATIONSEED=SIMULATIONSEED + i,
# daughter class kwargs
X=params.X,
networkSim=networkSim,
k_yXL=params.k_yXL[y],
synParams=params.synParams[y],
synDelayLoc=params.synDelayLoc[y],
synDelayScale=params.synDelayScale[y],
J_yX=params.J_yX[y],
tau_yX=params.tau_yX[y],
recordSingleContribFrac=params.recordSingleContribFrac,
)
# run population simulation and collect the data
pop.run()
pop.collect_data()
# object no longer needed
del pop
####### Postprocess the simulation output ################################
# reset seed, but output should be deterministic from now on
np.random.seed(SIMULATIONSEED)
# do some postprocessing on the collected data, i.e., superposition
# of population LFPs, CSDs etc
postproc = PostProcess(y=params.y,
dt_output=params.dt_output,
probes=probes,
savefolder=params.savefolder,
mapping_Yy=params.mapping_Yy,
savelist=params.savelist
)
# run through the procedure
postproc.run()
# create tar-archive with output for plotting
postproc.create_tar_archive()
# tic toc
print('Execution time: %.3f seconds' % (time() - tic))
|
espenhgn/hybridLFPy
|
examples/Hagen_et_al_2016_cercor/cellsim16pops_modified_ac_exc.py
|
Python
|
gpl-3.0
| 6,661
|
#!/usr/bin/env python3
import os
import sys
import uuid
import json
import argparse
import redis
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
from worker_conf import REDIS_CONF
rs = redis.Redis(**REDIS_CONF)
def upload_submission(data, content_dirname, debug=False):
data['uuid'] = str(uuid.uuid4())
if debug:
print('Submission UUID: ' + str(data['uuid']))
for project_file in os.listdir(content_dirname):
if debug:
print('Uploading file: ' + project_file)
with open(content_dirname + '/' + project_file, 'r') as project_fh:
rs.hset("submission:" + data['uuid'], "file:" + project_file, project_fh.read())
queue_priority = "medium"
seq_number = rs.incrby("queue:{}:counter".format(queue_priority), 1)
rs.zadd("queue:{}:order".format(queue_priority), data['uuid'], seq_number)
position = rs.rpush("queue:{}".format(queue_priority), json.dumps(data))
if debug:
print('Submission sequence number: ' + str(seq_number))
print('Current position in queue: ' + str(position))
return data['uuid']
def standalone_main():
parser = argparse.ArgumentParser(prog='submit.py')
parser.add_argument('example_name', help='for instance: cpp')
args = parser.parse_args()
with open('submission/' + args.example_name + '.json', 'r') as submission_file:
json_data = json.load(submission_file)
upload_submission(json_data, 'submission/' + args.example_name + '/', debug=True)
if __name__ == "__main__":
standalone_main()
|
algochecker/algochecker-engine
|
contrib/submit.py
|
Python
|
mit
| 1,582
|
# -*- coding: utf-8 -*-
"""
/***************************************************************************
DsgTools
A QGIS plugin
Brazilian Army Cartographic Production Tools
-------------------
begin : 2018-09-05
git sha : $Format:%H$
copyright : (C) 2018 by João P. Esperidião - Cartographic Engineer @ Brazilian Army
email : esperidiao.joao@eb.mil.br
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from DsgTools.gui.CustomWidgets.DatabaseConversionWidgets.SupportedDrivers.abstractSelectionWidget import AbstractSelectionWidget
from DsgTools.gui.CustomWidgets.ConnectionWidgets.AdvancedConnectionWidgets.databaseFileLineEdit import DatabaseFileLineEdit
from DsgTools.core.dsgEnums import DsgEnums
import os
class GeopackageWidget(AbstractSelectionWidget):
"""
Widget resposinble for adequating GUI to chosen data driver.
"""
def __init__(self, parent=None):
"""
Class contructor.
:param parent: (QWidget) widget parent to newly instantiated geopackge widget.
"""
super(GeopackageWidget, self).__init__(parent=parent)
# reset source attribute value as now it is defined as a SpatiaLite
self.source = DsgEnums.Geopackage
# initiate new instance of actual class widget
self.selectionWidget = self.getNewSelectionWidget(parent=parent)
self.selectionWidget.driver = DsgEnums.DriverGeopackage
self.selectionWidget.connectionSelectorLineEdit.caption = self.tr('Select a Geopackage Database')
self.selectionWidget.connectionSelectorLineEdit.filter = self.tr('Geopackage Database (*.gpkg)')
def getNewSelectionWidget(self, parent=None):
"""
Gets the widget according to selected datasource on datasource combobox on first page.
:param parent: (QWidget) widget parent to newly instantiated geopackge widget.
:return: (QWidget) driver widget supported by conversion tool.
"""
return DatabaseFileLineEdit(parent=parent)
def getDatasourceConnectionName(self):
"""
Gets the Geopackage connection name.
:return: (str) datasource connection name.
"""
n = self.selectionWidget.connectionSelectorLineEdit.lineEdit.text()
# n is a path and so it'll be something like /PATH/TO/datasource.sqlite or C:\PATH\TO\datasource.sqlite
splitChar = '/' if '/' in n else '\\'
ret = n.split(splitChar)[-1].split('.')[0] if n else ''
return ret
def getDatasourcePath(self):
"""
Gets the SpatiaLite database path.
:return: (str) datasource path name.
"""
if self.getDatasource():
# just return a datasource path if a valid one was loaded
return "gpkg:{0}".format(self.selectionWidget.connectionSelectorLineEdit.lineEdit.text())
return ''
def setDatasource(self, newDatasource):
"""
Sets the datasource selected on current widget.
:param newDatasource: (dict) containing datasource name and its path.
"""
if newDatasource:
self.selectionWidget.connectionSelectorLineEdit.lineEdit.setText((list(newDatasource.values())[0]))
def getDatasource(self):
"""
Gets the datasource selected on current widget.
:return: (AbstractDb) the object representing the target datasource according to its driver.
"""
return self.selectionWidget.abstractDb
|
lcoandrade/DsgTools
|
gui/CustomWidgets/DatabaseConversionWidgets/SupportedDrivers/geopackageWidget.py
|
Python
|
gpl-2.0
| 4,233
|
from future import standard_library
standard_library.install_aliases()
from builtins import range
from tkinter import *
class TabFrames(Frame):
def __call__(self, N): #return frame widget of screen number N
return self.pageFramesL[N]
def setSelectCallback(self, N, callback): #set selecting callback of screen number N
self.selectCallbackL[N] = callback
def setLeaveCallback(self, N, callback): #set leaving callback of screen number N
self.leaveCallbackL[N] = callback
def __init__(self, master, tabPixelWidthL=None, labelL=None,
height=250, width=400, tabHeight=24, offx=4, offy=4):
Frame.__init__(self,master)
self.master = master
self.tabPixelWidthL = tabPixelWidthL
self.Ntabs = len( labelL )
self.labelL = labelL
self.tabHeight = tabHeight
self.height = height
self.width = width
self.activeTab = None
#self.activeFrame = None
self.buttonL = []
self.buttonIndexD = {} # get integer index into buttonL from button widget
self.pageFramesL = []
self.selectCallbackL = [] # call when selecting page
self.leaveCallbackL = [] # call when leaving page
# frame that holds TabFrames
self.h, self.w = height+2*offy+tabHeight, width+2*offx
frame = Frame(self, height=self.h, width=self.w)
self.dhlf, self.dwlf = self.h-height, self.w-width
lframe = LabelFrame(frame,text="", height=height, relief="groove", width=width)
self.frame = frame
self.lframe = lframe
self.xlf = offx
self.ylf = tabHeight+offy-2
lframe.place(x=self.xlf, y=self.ylf, width=width, height=height)
lframe.configure( borderwidth=4 )
# make label buttons
xb = offx * 2
for N in range( self.Ntabs ):
try:
pw = tabPixelWidthL[N]
except:
pw = 40
b = Button( frame, width="12", text=labelL[N] )
b.bind("<ButtonRelease-1>", self.tabClicked)
self.buttonL.append( b )
self.buttonIndexD[b] = N
b.place( x=xb, y=offy, width=pw, height=tabHeight )
b.configure( relief=SUNKEN )
b.configure( borderwidth=2 )
#b.configure( background="#dddddd" )
b.lower(belowThis=self.lframe)
#b.pack()
xb += pw - 2
# make frames for each page
for N in range( self.Ntabs ):
f = Frame( lframe )
self.pageFramesL.append( f )
self.selectCallbackL.append( None ) # start out assuming no callback
self.leaveCallbackL.append( None )
if N==0:
f.pack(side=TOP, anchor=W, expand=YES, fill=BOTH)
#self.activeFrame = f
#self.frame.place(x=4,y=4,height=height, width=width)
self.frame.pack(side=TOP, anchor=W, expand=YES, fill=BOTH)
self.selectTab( self.buttonL[0] )
self.frame.bind("<Configure>", self.ReConfigure)
def ReConfigure(self, event):
x = int(self.frame.winfo_x())
y = int(self.frame.winfo_y())
w = int(self.frame.winfo_width())
h = int(self.frame.winfo_height())
#print '# ----------------- w,h=',w,h
#print '# self.w, self.h =',self.w, self.h
#print '# winfo_reqwidth()=',self.winfo_width()
#print '# winfo_reqheight()=',self.winfo_height()
#self.lframe.config(width=w-self.dwlf, height=h-self.dhlf)
self.lframe.place_forget()
self.lframe.place(x=self.xlf, y=self.ylf, width=w-self.dwlf, height=h-self.dhlf)
def tabClicked(self, event):
#print 'tab clicked',event.widget
#print 'selected tab = ', self.activeTab
self.selectTab( event.widget )
def selectTab(self, tab):
if self.activeTab:
b = self.activeTab
b.configure( relief=SUNKEN )
b.configure( borderwidth=2 )
#b.configure( background="#dddddd" )
b.lower(belowThis=self.lframe)
Nleave = self.buttonIndexD[self.activeTab]
leaveCallback = self.leaveCallbackL[Nleave]
else:
leaveCallback = None
#if self.activeFrame:
# self.activeFrame.forget()
for fr in self.pageFramesL:
fr.forget()
self.activeTab = tab
tab.configure( relief=RAISED )
tab.configure( borderwidth=4 )
#tab.configure( background="#ffffff" )
tab.lift()
N = self.buttonIndexD[tab]
fr = self.pageFramesL[N]
if leaveCallback:
leaveCallback()
callback = self.selectCallbackL[N]
if callback:
callback()
fr.pack(fill=BOTH, expand=1)
if __name__ == '__main__':
root = Tk()
root.title('TabFrames Test')
mainFrame = Frame(root, width=600, height=300)
LF = LabelFrame(mainFrame,text="Frame to hold TabFrames")
tf = TabFrames(LF, tabPixelWidthL=[90,120,60], labelL=['1234567890','xyzabcd','tab3'],
width=600, height=200)
page0 = tf(0)
b = Button(page0, text='screen 0' )
b.place( x=30, y=20 )
b = Button(page0, text='screen 0B' )
b.place( x=30, y=80 )
page1 = tf(1)
b = Button(page1, text='screen 1' )
b.place( x=80, y=20 )
b = Button(page1, text='screen 1B' )
b.place( x=80, y=80 )
page2 = tf(2)
b = Button(page2, text='screen 2' )
b.place( x=130, y=20 )
b = Button(page2, text='screen 2B' )
b.place( x=130, y=80 )
#tf.place( 30, 30)
tf.pack(anchor=NW, fill=BOTH, side=TOP, expand=True)
#LF.place( x=20, y=20)
LF.pack(anchor=NW, fill=BOTH, side=TOP, expand=True)
mainFrame.pack(anchor=NW, fill=BOTH, side=TOP, expand=True)
root.mainloop()
|
sonofeft/XYmath
|
xymath/gui/tabframes.py
|
Python
|
gpl-3.0
| 6,103
|
__all__ = [
'null', 'Invalid', 'FieldsetErrors',
'Field', 'FieldFactory', 'Fieldset',
'field', 'fieldpreview', 'get_field_factory', 'get_field_preview',
'Term', 'Vocabulary',
'All','Function','Regex','Email','Range', 'Length','OneOf',
'CompositeField', 'CompositeError',
'InputField', 'OptionsField',
'VocabularyField', 'BaseChoiceField','BaseMultiChoiceField',
'TextField','IntegerField','FloatField',
'DecimalField','TextAreaField','FileField','LinesField','PasswordField',
'DateField','DateTimeField','RadioField','BoolField','ChoiceField',
'MultiChoiceField','MultiSelectField','TimezoneField',
'Form','FormWidgets',
'button','button2','Button','Buttons',
'AC_DEFAULT','AC_PRIMARY','AC_DANGER','AC_SUCCESS','AC_INFO','AC_WARNING',
'parse_date','includeme', 'reify',
]
from pyramid.decorator import reify
# validation
from .interfaces import null
from .interfaces import Invalid
# field
from .field import Field
from .field import FieldFactory
from .fieldset import Fieldset
from .fieldset import FieldsetErrors
# field registration
from .directives import field
from .directives import fieldpreview
from .directives import get_field_factory
from .directives import get_field_preview
# vocabulary
from .vocabulary import Term
from .vocabulary import Vocabulary
# validators
from .validator import All
from .validator import Function
from .validator import Regex
from .validator import Email
from .validator import Range
from .validator import Length
from .validator import OneOf
# helper class
from .field import InputField
# helper field classes
from .fields import VocabularyField
from .fields import BaseChoiceField
from .fields import BaseMultiChoiceField
# fields
from .fields import TextField
from .fields import IntegerField
from .fields import FloatField
from .fields import DecimalField
from .fields import TextAreaField
from .fields import FileField
from .fields import LinesField
from .fields import PasswordField
from .fields import DateField
from .fields import DateTimeField
from .fields import RadioField
from .fields import BoolField
from .fields import ChoiceField
from .fields import MultiChoiceField
from .fields import MultiSelectField
from .fields import TimezoneField
from .fields import OptionsField
# composite fields
from .composite import CompositeField
from .composite import CompositeError
# forms
from .form import Form
from .form import FormWidgets
# button
from .button import button
from .button import button2
from .button import Button
from .button import Buttons
from .button import AC_DEFAULT
from .button import AC_PRIMARY
from .button import AC_DANGER
from .button import AC_SUCCESS
from .button import AC_INFO
from .button import AC_WARNING
# iso date
from .iso8601 import parse_date
def includeme(config):
config.include('pyramid_chameleon')
config.include('djed.renderer')
config.include('djed.message')
# field
from .directives import add_field
config.add_directive('provide_form_field', add_field)
# layers
config.add_layer('form', path='djed.form:templates/')
# scan
config.scan('djed.form')
|
djedproject/djed.form
|
djed/form/__init__.py
|
Python
|
isc
| 3,170
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Configuration variables."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
DATA_DIR = 'data'
MNIST_BINARIZED = 'mnist_salakhutdinov_07-19-2017.pkl'
MNIST_FLOAT = 'mnist_train_xs_07-19-2017.npy'
OMNIGLOT = 'omniglot_07-19-2017.mat'
|
cshallue/models
|
research/rebar/config.py
|
Python
|
apache-2.0
| 976
|
# Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from fuel_agent.objects.bootloader import Grub
from fuel_agent.objects.configdrive import ConfigDriveCommon
from fuel_agent.objects.configdrive import ConfigDriveMcollective
from fuel_agent.objects.configdrive import ConfigDrivePuppet
from fuel_agent.objects.configdrive import ConfigDriveScheme
from fuel_agent.objects.device import Loop
from fuel_agent.objects.image import Image
from fuel_agent.objects.image import ImageScheme
from fuel_agent.objects.operating_system import OperatingSystem
from fuel_agent.objects.operating_system import Ubuntu
from fuel_agent.objects.partition import Fs
from fuel_agent.objects.partition import Lv
from fuel_agent.objects.partition import Md
from fuel_agent.objects.partition import Partition
from fuel_agent.objects.partition import PartitionScheme
from fuel_agent.objects.partition import Pv
from fuel_agent.objects.partition import Vg
from fuel_agent.objects.repo import DEBRepo
from fuel_agent.objects.repo import Repo
__all__ = [
'Partition', 'Pv', 'Vg', 'Lv', 'Md', 'Fs', 'PartitionScheme',
'ConfigDriveCommon', 'ConfigDrivePuppet', 'ConfigDriveMcollective',
'ConfigDriveScheme', 'Image', 'ImageScheme', 'Grub',
'OperatingSystem', 'Ubuntu',
'Repo', 'DEBRepo',
'Loop',
]
|
nebril/fuel-web
|
fuel_agent/fuel_agent/objects/__init__.py
|
Python
|
apache-2.0
| 1,819
|
from crankshaft.regression.gwr import *
from crankshaft.regression.glm import *
from crankshaft.regression.gwr_cs import *
|
CartoDB/crankshaft
|
src/py/crankshaft/crankshaft/regression/__init__.py
|
Python
|
bsd-3-clause
| 123
|
# $HeadURL$
__RCSID__ = "$Id$"
import types
from DIRAC import S_OK, S_ERROR
from DIRAC.ConfigurationSystem.Client.Config import gConfig
from DIRAC.ConfigurationSystem.Client.Helpers.CSGlobals import getVO
gBaseSecuritySection = "/Registry"
def getUsernameForDN( dn, usersList = False ):
if not usersList:
retVal = gConfig.getSections( "%s/Users" % gBaseSecuritySection )
if not retVal[ 'OK' ]:
return retVal
usersList = retVal[ 'Value' ]
for username in usersList:
if dn in gConfig.getValue( "%s/Users/%s/DN" % ( gBaseSecuritySection, username ), [] ):
return S_OK( username )
return S_ERROR( "No username found for dn %s" % dn )
def getDNForUsername( username ):
dnList = gConfig.getValue( "%s/Users/%s/DN" % ( gBaseSecuritySection, username ), [] )
if dnList:
return S_OK( dnList )
return S_ERROR( "No DN found for user %s" % username )
def getGroupsForDN( dn ):
retVal = getUsernameForDN( dn )
if not retVal[ 'OK' ]:
return retVal
return getGroupsForUser( retVal[ 'Value' ] )
def __getGroupsWithAttr( attrName, value ):
retVal = gConfig.getSections( "%s/Groups" % gBaseSecuritySection )
if not retVal[ 'OK' ]:
return retVal
groupsList = retVal[ 'Value' ]
groups = []
for group in groupsList:
if value in gConfig.getValue( "%s/Groups/%s/%s" % ( gBaseSecuritySection, group, attrName ), [] ):
groups.append( group )
if not groups:
return S_ERROR( "No groups found for %s=%s" % ( attrName,value ) )
groups.sort()
return S_OK( groups )
def getGroupsForUser( username ):
return __getGroupsWithAttr( 'Users', username )
def getGroupsForVO( vo ):
if getVO():
return gConfig.getSections( "%s/Groups" % gBaseSecuritySection )
return __getGroupsWithAttr( 'VO', vo )
def getGroupsWithProperty( propName ):
return __getGroupsWithAttr( "Properties", propName )
def getHostnameForDN( dn ):
retVal = gConfig.getSections( "%s/Hosts" % gBaseSecuritySection )
if not retVal[ 'OK' ]:
return retVal
hostList = retVal[ 'Value' ]
for hostname in hostList:
if dn in gConfig.getValue( "%s/Hosts/%s/DN" % ( gBaseSecuritySection, hostname ), [] ):
return S_OK( hostname )
return S_ERROR( "No hostname found for dn %s" % dn )
def getDefaultUserGroup():
return gConfig.getValue( "/%s/DefaultGroup" % gBaseSecuritySection, "user" )
def findDefaultGroupForDN( dn ):
result = getUsernameForDN( dn )
if not result[ 'OK' ]:
return result
return findDefaultGroupForUser( result[ 'Value' ] )
def findDefaultGroupForUser( userName ):
userDefGroups = getUserOption( userName, "DefaultGroup", [] )
defGroups = userDefGroups + gConfig.getValue( "%s/DefaultGroup" % gBaseSecuritySection, [ "user" ] )
result = getGroupsForUser( userName )
if not result[ 'OK' ]:
return result
userGroups = result[ 'Value' ]
for group in defGroups:
if group in userGroups:
return S_OK( group )
if userGroups:
return S_OK( userGroups[0] )
return S_ERROR( "User %s has no groups" % userName )
def getAllUsers():
retVal = gConfig.getSections( "%s/Users" % gBaseSecuritySection )
if not retVal[ 'OK' ]:
return []
return retVal[ 'Value' ]
def getAllGroups():
retVal = gConfig.getSections( "%s/Groups" % gBaseSecuritySection )
if not retVal[ 'OK' ]:
return []
return retVal[ 'Value' ]
def getUsersInGroup( groupName, defaultValue = None ):
if defaultValue == None:
defaultValue = []
option = "%s/Groups/%s/Users" % ( gBaseSecuritySection, groupName )
return gConfig.getValue( option, defaultValue )
def getDNsInGroup( groupName ):
DNs = []
for user in getUsersInGroup( groupName ):
result = getDNForUsername( user )
if result[ 'OK' ]:
DNs.extend( result[ 'Value' ] )
return DNs
def getPropertiesForGroup( groupName, defaultValue = None ):
if defaultValue == None:
defaultValue = []
option = "%s/Groups/%s/Properties" % ( gBaseSecuritySection, groupName )
return gConfig.getValue( option, defaultValue )
def getPropertiesForHost( hostName, defaultValue = None ):
if defaultValue == None:
defaultValue = []
option = "%s/Hosts/%s/Properties" % ( gBaseSecuritySection, hostName )
return gConfig.getValue( option, defaultValue )
def getPropertiesForEntity( group, name = "", dn = "", defaultValue = None ):
if defaultValue == None:
defaultValue = []
if group == 'hosts':
if not name:
result = getHostnameForDN( dn )
if not result[ 'OK' ]:
return defaultValue
name = result[ 'Value' ]
return getPropertiesForHost( name, defaultValue )
else:
return getPropertiesForGroup( group, defaultValue )
def __matchProps( sProps, rProps ):
foundProps = []
for prop in sProps:
if prop in rProps:
foundProps.append( prop )
return foundProps
def groupHasProperties( groupName, propList ):
if type( propList ) in types.StringTypes:
propList = [ propList ]
return __matchProps( propList, getPropertiesForGroup( groupName ) )
def hostHasProperties( hostName, propList ):
if type( propList ) in types.StringTypes:
propList = [ propList ]
return __matchProps( propList, getPropertiesForHost( hostName ) )
def getUserOption( userName, optName, defaultValue = "" ):
return gConfig.getValue( "%s/Users/%s/%s" % ( gBaseSecuritySection, userName, optName ), defaultValue )
def getGroupOption( groupName, optName, defaultValue = "" ):
return gConfig.getValue( "%s/Groups/%s/%s" % ( gBaseSecuritySection, groupName, optName ), defaultValue )
def getHostOption( hostName, optName, defaultValue = "" ):
return gConfig.getValue( "%s/Hosts/%s/%s" % ( gBaseSecuritySection, hostName, optName ), defaultValue )
def getHosts():
return gConfig.getSections( '%s/Hosts' % gBaseSecuritySection )
def getVOOption( voName, optName, defaultValue = "" ):
return gConfig.getValue( "%s/VO/%s/%s" % ( gBaseSecuritySection, voName, optName ), defaultValue )
def getBannedIPs():
return gConfig.getValue( "%s/BannedIPs" % gBaseSecuritySection, [] )
def getVOForGroup( group ):
voName = getVO()
if voName:
return voName
return gConfig.getValue( "%s/Groups/%s/VO" % ( gBaseSecuritySection, group ), "" )
def getDefaultVOMSAttribute():
return gConfig.getValue( "%s/DefaultVOMSAttribute" % gBaseSecuritySection, "" )
def getVOMSAttributeForGroup( group ):
return gConfig.getValue( "%s/Groups/%s/VOMSRole" % ( gBaseSecuritySection, group ), getDefaultVOMSAttribute() )
def getDefaultVOMSVO():
vomsVO = gConfig.getValue( "%s/DefaultVOMSVO" % gBaseSecuritySection, "" )
if vomsVO:
return vomsVO
return getVO()
def getVOMSVOForGroup( group ):
vomsVO = gConfig.getValue( "%s/Groups/%s/VOMSVO" % ( gBaseSecuritySection, group ), getDefaultVOMSVO() )
if not vomsVO:
vo = getVOForGroup( group )
vomsVO = getVOOption( vo, 'VOMSName', '' )
return vomsVO
def getGroupsWithVOMSAttribute( vomsAttr ):
retVal = gConfig.getSections( "%s/Groups" % ( gBaseSecuritySection ) )
if not retVal[ 'OK' ]:
return []
groups = []
for group in retVal[ 'Value' ]:
if vomsAttr == gConfig.getValue( "%s/Groups/%s/VOMSRole" % ( gBaseSecuritySection, group ), "" ):
groups.append( group )
return groups
|
avedaee/DIRAC
|
ConfigurationSystem/Client/Helpers/Registry.py
|
Python
|
gpl-3.0
| 7,183
|
# Copyright (c) 2014 INFN - "Istituto Nazionale di Fisica Nucleare" - Italy
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# following PEP 386
__version__ = "1.0.0"
__all__ = [ "views", "backend", "urls", "forms" ]
|
opencityplatform/os-federation-addons
|
src/openstack_auth_fedext/__init__.py
|
Python
|
apache-2.0
| 746
|
from django.db import models
import datetime
class Profile(models.Model):
first_name=models.CharField(max_length=30)
last_name=models.CharField(max_length=30)
address=models.CharField(max_length=30)
location=models.CharField(max_length=30)
phone_home=models.CharField(max_length=30)
phone_mobile=models.CharField(max_length=30)
email=models.EmailField()
# favorites=models.ManyToMany(Favorites)
# order=models.ForeignField(Order)
def __unicode__(self):
return "%s %s" % (self.first_name,self.last_name)
class Meta:
verbose_name_plural='Profiles'
class Favorites(models.Model):
#profile=mode
pass
class Category(models.Model):
CATEG_LIST={
('Crepe','Crepe'),
('Club Sandwich','Club Sandwich'),
('Spaghetti','Spaghetti'),
('Sandwich','Sandwich'),
('Burger','Burger'),
('Salad','Salad'),
('Toast','Toast'),
('IceCream','IceCream'),
('Coffee','Coffee'),
('Dessert','Dessert'),
('Drinks','Drinks'),
('Soft Drinks','Soft Drinks'),
('Beer','Beer'),
('Wine','Wine')
}
category_title=models.CharField(max_length=35,choices=CATEG_LIST,default='Crepe',unique=True)
def __unicode__(self):
return self.category_title
class Meta:
verbose_name_plural='Categories'
ordering=['category_title']
class SubCategory(models.Model):
category=models.ForeignKey(Category)
sub_title=models.CharField(max_length=100)
def __unicode__(self):
return self.sub_title
class Meta:
ordering=['sub_title']
verbose_name_plural='Sub Categories'
class Food(models.Model):
title=models.CharField(max_length=200)
price=models.DecimalField(max_digits=4,decimal_places=2)
category=models.ForeignKey(Category)
description=models.TextField(blank=True)
def __unicode__(self):
return "%d - %s " % (self.price,self.title)
class Meta:
ordering=['category']
class Topping(models.Model):
topping_name=models.CharField(max_length=30)
topping_description=models.TextField()
topping_price=models.DecimalField(max_digits=4,decimal_places=2)
def __unicode__(self):
return "%s" % self.topping_name
class Offer(models.Model):
foodspot_offer=models.CharField(max_length=200)
foodspot_offer_price=models.DecimalField(max_digits=4,decimal_places=2)
foodspot_offer_description=models.CharField(max_length=100, null=True, blank=True)
def get_offer(self):
return (foodspot_offer, foodspot_offer_price)
def __unicode__(self):
return self.foodspot_offer
class Meta:
verbose_name_plural='Offers'
class Photo(models.Model):
#caption=models.CharField(max_length=200)
#image=models.ImageField(upload_to='images')
pass
class Menu(models.Model):
pass
class Choise(models.Model):
choise=models.ForeignKey(Menu)
class Order(models.Model):
order_profile=models.ForeignKey(Profile)
order_choises=models.ForeignKey(Choise)
order_date=models.DateTimeField(auto_now_add=True)
order_status=models.BooleanField(default=False)
class Meta:
ordering=['order_date']
def __unicode__(self):
pass
|
b10n1k/foodspot69
|
foodspot/models.py
|
Python
|
gpl-2.0
| 3,321
|
from serial import Serial
ser = Serial("/dev/ttyACM0", 9600)
while True:
print(ser.read())
|
dkkline/CanSat14-15
|
tests/camera_test/serial_dump.py
|
Python
|
mit
| 98
|
from protorpc.messages import *
from protorpc.protojson import *
from protopigeon import *
from .component import *
|
yowmamasita/social-listener-exam
|
ferris/core/messages/__init__.py
|
Python
|
mit
| 116
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from datetime import datetime
import glob
import optparse
import os
import re
from telemetry import test
from telemetry.core import bitmap
from telemetry.page import page_test
test_data_dir = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', '..', 'data', 'gpu'))
default_generated_data_dir = os.path.join(test_data_dir, 'generated')
default_reference_image_dir = os.path.join(test_data_dir, 'gpu_reference')
test_harness_script = r"""
var domAutomationController = {};
domAutomationController._succeeded = false;
domAutomationController._finished = false;
domAutomationController.setAutomationId = function(id) {}
domAutomationController.send = function(msg) {
domAutomationController._finished = true;
if(msg.toLowerCase() == "success") {
domAutomationController._succeeded = true;
} else {
domAutomationController._succeeded = false;
}
}
window.domAutomationController = domAutomationController;
"""
class PixelTestFailure(Exception):
pass
def _DidTestSucceed(tab):
return tab.EvaluateJavaScript('domAutomationController._succeeded')
class PixelValidator(page_test.PageTest):
def __init__(self):
super(PixelValidator, self).__init__('ValidatePage')
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs('--enable-gpu-benchmarking')
def ValidatePage(self, page, tab, results):
if not _DidTestSucceed(tab):
raise page_test.Failure('Page indicated a failure')
if not tab.screenshot_supported:
raise page_test.Failure('Browser does not support screenshot capture')
screenshot = tab.Screenshot(5)
if not screenshot:
raise page_test.Failure('Could not capture screenshot')
if hasattr(page, 'test_rect'):
screenshot = screenshot.Crop(
page.test_rect[0], page.test_rect[1],
page.test_rect[2], page.test_rect[3])
image_name = PixelValidator.UrlToImageName(page.display_name)
ref_png = PixelValidator.GetReferenceImage(self.options.reference_dir,
image_name, page.revision, screenshot)
# Test new snapshot against existing reference image
if not ref_png.IsEqual(screenshot, tolerance=2):
PixelValidator.WriteErrorImages(self.options.generated_dir, image_name,
self.options.build_revision, screenshot, ref_png)
raise page_test.Failure('Reference image did not match captured screen')
@staticmethod
def UrlToImageName(url):
image_name = re.sub(r'^(http|https|file)://(/*)', '', url)
image_name = re.sub(r'\.\./', '', image_name)
image_name = re.sub(r'(\.|/|-)', '_', image_name)
return image_name
@staticmethod
def DeleteOldReferenceImages(ref_image_path, cur_revision):
if not cur_revision:
return
old_revisions = glob.glob(ref_image_path + "_*.png")
for rev_path in old_revisions:
m = re.match(r'^.*_(\d+)\.png$', rev_path)
if m and int(m.group(1)) < cur_revision:
print 'Found deprecated reference image. Deleting rev ' + m.group(1)
os.remove(rev_path)
@staticmethod
def GetReferenceImage(img_dir, img_name, cur_revision, screenshot):
if not cur_revision:
cur_revision = 0
image_path = os.path.join(img_dir, img_name)
PixelValidator.DeleteOldReferenceImages(image_path, cur_revision)
image_path = image_path + '_' + str(cur_revision) + '.png'
try:
ref_png = bitmap.Bitmap.FromPngFile(image_path)
except IOError:
ref_png = None
if ref_png:
return ref_png
print 'Reference image not found. Writing tab contents as reference.'
PixelValidator.WriteImage(image_path, screenshot)
return screenshot
@staticmethod
def WriteErrorImages(img_dir, img_name, build_revision, screenshot, ref_png):
full_image_name = img_name + '_' + str(build_revision)
full_image_name = full_image_name + '.png'
# Save the reference image
# This ensures that we get the right revision number
PixelValidator.WriteImage(
os.path.join(img_dir, full_image_name), ref_png)
PixelValidator.WriteImage(
os.path.join(img_dir, 'FAIL_' + full_image_name), screenshot)
diff_png = screenshot.Diff(ref_png)
PixelValidator.WriteImage(
os.path.join(img_dir, 'DIFF_' + full_image_name), diff_png)
@staticmethod
def WriteImage(image_path, png_image):
output_dir = os.path.dirname(image_path)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
png_image.WritePngFile(image_path)
class Pixel(test.Test):
test = PixelValidator
page_set = 'page_sets/pixel_tests.json'
@staticmethod
def AddTestCommandLineOptions(parser):
group = optparse.OptionGroup(parser, 'Pixel test options')
group.add_option('--generated-dir',
help='Overrides the default location for generated test images that do '
'not match reference images',
default=default_generated_data_dir)
group.add_option('--reference-dir',
help='Overrides the default location for reference images',
default=default_reference_image_dir)
group.add_option('--build-revision',
help='Chrome revision being tested.',
default="unknownrev")
parser.add_option_group(group)
def CreatePageSet(self, options):
page_set = super(Pixel, self).CreatePageSet(options)
for page in page_set.pages:
page.script_to_evaluate_on_commit = test_harness_script
return page_set
|
qtekfun/htcDesire820Kernel
|
external/chromium_org/content/test/gpu/gpu_tests/pixel.py
|
Python
|
gpl-2.0
| 5,592
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os import system
import sys
from PyQt5 import QtGui, QtCore, QtWidgets
settings = QtCore.QSettings('qrdp', 'qrdp1')
servers = settings.value('servers')
credentials = settings.value('credentials')
resolution = settings.value('resolution')
def main():
app = QtWidgets.QApplication(sys.argv)
app.setApplicationName("qrdp")
global mw
mw = MainWindow()
mw.show()
trayWidget = SystemTrayIcon()
trayWidget.show()
sys.exit(app.exec_())
class MainWindow(QtWidgets.QMainWindow):
def __init__(self, parent = None):
super(MainWindow, self).__init__(parent)
self.setWindowTitle('qrdp')
self.addButton = QtWidgets.QPushButton('New connection')
self.addButton.clicked.connect(self.addWidget)
self.cb = QtWidgets.QCheckBox('Remove host', self)
self.resolution = QtWidgets.QLineEdit(self)
self.resolution.setObjectName("resolution")
if type(resolution) != unicode:
self.resolution.setText("Enter resolution")
else:
self.resolution.setText(resolution)
self.resolution.editingFinished.connect(self.handleEditingFinished)
self.scrollLayout = QtWidgets.QFormLayout()
self.scrollWidget = QtWidgets.QWidget()
self.scrollWidget.setLayout(self.scrollLayout)
self.scrollArea = QtWidgets.QScrollArea()
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setWidget(self.scrollWidget)
self.mainLayout = QtWidgets.QVBoxLayout()
self.mainLayout.addWidget(self.addButton)
self.mainLayout.addWidget(self.resolution)
self.mainLayout.addWidget(self.cb)
self.check = False
for item in servers:
srv = servers.get(item)
btn = QtWidgets.QPushButton(srv, self)
btn.clicked.connect(self.buttonClicked)
btn.resize(btn.sizeHint())
btn.setToolTip(srv)
self.scrollLayout.addRow(btn)
self.mainLayout.addWidget(self.scrollArea)
self.centralWidget = QtWidgets.QWidget()
self.centralWidget.setLayout(self.mainLayout)
self.setCentralWidget(self.centralWidget)
self.cb.stateChanged.connect(self.cb_changed)
def handleEditingFinished(self):
if self.resolution.isModified():
settings = QtCore.QSettings('qrdp', 'qrdp1')
text = mw.resolution.text()
settings.setValue('resolution', text)
del settings
self.resolution.setModified(False)
def addWidget(self):
if mw.isHidden() == True:
mw.show()
self.scrollLayout.addRow(newHostButton())
def cb_changed(self, state):
self.check = (state == QtCore.Qt.Checked)
def closeEvent(self, event):
event.ignore()
self.hide()
def buttonClicked(self):
sender = self.sender()
host = sender.text()
settings = QtCore.QSettings('qrdp', 'qrdp1')
if self.check is True:
sender.deleteLater()
for k,v in servers.items():
if v == host:
del servers[k]
del credentials[host]
settings.setValue('servers', servers)
settings.setValue('credentials', credentials)
del settings
else:
settings = QtCore.QSettings('qrdp', 'qrdp1')
servers = settings.value('servers')
credentials = settings.value('credentials')
resolution = settings.value('resolution')
authSettings = credentials.get(host)
user = authSettings[0]
user = str(user)
password = authSettings[1]
password = str(password)
isadm = authSettings[2]
system("rdesktop -5 -K -r clipboard:CLIPBOARD -z -a 16 " + host +'' + isadm + " -g " + str(resolution) + " -u " + user + " -p " +password)
class dialogWindow(QtWidgets.QDialog):
def __init__(self):
super(dialogWindow, self).__init__()
self.setFixedSize(220,330)
self.buttonBox = QtWidgets.QDialogButtonBox(self)
self.textName = QtWidgets.QLineEdit(self)
self.textPass = QtWidgets.QLineEdit(self)
self.textHost = QtWidgets.QLineEdit(self)
self.textName.setObjectName("Username")
self.textName.setText("Username")
self.textPass.setObjectName("Password")
self.textPass.setText("Password")
self.textHost.setObjectName("Hostname")
self.textHost.setText("Hostname")
self.textPass.setEchoMode(QtWidgets.QLineEdit.Password)
self.cb = QtWidgets.QCheckBox('/admin', self)
self.cb.move(140, 120)
self.cb.stateChanged.connect(self.adminSession)
self.cb.resize(self.cb.sizeHint())
self.cb1 = QtWidgets.QCheckBox("Administrator", self)
self.cb1.move(10, 120)
self.cb1.resize(self.cb1.sizeHint())
self.cb1.stateChanged.connect(self.admin)
self.isadm = ''
layout = QtWidgets.QVBoxLayout(self)
layout.addWidget(self.textHost)
layout.addWidget(self.textName)
layout.addWidget(self.textPass)
self.addButton = QtWidgets.QPushButton('Add server', self)
self.addButton.resize(self.addButton.sizeHint())
self.addButton.move(10, 10)
self.addButton.clicked.connect(self.addHost)
self.closeButton = QtWidgets.QPushButton('Cancel', self)
self.closeButton.clicked.connect(self.deleteLater)
self.closeButton.resize(self.closeButton.sizeHint())
self.closeButton.move(120, 10)
self.setGeometry(300, 300, 225, 350)
self.exec_()
def adminSession(self, state):
if state == QtCore.Qt.Checked:
isadm = ' -0'
self.isadm = isadm
def admin(self, st):
if st == QtCore.Qt.Checked:
self.textName.setText("Administrator")
self.cb.toggle()
def addHost(self):
settings = QtCore.QSettings('qrdp', 'qrdp1')
host = self.textHost.text()
name = self.textName.text()
password = self.textPass.text()
servlen = range(1, 1000)
newItem = [x for x in servlen if x not in servers.keys()]
servers.update({newItem[0] : host})
settings.setValue('servers', {newItem[0] : host})
newHost = servers.get(newItem[0])
settings.setValue('credentials', {newHost : [name, password, self.isadm]})
del settings
self.accept()
class newHostButton(QtWidgets.QPushButton):
def __init__(self, parent=None):
super(newHostButton, self).__init__(parent)
a = dialogWindow()
self.setText(a.textHost.text())
self.clicked.connect(mw.buttonClicked)
class SystemTrayIcon(QtWidgets.QSystemTrayIcon):
def __init__(self, parent=None):
QtWidgets.QSystemTrayIcon.__init__(self, parent)
self.setIcon(QtGui.QIcon.fromTheme("applications-system"))
self.activated.connect(self.LeftClick)
self.right_menu = RightClickMenu()
self.setContextMenu(self.right_menu)
def LeftClick(self, value):
if value == QtWidgets.QSystemTrayIcon.Trigger:
mw.show()
class RightClickMenu(QtWidgets.QMenu):
def __init__(self, parent=None):
super(RightClickMenu, self).__init__(parent)
icon = QtGui.QIcon.fromTheme("application-exit")
icon2 = QtGui.QIcon.fromTheme("network-transmit")
addServ = QtWidgets.QAction(icon2, "&AddServer", self)
exitAction = QtWidgets.QAction(icon, "&Exit", self)
exitAction.triggered.connect(self.exitApp)
addServ.triggered.connect(mw.addWidget)
self.addAction(addServ)
self.addAction(exitAction)
def exitApp(self):
settings = QtCore.QSettings('qrdp', 'qrdp1')
text = mw.resolution.text()
settings.setValue('resolution', text)
del settings
QtWidgets.QApplication.exit()
if __name__ == '__main__':
main()
|
qtheya/qrdp
|
qrdp.py
|
Python
|
unlicense
| 8,057
|
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from blinkpy.common.checkout.baseline_optimizer import BaselineOptimizer, ResultDigest
from blinkpy.common.host_mock import MockHost
from blinkpy.common.system.filesystem_mock import MockFileSystem
from blinkpy.common.path_finder import PathFinder
from blinkpy.common.path_finder import RELATIVE_WEB_TESTS
from blinkpy.web_tests.builder_list import BuilderList
ALL_PASS_TESTHARNESS_RESULT = """This is a testharness.js-based test.
PASS woohoo
Harness: the test ran to completion.
"""
ALL_PASS_TESTHARNESS_RESULT2 = """This is a testharness.js-based test.
PASS woohoo
PASS yahoo
Harness: the test ran to completion.
"""
MOCK_WEB_TESTS = '/mock-checkout/' + RELATIVE_WEB_TESTS
class BaselineOptimizerTest(unittest.TestCase):
def setUp(self):
self.host = MockHost()
self.fs = MockFileSystem()
self.host.filesystem = self.fs
# TODO(robertma): Even though we have mocked the builder list (and hence
# all_port_names), we are still relying on the knowledge of currently
# configured ports and their fallback order. Ideally, we should improve
# MockPortFactory and use it.
self.host.builders = BuilderList({
'Fake Test Win10.20h2': {
'port_name': 'win-win10.20h2',
'specifiers': ['Win10.20h2', 'Release']
},
'Fake Test Win11': {
'port_name': 'win-win11',
'specifiers': ['Win11', 'Release']
},
'Fake Test Linux': {
'port_name': 'linux-trusty',
'specifiers': ['Trusty', 'Release']
},
'Fake Test Mac11.0': {
'port_name': 'mac-mac11',
'specifiers': ['Mac11', 'Release']
},
'Fake Test Mac10.15': {
'port_name': 'mac-mac10.15',
'specifiers': ['Mac10.15', 'Release']
},
'Fake Test Mac10.14': {
'port_name': 'mac-mac10.14',
'specifiers': ['Mac10.14', 'Release']
},
'Fake Test Mac10.13': {
'port_name': 'mac-mac10.13',
'specifiers': ['Mac10.13', 'Release']
},
'Fake Test Mac10.12': {
'port_name': 'mac-mac10.12',
'specifiers': ['Mac10.12', 'Release']
},
})
# Note: this is a pre-assumption of the tests in this file. If this
# assertion fails, port configurations are likely changed, and the
# tests need to be adjusted accordingly.
self.assertEqual(sorted(self.host.port_factory.all_port_names()), [
'linux-trusty', 'mac-mac10.12', 'mac-mac10.13', 'mac-mac10.14',
'mac-mac10.15', 'mac-mac11', 'win-win10.20h2', 'win-win11'
])
def _assert_optimization(self,
results_by_directory,
directory_to_new_results,
baseline_dirname='',
suffix='txt'):
web_tests_dir = PathFinder(self.fs).web_tests_dir()
test_name = 'mock-test.html'
baseline_name = 'mock-test-expected.' + suffix
self.fs.write_text_file(
self.fs.join(web_tests_dir, 'VirtualTestSuites'),
'[{"prefix": "gpu", "bases": ["fast/canvas"], "args": ["--foo"]}]')
for dirname, contents in results_by_directory.items():
self.fs.write_text_file(
self.fs.join(web_tests_dir, dirname, baseline_name), contents)
baseline_optimizer = BaselineOptimizer(
self.host, self.host.port_factory.get(),
self.host.port_factory.all_port_names())
self.assertTrue(
baseline_optimizer.optimize(
self.fs.join(baseline_dirname, test_name), suffix))
for dirname, contents in directory_to_new_results.items():
path = self.fs.join(web_tests_dir, dirname, baseline_name)
if contents is None:
# Check files that are explicitly marked as absent.
self.assertFalse(
self.fs.exists(path),
'%s should not exist after optimization' % path)
else:
self.assertEqual(self.fs.read_text_file(path), contents,
'Content of %s != "%s"' % (path, contents))
for dirname in results_by_directory:
path = self.fs.join(web_tests_dir, dirname, baseline_name)
if (dirname not in directory_to_new_results
or directory_to_new_results[dirname] is None):
self.assertFalse(
self.fs.exists(path),
'%s should not exist after optimization' % path)
def _assert_reftest_optimization(self,
results_by_directory,
directory_to_new_results,
test_path='',
baseline_dirname=''):
web_tests_dir = PathFinder(self.fs).web_tests_dir()
self.fs.write_text_file(
self.fs.join(web_tests_dir, test_path, 'mock-test-expected.html'),
'ref')
self._assert_optimization(
results_by_directory,
directory_to_new_results,
baseline_dirname,
suffix='png')
def test_linux_redundant_with_win(self):
self._assert_optimization({
'platform/win': '1',
'platform/linux': '1',
}, {
'platform/win': '1',
})
def test_covers_mac_win_linux(self):
self._assert_optimization({
'platform/mac': '1',
'platform/win': '1',
'platform/linux': '1',
}, {
'': '1',
})
def test_overwrites_root(self):
self._assert_optimization({
'platform/mac': '1',
'platform/win': '1',
'platform/linux': '1',
'': '2',
}, {
'': '1',
})
def test_no_new_common_directory(self):
self._assert_optimization({
'platform/mac': '1',
'platform/linux': '1',
'': '2',
}, {
'platform/mac': '1',
'platform/linux': '1',
'': '2',
})
def test_local_optimization(self):
self._assert_optimization({
'platform/mac': '1',
'platform/linux': '1',
'platform/mac-mac10.14': '1',
}, {
'platform/mac': '1',
'platform/linux': '1',
})
def test_local_optimization_skipping_a_port_in_the_middle(self):
# mac-mac10.13 -> mac-mac10.14 -> mac
self._assert_optimization({
'platform/mac': '1',
'platform/linux': '1',
'platform/mac-mac10.13': '1',
}, {
'platform/mac': '1',
'platform/linux': '1',
})
def test_baseline_redundant_with_root(self):
self._assert_optimization({
'platform/mac': '1',
'platform/win': '2',
'': '2',
}, {
'platform/mac': '1',
'': '2',
})
def test_root_baseline_unused(self):
self._assert_optimization({
'platform/mac': '1',
'platform/win': '2',
'': '3',
}, {
'platform/mac': '1',
'platform/win': '2',
})
def test_root_baseline_unused_and_non_existant(self):
self._assert_optimization({
'platform/mac': '1',
'platform/win': '2',
}, {
'platform/mac': '1',
'platform/win': '2',
})
def test_virtual_baseline_redundant_with_non_virtual(self):
self._assert_optimization({
'platform/win/virtual/gpu/fast/canvas': '2',
'platform/win/fast/canvas': '2',
}, {
'platform/win/fast/canvas': '2',
},
baseline_dirname='virtual/gpu/fast/canvas')
def test_virtual_baseline_redundant_with_non_virtual_fallback(self):
# virtual linux -> virtual win -> virtual root -> linux -> win
self._assert_optimization(
{
'platform/linux/virtual/gpu/fast/canvas': '2',
'platform/win/fast/canvas': '2',
}, {
'platform/win/virtual/gpu/fast/canvas': None,
'platform/win/fast/canvas': '2',
},
baseline_dirname='virtual/gpu/fast/canvas')
def test_virtual_baseline_redundant_with_actual_root(self):
self._assert_optimization({
'platform/win/virtual/gpu/fast/canvas': '2',
'fast/canvas': '2',
}, {
'fast/canvas': '2',
},
baseline_dirname='virtual/gpu/fast/canvas')
def test_virtual_root_redundant_with_actual_root(self):
self._assert_optimization({
'virtual/gpu/fast/canvas': '2',
'fast/canvas': '2',
}, {
'fast/canvas': '2',
},
baseline_dirname='virtual/gpu/fast/canvas')
def test_virtual_root_redundant_with_ancestors(self):
self._assert_optimization({
'virtual/gpu/fast/canvas': '2',
'platform/mac/fast/canvas': '2',
'platform/win/fast/canvas': '2',
}, {
'fast/canvas': '2',
},
baseline_dirname='virtual/gpu/fast/canvas')
def test_virtual_root_not_redundant_with_ancestors(self):
self._assert_optimization({
'virtual/gpu/fast/canvas': '2',
'platform/mac/fast/canvas': '1',
}, {
'virtual/gpu/fast/canvas': '2',
'platform/mac/fast/canvas': '1',
},
baseline_dirname='virtual/gpu/fast/canvas')
def test_virtual_covers_mac_win_linux(self):
self._assert_optimization(
{
'platform/mac/virtual/gpu/fast/canvas': '1',
'platform/win/virtual/gpu/fast/canvas': '1',
'platform/linux/virtual/gpu/fast/canvas': '1',
}, {
'virtual/gpu/fast/canvas': '1',
},
baseline_dirname='virtual/gpu/fast/canvas')
def test_all_pass_testharness_at_root(self):
self._assert_optimization({
'': ALL_PASS_TESTHARNESS_RESULT
}, {'': None})
def test_all_pass_testharness_at_linux(self):
self._assert_optimization({
'platform/linux': ALL_PASS_TESTHARNESS_RESULT
}, {'platform/linux': None})
def test_all_pass_testharness_at_linux_and_win(self):
# https://crbug.com/805008
self._assert_optimization(
{
'platform/linux': ALL_PASS_TESTHARNESS_RESULT,
'platform/win': ALL_PASS_TESTHARNESS_RESULT
}, {
'platform/linux': None,
'platform/win': None
})
def test_all_pass_testharness_at_virtual_root(self):
self._assert_optimization(
{
'virtual/gpu/fast/canvas': ALL_PASS_TESTHARNESS_RESULT
}, {'virtual/gpu/fast/canvas': None},
baseline_dirname='virtual/gpu/fast/canvas')
def test_all_pass_testharness_at_virtual_linux(self):
self._assert_optimization(
{
'platform/linux/virtual/gpu/fast/canvas':
ALL_PASS_TESTHARNESS_RESULT
}, {'platform/linux/virtual/gpu/fast/canvas': None},
baseline_dirname='virtual/gpu/fast/canvas')
def test_all_pass_testharness_can_be_updated(self):
# https://crbug.com/866802
self._assert_optimization(
{
'fast/canvas':
'failure',
'virtual/gpu/fast/canvas':
ALL_PASS_TESTHARNESS_RESULT,
'platform/win/virtual/gpu/fast/canvas':
ALL_PASS_TESTHARNESS_RESULT2,
'platform/mac/virtual/gpu/fast/canvas':
ALL_PASS_TESTHARNESS_RESULT2,
}, {
'fast/canvas': 'failure',
'virtual/gpu/fast/canvas': ALL_PASS_TESTHARNESS_RESULT2,
'platform/win/virtual/gpu/fast/canvas': None,
'platform/mac/virtual/gpu/fast/canvas': None,
},
baseline_dirname='virtual/gpu/fast/canvas')
def test_all_pass_testharness_falls_back_to_non_pass(self):
# The all-PASS baseline needs to be preserved in this case.
self._assert_optimization(
{
'platform/linux': ALL_PASS_TESTHARNESS_RESULT,
'': '1'
}, {
'platform/linux': ALL_PASS_TESTHARNESS_RESULT,
'': '1'
})
def test_virtual_all_pass_testharness_falls_back_to_base(self):
# The all-PASS baseline needs to be preserved in this case.
self._assert_optimization(
{
'virtual/gpu/fast/canvas': ALL_PASS_TESTHARNESS_RESULT,
'platform/linux/fast/canvas': '1',
}, {
'virtual/gpu/fast/canvas': ALL_PASS_TESTHARNESS_RESULT,
'platform/linux/fast/canvas': '1',
},
baseline_dirname='virtual/gpu/fast/canvas')
def test_empty_at_root(self):
self._assert_optimization({'': ''}, {'': None})
def test_empty_at_linux(self):
self._assert_optimization({
'platform/linux': ''
}, {'platform/linux': None})
def test_empty_at_linux_and_win(self):
# https://crbug.com/805008
self._assert_optimization({
'platform/linux': '',
'platform/win': '',
}, {
'platform/linux': None,
'platform/win': None,
})
def test_empty_at_virtual_root(self):
self._assert_optimization({
'virtual/gpu/fast/canvas': ''
}, {'virtual/gpu/fast/canvas': None},
baseline_dirname='virtual/gpu/fast/canvas')
def test_empty_at_virtual_linux(self):
self._assert_optimization(
{
'platform/linux/virtual/gpu/fast/canvas': ''
}, {'platform/linux/virtual/gpu/fast/canvas': None},
baseline_dirname='virtual/gpu/fast/canvas')
def test_empty_falls_back_to_non_empty(self):
# The empty baseline needs to be preserved in this case.
self._assert_optimization({
'platform/linux': '',
'': '1',
}, {
'platform/linux': '',
'': '1',
})
def test_virtual_empty_falls_back_to_non_empty(self):
# The empty baseline needs to be preserved in this case.
self._assert_optimization({
'virtual/gpu/fast/canvas': '',
'platform/linux/fast/canvas': '1',
}, {
'virtual/gpu/fast/canvas': '',
'platform/linux/fast/canvas': '1',
},
baseline_dirname='virtual/gpu/fast/canvas')
def test_extra_png_for_reftest_at_root(self):
self._assert_reftest_optimization({'': 'extra'}, {'': None})
def test_extra_png_for_reftest_at_linux(self):
self._assert_reftest_optimization({
'platform/linux': 'extra'
}, {'platform/linux': None})
def test_extra_png_for_reftest_at_linux_and_win(self):
# https://crbug.com/805008
self._assert_reftest_optimization({
'platform/linux': 'extra1',
'platform/win': 'extra2',
}, {
'platform/linux': None,
'platform/win': None,
})
def test_extra_png_for_reftest_at_virtual_root(self):
self._assert_reftest_optimization(
{
'virtual/gpu/fast/canvas': 'extra'
}, {'virtual/gpu/fast/canvas': None},
test_path='fast/canvas',
baseline_dirname='virtual/gpu/fast/canvas')
def test_extra_png_for_reftest_at_virtual_linux(self):
self._assert_reftest_optimization(
{
'platform/linux/virtual/gpu/fast/canvas': 'extra'
}, {'platform/linux/virtual/gpu/fast/canvas': None},
test_path='fast/canvas',
baseline_dirname='virtual/gpu/fast/canvas')
def test_extra_png_for_reftest_falls_back_to_base(self):
# The extra png for reftest should be removed even if it's different
# from the fallback.
self._assert_reftest_optimization({
'platform/linux': 'extra1',
'': 'extra2',
}, {
'platform/linux': None,
'': None,
})
def test_virtual_extra_png_for_reftest_falls_back_to_base(self):
# The extra png for reftest should be removed even if it's different
# from the fallback.
self._assert_reftest_optimization(
{
'virtual/gpu/fast/canvas': 'extra',
'platform/linux/fast/canvas': 'extra2',
}, {
'virtual/gpu/fast/canvas': None,
'platform/linux/fast/canvas': None,
},
test_path='fast/canvas',
baseline_dirname='virtual/gpu/fast/canvas')
# Tests for protected methods - pylint: disable=protected-access
def test_move_baselines(self):
self.fs.write_text_file(MOCK_WEB_TESTS + 'VirtualTestSuites', '[]')
self.fs.write_binary_file(
MOCK_WEB_TESTS + 'platform/win/another/test-expected.txt',
'result A')
self.fs.write_binary_file(
MOCK_WEB_TESTS + 'platform/mac/another/test-expected.txt',
'result A')
self.fs.write_binary_file(MOCK_WEB_TESTS + 'another/test-expected.txt',
'result B')
baseline_optimizer = BaselineOptimizer(
self.host, self.host.port_factory.get(),
self.host.port_factory.all_port_names())
baseline_optimizer._move_baselines(
'another/test-expected.txt', {
MOCK_WEB_TESTS + 'platform/win': 'aaa',
MOCK_WEB_TESTS + 'platform/mac': 'aaa',
MOCK_WEB_TESTS[:-1]: 'bbb',
}, {
MOCK_WEB_TESTS[:-1]: 'aaa',
})
self.assertEqual(
self.fs.read_binary_file(MOCK_WEB_TESTS +
'another/test-expected.txt'), 'result A')
def test_move_baselines_skip_git_commands(self):
self.fs.write_text_file(MOCK_WEB_TESTS + 'VirtualTestSuites', '[]')
self.fs.write_binary_file(
MOCK_WEB_TESTS + 'platform/win/another/test-expected.txt',
'result A')
self.fs.write_binary_file(
MOCK_WEB_TESTS + 'platform/mac/another/test-expected.txt',
'result A')
self.fs.write_binary_file(MOCK_WEB_TESTS + 'another/test-expected.txt',
'result B')
baseline_optimizer = BaselineOptimizer(
self.host, self.host.port_factory.get(),
self.host.port_factory.all_port_names())
baseline_optimizer._move_baselines(
'another/test-expected.txt', {
MOCK_WEB_TESTS + 'platform/win': 'aaa',
MOCK_WEB_TESTS + 'platform/mac': 'aaa',
MOCK_WEB_TESTS[:-1]: 'bbb',
}, {
MOCK_WEB_TESTS + 'platform/linux': 'bbb',
MOCK_WEB_TESTS[:-1]: 'aaa',
})
self.assertEqual(
self.fs.read_binary_file(MOCK_WEB_TESTS +
'another/test-expected.txt'), 'result A')
class ResultDigestTest(unittest.TestCase):
def setUp(self):
self.host = MockHost()
self.fs = MockFileSystem()
self.host.filesystem = self.fs
self.fs.write_text_file('/all-pass/foo-expected.txt',
ALL_PASS_TESTHARNESS_RESULT)
self.fs.write_text_file('/all-pass/bar-expected.txt',
ALL_PASS_TESTHARNESS_RESULT2)
self.fs.write_text_file('/failures/baz-expected.txt', 'failure')
self.fs.write_binary_file('/others/reftest-expected.png', b'extra')
self.fs.write_binary_file('/others/reftest2-expected.png', b'extra2')
self.fs.write_text_file('/others/empty-expected.txt', '')
self.fs.write_binary_file('/others/something-expected.png',
b'Something')
self.fs.write_binary_file('/others/empty-expected.png', b'')
def test_all_pass_testharness_result(self):
self.assertTrue(
ResultDigest(self.fs,
'/all-pass/foo-expected.txt').is_extra_result)
self.assertTrue(
ResultDigest(self.fs,
'/all-pass/bar-expected.txt').is_extra_result)
self.assertFalse(
ResultDigest(self.fs,
'/failures/baz-expected.txt').is_extra_result)
def test_empty_result(self):
self.assertFalse(
ResultDigest(self.fs,
'/others/something-expected.png').is_extra_result)
self.assertTrue(
ResultDigest(self.fs,
'/others/empty-expected.txt').is_extra_result)
self.assertTrue(
ResultDigest(self.fs,
'/others/empty-expected.png').is_extra_result)
def test_extra_png_for_reftest_result(self):
self.assertFalse(
ResultDigest(self.fs,
'/others/something-expected.png').is_extra_result)
self.assertTrue(
ResultDigest(
self.fs, '/others/reftest-expected.png',
is_reftest=True).is_extra_result)
def test_non_extra_result(self):
self.assertFalse(
ResultDigest(self.fs,
'/others/something-expected.png').is_extra_result)
def test_implicit_extra_result(self):
# Implicit empty equal to any extra result but not failures.
implicit = ResultDigest(None, None)
self.assertTrue(
implicit == ResultDigest(self.fs, '/all-pass/foo-expected.txt'))
self.assertTrue(
implicit == ResultDigest(self.fs, '/all-pass/bar-expected.txt'))
self.assertFalse(
implicit == ResultDigest(self.fs, '/failures/baz-expected.txt'))
self.assertTrue(implicit == ResultDigest(
self.fs, '/others/reftest-expected.png', is_reftest=True))
def test_different_all_pass_results(self):
x = ResultDigest(self.fs, '/all-pass/foo-expected.txt')
y = ResultDigest(self.fs, '/all-pass/bar-expected.txt')
self.assertTrue(x != y)
self.assertFalse(x == y)
def test_same_extra_png_for_reftest(self):
x = ResultDigest(
self.fs, '/others/reftest-expected.png', is_reftest=True)
y = ResultDigest(
self.fs, '/others/reftest2-expected.png', is_reftest=True)
self.assertTrue(x == y)
self.assertFalse(x != y)
|
chromium/chromium
|
third_party/blink/tools/blinkpy/common/checkout/baseline_optimizer_unittest.py
|
Python
|
bsd-3-clause
| 24,752
|
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2003-2007 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import unittest
import wc.dns.exception
import wc.dns.ipv6
class TestNtoAAtoN(unittest.TestCase):
def test_aton1(self):
a = wc.dns.ipv6.inet_aton('::')
self.assertEqual(a, '\x00' * 16)
def test_aton2(self):
a = wc.dns.ipv6.inet_aton('::1')
self.assertEqual(a, '\x00' * 15 + '\x01')
def test_aton3(self):
a = wc.dns.ipv6.inet_aton('::10.0.0.1')
self.assertEqual(a, '\x00' * 12 + '\x0a\x00\x00\x01')
def test_aton4(self):
a = wc.dns.ipv6.inet_aton('abcd::dcba')
self.assertEqual(a, '\xab\xcd' + '\x00' * 12 + '\xdc\xba')
def test_aton5(self):
a = wc.dns.ipv6.inet_aton('1:2:3:4:5:6:7:8')
self.assertEqual(a,
'00010002000300040005000600070008'.decode('hex_codec'))
def test_bad_aton1(self):
def bad():
a = wc.dns.ipv6.inet_aton('abcd:dcba')
self.assertRaises(wc.dns.exception.DNSSyntaxError, bad)
def test_bad_aton2(self):
def bad():
a = wc.dns.ipv6.inet_aton('abcd::dcba::1')
self.assertRaises(wc.dns.exception.DNSSyntaxError, bad)
def test_bad_aton3(self):
def bad():
a = wc.dns.ipv6.inet_aton('1:2:3:4:5:6:7:8:9')
self.assertRaises(wc.dns.exception.DNSSyntaxError, bad)
def test_ntoa1(self):
b = '00010002000300040005000600070008'.decode('hex_codec')
t = wc.dns.ipv6.inet_ntoa(b)
self.assertEqual(t, '1:2:3:4:5:6:7:8')
def test_ntoa2(self):
b = '\x00' * 16
t = wc.dns.ipv6.inet_ntoa(b)
self.assertEqual(t, '::')
def test_ntoa3(self):
b = '\x00' * 15 + '\x01'
t = wc.dns.ipv6.inet_ntoa(b)
self.assertEqual(t, '::1')
def test_ntoa4(self):
b = '\x80' + '\x00' * 15
t = wc.dns.ipv6.inet_ntoa(b)
self.assertEqual(t, '8000::')
def test_ntoa5(self):
b = '\x01\xcd' + '\x00' * 12 + '\x03\xef'
t = wc.dns.ipv6.inet_ntoa(b)
self.assertEqual(t, '1cd::3ef')
def test_ntoa6(self):
b = 'ffff00000000ffff000000000000ffff'.decode('hex_codec')
t = wc.dns.ipv6.inet_ntoa(b)
self.assertEqual(t, 'ffff:0:0:ffff::ffff')
def test_ntoa7(self):
b = '00000000ffff000000000000ffffffff'.decode('hex_codec')
t = wc.dns.ipv6.inet_ntoa(b)
self.assertEqual(t, '0:0:ffff::ffff:ffff')
def test_ntoa8(self):
b = 'ffff0000ffff00000000ffff00000000'.decode('hex_codec')
t = wc.dns.ipv6.inet_ntoa(b)
self.assertEqual(t, 'ffff:0:ffff::ffff:0:0')
def test_ntoa9(self):
b = '0000000000000000000000000a000001'.decode('hex_codec')
t = wc.dns.ipv6.inet_ntoa(b)
self.assertEqual(t, '::10.0.0.1')
def test_ntoa10(self):
b = '0000000000000000000000010a000001'.decode('hex_codec')
t = wc.dns.ipv6.inet_ntoa(b)
self.assertEqual(t, '::1:a00:1')
def test_ntoa11(self):
b = '00000000000000000000ffff0a000001'.decode('hex_codec')
t = wc.dns.ipv6.inet_ntoa(b)
self.assertEqual(t, '::ffff:10.0.0.1')
def test_ntoa12(self):
b = '000000000000000000000000ffffffff'.decode('hex_codec')
t = wc.dns.ipv6.inet_ntoa(b)
self.assertEqual(t, '::255.255.255.255')
def test_ntoa13(self):
b = '00000000000000000000ffffffffffff'.decode('hex_codec')
t = wc.dns.ipv6.inet_ntoa(b)
self.assertEqual(t, '::ffff:255.255.255.255')
def test_ntoa14(self):
b = '0000000000000000000000000001ffff'.decode('hex_codec')
t = wc.dns.ipv6.inet_ntoa(b)
self.assertEqual(t, '::0.1.255.255')
def test_bad_ntoa1(self):
def bad():
a = wc.dns.ipv6.inet_ntoa('')
self.assertRaises(ValueError, bad)
def test_bad_ntoa2(self):
def bad():
a = wc.dns.ipv6.inet_ntoa('\x00' * 17)
self.assertRaises(ValueError, bad)
|
HomeRad/TorCleaner
|
tests/dns/test_ntoaaton.py
|
Python
|
gpl-2.0
| 4,720
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the Spotlight Volume configuration plist plugin."""
import unittest
from plaso.parsers.plist_plugins import spotlight_volume
from tests.parsers.plist_plugins import test_lib
class SpotlightVolumePluginTest(test_lib.PlistPluginTestCase):
"""Tests for the Spotlight Volume configuration plist plugin."""
def testProcess(self):
"""Tests the Process function."""
plist_name = 'VolumeConfiguration.plist'
plugin = spotlight_volume.SpotlightVolumePlugin()
storage_writer = self._ParsePlistFileWithPlugin(
plugin, [plist_name], plist_name)
self.assertEqual(storage_writer.number_of_events, 2)
self.assertEqual(storage_writer.number_of_extraction_warnings, 0)
self.assertEqual(storage_writer.number_of_recovery_warnings, 0)
# The order in which PlistParser generates events is nondeterministic
# hence we sort the events.
events = list(storage_writer.GetSortedEvents())
expected_timestamps = [1369657656000000, 1372139683000000]
timestamps = sorted([event.timestamp for event in events])
self.assertEqual(timestamps, expected_timestamps)
expected_event_values = {
'data_type': 'plist:key',
'desc': (
'Spotlight Volume 4D4BFEB5-7FE6-4033-AAAA-AAAABBBBCCCCDDDD '
'(/.MobileBackups) activated.'),
'key': '',
'root': '/Stores'}
self.CheckEventValues(storage_writer, events[1], expected_event_values)
if __name__ == '__main__':
unittest.main()
|
kiddinn/plaso
|
tests/parsers/plist_plugins/spotlight_volume.py
|
Python
|
apache-2.0
| 1,536
|
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Script to install the Chrome OS fonts on Linux.
# This script can be run manually (as root), but is also run as part
# install-build-deps.sh.
from __future__ import print_function
import os
import shutil
import subprocess
import sys
URL_TEMPLATE = ('https://commondatastorage.googleapis.com/chromeos-localmirror/'
'distfiles/%(name)s-%(version)s.tar.bz2')
# Taken from the media-fonts/<name> ebuilds in chromiumos-overlay.
# noto-cjk used to be here, but is removed because fc-cache takes too long
# regenerating the fontconfig cache (See crbug.com/697954.)
# TODO(jshin): Add it back when the above issue can be avoided.
SOURCES = [
{
'name': 'notofonts',
'version': '20161129'
}, {
'name': 'robotofonts',
'version': '2.132'
}
]
URLS = sorted([URL_TEMPLATE % d for d in SOURCES])
FONTS_DIR = '/usr/local/share/fonts'
def main(args):
if not sys.platform.startswith('linux'):
print("Error: %s must be run on Linux." % __file__)
return 1
if os.getuid() != 0:
print("Error: %s must be run as root." % __file__)
return 1
if not os.path.isdir(FONTS_DIR):
print("Error: Destination directory does not exist: %s" % FONTS_DIR)
return 1
dest_dir = os.path.join(FONTS_DIR, 'chromeos')
stamp = os.path.join(dest_dir, ".stamp02")
if os.path.exists(stamp):
with open(stamp) as s:
if s.read() == '\n'.join(URLS):
print("Chrome OS fonts already up to date in %s." % dest_dir)
return 0
if os.path.isdir(dest_dir):
shutil.rmtree(dest_dir)
os.mkdir(dest_dir)
os.chmod(dest_dir, 0755)
print("Installing Chrome OS fonts to %s." % dest_dir)
for url in URLS:
tarball = os.path.join(dest_dir, os.path.basename(url))
subprocess.check_call(['curl', '-L', url, '-o', tarball])
subprocess.check_call(['tar', '--no-same-owner', '--no-same-permissions',
'-xf', tarball, '-C', dest_dir])
os.remove(tarball)
readme = os.path.join(dest_dir, "README")
with open(readme, 'w') as s:
s.write("This directory and its contents are auto-generated.\n")
s.write("It may be deleted and recreated. Do not modify.\n")
s.write("Script: %s\n" % __file__)
with open(stamp, 'w') as s:
s.write('\n'.join(URLS))
for base, dirs, files in os.walk(dest_dir):
for dir in dirs:
os.chmod(os.path.join(base, dir), 0755)
for file in files:
os.chmod(os.path.join(base, file), 0644)
print("""\
Chrome OS font rendering settings are specified using Fontconfig. If your
system's configuration doesn't match Chrome OS's (which vary for different
devices), fonts may be rendered with different subpixel rendering, subpixel
positioning, or hinting settings. This may affect font metrics.
Chrome OS's settings are stored in the media-libs/fontconfig package, which is
at src/third_party/chromiumos-overlay/media-libs/fontconfig in a Chrome OS
checkout. You can configure your system to match Chrome OS's defaults by
creating or editing a ~/.fonts.conf file:
<?xml version="1.0"?>
<!DOCTYPE fontconfig SYSTEM "fonts.dtd">
<fontconfig>
<match target="font">
<edit name="antialias" mode="assign"><bool>true</bool></edit>
<edit name="autohint" mode="assign"><bool>true</bool></edit>
<edit name="hinting" mode="assign"><bool>true</bool></edit>
<edit name="hintstyle" mode="assign"><const>hintslight</const></edit>
<edit name="rgba" mode="assign"><const>rgb</const></edit>
</match>
</fontconfig>
To load additional per-font configs (and assuming you have Chrome OS checked
out), add the following immediately before the "</fontconfig>" line:
<include ignore_missing="yes">/path/to/src/third_party/chromiumos-overlay/media-libs/fontconfig/files/local.conf</include>
""")
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
endlessm/chromium-browser
|
build/linux/install-chromeos-fonts.py
|
Python
|
bsd-3-clause
| 4,006
|
# Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from oslo_utils import versionutils
import nova.conf
from nova.db import api as db
from nova import exception
from nova.i18n import _
from nova import objects
from nova.objects import base as obj_base
from nova.objects import fields
CONF = nova.conf.CONF
# TODO(berrange): Remove NovaObjectDictCompat
@obj_base.NovaObjectRegistry.register
class Network(obj_base.NovaPersistentObject, obj_base.NovaObject,
obj_base.NovaObjectDictCompat):
# Version 1.0: Initial version
# Version 1.1: Added in_use_on_host()
# Version 1.2: Added mtu, dhcp_server, enable_dhcp, share_address
VERSION = '1.2'
fields = {
'id': fields.IntegerField(),
'label': fields.StringField(),
'injected': fields.BooleanField(),
'cidr': fields.IPV4NetworkField(nullable=True),
'cidr_v6': fields.IPV6NetworkField(nullable=True),
'multi_host': fields.BooleanField(),
'netmask': fields.IPV4AddressField(nullable=True),
'gateway': fields.IPV4AddressField(nullable=True),
'broadcast': fields.IPV4AddressField(nullable=True),
'netmask_v6': fields.IPV6AddressField(nullable=True),
'gateway_v6': fields.IPV6AddressField(nullable=True),
'bridge': fields.StringField(nullable=True),
'bridge_interface': fields.StringField(nullable=True),
'dns1': fields.IPAddressField(nullable=True),
'dns2': fields.IPAddressField(nullable=True),
'vlan': fields.IntegerField(nullable=True),
'vpn_public_address': fields.IPAddressField(nullable=True),
'vpn_public_port': fields.IntegerField(nullable=True),
'vpn_private_address': fields.IPAddressField(nullable=True),
'dhcp_start': fields.IPV4AddressField(nullable=True),
'rxtx_base': fields.IntegerField(nullable=True),
'project_id': fields.UUIDField(nullable=True),
'priority': fields.IntegerField(nullable=True),
'host': fields.StringField(nullable=True),
'uuid': fields.UUIDField(),
'mtu': fields.IntegerField(nullable=True),
'dhcp_server': fields.IPAddressField(nullable=True),
'enable_dhcp': fields.BooleanField(),
'share_address': fields.BooleanField(),
}
@staticmethod
def _convert_legacy_ipv6_netmask(netmask):
"""Handle netmask_v6 possibilities from the database.
Historically, this was stored as just an integral CIDR prefix,
but in the future it should be stored as an actual netmask.
Be tolerant of either here.
"""
try:
prefix = int(netmask)
return netaddr.IPNetwork('1::/%i' % prefix).netmask
except ValueError:
pass
try:
return netaddr.IPNetwork(netmask).netmask
except netaddr.AddrFormatError:
raise ValueError(_('IPv6 netmask "%s" must be a netmask '
'or integral prefix') % netmask)
def obj_make_compatible(self, primitive, target_version):
target_version = versionutils.convert_version_to_tuple(target_version)
if target_version < (1, 2):
if 'mtu' in primitive:
del primitive['mtu']
if 'enable_dhcp' in primitive:
del primitive['enable_dhcp']
if 'dhcp_server' in primitive:
del primitive['dhcp_server']
if 'share_address' in primitive:
del primitive['share_address']
@staticmethod
def _from_db_object(context, network, db_network):
for field in network.fields:
db_value = db_network[field]
if field == 'netmask_v6' and db_value is not None:
db_value = network._convert_legacy_ipv6_netmask(db_value)
elif field == 'dhcp_server' and db_value is None:
db_value = db_network['gateway']
network[field] = db_value
network._context = context
network.obj_reset_changes()
return network
@obj_base.remotable_classmethod
def get_by_id(cls, context, network_id, project_only='allow_none'):
db_network = db.network_get(context, network_id,
project_only=project_only)
return cls._from_db_object(context, cls(), db_network)
@obj_base.remotable_classmethod
def get_by_uuid(cls, context, network_uuid):
db_network = db.network_get_by_uuid(context, network_uuid)
return cls._from_db_object(context, cls(), db_network)
@obj_base.remotable_classmethod
def get_by_cidr(cls, context, cidr):
db_network = db.network_get_by_cidr(context, cidr)
return cls._from_db_object(context, cls(), db_network)
# TODO(stephenfin): This is no longer used and can be removed
@obj_base.remotable_classmethod
def associate(cls, context, project_id, network_id=None, force=False):
db.network_associate(context, project_id, network_id=network_id,
force=force)
# TODO(stephenfin): This is no longer used and can be removed
@obj_base.remotable_classmethod
def disassociate(cls, context, network_id, host=False, project=False):
db.network_disassociate(context, network_id, host, project)
@obj_base.remotable_classmethod
def in_use_on_host(cls, context, network_id, host):
return db.network_in_use_on_host(context, network_id, host)
def _get_primitive_changes(self):
changes = {}
for key, value in self.obj_get_changes().items():
if isinstance(value, netaddr.IPAddress):
changes[key] = str(value)
else:
changes[key] = value
return changes
@obj_base.remotable
def create(self):
updates = self._get_primitive_changes()
if 'id' in updates:
raise exception.ObjectActionError(action='create',
reason='already created')
db_network = db.network_create_safe(self._context, updates)
self._from_db_object(self._context, self, db_network)
@obj_base.remotable
def destroy(self):
db.network_delete_safe(self._context, self.id)
self.deleted = True
self.obj_reset_changes(['deleted'])
@obj_base.remotable
def save(self):
context = self._context
updates = self._get_primitive_changes()
if 'netmask_v6' in updates:
# NOTE(danms): For some reason, historical code stores the
# IPv6 netmask as just the CIDR mask length, so convert that
# back here before saving for now.
updates['netmask_v6'] = netaddr.IPNetwork(
updates['netmask_v6']).netmask
set_host = 'host' in updates
if set_host:
db.network_set_host(context, self.id, updates.pop('host'))
if updates:
db_network = db.network_update(context, self.id, updates)
elif set_host:
db_network = db.network_get(context, self.id)
else:
db_network = None
if db_network is not None:
self._from_db_object(context, self, db_network)
@obj_base.NovaObjectRegistry.register
class NetworkList(obj_base.ObjectListBase, obj_base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: Added get_by_project()
# Version 1.2: Network <= version 1.2
VERSION = '1.2'
fields = {
'objects': fields.ListOfObjectsField('Network'),
}
@obj_base.remotable_classmethod
def get_all(cls, context, project_only='allow_none'):
db_networks = db.network_get_all(context, project_only)
return obj_base.obj_make_list(context, cls(context), objects.Network,
db_networks)
@obj_base.remotable_classmethod
def get_by_uuids(cls, context, network_uuids, project_only='allow_none'):
db_networks = db.network_get_all_by_uuids(context, network_uuids,
project_only)
return obj_base.obj_make_list(context, cls(context), objects.Network,
db_networks)
@obj_base.remotable_classmethod
def get_by_host(cls, context, host):
db_networks = db.network_get_all_by_host(context, host)
return obj_base.obj_make_list(context, cls(context), objects.Network,
db_networks)
@obj_base.remotable_classmethod
def get_by_project(cls, context, project_id, associate=True):
db_networks = db.project_get_networks(context, project_id,
associate=associate)
return obj_base.obj_make_list(context, cls(context), objects.Network,
db_networks)
|
rahulunair/nova
|
nova/objects/network.py
|
Python
|
apache-2.0
| 9,406
|
# -*- coding: utf-8 -*-
import argparse
from getpass import getpass
from listener_service.tables import User
import config
import tabulate
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from passlib.hash import pbkdf2_sha512
def check_args(a):
if not a.username:
print("Username parameter is required for this operation.")
return 1
if a.username:
if len(a.username) > 32 or len(a.username) < 4:
print("Username must be between 4 and 32 characters long")
return 1
if a.password:
if len(a.password) < 8:
print("Password must at least 8 characters long")
return 1
if a.nick:
if len(a.nick) > 32 or len(a.nick) < 2:
print("Nickname must be between 4 and 32 characters long")
return 1
return 0
def add_user(a):
# Require password for new users. If one is not given vie commandline, get it here.
if not a.password or a.password == '':
a.password = getpass("Password: ")
# Check inputs
ret_val = check_args(a)
if ret_val != 0:
return ret_val
s = db_session()
user = User()
user.username = a.username
user.nickname = a.nick
user.level = userlevels_choices.index(a.level)
user.password = pbkdf2_sha512.encrypt(a.password)
s.add(user)
try:
s.commit()
except User.IntegrityError as e:
print("Error: {}".format(e.message))
return 1
finally:
s.close()
print("User {} succesfully added!".format(a.username))
return 0
def del_user(a):
ret_val = check_args(a)
if ret_val != 0:
return ret_val
s = db_session()
user = User.get_one(s, username=a.username)
user.deleted = True
s.add(user)
s.commit()
s.close()
print("User {} deleted".format(a.username))
return 0
def edit_user(a):
# Check if user wants to give password but not via commandline
#if a.password == '':
# a.password = getpass("Password: ")
ret_val = check_args(a)
if ret_val != 0:
return ret_val
s = db_session()
try:
user = User.get_one(s, username=a.username)
if a.nick:
user.nickname = a.nick
if a.level:
user.level = userlevels_choices.index(a.level)
if a.password:
user.password = pbkdf2_sha512.encrypt(a.password)
s.add(user)
s.commit()
except User.NoResultFound:
print("User {} not found.".format(a.username))
return 1
finally:
s.close()
print("User {} edited".format(a.username))
return 0
def list_users(a):
s = db_session()
userlist = []
for user in User.get_many(s):
ser = user.serialize()
ser['username'] = user.username
ser['level'] = userlevels_choices[ser['level']]
userlist.append(ser)
s.close()
headers = {
'id': 'ID',
'username': 'Username',
'nickname': 'Nickname',
'level': 'Level',
'created_at': 'Created At',
'last_contact': 'Last Contact At'
}
print(tabulate.tabulate(userlist, headers, tablefmt="grid"))
return 0
if __name__ == '__main__':
userlevels_choices = ['guest', 'user', 'admin']
ops_choices = ['add', 'delete', 'edit', 'list']
# Form the argument parser (first argument is positional and required)
parser = argparse.ArgumentParser(description='Manage users for the website')
parser.add_argument('operation', nargs='+', choices=ops_choices, help='Operation')
parser.add_argument('--username', type=str, help='Username')
parser.add_argument('--password', type=str, nargs='?', help='Password', default='')
parser.add_argument('--nick', type=str, help='User nickname')
parser.add_argument('--level', type=str, choices=userlevels_choices, help='User privilege level', default='user')
args = parser.parse_args()
# Initialize a database session
db_session = sessionmaker()
engine = create_engine(config.DATABASE_CONFIG, pool_recycle=3600)
db_session.configure(bind=engine)
# Find the correct operation function and call it with arguments as a parameter
op = {
'add': add_user,
'delete': del_user,
'edit': edit_user,
'list': list_users
}[args.operation[0]]
exit(op(args))
|
katajakasa/aetherguild2
|
aetherguild/manage_users.py
|
Python
|
mit
| 4,375
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# aiohttp documentation build configuration file, created by
# sphinx-quickstart on Wed Mar 5 12:35:35 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import codecs
import re
_docs_path = os.path.dirname(__file__)
_version_path = os.path.abspath(os.path.join(_docs_path,
'..', 'aiohttp', '__init__.py'))
with codecs.open(_version_path, 'r', 'latin1') as fp:
try:
_version_info = re.search(r"^__version__ = '"
r"(?P<major>\d+)"
r"\.(?P<minor>\d+)"
r"\.(?P<patch>\d+)"
r"(?P<tag>.*)?'$",
fp.read(), re.M).groupdict()
except IndexError:
raise RuntimeError('Unable to determine version.')
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
sys.path.insert(0, os.path.abspath('.'))
# import alabaster
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.intersphinx',
'alabaster',
'sphinxcontrib.asyncio',
]
try:
import sphinxcontrib.spelling # noqa
extensions.append('sphinxcontrib.spelling')
except ImportError:
pass
intersphinx_mapping = {
'python': ('http://docs.python.org/3', None),
'multidict':
('https://multidict.readthedocs.io/en/stable/', None),
'yarl':
('https://yarl.readthedocs.io/en/stable/', None),
'aiohttpjinja2':
('https://aiohttp-jinja2.readthedocs.io/en/stable/', None),
'aiohttpsession':
('https://aiohttp-session.readthedocs.io/en/stable/', None)}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'aiohttp'
copyright = '2013-2017, Aiohttp contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '{major}.{minor}'.format(**_version_info)
# The full version, including alpha/beta/rc tags.
release = '{major}.{minor}.{patch}-{tag}'.format(**_version_info)
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# The default language to highlight source code in.
highlight_language = 'python3'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'logo': 'aiohttp-icon-128x128.png',
'description': 'http client/server for asyncio',
'github_user': 'aio-libs',
'github_repo': 'aiohttp',
'github_button': True,
'github_type': 'star',
'github_banner': True,
'travis_button': True,
'codecov_button': True,
'pre_bg': '#FFF6E5',
'note_bg': '#E5ECD1',
'note_border': '#BFCF8C',
'body_text': '#482C0A',
'sidebar_text': '#49443E',
'sidebar_header': '#4B4032',
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = [alabaster.get_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = 'aiohttp-icon.svg'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = 'aiohttp-icon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**': [
'about.html', 'navigation.html', 'searchbox.html',
]
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'aiohttpdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'aiohttp.tex', 'aiohttp Documentation',
'aiohttp contributors', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'aiohttp', 'aiohttp Documentation',
['aiohttp'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'aiohttp', 'aiohttp Documentation',
'Aiohttp contributors', 'aiohttp', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
|
playpauseandstop/aiohttp
|
docs/conf.py
|
Python
|
apache-2.0
| 10,410
|
import logging
from scrapy.downloadermiddlewares.retry import RetryMiddleware
from scrapy.utils.response import response_status_message
from scrapy.utils.misc import load_object
logger = logging.getLogger(__name__)
FIXED_PROVIDER_PATH = 'scrapy_fake_useragent.providers.FixedUserAgentProvider'
FAKE_USERAGENT_PROVIDER_PATH = 'scrapy_fake_useragent.providers.FakeUserAgentProvider'
class RandomUserAgentBase:
def __init__(self, crawler):
self._ua_provider = self._get_provider(crawler)
self._per_proxy = crawler.settings.get('RANDOM_UA_PER_PROXY', False)
self._proxy2ua = {}
def _get_provider(self, crawler):
self.providers_paths = crawler.settings.get('FAKEUSERAGENT_PROVIDERS', None)
# To keep compatibility if the user didn't set a provider, fake-useragent will be used
if not self.providers_paths:
self.providers_paths = [FAKE_USERAGENT_PROVIDER_PATH]
provider = None
# We try to use any of the user agent providers specified in the config (priority order)
for provider_path in self.providers_paths:
try:
provider = load_object(provider_path)(crawler.settings)
logger.debug("Loaded User-Agent provider: %s", provider_path)
break
except Exception: # Provider can throw anything
logger.info('Error loading User-Agent provider: %s', provider_path)
if not provider:
# If none of them work, we use the FixedUserAgent provider:
# (default provider that return a single useragent,
# like Scrapy does, specified in USER_AGENT setting)
logger.info('Unable to load any of the User-Agent providers')
provider = load_object(FIXED_PROVIDER_PATH)(crawler.settings)
logger.info("Using '%s' as the User-Agent provider", type(provider))
return provider
class RandomUserAgentMiddleware(RandomUserAgentBase):
def __init__(self, crawler):
RandomUserAgentBase.__init__(self, crawler)
@classmethod
def from_crawler(cls, crawler):
return cls(crawler)
def process_request(self, request, spider):
if self._per_proxy:
proxy = request.meta.get('proxy')
if proxy not in self._proxy2ua:
self._proxy2ua[proxy] = self._ua_provider.get_random_ua()
logger.debug('Assign User-Agent %s to Proxy %s'
% (self._proxy2ua[proxy], proxy))
request.headers.setdefault('User-Agent', self._proxy2ua[proxy])
else:
request.headers.setdefault('User-Agent', self._ua_provider.get_random_ua())
class RetryUserAgentMiddleware(RetryMiddleware, RandomUserAgentBase):
"""
Get random User-Agent set on request retry.
Use this middleware in place of the built-in RetryMiddleware.
"""
def __init__(self, crawler):
RetryMiddleware.__init__(self, crawler.settings)
RandomUserAgentBase.__init__(self, crawler)
@classmethod
def from_crawler(cls, crawler):
return cls(crawler)
def process_response(self, request, response, spider):
if request.meta.get('dont_retry', False):
return response
if response.status in self.retry_http_codes:
reason = response_status_message(response.status)
request.headers['User-Agent'] = self._ua_provider.get_random_ua()
return self._retry(request, reason, spider) or response
return response
def process_exception(self, request, exception, spider):
if isinstance(exception, self.EXCEPTIONS_TO_RETRY) \
and not request.meta.get('dont_retry', False):
request.headers['User-Agent'] = self._ua_provider.get_random_ua()
return self._retry(request, exception, spider)
|
alecxe/scrapy-fake-useragent
|
scrapy_fake_useragent/middleware.py
|
Python
|
bsd-3-clause
| 3,861
|
#!/usr/bin/env python
# coding: utf-8
#
# Urwid tour. It slices, it dices..
# Copyright (C) 2004-2011 Ian Ward
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
"""
Urwid tour. Shows many of the standard widget types and features.
"""
import urwid
import urwid.raw_display
import urwid.web_display
def main():
text_header = (u"Welcome to the urwid tour! "
u"UP / DOWN / PAGE UP / PAGE DOWN scroll. F8 exits.")
text_intro = [('important', u"Text"),
u" widgets are the most common in "
u"any urwid program. This Text widget was created "
u"without setting the wrap or align mode, so it "
u"defaults to left alignment with wrapping on space "
u"characters. ",
('important', u"Change the window width"),
u" to see how the widgets on this page react. "
u"This Text widget is wrapped with a ",
('important', u"Padding"),
u" widget to keep it indented on the left and right."]
text_right = (u"This Text widget is right aligned. Wrapped "
u"words stay to the right as well. ")
text_center = u"This one is center aligned."
text_clip = (u"Text widgets may be clipped instead of wrapped.\n"
u"Extra text is discarded instead of wrapped to the next line. "
u"65-> 70-> 75-> 80-> 85-> 90-> 95-> 100>\n"
u"Newlines embedded in the string are still respected.")
text_right_clip = (u"This is a right aligned and clipped Text widget.\n"
u"<100 <-95 <-90 <-85 <-80 <-75 <-70 <-65 "
u"Text will be cut off at the left of this widget.")
text_center_clip = (u"Center aligned and clipped widgets will have "
u"text cut off both sides.")
text_ellipsis = (u"Text can be clipped using the ellipsis character (…)\n"
u"Extra text is discarded and a … mark is shown."
u"50-> 55-> 60-> 65-> 70-> 75-> 80-> 85-> 90-> 95-> 100>\n"
)
text_any = (u"The 'any' wrap mode will wrap on any character. This "
u"mode will not collapse space characters at the end of the "
u"line but it still honors embedded newline characters.\n"
u"Like this one.")
text_padding = (u"Padding widgets have many options. This "
u"is a standard Text widget wrapped with a Padding widget "
u"with the alignment set to relative 20% and with its width "
u"fixed at 40.")
text_divider = [u"The ", ('important', u"Divider"),
u" widget repeats the same character across the whole line. "
u"It can also add blank lines above and below."]
text_edit = [u"The ", ('important', u"Edit"),
u" widget is a simple text editing widget. It supports cursor "
u"movement and tries to maintain the current column when focus "
u"moves to another edit widget. It wraps and aligns the same "
u"way as Text widgets." ]
text_edit_cap1 = ('editcp', u"This is a caption. Edit here: ")
text_edit_text1 = u"editable stuff"
text_edit_cap2 = ('editcp', u"This one supports newlines: ")
text_edit_text2 = (u"line one starts them all\n"
u"== line 2 == with some more text to edit.. words.. whee..\n"
u"LINE III, the line to end lines one and two, unless you "
u"change something.")
text_edit_cap3 = ('editcp', u"This one is clipped, try "
u"editing past the edge: ")
text_edit_text3 = u"add some text here -> -> -> ...."
text_edit_alignments = u"Different Alignments:"
text_edit_left = u"left aligned (default)"
text_edit_center = u"center aligned"
text_edit_right = u"right aligned"
text_intedit = ('editcp', [('important', u"IntEdit"),
u" allows only numbers: "])
text_edit_padding = ('editcp', u"Edit widget within a Padding widget ")
text_columns1 = [('important', u"Columns"),
u" are used to share horizontal screen space. "
u"This one splits the space into two parts with "
u"three characters between each column. The "
u"contents of each column is a single widget."]
text_columns2 = [u"When you need to put more than one "
u"widget into a column you can use a ",('important',
u"Pile"), u" to combine two or more widgets."]
text_col_columns = u"Columns may be placed inside other columns."
text_col_21 = u"Col 2.1"
text_col_22 = u"Col 2.2"
text_col_23 = u"Col 2.3"
text_column_widths = (u"Columns may also have uneven relative "
u"weights or fixed widths. Use a minimum width so that "
u"columns don't become too small.")
text_weight = u"Weight %d"
text_fixed_9 = u"<Fixed 9>" # should be 9 columns wide
text_fixed_14 = u"<--Fixed 14-->" # should be 14 columns wide
text_edit_col_cap1 = ('editcp', u"Edit widget within Columns")
text_edit_col_text1 = u"here's\nsome\ninfo"
text_edit_col_cap2 = ('editcp', u"and within Pile ")
text_edit_col_text2 = u"more"
text_edit_col_cap3 = ('editcp', u"another ")
text_edit_col_text3 = u"still more"
text_gridflow = [u"A ",('important', u"GridFlow"), u" widget "
u"may be used to display a list of flow widgets with equal "
u"widths. Widgets that don't fit on the first line will "
u"flow to the next. This is useful for small widgets that "
u"you want to keep together such as ", ('important', u"Button"),
u", ",('important', u"CheckBox"), u" and ",
('important', u"RadioButton"), u" widgets." ]
text_button_list = [u"Yes", u"No", u"Perhaps", u"Certainly", u"Partially",
u"Tuesdays Only", u"Help"]
text_cb_list = [u"Wax", u"Wash", u"Buff", u"Clear Coat", u"Dry",
u"Racing Stripe"]
text_rb_list = [u"Morning", u"Afternoon", u"Evening", u"Weekend"]
text_listbox = [u"All these widgets have been displayed "
u"with the help of a ", ('important', u"ListBox"), u" widget. "
u"ListBox widgets handle scrolling and changing focus. A ",
('important', u"Frame"), u" widget is used to keep the "
u"instructions at the top of the screen."]
def button_press(button):
frame.footer = urwid.AttrWrap(urwid.Text(
[u"Pressed: ", button.get_label()]), 'header')
radio_button_group = []
blank = urwid.Divider()
listbox_content = [
blank,
urwid.Padding(urwid.Text(text_intro), left=2, right=2, min_width=20),
blank,
urwid.Text(text_right, align='right'),
blank,
urwid.Text(text_center, align='center'),
blank,
urwid.Text(text_clip, wrap='clip'),
blank,
urwid.Text(text_right_clip, align='right', wrap='clip'),
blank,
urwid.Text(text_center_clip, align='center', wrap='clip'),
blank,
urwid.Text(text_ellipsis, wrap='ellipsis'),
blank,
urwid.Text(text_any, wrap='any'),
blank,
urwid.Padding(urwid.Text(text_padding), ('relative', 20), 40),
blank,
urwid.AttrWrap(urwid.Divider("=", 1), 'bright'),
urwid.Padding(urwid.Text(text_divider), left=2, right=2, min_width=20),
urwid.AttrWrap(urwid.Divider("-", 0, 1), 'bright'),
blank,
urwid.Padding(urwid.Text(text_edit), left=2, right=2, min_width=20),
blank,
urwid.AttrWrap(urwid.Edit(text_edit_cap1, text_edit_text1),
'editbx', 'editfc'),
blank,
urwid.AttrWrap(urwid.Edit(text_edit_cap2, text_edit_text2,
multiline=True ), 'editbx', 'editfc'),
blank,
urwid.AttrWrap(urwid.Edit(text_edit_cap3, text_edit_text3,
wrap='clip' ), 'editbx', 'editfc'),
blank,
urwid.Text(text_edit_alignments),
urwid.AttrWrap(urwid.Edit("", text_edit_left, align='left'),
'editbx', 'editfc' ),
urwid.AttrWrap(urwid.Edit("", text_edit_center,
align='center'), 'editbx', 'editfc' ),
urwid.AttrWrap(urwid.Edit("", text_edit_right, align='right'),
'editbx', 'editfc' ),
blank,
urwid.AttrWrap(urwid.IntEdit(text_intedit, 123),
'editbx', 'editfc' ),
blank,
urwid.Padding(urwid.AttrWrap(urwid.Edit(text_edit_padding, ""),
'editbx','editfc' ), left=10, width=50),
blank,
blank,
urwid.AttrWrap(urwid.Columns([
urwid.Divider("."),
urwid.Divider(","),
urwid.Divider("."),
]), 'bright'),
blank,
urwid.Columns([
urwid.Padding(urwid.Text(text_columns1), left=2, right=0,
min_width=20),
urwid.Pile([
urwid.Divider("~"),
urwid.Text(text_columns2),
urwid.Divider("_")])
], 3),
blank,
blank,
urwid.Columns([
urwid.Text(text_col_columns),
urwid.Columns([
urwid.Text(text_col_21),
urwid.Text(text_col_22),
urwid.Text(text_col_23),
], 1),
], 2),
blank,
urwid.Padding(urwid.Text(text_column_widths), left=2, right=2,
min_width=20),
blank,
urwid.Columns( [
urwid.AttrWrap(urwid.Text(text_weight % 1),'reverse'),
('weight', 2, urwid.Text(text_weight % 2)),
('weight', 3, urwid.AttrWrap(urwid.Text(
text_weight % 3), 'reverse')),
('weight', 4, urwid.Text(text_weight % 4)),
('weight', 5, urwid.AttrWrap(urwid.Text(
text_weight % 5), 'reverse')),
('weight', 6, urwid.Text(text_weight % 6)),
], 0, min_width=8),
blank,
urwid.Columns([
('weight', 2, urwid.AttrWrap(urwid.Text(
text_weight % 2), 'reverse')),
('fixed', 9, urwid.Text(text_fixed_9)),
('weight', 3, urwid.AttrWrap(urwid.Text(
text_weight % 2), 'reverse')),
('fixed', 14, urwid.Text(text_fixed_14)),
], 0, min_width=8),
blank,
urwid.Columns([
urwid.AttrWrap(urwid.Edit(text_edit_col_cap1,
text_edit_col_text1, multiline=True),
'editbx','editfc'),
urwid.Pile([
urwid.AttrWrap(urwid.Edit(
text_edit_col_cap2,
text_edit_col_text2),
'editbx','editfc'),
blank,
urwid.AttrWrap(urwid.Edit(
text_edit_col_cap3,
text_edit_col_text3),
'editbx','editfc'),
]),
], 1),
blank,
urwid.AttrWrap(urwid.Columns([
urwid.Divider("'"),
urwid.Divider('"'),
urwid.Divider("~"),
urwid.Divider('"'),
urwid.Divider("'"),
]), 'bright'),
blank,
blank,
urwid.Padding(urwid.Text(text_gridflow), left=2, right=2,
min_width=20),
blank,
urwid.Padding(urwid.GridFlow(
[urwid.AttrWrap(urwid.Button(txt, button_press),
'buttn','buttnf') for txt in text_button_list],
13, 3, 1, 'left'),
left=4, right=3, min_width=13),
blank,
urwid.Padding(urwid.GridFlow(
[urwid.AttrWrap(urwid.CheckBox(txt),'buttn','buttnf')
for txt in text_cb_list],
10, 3, 1, 'left') ,
left=4, right=3, min_width=10),
blank,
urwid.Padding(urwid.GridFlow(
[urwid.AttrWrap(urwid.RadioButton(radio_button_group,
txt), 'buttn','buttnf')
for txt in text_rb_list],
13, 3, 1, 'left') ,
left=4, right=3, min_width=13),
blank,
blank,
urwid.Padding(urwid.Text(text_listbox), left=2, right=2,
min_width=20),
blank,
blank,
]
header = urwid.AttrWrap(urwid.Text(text_header), 'header')
listbox = urwid.ListBox(urwid.SimpleListWalker(listbox_content))
frame = urwid.Frame(urwid.AttrWrap(listbox, 'body'), header=header)
palette = [
('body','black','light gray', 'standout'),
('reverse','light gray','black'),
('header','white','dark red', 'bold'),
('important','dark blue','light gray',('standout','underline')),
('editfc','white', 'dark blue', 'bold'),
('editbx','light gray', 'dark blue'),
('editcp','black','light gray', 'standout'),
('bright','dark gray','light gray', ('bold','standout')),
('buttn','black','dark cyan'),
('buttnf','white','dark blue','bold'),
]
# use appropriate Screen class
if urwid.web_display.is_web_request():
screen = urwid.web_display.Screen()
else:
screen = urwid.raw_display.Screen()
def unhandled(key):
if key == 'f8':
raise urwid.ExitMainLoop()
urwid.MainLoop(frame, palette, screen,
unhandled_input=unhandled).run()
def setup():
urwid.web_display.set_preferences("Urwid Tour")
# try to handle short web requests quickly
if urwid.web_display.handle_short_request():
return
main()
if '__main__'==__name__ or urwid.web_display.is_web_request():
setup()
|
inducer/urwid
|
examples/tour.py
|
Python
|
lgpl-2.1
| 14,031
|
import fiona
import numpy as np
from numpy.linalg import norm
def compute_normals_2d(datadict):
"""compute vertex normals by averaging incident edge normals"""
def orthogonal(vec):
v = np.array([-vec[1], vec[0]])
return v/norm(v)
def normal(p,q):
"""returns unit vector orthogonal to pq"""
# compute vector from p to q
pq = q-p
# find a vector that is orthogonal to v and points counter-clockwise wrt v
return orthogonal(pq)
m,n=datadict['coords'].shape
datadict['normals'] = np.zeros((m,n))
for i, p in enumerate(datadict['coords']):
if i==0: a = datadict['coords'][-1]
else: a = datadict['coords'][i-1]
if i==m-1: b = datadict['coords'][0]
else: b = datadict['coords'][i+1]
p_n = normal(a,p) + normal(p,b)
p_n = p_n/norm(p_n)
datadict['normals'][i] = p_n
def add_noise(datadict, sigma, recompute_normals=True):
"""Add gaussian noise in normal direction"""
from random import gauss
for i,n in enumerate(datadict['normals']):
p = datadict['coords'][i]
datadict['coords'][i] += n * gauss(0,sigma)
if recompute_normals:
compute_normals_2d(datadict)
def densify(datadict):
"""Subdivision of every edge using ball fitting to interpolate"""
from algebra import compute_radius, cos_angle
from math import acos, cos, sin
m,n = datadict['coords'].shape
result = np.zeros((m*2,n))
# result_n = np.zeros((m*2,n))
# for i,n in enumerate(datadict['normals']):
for i in xrange(m):
p = datadict['coords'][i]
if i==m-1:
q = datadict['coords'][0]
else:
q = datadict['coords'][i+1]
# r = compute_radius(p, n, q)
new = p + (q-p)/2
# c = p - n*r
# theta = acos(cos_angle(p-c,q-c))/2
# rotation = np.array([[cos(theta), -sin(theta)], [sin(theta), cos(theta)]])
# new_vec_at_origin = rotation.dot(q-c)
# print new_vec_at_origin
# print rotation
result[2*i] = p
result[2*i+1] = new
# result_n[2*i] = n
# result_n[2*i+1] = new_vec_at_origin / norm(new_vec_at_origin)
datadict['coords'] = result
def read_shp(infile, densify_n_times=0, roll=0):
datadict = {}
# retrieve vertices from polygon in shp file
shp_meta = None
with fiona.open(infile, 'r') as shp:
shp_meta = shp.meta
for f in shp:
datadict['coords'] = f['geometry']['coordinates'][0]
# convert to numpy array
datadict['coords'] = np.array(datadict['coords'][:-1])
# roll the coords array so that we can control what point is first
if roll != None:
datadict['coords'] = np.roll(datadict['coords'], shift=roll, axis=0)
for i in xrange(densify_n_times):
densify(datadict)
compute_normals_2d(datadict)
# return shp_meta, datadict
return datadict
|
Ylannl/masb2d
|
shp_reader.py
|
Python
|
gpl-3.0
| 2,999
|
import time
import unittest
from nive.definitions import *
from nive.tools import *
from nive.security import User
from nive.components.tools.example import configuration
import db_app
# -----------------------------------------------------------------
class ToolTest1(unittest.TestCase):
def test_tool(self):
t=Tool(ToolConf(id="test",data=[FieldConf(id="test",datatype="string")], values={"test":"aaaaa"}), None)
t.Run()
self.assert_(t.ExtractValues()["test"]=="aaaaa")
t.AppliesTo("type1")
self.assert_(t.GetAllParameters())
self.assert_(t.GetParameter("test"))
class ToolTest(unittest.TestCase):
def setUp(self):
self.app = db_app.app_db()
def tearDown(self):
self.app.Close()
def test_toolapp(self):
t = self.app.GetTool("nive.components.tools.example")
self.assert_(t)
r,v = t()
self.assert_(r)
def test_toolapp2(self):
self.app.Register("nive.components.tools.example")
t = self.app.GetTool("exampletool")
self.assert_(t)
r,v = t()
self.assert_(r)
def test_toolapp3(self):
c2 = ToolConf(configuration)
c2.id = "exampletool2"
c2.apply = (IApplication,)
self.app.Register(c2)
t = self.app.GetTool("exampletool2", self.app)
self.assert_(t)
r,v = t()
self.assert_(r)
def test_toolobj(self):
user = User(u"test")
r = self.app.root()
o = db_app.createObj1(r)
t = o.GetTool("nive.components.tools.example")
r1,v = t()
self.assert_(r1)
r.Delete(o.GetID(), user=user)
if __name__ == '__main__':
unittest.main()
|
nive-cms/nive
|
nive/tests/test_tool.py
|
Python
|
gpl-3.0
| 1,714
|
from ._cardutil import unmungFlip
from .card import Card
from .cardset import CardSet, CardSetDB
from .color import Color
from .jsonify import EnVecEncoder, iloadJSON
from .multipart import MultipartDB, CardClass
from .rarity import Rarity
from .tutor import ChecklistPage, Tutor
from ._util import split_mana
|
jwodder/envec
|
envec/__init__.py
|
Python
|
mit
| 334
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2016, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""Module providing a factory for instantiating a temporal memory instance."""
import inspect
from htmresearch.support.apical_tm_pair_monitor_mixin import (
ApicalTMPairMonitorMixin)
from nupic.algorithms.temporal_memory import TemporalMemory as TemporalMemoryPY
from nupic.bindings.algorithms import TemporalMemory as TemporalMemoryCPP
from nupic.algorithms.monitor_mixin.temporal_memory_monitor_mixin import (
TemporalMemoryMonitorMixin)
from htmresearch_core.experimental import ApicalTiebreakPairMemory
class MonitoredTemporalMemoryPY(TemporalMemoryMonitorMixin, TemporalMemoryPY):
pass
class MonitoredTemporalMemoryCPP(TemporalMemoryMonitorMixin, TemporalMemoryCPP):
pass
class MonitoredApicalTiebreakPairMemoryCPP(ApicalTMPairMonitorMixin,
ApicalTiebreakPairMemory):
pass
class TemporalMemoryTypes(object):
""" Enumeration of supported classification model types, mapping userland
identifier to constructor. See createModel() for actual factory method
implementation.
"""
etm = ApicalTiebreakPairMemory
monitored_etm = MonitoredApicalTiebreakPairMemoryCPP
tm_py = TemporalMemoryPY
tm_cpp = TemporalMemoryCPP
monitored_tm_py = MonitoredTemporalMemoryPY
monitored_tm_cpp = MonitoredTemporalMemoryCPP
@classmethod
def getTypes(cls):
""" Get sequence of acceptable model types. Iterates through class
attributes and separates the user-defined enumerations from the default
attributes implicit to Python classes. i.e. this function returns the names
of the attributes explicitly defined above.
"""
for attrName in dir(cls):
attrValue = getattr(cls, attrName)
if (isinstance(attrValue, type)):
yield attrName
def createModel(modelName, **kwargs):
"""
Return a classification model of the appropriate type. The model could be any
supported subclass of ClassficationModel based on modelName.
@param modelName (str) A supported temporal memory type
@param kwargs (dict) Constructor argument for the class that will be
instantiated. Keyword parameters specific to each
model type should be passed in here.
"""
if modelName not in TemporalMemoryTypes.getTypes():
raise RuntimeError("Unknown model type: " + modelName)
return getattr(TemporalMemoryTypes, modelName)(**kwargs)
def getConstructorArguments(modelName):
"""
Return constructor arguments and associated default values for the
given model type.
@param modelName (str) A supported temporal memory type
@return argNames (list of str) a list of strings corresponding to constructor
arguments for the given model type, excluding
'self'.
@return defaults (list) a list of default values for each argument
"""
if modelName not in TemporalMemoryTypes.getTypes():
raise RuntimeError("Unknown model type: " + modelName)
argspec = inspect.getargspec(
getattr(TemporalMemoryTypes, modelName).__init__)
return (argspec.args[1:], argspec.defaults)
|
ywcui1990/nupic.research
|
htmresearch/algorithms/temporal_memory_factory.py
|
Python
|
agpl-3.0
| 4,118
|
#!/usr/bin/python2.4
# Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Adaptation of the extern.tclib classes for our needs.
'''
import re
import types
from grit import exception
import grit.extern.tclib
def Identity(i):
return i
class BaseMessage(object):
'''Base class with methods shared by Message and Translation.
'''
def __init__(self, text='', placeholders=[], description='', meaning=''):
self.parts = []
self.placeholders = []
self.description = description
self.meaning = meaning
self.dirty = True # True if self.id is (or might be) wrong
self.id = 0
if text != '':
if not placeholders or placeholders == []:
self.AppendText(text)
else:
tag_map = {}
for placeholder in placeholders:
tag_map[placeholder.GetPresentation()] = [placeholder, 0]
tag_re = '(' + '|'.join(tag_map.keys()) + ')'
# This creates a regexp like '(TAG1|TAG2|TAG3)'
chunked_text = re.split(tag_re, text)
for chunk in chunked_text:
if chunk: # ignore empty chunk
if tag_map.has_key(chunk):
self.AppendPlaceholder(tag_map[chunk][0])
tag_map[chunk][1] += 1 # increase placeholder use count
else:
self.AppendText(chunk)
for key in tag_map.keys():
assert tag_map[key][1] != 0
def GetRealContent(self, escaping_function=Identity):
'''Returns the original content, i.e. what your application and users
will see.
Specify a function to escape each translateable bit, if you like.
'''
bits = []
for item in self.parts:
if isinstance(item, types.StringTypes):
bits.append(escaping_function(item))
else:
bits.append(item.GetOriginal())
return ''.join(bits)
def GetPresentableContent(self):
presentable_content = []
for part in self.parts:
if isinstance(part, Placeholder):
presentable_content.append(part.GetPresentation())
else:
presentable_content.append(part)
return ''.join(presentable_content)
def AppendPlaceholder(self, placeholder):
assert isinstance(placeholder, Placeholder)
dup = False
for other in self.GetPlaceholders():
if other.presentation == placeholder.presentation:
assert other.original == placeholder.original
dup = True
if not dup:
self.placeholders.append(placeholder)
self.parts.append(placeholder)
self.dirty = True
def AppendText(self, text):
assert isinstance(text, types.StringTypes)
assert text != ''
self.parts.append(text)
self.dirty = True
def GetContent(self):
'''Returns the parts of the message. You may modify parts if you wish.
Note that you must not call GetId() on this object until you have finished
modifying the contents.
'''
self.dirty = True # user might modify content
return self.parts
def GetDescription(self):
return self.description
def SetDescription(self, description):
self.description = description
def GetMeaning(self):
return self.meaning
def GetId(self):
if self.dirty:
self.id = self.GenerateId()
self.dirty = False
return self.id
def GenerateId(self):
# Must use a UTF-8 encoded version of the presentable content, along with
# the meaning attribute, to match the TC.
return grit.extern.tclib.GenerateMessageId(
self.GetPresentableContent().encode('utf-8'), self.meaning)
def GetPlaceholders(self):
return self.placeholders
def FillTclibBaseMessage(self, msg):
msg.SetDescription(self.description.encode('utf-8'))
for part in self.parts:
if isinstance(part, Placeholder):
ph = grit.extern.tclib.Placeholder(
part.presentation.encode('utf-8'),
part.original.encode('utf-8'),
part.example.encode('utf-8'))
msg.AppendPlaceholder(ph)
else:
msg.AppendText(part.encode('utf-8'))
class Message(BaseMessage):
'''A message.'''
def __init__(self, text='', placeholders=[], description='', meaning='',
assigned_id=None):
BaseMessage.__init__(self, text, placeholders, description, meaning)
self.assigned_id = assigned_id
def ToTclibMessage(self):
msg = grit.extern.tclib.Message('utf-8', meaning=self.meaning)
self.FillTclibBaseMessage(msg)
return msg
def GetId(self):
'''Use the assigned id if we have one.'''
if self.assigned_id:
return self.assigned_id
return BaseMessage.GetId(self)
class Translation(BaseMessage):
'''A translation.'''
def __init__(self, text='', id='', placeholders=[], description='', meaning=''):
BaseMessage.__init__(self, text, placeholders, description, meaning)
self.id = id
def GetId(self):
assert id != '', "ID has not been set."
return self.id
def SetId(self, id):
self.id = id
def ToTclibMessage(self):
msg = grit.extern.tclib.Message(
'utf-8', id=self.id, meaning=self.meaning)
self.FillTclibBaseMessage(msg)
return msg
class Placeholder(grit.extern.tclib.Placeholder):
'''Modifies constructor to accept a Unicode string
'''
# Must match placeholder presentation names
_NAME_RE = re.compile('[A-Za-z0-9_]+')
def __init__(self, presentation, original, example):
'''Creates a new placeholder.
Args:
presentation: 'USERNAME'
original: '%s'
example: 'Joi'
'''
assert presentation != ''
assert original != ''
assert example != ''
if not self._NAME_RE.match(presentation):
raise exception.InvalidPlaceholderName(presentation)
self.presentation = presentation
self.original = original
self.example = example
def GetPresentation(self):
return self.presentation
def GetOriginal(self):
return self.original
def GetExample(self):
return self.example
|
rwatson/chromium-capsicum
|
tools/grit/grit/tclib.py
|
Python
|
bsd-3-clause
| 6,009
|
# -*- coding: utf-8 -*-
"""tick module
"""
# License: BSD 3 clause
import tick.base
|
X-DataInitiative/tick
|
tick/__init__.py
|
Python
|
bsd-3-clause
| 85
|
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import sickbeard
import xbmc
import kodi
import plex
import nmj
import nmjv2
import synoindex
import synologynotifier
import pytivo
import growl
import prowl
from . import libnotify
import pushover
import boxcar2
import nma
import pushalot
import pushbullet
import tweet
from lib import libtrakt
import emailnotify
from sickbeard.common import *
# home theater / nas
xbmc_notifier = xbmc.XBMCNotifier()
kodi_notifier = kodi.KODINotifier()
plex_notifier = plex.PLEXNotifier()
nmj_notifier = nmj.NMJNotifier()
nmjv2_notifier = nmjv2.NMJv2Notifier()
synoindex_notifier = synoindex.synoIndexNotifier()
synology_notifier = synologynotifier.synologyNotifier()
pytivo_notifier = pytivo.pyTivoNotifier()
# devices
growl_notifier = growl.GrowlNotifier()
prowl_notifier = prowl.ProwlNotifier()
libnotify_notifier = libnotify.LibnotifyNotifier()
pushover_notifier = pushover.PushoverNotifier()
boxcar2_notifier = boxcar2.Boxcar2Notifier()
nma_notifier = nma.NMA_Notifier()
pushalot_notifier = pushalot.PushalotNotifier()
pushbullet_notifier = pushbullet.PushbulletNotifier()
# social
twitter_notifier = tweet.TwitterNotifier()
#trakt_notifier = trakt.TraktNotifier()
email_notifier = emailnotify.EmailNotifier()
notifiers = [
libnotify_notifier, # Libnotify notifier goes first because it doesn't involve blocking on network activity.
xbmc_notifier,
kodi_notifier,
plex_notifier,
nmj_notifier,
nmjv2_notifier,
synoindex_notifier,
synology_notifier,
pytivo_notifier,
growl_notifier,
prowl_notifier,
pushover_notifier,
boxcar2_notifier,
nma_notifier,
pushalot_notifier,
pushbullet_notifier,
twitter_notifier,
# trakt_notifier,
email_notifier,
]
def notify_download(ep_name):
for n in notifiers:
n.notify_download(ep_name)
def notify_subtitle_download(ep_name, lang):
for n in notifiers:
n.notify_subtitle_download(ep_name, lang)
def notify_snatch(ep_name):
for n in notifiers:
n.notify_snatch(ep_name)
def notify_git_update(new_version=''):
for n in notifiers:
n.notify_git_update(new_version)
|
adam111316/SickGear
|
sickbeard/notifiers/__init__.py
|
Python
|
gpl-3.0
| 2,860
|
# Case Conductor is a Test Case Management system.
# Copyright (C) 2011 uTest Inc.
#
# This file is part of Case Conductor.
#
# Case Conductor is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Case Conductor is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Case Conductor. If not, see <http://www.gnu.org/licenses/>.
from django.core.exceptions import ValidationError
from django.core.files.storage import default_storage
import floppyforms as forms
from ..attachments.models import Attachment
from ..core import forms as ccforms
from ..core.auth import admin
from ..core.filters import KeywordFilter
from ..products.models import Product, ProductList
from ..static.filters import TestCaseStatusFilter
from ..static.status import AttachmentType
from ..tags.filters import TagFilter
from ..tags.models import Tag, TagList
from ..testcases import increment
from ..testcases.bulk import BulkParser
from ..testcases.forms import StepFormSet
from ..testcases.models import (
TestSuite, TestSuiteList, TestCaseVersion, TestCaseList, TestCaseStep)
from ..testexecution.models import (
TestCycle, TestCycleList, TestRun, TestRunList)
from ..users.filters import UserFieldFilter
from ..users.models import User, UserList
def product_id_attrs(obj):
return {"data-product-id": obj.product.id}
class UserForm(ccforms.AddEditForm):
screenName = forms.CharField(label="screen name")
firstName = forms.CharField(label="first name")
lastName = forms.CharField(label="last name")
email = forms.CharField(label="email")
roles = ccforms.ModelMultipleChoiceField(required=False)
password = forms.CharField(label="password", widget=forms.PasswordInput)
no_edit_fields = ["screenName"]
assign_later = ["roles"]
entryclass = User
listclass = UserList
def __init__(self, *args, **kwargs):
self.company = kwargs.pop("company")
super(UserForm, self).__init__(*args, **kwargs)
if self.instance is not None:
fld = self.fields["password"]
fld.required = False
fld.widget.is_required = False
fld.label = "new password"
@property
def extra_creation_data(self):
return {"company": self.company}
def edit_clean(self):
ret = super(UserForm, self).edit_clean()
# Password and email require special handling
if self.cleaned_data["email"] != self.instance.email:
try:
self.instance.emailchange(self.cleaned_data["email"])
except self.instance.Conflict, e:
self.handle_error(self.instance, e)
if self.cleaned_data["password"]:
try:
self.instance.passwordchange(self.cleaned_data["password"])
except self.instance.Conflict, e:
self.handle_error(self.instance, e)
return ret
class ProductForm(ccforms.AddEditForm):
name = forms.CharField()
description = forms.CharField(widget=ccforms.BareTextarea)
team = ccforms.ModelMultipleChoiceField(required=False)
assign_later = ["team", "profile"]
entryclass = Product
listclass = ProductList
def __init__(self, *args, **kwargs):
self.company = kwargs.pop("company")
super(ProductForm, self).__init__(*args, **kwargs)
def field_hook(self):
if self.instance is None:
self.fields["profile"] = ccforms.ModelChoiceField()
@property
def extra_creation_data(self):
return {"company": self.company}
class TestCycleForm(ccforms.AddEditForm):
name = forms.CharField()
description = forms.CharField(widget=ccforms.BareTextarea)
product = ccforms.ModelChoiceField()
start_date = forms.DateField()
end_date = forms.DateField(required=False)
team = ccforms.ModelMultipleChoiceField(required=False)
no_edit_fields = ["product"]
field_mapping = {
"start_date": "startDate",
"end_date": "endDate"}
assign_later = ["team"]
entryclass = TestCycle
listclass = TestCycleList
extra_creation_data = {
"communityAccessAllowed": True,
"communityAuthoringAllowed": True,
}
class TestRunForm(ccforms.AddEditForm):
name = forms.CharField()
description = forms.CharField(widget=ccforms.BareTextarea)
test_cycle = ccforms.ModelChoiceField(
choice_attrs=product_id_attrs)
start_date = forms.DateField()
end_date = forms.DateField(required=False)
team = ccforms.ModelMultipleChoiceField(required=False)
suites = ccforms.ModelMultipleChoiceField(
required=False, choice_attrs=product_id_attrs)
no_edit_fields = ["test_cycle"]
field_mapping = {
"test_cycle": "testCycle",
"start_date": "startDate",
"end_date": "endDate"}
assign_later = ["team", "suites"]
entryclass = TestRun
listclass = TestRunList
extra_creation_data = {
"selfAssignLimit": 0,
"selfAssignAllowed": True,
}
def field_hook(self):
if self.instance:
if self.instance.status.DRAFT:
self.fields["caseupdate"] = forms.BooleanField(
initial=False, required=False)
else:
# no modifying suites on an active test run
del self.fields["suites"]
def clean_caseupdate(self):
if self.instance and self.cleaned_data.get("caseupdate"):
for itc in self.instance.includedtestcases:
itc.delete(invalidate_cache=[
"IncludedTestSuiteList",
"TestRunIncludedTestCaseList"])
class TestCaseFilteredSelectMultiple(ccforms.FilteredSelectMultiple):
def get_context_data(self):
ctx = super(TestCaseFilteredSelectMultiple, self).get_context_data()
ctx["choice_template"] = "forms/_select_cases_item.html"
return ctx
class TestSuiteForm(ccforms.AddEditForm):
name = forms.CharField()
description = forms.CharField(widget=ccforms.BareTextarea)
product = ccforms.ModelChoiceField(
choice_attrs=lambda p: {"data-product-id": p.id})
cases = ccforms.ModelMultipleChoiceField(
required=False,
choice_attrs=product_id_attrs)
def field_hook(self):
self.fields["cases"].widget = TestCaseFilteredSelectMultiple(
auth=self.auth,
filters=[
("status", TestCaseStatusFilter),
("name", KeywordFilter),
("tag", TagFilter),
("author", UserFieldFilter)
])
no_edit_fields = ["product"]
assign_later = ["cases"]
entryclass = TestSuite
listclass = TestSuiteList
class BulkTestCaseForm(ccforms.RemoteObjectForm):
product = ccforms.ModelChoiceField(
choice_attrs=lambda p: {"data-product-id": p.id})
suite = ccforms.ModelChoiceField(
required=False, choice_attrs=product_id_attrs)
cases = forms.CharField(widget=ccforms.BareTextarea)
def __init__(self, *args, **kwargs):
product_choices = kwargs.pop("product_choices")
suite_choices = kwargs.pop("suite_choices")
self.company = kwargs.pop("company")
self.auth = kwargs.pop("auth")
super(BulkTestCaseForm, self).__init__(*args, **kwargs)
self.fields["product"].obj_list = product_choices
self.fields["suite"].obj_list = suite_choices
def clean_cases(self):
parser = BulkParser()
data = parser.parse(self.cleaned_data["cases"])
for d in data:
if "error" in d:
raise ValidationError(d["error"])
return data
def clean(self):
cases = []
tcl = TestCaseList.get(auth=self.auth)
tag_ids = self.data.getlist("tag")
new_tags = self.data.getlist("newtag")
tl = TagList.get(auth=self.auth)
for name in new_tags:
t = Tag(name=name, company=self.company)
tl.post(t)
tag_ids.append(t.id)
suite = self.cleaned_data.get("suite")
for d in self.cleaned_data.get("cases", []):
tcdata = dict(
TestCaseForm.extra_creation_data,
product=self.cleaned_data["product"],
name=d["name"],
description="\r\n".join(d["description"]),
)
tcv = TestCaseVersion(**tcdata)
try:
tcl.post(tcv)
except TestCaseList.Conflict, e:
for case in cases:
TestCaseList.get_by_id(
case.testCaseId, auth=self.auth).delete()
self.handle_error(tcv, e)
break
cases.append(tcv)
for i, stepdata in enumerate(d["steps"]):
step = TestCaseStep(
stepNumber=i+1,
instruction="\n".join(stepdata["instruction"]),
expectedResult="\n".join(stepdata["expectedResult"]),
name="step %s" % str(i + 1),
testCaseVersion=tcv,
estimatedTimeInMin=0,
)
tcv.steps.post(step)
tcv.approve(auth=admin)
tcv.tags = tag_ids
if suite:
try:
suite.addcase(tcv)
except TestSuite.Conflict:
pass
self.cases = cases
return self.cleaned_data
def save(self):
return self.cases
class TestCaseForm(ccforms.AddEditForm):
name = forms.CharField()
description = forms.CharField()
product = ccforms.ModelChoiceField(
choice_attrs=lambda p: {"data-product-id": p.id})
no_edit_fields = ["product"]
entryclass = TestCaseVersion
listclass = TestCaseList
extra_creation_data = {
# @@@ these attachment-related attributes are non-functional
"maxAttachmentSizeInMbytes": 0,
"maxNumberOfAttachments": 0,
"automationUri": "",
}
def field_hook(self):
self.prior_version = None
if self.instance is not None:
increment_choices = [
("major", "save as new version"),
]
if self.instance.status.DRAFT:
increment_choices += [("inplace", "save in place")]
self.fields["increment"] = forms.ChoiceField(
choices=increment_choices
)
else:
self.fields["suite"] = ccforms.ModelChoiceField(
required=False, choice_attrs=product_id_attrs)
def create_formsets(self, *args, **kwargs):
self.steps_formset = StepFormSet(
*args, **dict(kwargs, prefix="steps"))
super(TestCaseForm, self).create_formsets(*args, **kwargs)
def is_valid(self):
if self.steps_formset.is_valid():
self.steps_formset_clear = True
return super(TestCaseForm, self).is_valid()
return False
def _save_tags(self):
# @@@ convert into proper form field with widget?
tag_ids = self.data.getlist("tag")
new_tags = self.data.getlist("newtag")
tl = TagList.get(auth=self.auth)
for name in new_tags:
t = Tag(name=name, company=self.instance.company)
tl.post(t)
tag_ids.append(t.id)
self.instance.tags = tag_ids
def _save_attachments(self):
# @@@ convert into proper form field with widget?
delete_ids = set(self.data.getlist("remove-attachment"))
for attachment in self.instance.attachments:
if attachment.id in delete_ids:
attachment.delete()
# if we're saving as new version, bring forward existing attachments
# from previous version
if self.prior_version is not None:
for attachment in self.prior_version.attachments:
self.instance.attachments.post(attachment)
if not self.files:
return
for uf in self.files.getlist("attachment"):
try:
file_name = uf.name
file_size = uf.size
except AttributeError:
continue
if not file_name or not file_size:
continue
storage_name = default_storage.get_available_name(
default_storage.get_valid_name(file_name))
default_storage.save(storage_name, uf)
attachment = Attachment(
name=storage_name,
description=file_name,
url=default_storage.url(storage_name),
size=file_size,
attachmentType=AttachmentType.UNSPECIFIED
)
self.instance.attachments.post(attachment)
def clean(self):
# If the formset is not valid, we don't want to actually try saving the
# testcase.
if self.errors or not getattr(self, "steps_formset_clear", False):
return self.cleaned_data
ret = super(TestCaseForm, self).clean()
self._save_tags()
self._save_attachments()
suite = self.cleaned_data.get("suite")
if suite:
try:
suite.addcase(self.instance)
except TestSuite.Conflict:
pass
return ret
def edit_clean(self):
for k, v in self.prep_form_data(
self.cleaned_data, editing=True).iteritems():
setattr(self.instance, k, v)
incr = self.cleaned_data["increment"]
try:
if incr == "minor":
self.prior_version = self.instance.refresh()
self.instance.versionincrement(increment.MINOR)
self.instance.approve(auth=admin)
elif incr == "major":
self.prior_version = self.instance.refresh()
self.instance.versionincrement(increment.MAJOR)
self.instance.approve(auth=admin)
else:
self.instance.put()
except self.instance.Conflict, e:
self.handle_error(self.instance, e)
# Name field can't be edited via TestCaseVersion, so we do it via the
# TestCase proper
if self.cleaned_data["name"] != self.instance.name:
tc = self.instance.testCase
tc.name = self.instance.name = self.cleaned_data["name"]
try:
tc.put()
except self.instance.Conflict, e:
self.handle_error(self.instance, e)
return self.cleaned_data
def save(self):
self.steps_formset.save(self.instance, hasattr(self, "prior_version"))
instance = super(TestCaseForm, self).save()
instance.approve(auth=admin)
if self.prior_version and self.prior_version.status.ACTIVE:
instance.activate(auth=admin)
return instance
|
mozilla/caseconductor-ui
|
ccui/manage/forms.py
|
Python
|
gpl-3.0
| 15,347
|
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
from tinymce.widgets import TinyMCE
from django.conf import settings
class PageForm(FlatpageForm):
class Meta:
model = FlatPage
exclude = []
widgets = {
'content': TinyMCE(attrs={'cols': 100, 'rows': 15}),
}
class PageAdmin(FlatPageAdmin):
"""
Page Admin
"""
form = PageForm
def formfield_for_dbfield(self, db_field, **kwargs):
if db_field.name in ('content', ):
return db_field.formfield(widget=TinyMCE(
attrs={'cols': 80, 'rows': 30},
mce_attrs=settings.TINYMCE_BODY_CONFIG,
))
return super(PageAdmin, self).formfield_for_dbfield(db_field, **kwargs)
|
erral/promesak
|
promesak/admin.py
|
Python
|
gpl-2.0
| 824
|
#!/usr/bin/env python
from webdriver_testing.pages.site_pages.teams import ATeamPage
class MessagesTab(ATeamPage):
"""Actions for the Messages tab of the Team Settings Page.
"""
_URL = 'teams/%s/settings/messages/' #provide the team slug
#MESSAGES FIELDS
_INVITATION_MESSAGE = 'textarea#id_messages_invite'
_APPLICATION_MESSAGE = 'textarea#id_messages_application'
_NEW_MANAGER_MESSAGE = 'textarea#id_messages_manager'
_NEW_ADMIN_MESSAGE = 'textarea#id_messages_admin'
_NEW_MEMBER_MESSAGE = 'textarea#id_messages_joins'
_MESSAGES = ['INVITATION', 'APPLICATION', 'NEW_MANAGER', 'NEW_ADMIN', 'NEW_MEMBER']
#GUIDELINES FIELDS
_SUBTITLE_GUIDELINES = 'textarea#id_guidelines_subtitle'
_TRANSLATE_GUIDELINES = 'textarea#id_guidelines_translate'
_REVIEW_GUIDELINES = 'textarea#id_guidelines_review'
_GUIDELINES = ['SUBTITLE', 'TRANSLATE', 'REVIEW']
_SAVE_CHANGES = 'div.submit input.submit'
def open_messages_tab(self, team):
"""Open the messages tag for the given team slug.
"""
self.open_page(self._URL % team)
self.wait_for_element_present(self._INVITATION_MESSAGE)
def _customize_messages(self, **messages):
"""Enter the message text in the message field.
"""
for message_field, text in messages.iteritems():
field = getattr(self, '_'.join(['', message_field, 'MESSAGE']))
self.logger.info(field)
self.type_by_css(field, text)
def _stored_message_text(self):
"""Return the stored text for all message fields'
"""
current_messages = dict()
for message_field in self._MESSAGES:
css_field = getattr(self,
'_'.join(['', message_field, 'MESSAGE']))
displayed_text = self.get_text_by_css(css_field)
current_messages[message_field] = displayed_text
return current_messages
def _stored_guideline_text(self):
"""Return the stored text for all guidelines fields'
"""
current_guidelines = dict()
for guideline_field in self._GUIDELINES:
css_field = getattr(self,
'_'.join(['', guideline_field, 'GUIDELINES']))
self.logger.info(css_field)
displayed_text = self.get_text_by_css(css_field)
current_guidelines[guideline_field] = displayed_text
return current_guidelines
def _customize_guidelines(self, **guidelines):
"""Enter the message text in the message field.
"""
for guideline_field, text in guidelines.iteritems():
field = getattr(self,
'_'.join(['', guideline_field, 'GUIDELINES']))
self.type_by_css(field, text)
def edit_messages(self, messages):
"""Edit the text of the messages fields and save.
Messages should be a dict of the fields and text.
"""
self._customize_messages(**messages)
self.submit_by_css(self._SAVE_CHANGES)
def edit_guidelines(self, guidelines):
"""Edit the text of the guidelines fields and save.
Guidelines should be a dict of the fields and text.
"""
self._customize_guidelines(**guidelines)
self.click_by_css(self._SAVE_CHANGES)
def stored_messages(self):
"""Return a dict of the currenlty configured messages.
"""
messages = self._stored_message_text()
return messages
def stored_guidelines(self):
"""Return a dict of the currently configured guidelines.
"""
guidelines = self._stored_guideline_text()
return guidelines
|
ofer43211/unisubs
|
apps/webdriver_testing/pages/site_pages/teams/messages_tab.py
|
Python
|
agpl-3.0
| 3,683
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
def register_types(module):
root_module = module.get_root()
## Register a nested module for the namespace Config
nested_module = module.add_cpp_namespace('Config')
register_types_ns3_Config(nested_module)
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace addressUtils
nested_module = module.add_cpp_namespace('addressUtils')
register_types_ns3_addressUtils(nested_module)
## Register a nested module for the namespace aodv
nested_module = module.add_cpp_namespace('aodv')
register_types_ns3_aodv(nested_module)
## Register a nested module for the namespace dot11s
nested_module = module.add_cpp_namespace('dot11s')
register_types_ns3_dot11s(nested_module)
## Register a nested module for the namespace flame
nested_module = module.add_cpp_namespace('flame')
register_types_ns3_flame(nested_module)
## Register a nested module for the namespace internal
nested_module = module.add_cpp_namespace('internal')
register_types_ns3_internal(nested_module)
## Register a nested module for the namespace olsr
nested_module = module.add_cpp_namespace('olsr')
register_types_ns3_olsr(nested_module)
def register_types_ns3_Config(module):
root_module = module.get_root()
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_addressUtils(module):
root_module = module.get_root()
def register_types_ns3_aodv(module):
root_module = module.get_root()
def register_types_ns3_dot11s(module):
root_module = module.get_root()
def register_types_ns3_flame(module):
root_module = module.get_root()
## flame-protocol.h: ns3::flame::FlameProtocol [class]
module.add_class('FlameProtocol', parent=root_module['ns3::MeshL2RoutingProtocol'])
## flame-protocol.h: ns3::flame::FlameTag [class]
module.add_class('FlameTag', parent=root_module['ns3::Tag'])
def register_types_ns3_internal(module):
root_module = module.get_root()
def register_types_ns3_olsr(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3FlameFlameProtocol_methods(root_module, root_module['ns3::flame::FlameProtocol'])
register_Ns3FlameFlameTag_methods(root_module, root_module['ns3::flame::FlameTag'])
return
def register_Ns3FlameFlameProtocol_methods(root_module, cls):
## flame-protocol.h: ns3::flame::FlameProtocol::FlameProtocol() [constructor]
cls.add_constructor([])
## flame-protocol.h: void ns3::flame::FlameProtocol::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## flame-protocol.h: ns3::Mac48Address ns3::flame::FlameProtocol::GetAddress() [member function]
cls.add_method('GetAddress',
'ns3::Mac48Address',
[])
## flame-protocol.h: static ns3::TypeId ns3::flame::FlameProtocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## flame-protocol.h: bool ns3::flame::FlameProtocol::Install(ns3::Ptr<ns3::MeshPointDevice> arg0) [member function]
cls.add_method('Install',
'bool',
[param('ns3::Ptr< ns3::MeshPointDevice >', 'arg0')])
## flame-protocol.h: bool ns3::flame::FlameProtocol::RemoveRoutingStuff(uint32_t fromIface, ns3::Mac48Address const source, ns3::Mac48Address const destination, ns3::Ptr<ns3::Packet> packet, uint16_t & protocolType) [member function]
cls.add_method('RemoveRoutingStuff',
'bool',
[param('uint32_t', 'fromIface'), param('ns3::Mac48Address const', 'source'), param('ns3::Mac48Address const', 'destination'), param('ns3::Ptr< ns3::Packet >', 'packet'), param('uint16_t &', 'protocolType')],
is_virtual=True)
## flame-protocol.h: void ns3::flame::FlameProtocol::Report(std::ostream & arg0) const [member function]
cls.add_method('Report',
'void',
[param('std::ostream &', 'arg0')],
is_const=True)
## flame-protocol.h: bool ns3::flame::FlameProtocol::RequestRoute(uint32_t sourceIface, ns3::Mac48Address const source, ns3::Mac48Address const destination, ns3::Ptr<ns3::Packet const> packet, uint16_t protocolType, ns3::Callback<void, bool, ns3::Ptr<ns3::Packet>, ns3::Mac48Address, ns3::Mac48Address, unsigned short, unsigned int, ns3::empty, ns3::empty, ns3::empty> routeReply) [member function]
cls.add_method('RequestRoute',
'bool',
[param('uint32_t', 'sourceIface'), param('ns3::Mac48Address const', 'source'), param('ns3::Mac48Address const', 'destination'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'protocolType'), param('ns3::Callback< void, bool, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, unsigned short, unsigned int, ns3::empty, ns3::empty, ns3::empty >', 'routeReply')],
is_virtual=True)
## flame-protocol.h: void ns3::flame::FlameProtocol::ResetStats() [member function]
cls.add_method('ResetStats',
'void',
[])
return
def register_Ns3FlameFlameTag_methods(root_module, cls):
## flame-protocol.h: ns3::flame::FlameTag::FlameTag(ns3::flame::FlameTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::flame::FlameTag const &', 'arg0')])
## flame-protocol.h: ns3::flame::FlameTag::FlameTag(ns3::Mac48Address a=ns3::Mac48Address()) [constructor]
cls.add_constructor([param('ns3::Mac48Address', 'a', default_value='ns3::Mac48Address()')])
## flame-protocol.h: void ns3::flame::FlameTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## flame-protocol.h: ns3::TypeId ns3::flame::FlameTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## flame-protocol.h: uint32_t ns3::flame::FlameTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## flame-protocol.h: static ns3::TypeId ns3::flame::FlameTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## flame-protocol.h: void ns3::flame::FlameTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## flame-protocol.h: void ns3::flame::FlameTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## flame-protocol.h: ns3::flame::FlameTag::receiver [variable]
cls.add_instance_attribute('receiver', 'ns3::Mac48Address', is_const=False)
## flame-protocol.h: ns3::flame::FlameTag::transmitter [variable]
cls.add_instance_attribute('transmitter', 'ns3::Mac48Address', is_const=False)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_Config(module.get_submodule('Config'), root_module)
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_addressUtils(module.get_submodule('addressUtils'), root_module)
register_functions_ns3_aodv(module.get_submodule('aodv'), root_module)
register_functions_ns3_dot11s(module.get_submodule('dot11s'), root_module)
register_functions_ns3_flame(module.get_submodule('flame'), root_module)
register_functions_ns3_internal(module.get_submodule('internal'), root_module)
register_functions_ns3_olsr(module.get_submodule('olsr'), root_module)
return
def register_functions_ns3_Config(module, root_module):
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_addressUtils(module, root_module):
return
def register_functions_ns3_aodv(module, root_module):
return
def register_functions_ns3_dot11s(module, root_module):
return
def register_functions_ns3_flame(module, root_module):
return
def register_functions_ns3_internal(module, root_module):
return
def register_functions_ns3_olsr(module, root_module):
return
|
csgrad/ns-3-9-ngwmn
|
bindings/python/apidefs/gcc-LP64/ns3_module_flame.py
|
Python
|
gpl-2.0
| 9,222
|
#!/usr/bin/python
#!/usr/bin/env python
#This file launches spectrum_sense.py in one thread and transactions.py in another thread. This file should be executed to start the basestation.
import os
import threading
def start_transactions:
os.system('./transactions.py')
print 'Transactions started sucessfully'
def start_spectrum_sense:
os.system('./spectrum_sense.py')
print 'Spectrum sensing started sucessfully'
t1 = Thread(start_spectrum_sense)
t2 = Thread(start_transactions)
t1.start()
t2.start()
|
levelrf/level_basestation
|
spectrum_sense/main.py
|
Python
|
gpl-3.0
| 526
|
from flask import Flask, render_template, send_from_directory
app = Flask(__name__)
@app.route('/js/<path:path>')
def send_js(path):
return send_from_directory(u'templates/js', path)
@app.route('/css/<path:path>')
def send_css(path):
return send_from_directory(u'templates/css', path)
@app.route('/fonts/<path:path>')
def send_fonts(path):
return send_from_directory(u'templates/fonts', path)
@app.route('/img/<path:path>')
def send_img(path):
return send_from_directory(u'templates/img', path)
@app.route('/')
@app.route('/index.html')
def root_index_page():
return render_template('index.html')
if __name__ == '__main__':
app.run(debug=True)
|
c0state/python-skeleton-for-ci-cd
|
app/main_app.py
|
Python
|
mit
| 680
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
# ERPNext - web based ERP (http://erpnext.com)
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cstr, flt, getdate
from frappe import _
from frappe.model.mapper import get_mapped_doc
from erpnext.stock.stock_balance import update_bin_qty, get_indented_qty
from erpnext.controllers.buying_controller import BuyingController
form_grid_templates = {
"items": "templates/form_grid/material_request_grid.html"
}
class MaterialRequest(BuyingController):
def get_feed(self):
return _("{0}: {1}").format(self.status, self.material_request_type)
def check_if_already_pulled(self):
pass#if self.[d.sales_order_no for d in self.get('items')]
def validate_qty_against_so(self):
so_items = {} # Format --> {'SO/00001': {'Item/001': 120, 'Item/002': 24}}
for d in self.get('items'):
if d.sales_order_no:
if not so_items.has_key(d.sales_order_no):
so_items[d.sales_order_no] = {d.item_code: flt(d.qty)}
else:
if not so_items[d.sales_order_no].has_key(d.item_code):
so_items[d.sales_order_no][d.item_code] = flt(d.qty)
else:
so_items[d.sales_order_no][d.item_code] += flt(d.qty)
for so_no in so_items.keys():
for item in so_items[so_no].keys():
already_indented = frappe.db.sql("""select sum(qty)
from `tabMaterial Request Item`
where item_code = %s and sales_order_no = %s and
docstatus = 1 and parent != %s""", (item, so_no, self.name))
already_indented = already_indented and flt(already_indented[0][0]) or 0
actual_so_qty = frappe.db.sql("""select sum(qty) from `tabSales Order Item`
where parent = %s and item_code = %s and docstatus = 1""", (so_no, item))
actual_so_qty = actual_so_qty and flt(actual_so_qty[0][0]) or 0
if actual_so_qty and (flt(so_items[so_no][item]) + already_indented > actual_so_qty):
frappe.throw(_("Material Request of maximum {0} can be made for Item {1} against Sales Order {2}").format(actual_so_qty - already_indented, item, so_no))
def validate_schedule_date(self):
for d in self.get('items'):
if d.schedule_date and getdate(d.schedule_date) < getdate(self.transaction_date):
frappe.throw(_("Expected Date cannot be before Material Request Date"))
# Validate
# ---------------------
def validate(self):
super(MaterialRequest, self).validate()
self.validate_schedule_date()
self.validate_uom_is_integer("uom", "qty")
if not self.status:
self.status = "Draft"
from erpnext.controllers.status_updater import validate_status
validate_status(self.status, ["Draft", "Submitted", "Stopped", "Cancelled"])
self.validate_value("material_request_type", "in", ["Purchase", "Material Transfer", "Material Issue"])
pc_obj = frappe.get_doc('Purchase Common')
pc_obj.validate_for_items(self)
# self.validate_qty_against_so()
# NOTE: Since Item BOM and FG quantities are combined, using current data, it cannot be validated
# Though the creation of Material Request from a Production Plan can be rethought to fix this
def on_submit(self):
frappe.db.set(self, 'status', 'Submitted')
self.update_requested_qty()
def check_modified_date(self):
mod_db = frappe.db.sql("""select modified from `tabMaterial Request` where name = %s""",
self.name)
date_diff = frappe.db.sql("""select TIMEDIFF('%s', '%s')"""
% (mod_db[0][0], cstr(self.modified)))
if date_diff and date_diff[0][0]:
frappe.throw(_("{0} {1} has been modified. Please refresh.").format(_(self.doctype), self.name))
def update_status(self, status):
self.check_modified_date()
frappe.db.set(self, 'status', cstr(status))
self.update_requested_qty()
def on_cancel(self):
pc_obj = frappe.get_doc('Purchase Common')
pc_obj.check_for_stopped_or_closed_status(self.doctype, self.name)
pc_obj.check_docstatus(check = 'Next', doctype = 'Purchase Order', docname = self.name, detail_doctype = 'Purchase Order Item')
self.update_requested_qty()
frappe.db.set(self,'status','Cancelled')
def update_completed_qty(self, mr_items=None):
if self.material_request_type == "Purchase":
return
if not mr_items:
mr_items = [d.name for d in self.get("items")]
per_ordered = 0.0
for d in self.get("items"):
if d.name in mr_items:
d.ordered_qty = flt(frappe.db.sql("""select sum(transfer_qty)
from `tabStock Entry Detail` where material_request = %s
and material_request_item = %s and docstatus = 1""",
(self.name, d.name))[0][0])
frappe.db.set_value(d.doctype, d.name, "ordered_qty", d.ordered_qty)
# note: if qty is 0, its row is still counted in len(self.get("items"))
# hence adding 1 to per_ordered
if (d.ordered_qty > d.qty) or not d.qty:
per_ordered += 1.0
elif d.qty > 0:
per_ordered += flt(d.ordered_qty / flt(d.qty))
self.per_ordered = flt((per_ordered / flt(len(self.get("items")))) * 100.0, 2)
frappe.db.set_value(self.doctype, self.name, "per_ordered", self.per_ordered)
def update_requested_qty(self, mr_item_rows=None):
"""update requested qty (before ordered_qty is updated)"""
item_wh_list = []
for d in self.get("items"):
if (not mr_item_rows or d.name in mr_item_rows) and [d.item_code, d.warehouse] not in item_wh_list \
and frappe.db.get_value("Item", d.item_code, "is_stock_item") == 1 and d.warehouse:
item_wh_list.append([d.item_code, d.warehouse])
for item_code, warehouse in item_wh_list:
update_bin_qty(item_code, warehouse, {
"indented_qty": get_indented_qty(item_code, warehouse)
})
def update_completed_and_requested_qty(stock_entry, method):
if stock_entry.doctype == "Stock Entry":
material_request_map = {}
for d in stock_entry.get("items"):
if d.material_request:
material_request_map.setdefault(d.material_request, []).append(d.material_request_item)
for mr, mr_item_rows in material_request_map.items():
if mr and mr_item_rows:
mr_obj = frappe.get_doc("Material Request", mr)
if mr_obj.status in ["Stopped", "Cancelled"]:
frappe.throw(_("{0} {1} is cancelled or stopped").format(_("Material Request"), mr),
frappe.InvalidStatusError)
mr_obj.update_completed_qty(mr_item_rows)
mr_obj.update_requested_qty(mr_item_rows)
def set_missing_values(source, target_doc):
target_doc.run_method("set_missing_values")
target_doc.run_method("calculate_taxes_and_totals")
def update_item(obj, target, source_parent):
target.conversion_factor = 1
target.qty = flt(obj.qty) - flt(obj.ordered_qty)
target.stock_qty = target.qty
@frappe.whitelist()
def make_purchase_order(source_name, target_doc=None):
doclist = get_mapped_doc("Material Request", source_name, {
"Material Request": {
"doctype": "Purchase Order",
"validation": {
"docstatus": ["=", 1],
"material_request_type": ["=", "Purchase"]
}
},
"Material Request Item": {
"doctype": "Purchase Order Item",
"field_map": [
["name", "prevdoc_detail_docname"],
["parent", "prevdoc_docname"],
["parenttype", "prevdoc_doctype"],
["uom", "stock_uom"],
["uom", "uom"]
],
"postprocess": update_item,
"condition": lambda doc: doc.ordered_qty < doc.qty
}
}, target_doc, set_missing_values)
return doclist
@frappe.whitelist()
def make_purchase_order_based_on_supplier(source_name, target_doc=None):
if target_doc:
if isinstance(target_doc, basestring):
import json
target_doc = frappe.get_doc(json.loads(target_doc))
target_doc.set("items", [])
material_requests, supplier_items = get_material_requests_based_on_supplier(source_name)
def postprocess(source, target_doc):
target_doc.supplier = source_name
set_missing_values(source, target_doc)
target_doc.set("items", [d for d in target_doc.get("items")
if d.get("item_code") in supplier_items and d.get("qty") > 0])
return target_doc
for mr in material_requests:
target_doc = get_mapped_doc("Material Request", mr, {
"Material Request": {
"doctype": "Purchase Order",
},
"Material Request Item": {
"doctype": "Purchase Order Item",
"field_map": [
["name", "prevdoc_detail_docname"],
["parent", "prevdoc_docname"],
["parenttype", "prevdoc_doctype"],
["uom", "stock_uom"],
["uom", "uom"]
],
"postprocess": update_item,
"condition": lambda doc: doc.ordered_qty < doc.qty
}
}, target_doc, postprocess)
return target_doc
def get_material_requests_based_on_supplier(supplier):
supplier_items = [d[0] for d in frappe.db.get_values("Item",
{"default_supplier": supplier})]
if supplier_items:
material_requests = frappe.db.sql_list("""select distinct mr.name
from `tabMaterial Request` mr, `tabMaterial Request Item` mr_item
where mr.name = mr_item.parent
and mr_item.item_code in (%s)
and mr.material_request_type = 'Purchase'
and mr.per_ordered < 99.99
and mr.docstatus = 1
and mr.status != 'Stopped'""" % ', '.join(['%s']*len(supplier_items)),
tuple(supplier_items))
else:
material_requests = []
return material_requests, supplier_items
@frappe.whitelist()
def make_supplier_quotation(source_name, target_doc=None):
doclist = get_mapped_doc("Material Request", source_name, {
"Material Request": {
"doctype": "Supplier Quotation",
"validation": {
"docstatus": ["=", 1],
"material_request_type": ["=", "Purchase"]
}
},
"Material Request Item": {
"doctype": "Supplier Quotation Item",
"field_map": {
"name": "prevdoc_detail_docname",
"parent": "prevdoc_docname",
"parenttype": "prevdoc_doctype"
}
}
}, target_doc, set_missing_values)
return doclist
@frappe.whitelist()
def make_stock_entry(source_name, target_doc=None):
def update_item(obj, target, source_parent):
qty = flt(obj.qty) - flt(obj.ordered_qty) \
if flt(obj.qty) > flt(obj.ordered_qty) else 0
target.qty = qty
target.transfer_qty = qty
target.conversion_factor = 1
if source_parent.material_request_type == "Material Transfer":
target.t_warehouse = obj.warehouse
else:
target.s_warehouse = obj.warehouse
def set_missing_values(source, target):
target.purpose = source.material_request_type
target.run_method("calculate_rate_and_amount")
doclist = get_mapped_doc("Material Request", source_name, {
"Material Request": {
"doctype": "Stock Entry",
"validation": {
"docstatus": ["=", 1],
"material_request_type": ["in", ["Material Transfer", "Material Issue"]]
}
},
"Material Request Item": {
"doctype": "Stock Entry Detail",
"field_map": {
"name": "material_request_item",
"parent": "material_request",
"uom": "stock_uom",
},
"postprocess": update_item,
"condition": lambda doc: doc.ordered_qty < doc.qty
}
}, target_doc, set_missing_values)
return doclist
|
MartinEnder/erpnext-de
|
erpnext/stock/doctype/material_request/material_request.py
|
Python
|
agpl-3.0
| 10,868
|
'''
James D. Zoll
4/29/2013
Purpose: Defines the single view for the DK Optimize project.
License: This is a public work.
'''
# Library Imports
from django.template import RequestContext
from django.shortcuts import render_to_response
def index(request):
return render_to_response('dk_optimize/index.html', context_instance = RequestContext(request))
|
Zerack/zoll.me
|
dk_optimize/views.py
|
Python
|
mit
| 363
|
import sys
sys.path[0] = 'build/lib.linux-i686-2.7'
import io
import os.path
import numpy
import random
import itertools
from selfrepairing import HSRC, HSRCMode
numpy.set_printoptions(linewidth=100)
k = 3
n = 7
w = 32
packetsize = 128
buffsize = w*packetsize
mode = HSRCMode.SCHDULE
dec_points = numpy.ndarray(dtype=numpy.uint32, shape=(k,))
raw_data = [numpy.ndarray(shape=(buffsize,), dtype=numpy.uint8) for i in xrange(k)]
encoded_data = [numpy.zeros(buffsize, dtype=numpy.uint8) for i in xrange(k)]
file_name = sys.argv[1]
file_size = os.path.getsize(file_name)
survived = []
inputfiles = []
for i in xrange(n):
name = file_name+"_%d"%i
if os.path.exists(name):
inputfiles.append(io.open(file_name+"_%d"%i,'rb'))
survived.append(i)
outputfile = io.open(file_name+"_decoded", 'wb')
for i,p in enumerate(survived):
dec_points[i] = p+1
print dec_points
decoder = HSRC(k, dec_points, w=w, packetsize=packetsize, mode=mode, decoder=True)
readblock = k*buffsize
to_read = file_size
while to_read>0:
for i,inputf in enumerate(inputfiles):
inputf.readinto(encoded_data[i])
decoder.decode(encoded_data, raw_data)
if to_read>=readblock:
for rd in raw_data:
outputfile.write(rd)
to_read -= readblock
else:
data = to_read
for rd in raw_data:
outputfile.write(buffer(rd,0,to_read))
to_read = max(to_read-buffsize, 0)
for inputf in inputfiles:
inputf.close()
outputfile.close()
|
llpamies/selfrepairing
|
decode.py
|
Python
|
gpl-3.0
| 1,505
|
from qtpy import QtWidgets, QtCore
class WarningTimerModel(object):
"""Model for Warning Boxes that close after a certain amount of time
Parameters
----------
parent
The parent of the QMessageBox
title : :obj:`str`
The title of the window
message : :obj:`str`
The warning message
time_to_wait : :obj:`int`
The amount of time in seconds to wait. 3 seconds by default
Attributes
----------
parent
The parent of the QMessageBox
"""
def __init__(self, parent, title, message, time_to_wait=3):
self._views = set()
self.parent = parent
self._title = title
self._message = message
self._time_to_wait = time_to_wait
@property
def title(self):
"""The warning window title
Setting the title will notify the views to change the window title
"""
return self._title
@title.setter
def title(self, new_title):
self._title = new_title
for view in self._views:
view.change_title()
@property
def message(self):
"""The warning message to be displayed
Setting the message will notify the views to change the message
"""
return self._message
@message.setter
def message(self, new_message):
self._message = new_message
for view in self._views:
view.chanage_text()
@property
def time_to_wait(self):
"""The amount of time in seconds to wait until the window closes
Setting the time will notify the views to either close if the time is
less than 0 or adjust the text to show how much time is left to wait
"""
return self._time_to_wait
@time_to_wait.setter
def time_to_wait(self, time_to_wait):
self._time_to_wait = time_to_wait
if self.time_to_wait < 0:
self.close_views()
else:
for view in self._views:
view.change_text()
@property
def text(self):
"""The message with the amount of time until the window closes"""
time_text = "\n\n Closing in %d seconds" % (self.time_to_wait)
return self.message + time_text
def register(self, view):
"""Register a view with the model"""
self._views.add(view)
def unregister(self, view):
"""Unregister a view with the model"""
self._view.remove(view)
def close_views(self):
"""Close all the views (called when the time has run out)"""
for view in self._views:
view.close_view()
class WarningTimerController(object):
"""The Warning Timer Controller
Parameters
----------
model : :class:`WarningTimerModel`
The Warning Timer Model
view : :class:`QtWidgets.QMessageBox <PySide.QtGui.QMessageBox>`
A message box with a :class:`WarningTimerModel` model
"""
def __init__(self, model, view):
self._model = model
self._view = view
def minus_one_second(self):
"""Subtract a second from the time to wait"""
self._model.time_to_wait -= 1
class WarningTimer(QtWidgets.QMessageBox):
"""A Warning Message Box that closes after some time
Parameters
----------
model : :class:`WarningTimerModel`
The Warning Timer Model
Attributes
----------
model : :class:`WarningTimerModel`
The Warning Timer Model
control : :class:`WarningTimerController`
The controller
timer : :class:`QtCore.QTimer <PySide.QtCore.QTimer>`
The timer
"""
def __init__(self, model, start_timer=True):
super(WarningTimer, self).__init__(model.parent)
self.model = model
self.control = WarningTimerController(self.model, self)
self.model.register(self)
self.change_text()
self.timer = QtCore.QTimer(self)
self.timer.setInterval(1000)
self.timer.timeout.connect(self.one_second_passed)
if start_timer:
self.start_timer()
self.setWindowTitle(self.model.title)
self.setStandardButtons(QtWidgets.QMessageBox.Ok)
self.setIcon(QtWidgets.QMessageBox.Warning)
def start_timer(self):
self.timer.start()
def one_second_passed(self):
"""After a second, subtract from the time_to_wait"""
self.control.minus_one_second()
def change_text(self):
"""Set the text to the text given by the model"""
self.setText(self.model.text)
def change_title(self):
"""Change the title to the model's title"""
self.setWindowTitle(self.model.title)
def close_view(self):
"""Close the Warning Box"""
self.close()
def closeEvent(self, event):
self.timer.stop()
self.accept()
event.accept()
|
planetarypy/pdsspect
|
pdsspect/warningtimer.py
|
Python
|
bsd-3-clause
| 5,012
|
import plyvel
import ast
import hashlib
import os
import sys
from processor import print_log, logger
from utils import bc_address_to_hash_160, hash_160_to_pubkey_address, hex_to_int, int_to_hex, Hash
global GENESIS_HASH
GENESIS_HASH = '00000b7e804f0de87e7752550ff04d7686a4599509897feefd7f03904eb45633'
"""
Patricia tree for hashing unspents
"""
DEBUG = 0
KEYLENGTH = 20 + 32 + 4 #56
class Storage(object):
def __init__(self, config, shared, test_reorgs):
self.dbpath = config.get('leveldb', 'path')
if not os.path.exists(self.dbpath):
os.mkdir(self.dbpath)
self.pruning_limit = config.getint('leveldb', 'pruning_limit')
self.shared = shared
self.hash_list = {}
self.parents = {}
self.test_reorgs = test_reorgs
try:
self.db_utxo = plyvel.DB(os.path.join(self.dbpath,'utxo'), create_if_missing=True, compression=None)
self.db_addr = plyvel.DB(os.path.join(self.dbpath,'addr'), create_if_missing=True, compression=None)
self.db_hist = plyvel.DB(os.path.join(self.dbpath,'hist'), create_if_missing=True, compression=None)
self.db_undo = plyvel.DB(os.path.join(self.dbpath,'undo'), create_if_missing=True, compression=None)
except:
logger.error('db init', exc_info=True)
self.shared.stop()
self.db_version = 3 # increase this when database needs to be updated
try:
self.last_hash, self.height, db_version = ast.literal_eval(self.db_undo.get('height'))
print_log("Database version", self.db_version)
print_log("Blockchain height", self.height)
except:
print_log('initializing database')
self.height = 0
self.last_hash = GENESIS_HASH
db_version = self.db_version
# write root
self.put_node('', {})
# check version
if self.db_version != db_version:
print_log("Your database '%s' is deprecated. Please create a new database"%self.dbpath)
self.shared.stop()
return
# compute root hash
d = self.get_node('')
self.root_hash, v = self.get_node_hash('',d,None)
print_log("UTXO tree root hash:", self.root_hash.encode('hex'))
print_log("Coins in database:", v)
# convert between bitcoin addresses and 20 bytes keys used for storage.
def address_to_key(self, addr):
return bc_address_to_hash_160(addr)
def key_to_address(self, addr):
return hash_160_to_pubkey_address(addr)
def get_proof(self, addr):
key = self.address_to_key(addr)
i = self.db_utxo.iterator(start=key)
k, _ = i.next()
p = self.get_path(k)
p.append(k)
out = []
for item in p:
v = self.db_utxo.get(item)
out.append((item.encode('hex'), v.encode('hex')))
return out
def get_balance(self, addr):
key = self.address_to_key(addr)
i = self.db_utxo.iterator(start=key)
k, _ = i.next()
if not k.startswith(key):
return 0
p = self.get_parent(k)
d = self.get_node(p)
letter = k[len(p)]
return d[letter][1]
def listunspent(self, addr):
key = self.address_to_key(addr)
if key is None:
raise BaseException('Invalid Bitcoin address', addr)
out = []
for k, v in self.db_utxo.iterator(start=key):
if not k.startswith(key):
break
if len(k) == KEYLENGTH:
txid = k[20:52].encode('hex')
txpos = hex_to_int(k[52:56])
h = hex_to_int(v[8:12])
v = hex_to_int(v[0:8])
out.append({'tx_hash': txid, 'tx_pos':txpos, 'height': h, 'value':v})
out.sort(key=lambda x:x['height'])
return out
def get_history(self, addr):
out = []
o = self.listunspent(addr)
for item in o:
out.append((item['tx_hash'], item['height']))
h = self.db_hist.get(addr)
while h:
item = h[0:80]
h = h[80:]
txi = item[0:32].encode('hex')
hi = hex_to_int(item[36:40])
txo = item[40:72].encode('hex')
ho = hex_to_int(item[76:80])
out.append((txi, hi))
out.append((txo, ho))
# sort
out.sort(key=lambda x:x[1])
# uniqueness
out = set(out)
return map(lambda x: {'tx_hash':x[0], 'height':x[1]}, out)
def get_address(self, txi):
return self.db_addr.get(txi)
def get_undo_info(self, height):
s = self.db_undo.get("undo_info_%d" % (height % 100))
if s is None: print_log("no undo info for ", height)
return eval(s)
def write_undo_info(self, height, bitcoind_height, undo_info):
if height > bitcoind_height - 100 or self.test_reorgs:
self.db_undo.put("undo_info_%d" % (height % 100), repr(undo_info))
def common_prefix(self, word1, word2):
max_len = min(len(word1),len(word2))
for i in range(max_len):
if word2[i] != word1[i]:
index = i
break
else:
index = max_len
return word1[0:index]
def put_node(self, key, d, batch=None):
k = 0
serialized = ''
for i in range(256):
if chr(i) in d.keys():
k += 1<<i
h, v = d[chr(i)]
if h is None: h = chr(0)*32
vv = int_to_hex(v, 8).decode('hex')
item = h + vv
assert len(item) == 40
serialized += item
k = "0x%0.64X" % k # 32 bytes
k = k[2:].decode('hex')
assert len(k) == 32
out = k + serialized
if batch:
batch.put(key, out)
else:
self.db_utxo.put(key, out)
def get_node(self, key):
s = self.db_utxo.get(key)
if s is None:
return
#print "get node", key.encode('hex'), len(key), s.encode('hex')
k = int(s[0:32].encode('hex'), 16)
s = s[32:]
d = {}
for i in range(256):
if k % 2 == 1:
_hash = s[0:32]
value = hex_to_int(s[32:40])
d[chr(i)] = (_hash, value)
s = s[40:]
k = k/2
#cache
return d
def add_address(self, target, value, height):
assert len(target) == KEYLENGTH
word = target
key = ''
path = [ '' ]
i = self.db_utxo.iterator()
while key != target:
items = self.get_node(key)
if word[0] in items.keys():
i.seek(key + word[0])
new_key, _ = i.next()
if target.startswith(new_key):
# add value to the child node
key = new_key
word = target[len(key):]
if key == target:
break
else:
assert key not in path
path.append(key)
else:
# prune current node and add new node
prefix = self.common_prefix(new_key, target)
index = len(prefix)
## get hash and value of new_key from parent (if it's a leaf)
if len(new_key) == KEYLENGTH:
parent_key = self.get_parent(new_key)
parent = self.get_node(parent_key)
z = parent[ new_key[len(parent_key)] ]
self.put_node(prefix, { target[index]:(None,0), new_key[index]:z } )
else:
# if it is not a leaf, update the hash of new_key because skip_string changed
h, v = self.get_node_hash(new_key, self.get_node(new_key), prefix)
self.put_node(prefix, { target[index]:(None,0), new_key[index]:(h,v) } )
path.append(prefix)
self.parents[new_key] = prefix
break
else:
assert key in path
items[ word[0] ] = (None,0)
self.put_node(key,items)
break
# write
s = (int_to_hex(value, 8) + int_to_hex(height,4)).decode('hex')
self.db_utxo.put(target, s)
# the hash of a node is the txid
_hash = target[20:52]
self.update_node_hash(target, path, _hash, value)
def update_node_hash(self, node, path, _hash, value):
c = node
for x in path[::-1]:
self.parents[c] = x
c = x
self.hash_list[node] = (_hash, value)
def update_hashes(self):
nodes = {} # nodes to write
for i in range(KEYLENGTH, -1, -1):
for node in self.hash_list.keys():
if len(node) != i: continue
node_hash, node_value = self.hash_list.pop(node)
# for each node, compute its hash, send it to the parent
if node == '':
self.root_hash = node_hash
self.root_value = node_value
break
parent = self.parents[node]
# read parent.. do this in add_address
d = nodes.get(parent)
if d is None:
d = self.get_node(parent)
assert d is not None
letter = node[len(parent)]
assert letter in d.keys()
if i != KEYLENGTH and node_hash is None:
d2 = self.get_node(node)
node_hash, node_value = self.get_node_hash(node, d2, parent)
assert node_hash is not None
# write new value
d[letter] = (node_hash, node_value)
nodes[parent] = d
# iterate
grandparent = self.parents[parent] if parent != '' else None
parent_hash, parent_value = self.get_node_hash(parent, d, grandparent)
self.hash_list[parent] = (parent_hash, parent_value)
# batch write modified nodes
batch = self.db_utxo.write_batch()
for k, v in nodes.items():
self.put_node(k, v, batch)
batch.write()
# cleanup
assert self.hash_list == {}
self.parents = {}
def get_node_hash(self, x, d, parent):
# final hash
if x != '':
skip_string = x[len(parent)+1:]
else:
skip_string = ''
d2 = sorted(d.items())
values = map(lambda x: x[1][1], d2)
hashes = map(lambda x: x[1][0], d2)
value = sum( values )
_hash = self.hash( skip_string + ''.join(hashes) )
return _hash, value
def get_path(self, target):
word = target
key = ''
path = [ '' ]
i = self.db_utxo.iterator(start='')
while key != target:
i.seek(key + word[0])
try:
new_key, _ = i.next()
is_child = new_key.startswith(key + word[0])
except StopIteration:
is_child = False
if is_child:
if target.startswith(new_key):
# add value to the child node
key = new_key
word = target[len(key):]
if key == target:
break
else:
assert key not in path
path.append(key)
else:
print_log('not in tree', self.db_utxo.get(key+word[0]), new_key.encode('hex'))
return False
else:
assert key in path
break
return path
def delete_address(self, leaf):
path = self.get_path(leaf)
if path is False:
print_log("addr not in tree", leaf.encode('hex'), self.key_to_address(leaf[0:20]), self.db_utxo.get(leaf))
raise
s = self.db_utxo.get(leaf)
self.db_utxo.delete(leaf)
if leaf in self.hash_list:
self.hash_list.pop(leaf)
parent = path[-1]
letter = leaf[len(parent)]
items = self.get_node(parent)
items.pop(letter)
# remove key if it has a single child
if len(items) == 1:
letter, v = items.items()[0]
self.db_utxo.delete(parent)
if parent in self.hash_list:
self.hash_list.pop(parent)
# we need the exact length for the iteration
i = self.db_utxo.iterator()
i.seek(parent+letter)
k, v = i.next()
# note: k is not necessarily a leaf
if len(k) == KEYLENGTH:
_hash, value = k[20:52], hex_to_int(v[0:8])
else:
_hash, value = None, None
self.update_node_hash(k, path[:-1], _hash, value)
else:
self.put_node(parent, items)
_hash, value = None, None
self.update_node_hash(parent, path[:-1], _hash, value)
return s
def get_children(self, x):
i = self.db_utxo.iterator()
l = 0
while l <256:
i.seek(x+chr(l))
k, v = i.next()
if k.startswith(x+chr(l)):
yield k, v
l += 1
elif k.startswith(x):
yield k, v
l = ord(k[len(x)]) + 1
else:
break
def get_parent(self, x):
""" return parent and skip string"""
i = self.db_utxo.iterator()
for j in range(len(x)):
p = x[0:-j-1]
i.seek(p)
k, v = i.next()
if x.startswith(k) and x!=k:
break
else: raise
return k
def hash(self, x):
if DEBUG: return "hash("+x+")"
return Hash(x)
def get_root_hash(self):
return self.root_hash
def close(self):
self.db_utxo.close()
self.db_addr.close()
self.db_hist.close()
self.db_undo.close()
def add_to_history(self, addr, tx_hash, tx_pos, value, tx_height):
key = self.address_to_key(addr)
txo = (tx_hash + int_to_hex(tx_pos, 4)).decode('hex')
# write the new history
self.add_address(key + txo, value, tx_height)
# backlink
self.db_addr.put(txo, addr)
def revert_add_to_history(self, addr, tx_hash, tx_pos, value, tx_height):
key = self.address_to_key(addr)
txo = (tx_hash + int_to_hex(tx_pos, 4)).decode('hex')
# delete
self.delete_address(key + txo)
# backlink
self.db_addr.delete(txo)
def get_utxo_value(self, addr, txi):
key = self.address_to_key(addr)
leaf = key + txi
s = self.db_utxo.get(leaf)
value = hex_to_int(s[0:8])
return value
def set_spent(self, addr, txi, txid, index, height, undo):
key = self.address_to_key(addr)
leaf = key + txi
s = self.delete_address(leaf)
value = hex_to_int(s[0:8])
in_height = hex_to_int(s[8:12])
undo[leaf] = value, in_height
# delete backlink txi-> addr
self.db_addr.delete(txi)
# add to history
s = self.db_hist.get(addr)
if s is None: s = ''
txo = (txid + int_to_hex(index,4) + int_to_hex(height,4)).decode('hex')
s += txi + int_to_hex(in_height,4).decode('hex') + txo
s = s[ -80*self.pruning_limit:]
self.db_hist.put(addr, s)
def revert_set_spent(self, addr, txi, undo):
key = self.address_to_key(addr)
leaf = key + txi
# restore backlink
self.db_addr.put(txi, addr)
v, height = undo.pop(leaf)
self.add_address(leaf, v, height)
# revert add to history
s = self.db_hist.get(addr)
# s might be empty if pruning limit was reached
if not s:
return
assert s[-80:-44] == txi
s = s[:-80]
self.db_hist.put(addr, s)
def import_transaction(self, txid, tx, block_height, touched_addr):
undo = { 'prev_addr':[] } # contains the list of pruned items for each address in the tx; also, 'prev_addr' is a list of prev addresses
prev_addr = []
for i, x in enumerate(tx.get('inputs')):
txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
addr = self.get_address(txi)
if addr is not None:
self.set_spent(addr, txi, txid, i, block_height, undo)
touched_addr.add(addr)
prev_addr.append(addr)
undo['prev_addr'] = prev_addr
# here I add only the outputs to history; maybe I want to add inputs too (that's in the other loop)
for x in tx.get('outputs'):
addr = x.get('address')
if addr is None: continue
self.add_to_history(addr, txid, x.get('index'), x.get('value'), block_height)
touched_addr.add(addr)
return undo
def revert_transaction(self, txid, tx, block_height, touched_addr, undo):
#print_log("revert tx", txid)
for x in reversed(tx.get('outputs')):
addr = x.get('address')
if addr is None: continue
self.revert_add_to_history(addr, txid, x.get('index'), x.get('value'), block_height)
touched_addr.add(addr)
prev_addr = undo.pop('prev_addr')
for i, x in reversed(list(enumerate(tx.get('inputs')))):
addr = prev_addr[i]
if addr is not None:
txi = (x.get('prevout_hash') + int_to_hex(x.get('prevout_n'), 4)).decode('hex')
self.revert_set_spent(addr, txi, undo)
touched_addr.add(addr)
assert undo == {}
|
TrainMAnB/electrum-vcn-server
|
src/storage.py
|
Python
|
agpl-3.0
| 18,186
|
"""Hello Analytics Reporting API V4."""
import argparse
from apiclient.discovery import build
from oauth2client.service_account import ServiceAccountCredentials
import httplib2
from oauth2client import client
from oauth2client import file
from oauth2client import tools
SCOPES = ['https://www.googleapis.com/auth/analytics.readonly']
DISCOVERY_URI = ('https://analyticsreporting.googleapis.com/$discovery/rest')
KEY_FILE_LOCATION = 'bicraft-proj2-c9df8b3c22d8.p12'
SERVICE_ACCOUNT_EMAIL = 'bicraft-proj2-107712655864129243091@developer.gserviceaccount.com'
VIEW_ID = '69520391'
def initialize_analyticsreporting():
"""Initializes an analyticsreporting service object.
Returns:
analytics an authorized analyticsreporting service object.
"""
credentials = ServiceAccountCredentials.from_p12_keyfile(
SERVICE_ACCOUNT_EMAIL, KEY_FILE_LOCATION, scopes=SCOPES)
http = credentials.authorize(httplib2.Http())
# Build the service object.
analytics = build('analytics', 'v4', http=http, discoveryServiceUrl=DISCOVERY_URI)
return analytics
def get_report(analytics):
# Use the Analytics Service Object to query the Analytics Reporting API V4.
return analytics.reports().batchGet(
body={
'reportRequests': [
{
'viewId': VIEW_ID,
'dateRanges': [{'startDate': '7daysAgo', 'endDate': 'today'}],
'metrics': [{'expression': 'ga:sessions'}]
}]
}
).execute()
def print_response(response):
"""Parses and prints the Analytics Reporting API V4 response"""
for report in response.get('reports', []):
columnHeader = report.get('columnHeader', {})
dimensionHeaders = columnHeader.get('dimensions', [])
metricHeaders = columnHeader.get('metricHeader', {}).get('metricHeaderEntries', [])
rows = report.get('data', {}).get('rows', [])
for row in rows:
dimensions = row.get('dimensions', [])
dateRangeValues = row.get('metrics', [])
for header, dimension in zip(dimensionHeaders, dimensions):
print header + ': ' + dimension
for i, values in enumerate(dateRangeValues):
print 'Date range (' + str(i) + ')'
for metricHeader, value in zip(metricHeaders, values.get('values')):
print metricHeader.get('name') + ': ' + value
def main():
analytics = initialize_analyticsreporting()
response = get_report(analytics)
print_response(response)
if __name__ == '__main__':
main()
|
desavera/bicraft
|
var/googleapi/HelloAnalytics.py
|
Python
|
gpl-3.0
| 2,445
|
"""Unit tests for PyGraphviz interface."""
import os
import tempfile
from nose import SkipTest
from nose.tools import assert_true, assert_equal, assert_raises
from networkx.testing import assert_edges_equal, assert_nodes_equal, \
assert_graphs_equal
import networkx as nx
class TestAGraph(object):
@classmethod
def setupClass(cls):
global pygraphviz
try:
import pygraphviz
except ImportError:
raise SkipTest('PyGraphviz not available.')
def build_graph(self, G):
edges = [('A', 'B'), ('A', 'C'), ('A', 'C'), ('B', 'C'), ('A', 'D')]
G.add_edges_from(edges)
G.add_node('E')
G.graph['metal'] = 'bronze'
return G
def assert_equal(self, G1, G2):
assert_nodes_equal(G1.nodes(), G2.nodes())
assert_edges_equal(G1.edges(), G2.edges())
assert_equal(G1.graph['metal'], G2.graph['metal'])
def agraph_checks(self, G):
G = self.build_graph(G)
A = nx.nx_agraph.to_agraph(G)
H = nx.nx_agraph.from_agraph(A)
self.assert_equal(G, H)
fname = tempfile.mktemp()
nx.drawing.nx_agraph.write_dot(H, fname)
Hin = nx.nx_agraph.read_dot(fname)
os.unlink(fname)
self.assert_equal(H, Hin)
(fd, fname) = tempfile.mkstemp()
with open(fname, 'w') as fh:
nx.drawing.nx_agraph.write_dot(H, fh)
with open(fname, 'r') as fh:
Hin = nx.nx_agraph.read_dot(fh)
os.unlink(fname)
self.assert_equal(H, Hin)
def test_from_agraph_name(self):
G = nx.Graph(name='test')
A = nx.nx_agraph.to_agraph(G)
H = nx.nx_agraph.from_agraph(A)
assert_equal(G.name, 'test')
def test_undirected(self):
self.agraph_checks(nx.Graph())
def test_directed(self):
self.agraph_checks(nx.DiGraph())
def test_multi_undirected(self):
self.agraph_checks(nx.MultiGraph())
def test_multi_directed(self):
self.agraph_checks(nx.MultiDiGraph())
def test_view_pygraphviz(self):
G = nx.Graph() # "An empty graph cannot be drawn."
assert_raises(nx.NetworkXException, nx.nx_agraph.view_pygraphviz, G)
G = nx.barbell_graph(4, 6)
nx.nx_agraph.view_pygraphviz(G)
def test_view_pygraphviz_edgelable(self):
G = nx.Graph()
G.add_edge(1, 2, weight=7)
G.add_edge(2, 3, weight=8)
nx.nx_agraph.view_pygraphviz(G, edgelabel='weight')
def test_graph_with_reserved_keywords(self):
# test attribute/keyword clash case for #1582
# node: n
# edges: u,v
G = nx.Graph()
G = self.build_graph(G)
G.node['E']['n'] = 'keyword'
G.edges[('A', 'B')]['u'] = 'keyword'
G.edges[('A', 'B')]['v'] = 'keyword'
A = nx.nx_agraph.to_agraph(G)
def test_round_trip(self):
G = nx.Graph()
A = nx.nx_agraph.to_agraph(G)
H = nx.nx_agraph.from_agraph(A)
#assert_graphs_equal(G, H)
AA = nx.nx_agraph.to_agraph(H)
HH = nx.nx_agraph.from_agraph(AA)
assert_graphs_equal(H, HH)
G.graph['graph'] = {}
G.graph['node'] = {}
G.graph['edge'] = {}
assert_graphs_equal(G, HH)
|
kenshay/ImageScript
|
ProgramData/SystemFiles/Python/Lib/site-packages/networkx/drawing/tests/test_agraph.py
|
Python
|
gpl-3.0
| 3,262
|
# -*- coding: utf-8 -*-
# this file is released under public domain and you can use without limitations
#########################################################################
## This is a sample controller
## - index is the default action of any application
## - user is required for authentication and authorization
## - download is for downloading files uploaded in the db (does streaming)
## - call exposes all registered services (none by default)
#########################################################################
@auth.requires_login()
@auth.requires_membership('Super-Administrator')
def academic_log():
import cpfecys
#assignation = request.vars['assignation']
year_period = cpfecys.current_year_period()
max_display = 1
currentyear_period = db.period_year(db.period_year.id == year_period)
query = ((db.academic_course_assignation.semester == currentyear_period.id))
#db.academic_course_assignation.assignation.default = check.project
db.academic_course_assignation.assignation.writable = False
db.academic_course_assignation.assignation.readable = False
db.academic_course_assignation.semester.default = currentyear_period.id
db.academic_course_assignation.semester.writable = False
db.academic_course_assignation.semester.readable = False
grid = SQLFORM.grid(query)
current_period_name = T(cpfecys.second_period.name)
if currentyear_period.period == cpfecys.first_period.id:
current_period_name = T(cpfecys.first_period.name)
start_index = currentyear_period.id - max_display - 1
if start_index < 1:
start_index = 0
end_index = currentyear_period.id + max_display
periods_before = db(db.period_year).select(limitby=(start_index, \
currentyear_period.id - 1))
periods_after = db(db.period_year).select(limitby=(currentyear_period.id, \
end_index))
other_periods = db(db.period_year).select()
return dict(grid = grid,
currentyear_period = currentyear_period,
current_period_name = current_period_name,
periods_before = periods_before,
periods_after = periods_after,
other_periods = other_periods,
)
|
ECYS-USAC/cpfecys
|
src/cpfecys/controllers/admin_academic.py
|
Python
|
lgpl-3.0
| 2,248
|
from pygame import image, Rect
class ScrollButton():
def __init__(self, direction, resourcePath):
self.direction = direction
if (self.direction == "up"):
self.image = image.load(resourcePath + "img/uparrow.png")
elif (self.direction == "down"):
self.image = image.load(resourcePath + "img/downarrow.png")
self.rect = Rect((0, 0), self.image.get_size())
def update(self, position):
self.rect = Rect(position, self.image.get_size())
|
ZakDoesGaming/OregonTrail
|
lib/scrollButton.py
|
Python
|
mit
| 452
|
# Copyright: 2011, Grigoriy Petukhov
# Author: Grigoriy Petukhov (http://lorien.name)
# License: BSD
import logging
import sys
try:
from cStringIO import StringIO
except ImportError:
from io import BytesIO as StringIO
import random
try:
from urlparse import urlsplit
except ImportError:
from urllib.parse import urlsplit
from six.moves.http_cookiejar import CookieJar
import six
from weblib.http import (normalize_http_values,
normalize_post_data, normalize_url)
from weblib.encoding import make_str, make_unicode
from user_agent import generate_user_agent
import pycurl
from grab.cookie import create_cookie, CookieManager
from grab import error
from grab.error import GrabMisuseError
from grab.document import Document
from grab.upload import UploadFile, UploadContent
from grab.transport.base import BaseTransport
from grab.util.log import PycurlSigintHandler
# pylint: disable=invalid-name
logger = logging.getLogger('grab.transport.curl')
# pylint: enable=invalid-name
# We should ignore SIGPIPE when using pycurl.NOSIGNAL - see
# the libcurl tutorial for more info.
# http://curl.haxx.se/mail/curlpython-2005-06/0004.html
# http://curl.haxx.se/mail/lib-2010-03/0114.html
# FIXME: is NOSIGNAL option is required?
# CURLOPT_NOSIGNAL
# Pass a long. If it is 1, libcurl will not use any functions that install
# signal handlers or any functions that cause signals to be sent to the
# process. This option is mainly here to allow multi-threaded unix applications
# to still set/use all timeout options etc, without risking getting signals.
# (Added in 7.10)
# If this option is set and libcurl has been built with the standard name
# resolver, timeouts will not occur while the name resolve takes place.
# Consider building libcurl with c-ares support to enable
# asynchronous DNS
# lookups, which enables nice timeouts for name resolves without signals.
YEAR_2030_EPOCH_TIME = 1893456000
try:
import signal
from signal import SIGPIPE, SIG_IGN
try:
signal.signal(SIGPIPE, SIG_IGN)
except ValueError:
# Ignore the exception
# ValueError: signal only works in main thread
pass
except ImportError:
pass
def process_upload_items(items):
result = []
for key, val in items:
if isinstance(val, UploadContent):
data = [pycurl.FORM_BUFFER, val.filename,
pycurl.FORM_BUFFERPTR, val.content]
if val.content_type:
data.extend([pycurl.FORM_CONTENTTYPE, val.content_type])
result.append((key, tuple(data)))
elif isinstance(val, UploadFile):
data = [pycurl.FORM_FILE, val.path]
if val.filename:
data.extend([pycurl.FORM_FILENAME, val.filename])
if val.content_type:
data.extend([pycurl.FORM_CONTENTTYPE, val.content_type])
result.append((key, tuple(data)))
else:
result.append((key, val))
return result
class CurlTransport(BaseTransport):
"""
Grab transport layer using pycurl.
"""
def __init__(self):
super(CurlTransport, self).__init__()
self.curl = pycurl.Curl()
# this assignments makes pylint happy
self.config_nobody = None
self.config_body_maxsize = None
self.request_head = None
self.request_body = None
self.verbose_logging = None
self.response_header_chunks = None
self.response_body_chunks = None
self.reset()
def reset(self):
super(CurlTransport, self).reset()
self.response_header_chunks = []
self.response_body_chunks = []
self.response_body_bytes_read = 0
self.verbose_logging = False
self.config_nobody = None
self.config_body_maxsize = 0
# FIXME: maybe move to super-class???
self.request_head = b''
self.request_body = b''
#self.request_log = ''
self.curl.grab_callback_interrupted = False
def header_processor(self, chunk):
"""
Process head of response.
"""
self.response_header_chunks.append(chunk)
# Returning None implies that all bytes were written
return None
def body_processor(self, chunk):
"""
Process body of response.
"""
if self.config_nobody:
self.curl.grab_callback_interrupted = True
return 0
bytes_read = len(chunk)
self.response_body_bytes_read += bytes_read
if self.body_file:
self.body_file.write(chunk)
else:
self.response_body_chunks.append(chunk)
if self.config_body_maxsize is not None:
if self.response_body_bytes_read > self.config_body_maxsize:
logger.debug('Response body max size limit reached: %s',
self.config_body_maxsize)
self.curl.grab_callback_interrupted = True
return 0
# Returning None implies that all bytes were written
return None
def debug_processor(self, _type, text):
"""
Process request details.
0: CURLINFO_TEXT
1: CURLINFO_HEADER_IN
2: CURLINFO_HEADER_OUT
3: CURLINFO_DATA_IN
4: CURLINFO_DATA_OUT
5: CURLINFO_unrecognized_type
"""
if _type == pycurl.INFOTYPE_HEADER_OUT:
if isinstance(text, six.text_type):
text = text.encode('utf-8')
self.request_head += text
if _type == pycurl.INFOTYPE_DATA_OUT:
# Untill 7.19.5.2 version
# pycurl gives unicode in `text` variable
# WTF??? Probably that codes would fails
# or does unexpected things if you use
# pycurl<7.19.5.2
if isinstance(text, six.text_type):
text = text.encode('utf-8')
self.request_body += text
#if _type == pycurl.INFOTYPE_TEXT:
# if self.request_log is None:
# self.request_log = ''
# self.request_log += text
if self.verbose_logging:
if _type in (pycurl.INFOTYPE_TEXT, pycurl.INFOTYPE_HEADER_IN,
pycurl.INFOTYPE_HEADER_OUT):
marker_types = {
pycurl.INFOTYPE_TEXT: 'i',
pycurl.INFOTYPE_HEADER_IN: '<',
pycurl.INFOTYPE_HEADER_OUT: '>',
}
marker = marker_types[_type]
logger.debug('%s: %s', marker, text.rstrip())
def process_config(self, grab):
"""
Setup curl instance with values from ``self.config``.
"""
# Copy some config for future usage
self.config_nobody = grab.config['nobody']
self.config_body_maxsize = grab.config['body_maxsize']
try:
request_url = normalize_url(grab.config['url'])
except Exception as ex:
raise error.GrabInvalidUrl(
u'%s: %s' % (six.text_type(ex), grab.config['url']))
# py3 hack
if not six.PY3:
request_url = make_str(request_url)
self.curl.setopt(pycurl.URL, request_url)
# 30* redirects are handled by Grab
self.curl.setopt(pycurl.FOLLOWLOCATION, 0)
self.curl.setopt(pycurl.MAXREDIRS, grab.config['redirect_limit'])
self.curl.setopt(pycurl.CONNECTTIMEOUT, grab.config['connect_timeout'])
self.curl.setopt(pycurl.TIMEOUT, grab.config['timeout'])
#self.curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
# self.curl.setopt(pycurl.DNS_CACHE_TIMEOUT, 0)
if not grab.config['connection_reuse']:
self.curl.setopt(pycurl.FRESH_CONNECT, 1)
self.curl.setopt(pycurl.FORBID_REUSE, 1)
self.curl.setopt(pycurl.NOSIGNAL, 1)
self.curl.setopt(pycurl.HEADERFUNCTION, self.header_processor)
if grab.config['body_inmemory']:
self.curl.setopt(pycurl.WRITEFUNCTION, self.body_processor)
else:
if not grab.config['body_storage_dir']:
raise error.GrabMisuseError(
'Option body_storage_dir is not defined')
self.setup_body_file(
grab.config['body_storage_dir'],
grab.config['body_storage_filename'],
create_dir=grab.config['body_storage_create_dir'])
self.curl.setopt(pycurl.WRITEFUNCTION, self.body_processor)
if grab.config['verbose_logging']:
self.verbose_logging = True
# User-Agent
if grab.config['user_agent'] is None:
if grab.config['user_agent_file'] is not None:
with open(grab.config['user_agent_file']) as inf:
lines = inf.read().splitlines()
grab.config['user_agent'] = random.choice(lines)
else:
grab.config['user_agent'] = generate_user_agent()
# If value is None then set empty string
# None is not acceptable because in such case
# pycurl will set its default user agent "PycURL/x.xx.x"
if not grab.config['user_agent']:
grab.config['user_agent'] = ''
self.curl.setopt(pycurl.USERAGENT, grab.config['user_agent'])
if grab.config['debug']:
self.curl.setopt(pycurl.VERBOSE, 1)
self.curl.setopt(pycurl.DEBUGFUNCTION, self.debug_processor)
# Ignore SSL errors
self.curl.setopt(pycurl.SSL_VERIFYPEER, 0)
self.curl.setopt(pycurl.SSL_VERIFYHOST, 0)
# Disabled to avoid SSL3_READ_BYTES:sslv3 alert handshake failure error
# self.curl.setopt(pycurl.SSLVERSION, pycurl.SSLVERSION_SSLv3)
if grab.request_method in ('POST', 'PUT'):
if (grab.config['post'] is None
and grab.config['multipart_post'] is None):
raise GrabMisuseError('Neither `post` or `multipart_post`'
' options was specified for the %s'
' request' % grab.request_method)
if grab.request_method == 'POST':
self.curl.setopt(pycurl.POST, 1)
if grab.config['multipart_post']:
if isinstance(grab.config['multipart_post'], six.string_types):
raise error.GrabMisuseError(
'multipart_post option could not be a string')
post_items = normalize_http_values(
grab.config['multipart_post'],
charset=grab.config['charset'],
ignore_classes=(UploadFile, UploadContent),
)
# py3 hack
#if six.PY3:
# post_items = decode_pairs(post_items,
# grab.config['charset'])
self.curl.setopt(pycurl.HTTPPOST,
process_upload_items(post_items))
elif grab.config['post']:
post_data = normalize_post_data(grab.config['post'],
grab.config['charset'])
# py3 hack
# if six.PY3:
# post_data = smart_unicode(post_data,
# grab.config['charset'])
self.curl.setopt(pycurl.POSTFIELDS, post_data)
else:
self.curl.setopt(pycurl.POSTFIELDS, '')
elif grab.request_method == 'PUT':
data = grab.config['post']
if isinstance(data, six.text_type):
# py3 hack
# if six.PY3:
# data = data.encode('utf-8')
# else:
raise error.GrabMisuseError(
'Value of post option could be only '
'byte string if PUT method is used')
self.curl.setopt(pycurl.UPLOAD, 1)
self.curl.setopt(pycurl.CUSTOMREQUEST, 'PUT')
self.curl.setopt(pycurl.READFUNCTION, StringIO(data).read)
self.curl.setopt(pycurl.INFILESIZE, len(data))
elif grab.request_method == 'PATCH':
data = grab.config['post']
if isinstance(data, six.text_type):
raise error.GrabMisuseError(
'Value of post option could be only byte '
'string if PATCH method is used')
self.curl.setopt(pycurl.UPLOAD, 1)
self.curl.setopt(pycurl.CUSTOMREQUEST, 'PATCH')
self.curl.setopt(pycurl.READFUNCTION, StringIO(data).read)
self.curl.setopt(pycurl.INFILESIZE, len(data))
elif grab.request_method == 'DELETE':
self.curl.setopt(pycurl.CUSTOMREQUEST, 'DELETE')
elif grab.request_method == 'HEAD':
self.curl.setopt(pycurl.NOBODY, 1)
elif grab.request_method == 'UPLOAD':
self.curl.setopt(pycurl.UPLOAD, 1)
elif grab.request_method == 'GET':
self.curl.setopt(pycurl.HTTPGET, 1)
elif grab.request_method == 'OPTIONS':
data = grab.config['post']
if data is not None:
if isinstance(data, six.text_type):
raise error.GrabMisuseError(
'Value of post option could be only byte '
'string if PATCH method is used')
self.curl.setopt(pycurl.UPLOAD, 1)
self.curl.setopt(pycurl.READFUNCTION, StringIO(data).read)
self.curl.setopt(pycurl.INFILESIZE, len(data))
self.curl.setopt(pycurl.CUSTOMREQUEST, 'OPTIONS')
else:
raise error.GrabMisuseError('Invalid method: %s' %
grab.request_method)
headers = grab.config['common_headers'].copy()
if grab.config['headers']:
headers.update(grab.config['headers'])
# This is required to avoid some problems
headers.update({'Expect': ''})
header_tuples = [str('%s: %s' % x) for x
in headers.items()]
self.curl.setopt(pycurl.HTTPHEADER, header_tuples)
self.process_cookie_options(grab, request_url)
if grab.config['referer']:
self.curl.setopt(pycurl.REFERER, str(grab.config['referer']))
if grab.config['proxy']:
self.curl.setopt(pycurl.PROXY, str(grab.config['proxy']))
else:
self.curl.setopt(pycurl.PROXY, '')
if grab.config['proxy_userpwd']:
self.curl.setopt(pycurl.PROXYUSERPWD,
str(grab.config['proxy_userpwd']))
if grab.config['proxy_type']:
key = 'PROXYTYPE_%s' % grab.config['proxy_type'].upper()
self.curl.setopt(pycurl.PROXYTYPE, getattr(pycurl, key))
if grab.config['encoding']:
if ('gzip' in grab.config['encoding'] and
'zlib' not in pycurl.version):
raise error.GrabMisuseError(
'You can not use gzip encoding because '
'pycurl was built without zlib support')
self.curl.setopt(pycurl.ENCODING, grab.config['encoding'])
if grab.config['userpwd']:
self.curl.setopt(pycurl.USERPWD, str(grab.config['userpwd']))
if grab.config.get('interface') is not None:
self.curl.setopt(pycurl.INTERFACE, grab.config['interface'])
if grab.config.get('reject_file_size') is not None:
self.curl.setopt(pycurl.MAXFILESIZE,
grab.config['reject_file_size'])
def process_cookie_options(self, grab, request_url):
request_host = urlsplit(request_url).netloc.split(':')[0]
if request_host.startswith('www.'):
request_host_no_www = request_host[4:]
else:
request_host_no_www = request_host
# `cookiefile` option should be processed before `cookies` option
# because `load_cookies` updates `cookies` option
if grab.config['cookiefile']:
# Do not raise exception if cookie file does not exist
try:
grab.cookies.load_from_file(grab.config['cookiefile'])
except IOError as ex:
logging.error(ex)
# Process `cookies` option that is simple dict i.e.
# it provides only `name` and `value` attributes of cookie
# No domain, no path, no expires, etc
# I pass these no-domain cookies to *each* requested domain
# by setting these cookies with corresponding domain attribute
# Trying to guess better domain name by removing leading "www."
if grab.config['cookies']:
if not isinstance(grab.config['cookies'], dict):
raise error.GrabMisuseError('cookies option should be a dict')
for name, value in grab.config['cookies'].items():
grab.cookies.set(
name=name,
value=value,
domain=request_host_no_www
)
# Erase known cookies stored in pycurl handler
self.curl.setopt(pycurl.COOKIELIST, 'ALL')
# Enable pycurl cookie processing mode
self.curl.setopt(pycurl.COOKIELIST, '')
# Put all cookies from `grab.cookies.cookiejar` to
# the pycurl instance.
# We put *all* cookies, for all host names
# Pycurl cookie engine is smart enough to send
# only cookies belong to the current request's host name
for cookie in grab.cookies.cookiejar:
self.curl.setopt(pycurl.COOKIELIST,
self.get_netscape_cookie_spec(cookie,
request_host))
def get_netscape_cookie_spec(self, cookie, request_host):
# FIXME: Now cookie.domain could not be None
# request_host is not needed anymore
host = make_unicode(cookie.domain) or request_host
if cookie.get_nonstandard_attr('HttpOnly'):
host = '#HttpOnly_' + host
items = [
host,
u'TRUE',
make_unicode(cookie.path),
u'TRUE' if cookie.secure else u'FALSE',
make_unicode(str(
cookie.expires if cookie.expires
else YEAR_2030_EPOCH_TIME
)),
make_unicode(cookie.name),
make_unicode(cookie.value),
]
return (u'\t'.join(items)).encode('utf-8')
def request(self):
sigint_handler = PycurlSigintHandler()
try:
with sigint_handler.handle_sigint():
self.curl.perform()
except pycurl.error as ex:
new_ex = build_grab_exception(ex, self.curl)
if new_ex:
raise new_ex # pylint: disable=raising-bad-type
except Exception as ex: # pylint: disable=broad-except
six.reraise(error.GrabInternalError, error.GrabInternalError(ex),
sys.exc_info()[2])
finally:
self.curl.grab_callback_interrupted = False
def prepare_response(self, grab):
if self.body_file:
self.body_file.close()
response = Document()
response.head = b''.join(self.response_header_chunks)
if self.body_path:
response.body_path = self.body_path
else:
response.body = b''.join(self.response_body_chunks)
# Clear memory
self.response_header_chunks = []
self.response_body_chunks = []
response.code = self.curl.getinfo(pycurl.HTTP_CODE)
response.total_time = self.curl.getinfo(pycurl.TOTAL_TIME)
response.connect_time = self.curl.getinfo(pycurl.CONNECT_TIME)
response.name_lookup_time = self.curl.getinfo(pycurl.NAMELOOKUP_TIME)
response.download_size = self.curl.getinfo(pycurl.SIZE_DOWNLOAD)
response.upload_size = self.curl.getinfo(pycurl.SIZE_UPLOAD)
response.download_speed = self.curl.getinfo(pycurl.SPEED_DOWNLOAD)
response.remote_ip = self.curl.getinfo(pycurl.PRIMARY_IP)
response.url = self.curl.getinfo(pycurl.EFFECTIVE_URL)
response.parse(charset=grab.config['document_charset'])
response.cookies = CookieManager(self.extract_cookiejar())
# We do not need anymore cookies stored in the
# curl instance so drop them
self.curl.setopt(pycurl.COOKIELIST, 'ALL')
return response
def extract_cookiejar(self):
"""
Extract cookies that pycurl instance knows.
Returns `CookieJar` object.
"""
# Example of line:
# www.google.com\tFALSE\t/accounts/\tFALSE\t0'
# \tGoogleAccountsLocale_session\ten
# Fields:
# * domain
# * whether or not all machines under that domain can
# read the cookie's information.
# * path
# * Secure Flag: whether or not a secure connection (HTTPS)
# is required to read the cookie.
# * exp. timestamp
# * name
# * value
cookiejar = CookieJar()
for line in self.curl.getinfo(pycurl.INFO_COOKIELIST):
values = line.split('\t')
domain = values[0].lower()
if domain.startswith('#httponly_'):
domain = domain.replace('#httponly_', '')
httponly = True
else:
httponly = False
# old
# cookies[values[-2]] = values[-1]
# new
cookie = create_cookie(
name=values[5],
value=values[6],
domain=domain,
path=values[2],
secure=values[3] == "TRUE",
expires=int(values[4]) if values[4] else None,
httponly=httponly,
)
cookiejar.set_cookie(cookie)
return cookiejar
def __getstate__(self):
"""
Reset curl attribute which could not be pickled.
"""
state = self.__dict__.copy()
state['curl'] = None
return state
def __setstate__(self, state):
"""
Create pycurl instance after Grab instance was restored
from pickled state.
"""
state['curl'] = pycurl.Curl()
self.__dict__ = state # pylint: disable=attribute-defined-outside-init
def build_grab_exception(ex, curl):
"""
Build Grab exception from the pycurl exception
Args:
ex - the original pycurl exception
curl - the Curl instance raised the exception
"""
# CURLE_WRITE_ERROR (23)
# An error occurred when writing received data to a local file, or
# an error was returned to libcurl from a write callback.
# This exception should be ignored if grab_callback_interrupted
# flag # is enabled (this happens when nohead or nobody options
# enabled)
#
# Also this error is raised when curl receives KeyboardInterrupt
# while it is processing some callback function
# (WRITEFUNCTION, HEADERFUNCTIO, etc)
# If you think WTF then see details here:
# https://github.com/pycurl/pycurl/issues/413
if ex.args[0] == 23:
if getattr(curl, 'grab_callback_interrupted', None) is True:
# If the execution of body_process callback is
# interrupted (body_maxsize, nobody and other options)
# then the pycurl raised exception with code 23
# We should ignore it
return None
else:
return error.GrabNetworkError(ex.args[1], ex)
else:
if ex.args[0] == 28:
return error.GrabTimeoutError(ex.args[1], ex)
elif ex.args[0] == 7:
return error.GrabConnectionError(ex.args[1], ex)
elif ex.args[0] == 67:
return error.GrabAuthError(ex.args[1], ex)
elif ex.args[0] == 47:
return error.GrabTooManyRedirectsError(ex.args[1], ex)
elif ex.args[0] == 6:
return error.GrabCouldNotResolveHostError(ex.args[1], ex)
elif ex.args[0] == 3:
return error.GrabInvalidUrl(ex.args[1], ex)
else:
return error.GrabNetworkError(ex.args[1], ex)
|
istinspring/grab
|
grab/transport/curl.py
|
Python
|
mit
| 24,234
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_utils import uuidutils
from neutron.agent.common import config as agent_config
from neutron.agent.l3 import router_info
from neutron.agent.linux import ip_lib
from neutron.common import constants as l3_constants
from neutron.common import exceptions as n_exc
from neutron.tests import base
_uuid = uuidutils.generate_uuid
class TestRouterInfo(base.BaseTestCase):
def setUp(self):
super(TestRouterInfo, self).setUp()
conf = agent_config.setup_conf()
self.ip_cls_p = mock.patch('neutron.agent.linux.ip_lib.IPWrapper')
ip_cls = self.ip_cls_p.start()
self.mock_ip = mock.MagicMock()
ip_cls.return_value = self.mock_ip
self.ri_kwargs = {'agent_conf': conf,
'interface_driver': mock.sentinel.interface_driver}
def _check_agent_method_called(self, calls):
self.mock_ip.netns.execute.assert_has_calls(
[mock.call(call, check_exit_code=False) for call in calls],
any_order=True)
def test_routing_table_update(self):
ri = router_info.RouterInfo(_uuid(), {}, **self.ri_kwargs)
ri.router = {}
fake_route1 = {'destination': '135.207.0.0/16',
'nexthop': '1.2.3.4'}
fake_route2 = {'destination': '135.207.111.111/32',
'nexthop': '1.2.3.4'}
ri.update_routing_table('replace', fake_route1)
expected = [['ip', 'route', 'replace', 'to', '135.207.0.0/16',
'via', '1.2.3.4']]
self._check_agent_method_called(expected)
ri.update_routing_table('delete', fake_route1)
expected = [['ip', 'route', 'delete', 'to', '135.207.0.0/16',
'via', '1.2.3.4']]
self._check_agent_method_called(expected)
ri.update_routing_table('replace', fake_route2)
expected = [['ip', 'route', 'replace', 'to', '135.207.111.111/32',
'via', '1.2.3.4']]
self._check_agent_method_called(expected)
ri.update_routing_table('delete', fake_route2)
expected = [['ip', 'route', 'delete', 'to', '135.207.111.111/32',
'via', '1.2.3.4']]
self._check_agent_method_called(expected)
def test_update_routing_table(self):
# Just verify the correct namespace was used in the call
uuid = _uuid()
netns = 'qrouter-' + uuid
fake_route1 = {'destination': '135.207.0.0/16',
'nexthop': '1.2.3.4'}
ri = router_info.RouterInfo(uuid, {'id': uuid}, **self.ri_kwargs)
ri._update_routing_table = mock.Mock()
ri.update_routing_table('replace', fake_route1)
ri._update_routing_table.assert_called_once_with('replace',
fake_route1,
netns)
def test_routes_updated(self):
ri = router_info.RouterInfo(_uuid(), {}, **self.ri_kwargs)
ri.router = {}
fake_old_routes = []
fake_new_routes = [{'destination': "110.100.31.0/24",
'nexthop': "10.100.10.30"},
{'destination': "110.100.30.0/24",
'nexthop': "10.100.10.30"}]
ri.routes = fake_old_routes
ri.router['routes'] = fake_new_routes
ri.routes_updated(fake_old_routes, fake_new_routes)
expected = [['ip', 'route', 'replace', 'to', '110.100.30.0/24',
'via', '10.100.10.30'],
['ip', 'route', 'replace', 'to', '110.100.31.0/24',
'via', '10.100.10.30']]
self._check_agent_method_called(expected)
ri.routes = fake_new_routes
fake_new_routes = [{'destination': "110.100.30.0/24",
'nexthop': "10.100.10.30"}]
ri.router['routes'] = fake_new_routes
ri.routes_updated(ri.routes, fake_new_routes)
expected = [['ip', 'route', 'delete', 'to', '110.100.31.0/24',
'via', '10.100.10.30']]
self._check_agent_method_called(expected)
fake_new_routes = []
ri.router['routes'] = fake_new_routes
ri.routes_updated(ri.routes, fake_new_routes)
expected = [['ip', 'route', 'delete', 'to', '110.100.30.0/24',
'via', '10.100.10.30']]
self._check_agent_method_called(expected)
def test_add_ports_address_scope_iptables(self):
ri = router_info.RouterInfo(_uuid(), {}, **self.ri_kwargs)
port = {
'id': _uuid(),
'fixed_ips': [{'ip_address': '172.9.9.9'}],
'address_scopes': {l3_constants.IP_VERSION_4: '1234'}
}
ipv4_mangle = ri.iptables_manager.ipv4['mangle'] = mock.MagicMock()
ri.get_address_scope_mark_mask = mock.Mock(return_value='fake_mark')
ri.get_internal_device_name = mock.Mock(return_value='fake_device')
ri.rt_tables_manager = mock.MagicMock()
ri.process_external_port_address_scope_routing = mock.Mock()
ri.process_floating_ip_address_scope_rules = mock.Mock()
ri.iptables_manager._apply = mock.Mock()
ri.router[l3_constants.INTERFACE_KEY] = [port]
ri.process_address_scope()
ipv4_mangle.add_rule.assert_called_once_with(
'scope', ri.address_scope_mangle_rule('fake_device', 'fake_mark'))
class BasicRouterTestCaseFramework(base.BaseTestCase):
def _create_router(self, router=None, **kwargs):
if not router:
router = mock.MagicMock()
self.agent_conf = mock.Mock()
self.router_id = _uuid()
return router_info.RouterInfo(self.router_id,
router,
self.agent_conf,
mock.sentinel.interface_driver,
**kwargs)
class TestBasicRouterOperations(BasicRouterTestCaseFramework):
def test_get_floating_ips(self):
router = mock.MagicMock()
router.get.return_value = [mock.sentinel.floating_ip]
ri = self._create_router(router)
fips = ri.get_floating_ips()
self.assertEqual([mock.sentinel.floating_ip], fips)
def test_process_floating_ip_nat_rules(self):
ri = self._create_router()
fips = [{'fixed_ip_address': mock.sentinel.ip,
'floating_ip_address': mock.sentinel.fip}]
ri.get_floating_ips = mock.Mock(return_value=fips)
ri.iptables_manager = mock.MagicMock()
ipv4_nat = ri.iptables_manager.ipv4['nat']
ri.floating_forward_rules = mock.Mock(
return_value=[(mock.sentinel.chain, mock.sentinel.rule)])
ri.process_floating_ip_nat_rules()
# Be sure that the rules are cleared first and apply is called last
self.assertEqual(mock.call.clear_rules_by_tag('floating_ip'),
ipv4_nat.mock_calls[0])
self.assertEqual(mock.call.apply(), ri.iptables_manager.mock_calls[-1])
# Be sure that add_rule is called somewhere in the middle
ipv4_nat.add_rule.assert_called_once_with(mock.sentinel.chain,
mock.sentinel.rule,
tag='floating_ip')
def test_process_floating_ip_nat_rules_removed(self):
ri = self._create_router()
ri.get_floating_ips = mock.Mock(return_value=[])
ri.iptables_manager = mock.MagicMock()
ipv4_nat = ri.iptables_manager.ipv4['nat']
ri.process_floating_ip_nat_rules()
# Be sure that the rules are cleared first and apply is called last
self.assertEqual(mock.call.clear_rules_by_tag('floating_ip'),
ipv4_nat.mock_calls[0])
self.assertEqual(mock.call.apply(), ri.iptables_manager.mock_calls[-1])
# Be sure that add_rule is called somewhere in the middle
self.assertFalse(ipv4_nat.add_rule.called)
def test_process_floating_ip_address_scope_rules_diff_scopes(self):
ri = self._create_router()
fips = [{'fixed_ip_address': mock.sentinel.ip,
'floating_ip_address': mock.sentinel.fip,
'fixed_ip_address_scope': 'scope1'}]
ri.get_floating_ips = mock.Mock(return_value=fips)
ri._get_external_address_scope = mock.Mock(return_value='scope2')
ipv4_mangle = ri.iptables_manager.ipv4['mangle'] = mock.MagicMock()
ri.floating_mangle_rules = mock.Mock(
return_value=[(mock.sentinel.chain1, mock.sentinel.rule1)])
ri.get_external_device_name = mock.Mock()
ri.process_floating_ip_address_scope_rules()
# Be sure that the rules are cleared first
self.assertEqual(mock.call.clear_rules_by_tag('floating_ip'),
ipv4_mangle.mock_calls[0])
# Be sure that add_rule is called somewhere in the middle
self.assertEqual(1, ipv4_mangle.add_rule.call_count)
self.assertEqual(mock.call.add_rule(mock.sentinel.chain1,
mock.sentinel.rule1,
tag='floating_ip'),
ipv4_mangle.mock_calls[1])
def test_process_floating_ip_address_scope_rules_same_scopes(self):
ri = self._create_router()
fips = [{'fixed_ip_address': mock.sentinel.ip,
'floating_ip_address': mock.sentinel.fip,
'fixed_ip_address_scope': 'scope1'}]
ri.get_floating_ips = mock.Mock(return_value=fips)
ri._get_external_address_scope = mock.Mock(return_value='scope1')
ipv4_mangle = ri.iptables_manager.ipv4['mangle'] = mock.MagicMock()
ri.process_floating_ip_address_scope_rules()
# Be sure that the rules are cleared first
self.assertEqual(mock.call.clear_rules_by_tag('floating_ip'),
ipv4_mangle.mock_calls[0])
# Be sure that add_rule is not called somewhere in the middle
self.assertFalse(ipv4_mangle.add_rule.called)
def test_process_floating_ip_mangle_rules_removed(self):
ri = self._create_router()
ri.get_floating_ips = mock.Mock(return_value=[])
ipv4_mangle = ri.iptables_manager.ipv4['mangle'] = mock.MagicMock()
ri.process_floating_ip_address_scope_rules()
# Be sure that the rules are cleared first
self.assertEqual(mock.call.clear_rules_by_tag('floating_ip'),
ipv4_mangle.mock_calls[0])
# Be sure that add_rule is not called somewhere in the middle
self.assertFalse(ipv4_mangle.add_rule.called)
def _test_add_fip_addr_to_device_error(self, device):
ri = self._create_router()
ip = '15.1.2.3'
result = ri._add_fip_addr_to_device(
{'id': mock.sentinel.id, 'floating_ip_address': ip}, device)
device.addr.add.assert_called_with(ip + '/32')
return result
def test__add_fip_addr_to_device(self):
result = self._test_add_fip_addr_to_device_error(mock.Mock())
self.assertTrue(result)
def test__add_fip_addr_to_device_error(self):
device = mock.Mock()
device.addr.add.side_effect = RuntimeError
result = self._test_add_fip_addr_to_device_error(device)
self.assertFalse(result)
def test_process_snat_dnat_for_fip(self):
ri = self._create_router()
ri.process_floating_ip_nat_rules = mock.Mock(side_effect=Exception)
self.assertRaises(n_exc.FloatingIpSetupException,
ri.process_snat_dnat_for_fip)
ri.process_floating_ip_nat_rules.assert_called_once_with()
def test_put_fips_in_error_state(self):
ri = self._create_router()
ri.router = mock.Mock()
ri.router.get.return_value = [{'id': mock.sentinel.id1},
{'id': mock.sentinel.id2}]
statuses = ri.put_fips_in_error_state()
expected = [{mock.sentinel.id1: l3_constants.FLOATINGIP_STATUS_ERROR,
mock.sentinel.id2: l3_constants.FLOATINGIP_STATUS_ERROR}]
self.assertNotEqual(expected, statuses)
def test_configure_fip_addresses(self):
ri = self._create_router()
ri.process_floating_ip_addresses = mock.Mock(
side_effect=Exception)
self.assertRaises(n_exc.FloatingIpSetupException,
ri.configure_fip_addresses,
mock.sentinel.interface_name)
ri.process_floating_ip_addresses.assert_called_once_with(
mock.sentinel.interface_name)
def test_get_router_cidrs_returns_cidrs(self):
ri = self._create_router()
addresses = ['15.1.2.2/24', '15.1.2.3/32']
device = mock.MagicMock()
device.addr.list.return_value = [{'cidr': addresses[0]},
{'cidr': addresses[1]}]
self.assertEqual(set(addresses), ri.get_router_cidrs(device))
@mock.patch.object(ip_lib, 'IPDevice')
class TestFloatingIpWithMockDevice(BasicRouterTestCaseFramework):
def test_process_floating_ip_addresses_remap(self, IPDevice):
fip_id = _uuid()
fip = {
'id': fip_id, 'port_id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.2',
'status': l3_constants.FLOATINGIP_STATUS_DOWN
}
IPDevice.return_value = device = mock.Mock()
device.addr.list.return_value = [{'cidr': '15.1.2.3/32'}]
ri = self._create_router()
ri.get_floating_ips = mock.Mock(return_value=[fip])
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertEqual({fip_id: l3_constants.FLOATINGIP_STATUS_ACTIVE},
fip_statuses)
self.assertFalse(device.addr.add.called)
self.assertFalse(device.addr.delete.called)
def test_process_router_with_disabled_floating_ip(self, IPDevice):
fip_id = _uuid()
fip = {
'id': fip_id, 'port_id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.2'
}
ri = self._create_router()
ri.floating_ips = [fip]
ri.get_floating_ips = mock.Mock(return_value=[])
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertIsNone(fip_statuses.get(fip_id))
def test_process_router_floating_ip_with_device_add_error(self, IPDevice):
IPDevice.return_value = device = mock.Mock(side_effect=RuntimeError)
device.addr.list.return_value = []
fip_id = _uuid()
fip = {
'id': fip_id, 'port_id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.2',
'status': 'DOWN'
}
ri = self._create_router()
ri.add_floating_ip = mock.Mock(
return_value=l3_constants.FLOATINGIP_STATUS_ERROR)
ri.get_floating_ips = mock.Mock(return_value=[fip])
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertEqual({fip_id: l3_constants.FLOATINGIP_STATUS_ERROR},
fip_statuses)
# TODO(mrsmith): refactor for DVR cases
def test_process_floating_ip_addresses_remove(self, IPDevice):
IPDevice.return_value = device = mock.Mock()
device.addr.list.return_value = [{'cidr': '15.1.2.3/32'}]
ri = self._create_router()
ri.remove_floating_ip = mock.Mock()
ri.router.get = mock.Mock(return_value=[])
fip_statuses = ri.process_floating_ip_addresses(
mock.sentinel.interface_name)
self.assertEqual({}, fip_statuses)
ri.remove_floating_ip.assert_called_once_with(device, '15.1.2.3/32')
|
wolverineav/neutron
|
neutron/tests/unit/agent/l3/test_router_info.py
|
Python
|
apache-2.0
| 16,514
|
# coding=utf-8
# MIT License
#
# Copyright 2021 Google LLC
# Copyright (c) 2018 Maluuba Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Jumping task."""
from __future__ import print_function
import argparse
import gym
import pygame
from gym import spaces
from gym.utils import seeding
import numpy as np
import time
################## COLORS #####################
# Colors of the different objects on the screen
RGB_WHITE = (255, 255, 255)
RGB_GREY = (128, 128, 128)
RGB_BLACK = (0, 0, 0)
GREYSCALE_WHITE = 1.0
GREYSCALE_GREY = 0.5
###############################################
############### JUMP PARAMETERS ###############
# The jump shape is a `hat`:
# - diagonal going up until the jump height
# - then diagonal going down
JUMP_HEIGHT = 15
JUMP_VERTICAL_SPEED = 1
JUMP_HORIZONTAL_SPEED = 1
###############################################
############ OBSTACLE POSITIONS ###############
# OBSTACLE_*: fixed x positions of two obstacles on the floor.
# Constrained by the shape of the jump.
# This is used as a form of ultimate generalization test.
# Used when two_obstacles is set to True in the environment
OBSTACLE_1 = 20
OBSTACLE_2 = 55
# These are the 6 random positions used in the paper.
ALLOWED_OBSTACLE_X = [20, 30, 40]
ALLOWED_OBSTACLE_Y = [10, 20]
# Max and min positions
LEFT = 14
RIGHT = 48
DOWN = 0
UP = 41
###############################################
class JumpTaskEnv(gym.Env):
def __init__(self,
seed=42,
scr_w=60,
scr_h=60,
floor_height=10,
agent_w=5,
agent_h=10,
agent_init_pos=0,
agent_speed=1,
obstacle_position=30,
obstacle_size=(9, 10),
rendering=False,
zoom=8,
slow_motion=False,
with_left_action=False,
max_number_of_steps=600,
two_obstacles=False,
finish_jump=False,
use_colors=False):
"""Environment for the jumping task.
Args:
scr_w: screen width, by default 60 pixels
scr_h: screen height, by default 60 pixels
floor_height: the height of the floor in pixels, by default 10 pixels
agent_w: agent width, by default 5 pixels
agent_h: agent height, by default 10 pixels
agent_init_pos: initial x position of the agent (on the floor), defaults
to the left of the screen
agent_speed: agent lateral speed, measured in pixels per time step,
by default 1 pixel
obstacle_position: initial x position of the obstacle (on the floor),
by default 0 pixels, which is the leftmost one
obstacle_size: width and height of the obstacle, by default (9, 10)
rendering: display the game screen, by default False
zoom: zoom applied to the screen when rendering, by default 8
slow_motion: if True, sleeps for 0.1 seconds at each time step.
Allows to watch the game at "human" speed when played by the agent, by
default False
with_left_action: if True, the left action is allowed, by default False
max_number_of_steps: the maximum number of steps for an episode, by
default 600.
two_obstacles: puts two obstacles on the floor at a given location.
The ultimate generalization test, by default False
finish_jump: perform a full jump when the jump action is selected.
Otherwise an action needs to be selected as usual, by default False.
use_colors: Whether to use RGB image or not.
"""
# Initialize seed.
self.seed(seed)
self.rewards = {'life': -1, 'exit': 100}
self.scr_w = scr_w
self.scr_h = scr_h
if use_colors:
self.state_shape = [scr_w, scr_h, 3]
else:
self.state_shape = [scr_w, scr_h]
self.rendering = rendering
self.zoom = zoom
if rendering:
self.screen = pygame.display.set_mode((zoom*scr_w, zoom*scr_h))
if with_left_action:
self.legal_actions = [0, 1, 2]
else:
self.legal_actions = [0, 1]
self.nb_actions = len(self.legal_actions)
self.agent_speed = agent_speed
self.agent_current_speed = agent_speed * JUMP_HORIZONTAL_SPEED
self.jumping = [False, None]
self.agent_init_pos = agent_init_pos
self.agent_size = [agent_w, agent_h]
self.obstacle_size = obstacle_size
self.step_id = 0
self.slow_motion = slow_motion
self.max_number_of_steps = max_number_of_steps
self.finish_jump = finish_jump
# Min and max positions of the obstacle
self.min_x_position = LEFT
self.max_x_position = RIGHT
self.min_y_position = DOWN
self.max_y_position = UP
# Define gym env objects
self.observation_space = spaces.Box(low=0, high=1, shape=(self.state_shape))
self.action_space = spaces.Discrete(self.nb_actions)
self.reset()
def _game_status(self):
"""Returns two booleans stating whether the agent is touching the obstacle(s) (failure)
and whether the agent has reached the right end of the screen (success).
"""
def _overlapping_objects(env, sx, sy):
return sx + env.obstacle_size[0] > env.agent_pos_x and sx < env.agent_pos_x + env.agent_size[0] \
and sy + env.obstacle_size[1] > env.agent_pos_y and sy < env.agent_pos_y + env.agent_size[1]
if self.two_obstacles:
failure = _overlapping_objects(self, OBSTACLE_1, self.floor_height) or \
_overlapping_objects(self, OBSTACLE_2, self.floor_height)
else:
failure = _overlapping_objects(
self, self.obstacle_position, self.floor_height)
success = self.scr_w < self.agent_pos_x + self.agent_size[0]
self.done = bool(failure or success)
if self.rendering:
self.render()
if self.slow_motion:
time.sleep(0.1)
return failure, success
def _continue_jump(self):
"""Updates the position of the agent while jumping.
Needs to be called at each discrete step of the jump
"""
self.agent_pos_x = np.max([self.agent_pos_x + self.agent_current_speed, 0])
if self.agent_pos_y > self.floor_height + JUMP_HEIGHT:
self.jumping[1] = "down"
if self.jumping[1] == "up":
self.agent_pos_y += self.agent_speed * JUMP_VERTICAL_SPEED
elif self.jumping[1] == "down":
self.agent_pos_y -= self.agent_speed * JUMP_VERTICAL_SPEED
if self.agent_pos_y == self.floor_height:
self.jumping[0] = False
def reset(self, ):
"""Resets the game.
To be called at the beginning of each episode for training as in the paper.
Sets the obstacle at one of six random positions.
"""
obstacle_position = self.np_random.choice(ALLOWED_OBSTACLE_X)
floor_height = self.np_random.choice(ALLOWED_OBSTACLE_Y)
return self._reset(obstacle_position, floor_height)
def _reset(self, obstacle_position=30, floor_height=10, two_obstacles=False):
"""Resets the game.
Allows to set different obstacle positions and floor heights
Args:
obstacle_position: the x position of the obstacle for the new game
floor_height: the floor height for the new game
two_obstacles: whether to switch to a two obstacles environment
"""
self.agent_pos_x = self.agent_init_pos
self.agent_pos_y = floor_height
self.agent_current_speed = self.agent_speed * JUMP_HORIZONTAL_SPEED
self.jumping = [False, None]
self.step_id = 0
self.done = False
self.floor_height = floor_height
self.two_obstacles = two_obstacles
if two_obstacles:
return self.get_state()
if obstacle_position < self.min_x_position or obstacle_position >= self.max_x_position:
raise ValueError('The obstacle x position needs to be in the range [{}, {}]'.format(self.min_x_position, self.max_x_position))
if floor_height < self.min_y_position or floor_height >= self.max_y_position:
raise ValueError('The floor height needs to be in the range [{}, {}]'.format(self.min_y_position, self.max_y_position))
self.obstacle_position = obstacle_position
return self.get_state()
def close(self):
"""Exits the game and closes the rendering.
"""
self.done = True
if self.rendering:
pygame.quit()
def seed(self, seed=None):
"""Seed used in the random selection of the obstacle position
"""
self.np_random, seed = seeding.np_random(seed)
return [seed]
def get_state(self):
"""Returns an np array of the screen in greyscale
"""
obs = np.zeros((self.scr_h, self.scr_w), dtype=np.float32)
def _fill_rec(left, up, size, color):
obs[left: left + size[0], up: up + size[1]] = color
# Add agent and obstacles
_fill_rec(self.agent_pos_x, self.agent_pos_y, self.agent_size, 1.0)
if self.two_obstacles:
# Multiple obstacles
_fill_rec(OBSTACLE_1, self.floor_height,
self.obstacle_size, GREYSCALE_GREY)
_fill_rec(OBSTACLE_2, self.floor_height,
self.obstacle_size, GREYSCALE_GREY)
else:
_fill_rec(self.obstacle_position, self.floor_height,
self.obstacle_size, GREYSCALE_GREY)
# Draw the outline of the screen
obs[0:self.scr_w, 0] = GREYSCALE_WHITE
obs[0:self.scr_w, self.scr_h-1] = GREYSCALE_WHITE
obs[0, 0:self.scr_h] = GREYSCALE_WHITE
obs[self.scr_w-1, 0:self.scr_h] = GREYSCALE_WHITE
# Draw the floor
obs[0:self.scr_w, self.floor_height] = GREYSCALE_WHITE
return obs.T
def step(self, action):
"""Updates the game state based on the action selected.
Returns the state as a greyscale numpy array, the reward obtained by the agent
and a boolean stating whether the next state is terminal.
The reward is defined as a +1 for each pixel movement to the right.
Args
action: the action to be taken by the agent
"""
reward = -self.agent_pos_x
if self.step_id > self.max_number_of_steps:
print('You have reached the maximum number of steps.')
self.done = True
return self.get_state(), 0., self.done, {}
elif action not in self.legal_actions:
raise ValueError(
'We did not recognize that action. '
'It should be an int in {}'.format(self.legal_actions))
if self.jumping[0]:
self._continue_jump()
elif action == 0: # right
self.agent_pos_x += self.agent_speed
self.agent_current_speed = self.agent_speed * JUMP_HORIZONTAL_SPEED
elif action == 1: # jump
self.jumping = [True, "up"]
self._continue_jump()
elif action == 2: # left, can only be taken if self.with_left_action is set to True
if self.agent_pos_x > 0:
self.agent_pos_x -= self.agent_speed
self.agent_current_speed = -self.agent_speed * JUMP_HORIZONTAL_SPEED
else:
self.agent_current_speed = 0
killed, exited = self._game_status()
if self.finish_jump:
# Continue jumping until jump is finished
# Being in the air is marked by self.jumping[0]
while self.jumping[0] and not killed and not exited:
self._continue_jump()
killed, exited = self._game_status()
reward += self.agent_pos_x
if killed:
reward = self.rewards['life']
elif exited:
reward += self.rewards['exit']
self.step_id += 1
return self.get_state(), reward, self.done, {'collision': killed}
def render(self):
"""Render the screen game using pygame.
"""
if not self.rendering:
return
pygame.event.pump()
self.screen.fill(RGB_BLACK)
pygame.draw.line(self.screen, RGB_WHITE,
[0, self.zoom*(self.scr_h-self.floor_height)],
[self.zoom*self.scr_w, self.zoom*(self.scr_h-self.floor_height)], 1)
agent = pygame.Rect(self.zoom*self.agent_pos_x,
self.zoom*(self.scr_h-self.agent_pos_y-self.agent_size[1]),
self.zoom*self.agent_size[0],
self.zoom*self.agent_size[1])
pygame.draw.rect(self.screen, RGB_WHITE, agent)
if self.two_obstacles:
obstacle = pygame.Rect(self.zoom*OBSTACLE_1,
self.zoom*(self.scr_h-self.floor_height-self.obstacle_size[1]),
self.zoom*self.obstacle_size[0],
self.zoom*self.obstacle_size[1])
pygame.draw.rect(self.screen, RGB_GREY, obstacle)
obstacle = pygame.Rect(self.zoom*OBSTACLE_2,
self.zoom*(self.scr_h-self.floor_height-self.obstacle_size[1]),
self.zoom*self.obstacle_size[0],
self.zoom*self.obstacle_size[1])
else:
obstacle = pygame.Rect(self.zoom*self.obstacle_position,
self.zoom*(self.scr_h-self.obstacle_size[1]-self.floor_height),
self.zoom*self.obstacle_size[0],
self.zoom*self.obstacle_size[1])
pygame.draw.rect(self.screen, RGB_GREY, obstacle)
pygame.display.flip()
def test(args):
env = JumpTaskEnv(scr_w=args.scr_w, scr_h=args.scr_h, floor_height=args.floor_height,
agent_w=args.agent_w, agent_h=args.agent_h, agent_init_pos=args.agent_init_pos, agent_speed=args.agent_speed,
obstacle_position=args.obstacle_position, obstacle_size=args.obstacle_size,
rendering=True, zoom=args.zoom, slow_motion=True, with_left_action=args.with_left_action,
max_number_of_steps=args.max_number_of_steps, two_obstacles=args.two_obstacles, finish_jump=args.finish_jump)
env.render()
score = 0
while not env.done:
action = None
if env.jumping[0] and env.finish_jump:
action = 3
else:
events = pygame.event.get()
for event in events:
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RIGHT:
action = 0
elif event.key == pygame.K_UP:
action = 1
elif event.key == pygame.K_LEFT and args.with_left_action:
action = 2
elif event.key == pygame.K_e:
env.exit()
else:
action = 'unknown'
if action is None:
continue
elif action == 'unknown':
print('We did not recognize that action. Please use the arrows to move the agent or the \'e\' key to exit.')
continue
_, r, term, _ = env.step(action)
env.render()
score += r
print('Agent position: {:2d} | Reward: {:2d} | Terminal: {}'.format(
env.agent_pos_x, r, term))
print('---------------')
print('Final score: {:2d}'.format(int(score)))
print('---------------')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Options to test the environment")
parser.add_argument('--scr_w', type=int, default=60,
help='screen width, by default 60 pixels')
parser.add_argument('--scr_h', type=int, default=60,
help='screen height, by default 60 pixels')
parser.add_argument('--floor_height', type=int, default=10,
help='the y position of the floor in pixels, by default 10 pixels')
parser.add_argument('--agent_w', type=int, default=5,
help='agent width, by default 5 pixels')
parser.add_argument('--agent_h', type=int, default=10,
help='agent height, by default 10 pixels')
parser.add_argument('--agent_init_pos', type=int, default=0,
help='initial x position of the agent(on the floor), defaults to the left of the screen')
parser.add_argument('--agent_speed', type=int, default=1,
help='agent lateral speed, measured in pixels per time step, by default 1 pixel')
parser.add_argument('--obstacle_position', type=int, default=0,
help='initial x position of the obstacle (on the floor), by default 0 pixels, which is the leftmost one')
parser.add_argument('--obstacle_size', type=int, default=(9,10),
help='width and height of the obstacle, by default(9, 10)')
parser.add_argument('--zoom', type=int, default=8,
help='zoom applied to the screen when rendering, by default 8')
parser.add_argument('--with_left_action', action='store_true',
help='flag, if present, the left action is allowed, by default False')
parser.add_argument('--max_number_of_steps', type=int, default=600,
help='the maximum number of steps for an episode, by default 600.')
parser.add_argument('--two_obstacles', action='store_true', help='flag, if present: puts two obstacles on the floor at a given location. ' +
'The ultimate generalization test, by default False')
parser.add_argument('--finish_jump', action='store_true', help='flag, if present: perform a full jump when the jump action is selected. ' +
'Otherwise an action needs to be selected as usual, by default False')
args = parser.parse_args()
test(args)
|
google-research/jumping-task
|
gym_jumping_task/envs/jumping_task.py
|
Python
|
mit
| 17,965
|
# coding=utf-8
from django.contrib.auth.forms import PasswordResetForm, SetPasswordForm
from django.contrib.auth.tokens import default_token_generator
from django.contrib.auth.views import password_reset_confirm
from django.template.response import TemplateResponse
from django.utils.translation import ugettext as _
from xadmin.sites import site
from xadmin.views.base import BaseAdminPlugin, BaseAdminView, csrf_protect_m
from xadmin.views.website import LoginView
class ResetPasswordSendView(BaseAdminView):
need_site_permission = False
password_reset_form = PasswordResetForm
password_reset_template = 'xadmin/auth/password_reset/form.html'
password_reset_done_template = 'xadmin/auth/password_reset/done.html'
password_reset_token_generator = default_token_generator
password_reset_from_email = None
password_reset_email_template = 'xadmin/auth/password_reset/email.html'
password_reset_subject_template = None
def get(self, request, *args, **kwargs):
context = super(ResetPasswordSendView, self).get_context()
context['form'] = kwargs.get('form', self.password_reset_form())
return TemplateResponse(request, self.password_reset_template, context)
@csrf_protect_m
def post(self, request, *args, **kwargs):
form = self.password_reset_form(request.POST)
if form.is_valid():
opts = {
'use_https': request.is_secure(),
'token_generator': self.password_reset_token_generator,
'email_template_name': self.password_reset_email_template,
'request': request,
'domain_override': request.get_host()
}
if self.password_reset_from_email:
opts['from_email'] = self.password_reset_from_email
if self.password_reset_subject_template:
opts['subject_template_name'] = self.password_reset_subject_template
form.save(**opts)
context = super(ResetPasswordSendView, self).get_context()
return TemplateResponse(request, self.password_reset_done_template, context)
else:
return self.get(request, form=form)
site.register_view(r'^xadmin/password_reset/$', ResetPasswordSendView, name='xadmin_password_reset')
class ResetLinkPlugin(BaseAdminPlugin):
def block_form_bottom(self, context, nodes):
reset_link = self.get_admin_url('xadmin_password_reset')
return '<div class="text-info" style="margin-top:15px;"><a href="%s"><i class="fa fa-question-sign"></i> %s</a></div>' % (reset_link, _('Forgotten your password or username?'))
site.register_plugin(ResetLinkPlugin, LoginView)
class ResetPasswordComfirmView(BaseAdminView):
need_site_permission = False
password_reset_set_form = SetPasswordForm
password_reset_confirm_template = 'xadmin/auth/password_reset/confirm.html'
password_reset_token_generator = default_token_generator
def do_view(self, request, uidb36, token, *args, **kwargs):
context = super(ResetPasswordComfirmView, self).get_context()
return password_reset_confirm(request, uidb36, token,
template_name=self.password_reset_confirm_template,
token_generator=self.password_reset_token_generator,
set_password_form=self.password_reset_set_form,
post_reset_redirect=self.get_admin_url('xadmin_password_reset_complete'),
current_app=self.admin_site.name, extra_context=context)
def get(self, request, uidb36, token, *args, **kwargs):
return self.do_view(request, uidb36, token)
def post(self, request, uidb36, token, *args, **kwargs):
return self.do_view(request, uidb36, token)
def get_media(self):
return super(ResetPasswordComfirmView, self).get_media() + \
self.vendor('xadmin.page.form.js', 'xadmin.form.css')
site.register_view(r'^xadmin/password_reset/(?P<uidb36>[0-9A-Za-z]{1,13})-(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
ResetPasswordComfirmView, name='xadmin_password_reset_confirm')
class ResetPasswordCompleteView(BaseAdminView):
need_site_permission = False
password_reset_complete_template = 'xadmin/auth/password_reset/complete.html'
def get(self, request, *args, **kwargs):
context = super(ResetPasswordCompleteView, self).get_context()
context['login_url'] = self.get_admin_url('index')
return TemplateResponse(request, self.password_reset_complete_template, context)
site.register_view(r'^xadmin/password_reset/complete/$', ResetPasswordCompleteView, name='xadmin_password_reset_complete')
|
sshwsfc/django-xadmin
|
xadmin/plugins/passwords.py
|
Python
|
bsd-3-clause
| 4,680
|
# -*- encoding: utf-8 -*-
from abjad import *
def test_spannertools_MeasuredComplexBeam_01():
staff = Staff("abj: | 2/16 c'16 d'16 || 2/16 e'16 f'16 |"
"| 2/16 g'16 a'16 |")
assert systemtools.TestManager.compare(
staff,
r'''
\new Staff {
{
\time 2/16
c'16
d'16
}
{
e'16
f'16
}
{
g'16
a'16
}
}
'''
)
beam = spannertools.MeasuredComplexBeam()
attach(beam, staff[:])
assert systemtools.TestManager.compare(
staff,
r'''
\new Staff {
{
\time 2/16
\set stemLeftBeamCount = #0
\set stemRightBeamCount = #2
c'16 [
\set stemLeftBeamCount = #2
\set stemRightBeamCount = #1
d'16
}
{
\set stemLeftBeamCount = #1
\set stemRightBeamCount = #2
e'16
\set stemLeftBeamCount = #2
\set stemRightBeamCount = #1
f'16
}
{
\set stemLeftBeamCount = #1
\set stemRightBeamCount = #2
g'16
\set stemLeftBeamCount = #2
\set stemRightBeamCount = #0
a'16 ]
}
}
'''
)
assert inspect_(staff).is_well_formed()
|
mscuthbert/abjad
|
abjad/tools/spannertools/test/test_spannertools_MeasuredComplexBeam.py
|
Python
|
gpl-3.0
| 1,571
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.