code
stringlengths 1
199k
|
|---|
import os
import random
import unittest
import warnings
from math import exp, pi
import gpytorch
import torch
from gpytorch.distributions import MultivariateNormal
from gpytorch.kernels import InducingPointKernel, RBFKernel, ScaleKernel
from gpytorch.likelihoods import GaussianLikelihood
from gpytorch.means import ConstantMean
from gpytorch.priors import SmoothedBoxPrior
from gpytorch.test.utils import least_used_cuda_device
from gpytorch.utils.warnings import NumericalWarning
from torch import optim
def make_data(cuda=False):
train_x = torch.linspace(0, 1, 100)
train_y = torch.sin(train_x * (2 * pi))
train_y.add_(torch.randn_like(train_y), alpha=1e-2)
test_x = torch.rand(51)
test_y = torch.sin(test_x * (2 * pi))
if cuda:
train_x = train_x.cuda()
train_y = train_y.cuda()
test_x = test_x.cuda()
test_y = test_y.cuda()
return train_x, train_y, test_x, test_y
class GPRegressionModel(gpytorch.models.ExactGP):
def __init__(self, train_x, train_y, likelihood):
super(GPRegressionModel, self).__init__(train_x, train_y, likelihood)
self.mean_module = ConstantMean(prior=SmoothedBoxPrior(-1e-5, 1e-5))
self.base_covar_module = ScaleKernel(RBFKernel(lengthscale_prior=SmoothedBoxPrior(exp(-5), exp(6), sigma=0.1)))
self.covar_module = InducingPointKernel(
self.base_covar_module, inducing_points=torch.linspace(0, 1, 32), likelihood=likelihood
)
def forward(self, x):
mean_x = self.mean_module(x)
covar_x = self.covar_module(x)
return MultivariateNormal(mean_x, covar_x)
class TestSGPRRegression(unittest.TestCase):
def setUp(self):
if os.getenv("UNLOCK_SEED") is None or os.getenv("UNLOCK_SEED").lower() == "false":
self.rng_state = torch.get_rng_state()
torch.manual_seed(0)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(0)
random.seed(0)
def tearDown(self):
if hasattr(self, "rng_state"):
torch.set_rng_state(self.rng_state)
def test_sgpr_mean_abs_error(self):
# Suppress numerical warnings
warnings.simplefilter("ignore", NumericalWarning)
train_x, train_y, test_x, test_y = make_data()
likelihood = GaussianLikelihood()
gp_model = GPRegressionModel(train_x, train_y, likelihood)
mll = gpytorch.mlls.ExactMarginalLogLikelihood(likelihood, gp_model)
# Optimize the model
gp_model.train()
likelihood.train()
optimizer = optim.Adam(gp_model.parameters(), lr=0.1)
for _ in range(30):
optimizer.zero_grad()
output = gp_model(train_x)
loss = -mll(output, train_y)
loss.backward()
optimizer.step()
for param in gp_model.parameters():
self.assertTrue(param.grad is not None)
self.assertGreater(param.grad.norm().item(), 0)
# Test the model
gp_model.eval()
likelihood.eval()
test_preds = likelihood(gp_model(test_x)).mean
mean_abs_error = torch.mean(torch.abs(test_y - test_preds))
self.assertLess(mean_abs_error.squeeze().item(), 0.05)
def test_sgpr_fast_pred_var(self):
# Suppress numerical warnings
warnings.simplefilter("ignore", NumericalWarning)
train_x, train_y, test_x, test_y = make_data()
likelihood = GaussianLikelihood()
gp_model = GPRegressionModel(train_x, train_y, likelihood)
mll = gpytorch.mlls.ExactMarginalLogLikelihood(likelihood, gp_model)
# Optimize the model
gp_model.train()
likelihood.train()
optimizer = optim.Adam(gp_model.parameters(), lr=0.1)
for _ in range(50):
optimizer.zero_grad()
output = gp_model(train_x)
loss = -mll(output, train_y)
loss.backward()
optimizer.step()
for param in gp_model.parameters():
self.assertTrue(param.grad is not None)
self.assertGreater(param.grad.norm().item(), 0)
# Test the model
gp_model.eval()
likelihood.eval()
with gpytorch.settings.max_preconditioner_size(5), gpytorch.settings.max_cg_iterations(50):
with gpytorch.settings.fast_pred_var(True):
fast_var = gp_model(test_x).variance
fast_var_cache = gp_model(test_x).variance
self.assertLess(torch.max((fast_var_cache - fast_var).abs()), 1e-3)
with gpytorch.settings.fast_pred_var(False):
slow_var = gp_model(test_x).variance
self.assertLess(torch.max((fast_var_cache - slow_var).abs()), 1e-3)
def test_sgpr_mean_abs_error_cuda(self):
# Suppress numerical warnings
warnings.simplefilter("ignore", NumericalWarning)
if not torch.cuda.is_available():
return
with least_used_cuda_device():
train_x, train_y, test_x, test_y = make_data(cuda=True)
likelihood = GaussianLikelihood().cuda()
gp_model = GPRegressionModel(train_x, train_y, likelihood).cuda()
mll = gpytorch.mlls.ExactMarginalLogLikelihood(likelihood, gp_model)
# Optimize the model
gp_model.train()
likelihood.train()
optimizer = optim.Adam(gp_model.parameters(), lr=0.1)
optimizer.n_iter = 0
for _ in range(25):
optimizer.zero_grad()
output = gp_model(train_x)
loss = -mll(output, train_y)
loss.backward()
optimizer.n_iter += 1
optimizer.step()
for param in gp_model.parameters():
self.assertTrue(param.grad is not None)
self.assertGreater(param.grad.norm().item(), 0)
# Test the model
gp_model.eval()
likelihood.eval()
test_preds = likelihood(gp_model(test_x)).mean
mean_abs_error = torch.mean(torch.abs(test_y - test_preds))
self.assertLess(mean_abs_error.squeeze().item(), 0.02)
if __name__ == "__main__":
unittest.main()
|
"""
Django settings for apps project.
Generated by 'django-admin startproject' using Django 1.9.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
from local_settings import SECRET_KEY, DATABASES, DEBUG
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = SECRET_KEY
DEBUG = DEBUG
ALLOWED_HOSTS = [
'learningdjango.in',
'localhost',
'127.0.0.1'
]
INSTALLED_APPS = [
'home.apps.HomeConfig',
'polls.apps.PollsConfig',
'blog.apps.BlogConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'apps.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'apps.wsgi.application'
DATABASES = DATABASES
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Kolkata'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
|
"""Command for spanner databases create."""
from googlecloudsdk.api_lib.spanner import database_operations
from googlecloudsdk.api_lib.spanner import databases
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.spanner import flags
class Create(base.CreateCommand):
"""Cloud Spanner databases create command."""
@staticmethod
def Args(parser):
"""Args is called by calliope to gather arguments for this command.
Please add arguments in alphabetical order except for no- or a clear-
pair for that argument which can follow the argument itself.
Args:
parser: An argparse parser that you can use to add arguments that go
on the command line after this command. Positional arguments are
allowed.
"""
flags.Instance(positional=False).AddToParser(parser)
flags.Database().AddToParser(parser)
flags.Ddl(help_text='Semi-colon separated DDL (data definition language) '
'statements to run inside the '
'newly created database. If there is an error in any statement, '
'the database is not created. Full DDL specification is at '
'https://cloud.google.com/spanner/docs/data-definition-language'
).AddToParser(parser)
base.ASYNC_FLAG.AddToParser(parser)
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
Some value that we want to have printed later.
"""
op = databases.Create(
args.instance, args.database, flags.FixDdl(args.ddl or []))
if args.async:
return op
return database_operations.Await(op, 'Creating database')
|
import sys, os
sys.path.insert(0, os.path.abspath(os.path.join('..')))
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'M2Crypto'
copyright = u'2017, Matej Cepl <mcepl@cepl.eu>'
version = ''
release = ''
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_static_path = ['_static']
htmlhelp_basename = 'M2Cryptodoc'
latex_elements = {
}
latex_documents = [
('index', 'M2Crypto.tex', u'M2Crypto Documentation',
u'Matej Cepl \\textless{}mcepl@cepl.eu\\textgreater{}', 'manual'),
]
man_pages = [
('index', 'm2crypto', u'M2Crypto Documentation',
[u'Matej Cepl <mcepl@cepl.eu>'], 1)
]
texinfo_documents = [
('index', 'M2Crypto', u'M2Crypto Documentation',
u'Matej Cepl <mcepl@cepl.eu>', 'M2Crypto', 'One line description of project.',
'Miscellaneous'),
]
epub_title = u'M2Crypto'
epub_author = u'Matej Cepl <mcepl@cepl.eu>'
epub_publisher = u'Matej Cepl <mcepl@cepl.eu>'
epub_copyright = u'2017, Matej Cepl <mcepl@cepl.eu>'
|
from zeroconf import Zeroconf, ServiceInfo
import socket
import configparser
from . import hazc_cmd
class hazc_device:
#Forward constants
NO_PARAM = hazc_cmd.NO_PARAM
BOOL = hazc_cmd.BOOL
FLOAT = hazc_cmd.FLOAT
STRING = hazc_cmd.STRING
INT = hazc_cmd.INT
global running
running = False
def __init__(self, ipaddr):
self.version = "0.1"
self.config = configparser.ConfigParser()
self.config.read('config.ini')
self.MSGLEN = 1024
self.END_OF_MSG = '*'
self.ip = ipaddr
self.buffer = 20
hcvc = hazc_cmd.hazc_cmd('version?', self.version_cmd, self.NO_PARAM)
hccc = hazc_cmd.hazc_cmd('commands?', self.commands_cmd, self.NO_PARAM)
hcsc = hazc_cmd.hazc_cmd('status?', self.status_cmd, self.STRING)
self.commands = {'version': hcvc, 'commands': hccc, 'status': hcsc}
# probably want to add a debug log status
self.status = {'exec_status': self.exec_status}
#Adds a function - not as preferred as addControl
#Does NOT auto add status
def addFunction(self, name, handler, paramtype):
#log("This is not the preferred way to add controls, see addControl")
if not('?' in name or '!' in name):
name += '!'
self.commands[name] = hazc_cmd.hazc_cmd(name, handler, paramtype)
#Adds a control vector
#controlname should just be a name like 'temp' or 'position' - it'll be the same for the status
def addControl(self, controlname, handler, statushandler, paramtype=NO_PARAM):
cmd_name = 'set-'+controlname
self.commands[cmd_name] = hazc_cmd.hazc_cmd(cmd_name+'?', handler, paramtype)
self.addStatus(controlname, statushandler)
#adds a unique status not already included in control vector. name is just the name, as in 'temp'
def addStatus(self, name, handler):
self.status[name] = handler
def advertise(self):
postfix = self.config['global']['service_prefix']
self.port = int(self.config['global']['port'])
#print(self.config['device']['hostname']+postfix)
info = ServiceInfo(postfix, self.config['device']['hostname']+"."+postfix,
socket.inet_aton(self.ip), self.port, 0, 0,
{'info': self.config['device']['description']}, "hazc.local.")
self.bindConnection()
zeroconf = Zeroconf()
zeroconf.register_service(info)
try:
while True:
print("Ready")
self.conn, self.addr = self.webcontrol.accept()
self.listen()
self.conn.close()
except KeyboardInterrupt:
pass
finally:
print()
print("Unregistering...")
zeroconf.unregister_service(info)
zeroconf.close()
try:
print("Shutting down socket")
self.webcontrol.shutdown(socket.SHUT_RDWR)
except Exception as e:
print(e)
def listen(self):
data = bytes()
rbytes = 0
while rbytes < self.MSGLEN:
d = self.conn.recv(self.buffer)
if not d: break
data += d
rbytes += len(d)
self.handledata(data)
def handledata(self, data):
command, param = self.cleanandstringdata(data)
print('->' + command + ';' + param)
try:
replystr = self.commands[command].execute(param)
except KeyError:
if(command==''):
command = "(empty string)"
print("ERROR! Unknown command: " + command)
replystr = ""
if(replystr == None):
print("WARNING! " + command + " should return a string to send to the master. Sending 'NO_REPLY'")
replystr = 'NO_REPLY'
print(replystr)
self.reply(replystr)
def reply(self, msg):
longmsg = msg
while len(longmsg) < self.MSGLEN:
longmsg += self.END_OF_MSG
self.conn.send(longmsg.encode('utf-8'))
def cleanandstringdata(self, data):
dstr = data.decode('utf-8')
full = dstr.strip(self.END_OF_MSG)
if '?' in full:
li = full.split('?')
param = li[-1]
cmd = li[0]
elif '!' in full:
li = full.split('!')
param = li[-1]
cmd = li[0]
else:
param = ''
cmd = full
return (cmd, param)
def bindConnection(self):
try:
self.webcontrol = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.webcontrol.bind((self.ip, self.port))
self.webcontrol.listen(1)
except OSError as e:
print(e)
quit()
def exec_status(self):
return "Running"
def version_cmd(self):
return self.version
def paramtype_tostring(self, paramnum):
if paramnum == self.BOOL:
return 'BOOL'
elif paramnum == self.FLOAT:
return 'FLOAT'
elif paramnum == self.STRING:
return 'STRING'
elif paramnum == self.INT:
return 'INT'
else:
return 'PARAM_ERROR'
def commands_cmd(self):
rstr = ""
for key in self.commands:
rstr += key
if self.commands[key].paramtype is not self.NO_PARAM:
rstr += ':' + self.paramtype_tostring(self.commands[key].paramtype)
rstr += ";"
return rstr
def status_cmd(self, specific_status=''):
str = ''
if len(specific_status) > 0:
str = self.status[specific_status]
else:
for st in self.status:
str += st + ',' + self.status[st]() + ';'
return str[:self.MSGLEN-1]
# Some debugging methods
def debug_cmds(self):
print("Commands: " + str(self.commands))
print("Statuses: " + str(self.status))
|
from victor.exceptions import (
FieldValidationException,
FieldTypeConversionError,
FieldRequiredError,
VectorInputTypeError
)
class Field(object):
required = True
"""Field is required and an exception will be raised if missing"""
missing_value = None
"""Value to use when field is missing and not required"""
strict = False
"""Field value must pass validation or an exception will be raised"""
cast_cls = None
data = None
def __init__(self, required=True, missing_value=None, strict=False):
self.required = required
self.missing_value = missing_value
self.strict = strict
def _validate(self, value):
return True
def _cast_type(self, value):
return self.cast_cls(value)
def set_data(self, value):
if self.strict:
if not self._validate(value):
raise FieldValidationException('%s does not '
'except this value'
% self.__class__.__name__)
elif self.cast_cls is not None:
value = self._cast_type(value)
self.data = value
class CharField(Field):
pass
class StringField(Field):
cast_cls = str
def _validate(self, value):
if not isinstance(value, (str, unicode)):
return False
return True
class IntField(Field):
cast_cls = int
_cast_fallback_value = 0
def __init__(self, *args, **kwargs):
super(IntField, self).__init__(*args, **kwargs)
if self.missing_value is None:
self.missing_value = self._cast_fallback_value
def _cast_type(self, value):
try:
return self.cast_cls(value)
except ValueError, exc:
if self.missing_value is False:
raise FieldTypeConversionError('Could not convert '
'data or use missing_value: %s'
% exc)
return self.missing_value
class FloatField(IntField):
cast_class = float
_cast_fallback_value = 0.0
class ListField(Field):
cls = None
"""Field class to represent list items"""
def __init__(self, cls, *args, **kwargs):
assert isinstance(cls, Field), 'cls is not a valid Field instance'
self.cls = cls
super(ListField, self).__init__(*args, **kwargs)
def _validate(self, value):
if not isinstance(value, (list, tuple)):
raise FieldValidationException('ListField requires data '
'to be a sequence type')
for x in value:
self.cls.set_data(value)
self.data = value
return True
class Vector(object):
def __init__(self):
self.input_data = {}
self._fields = {}
self._map = {}
self._required = []
self._setup_fields()
def get_name(self):
return self.__class__.__name__
def __call__(self, data):
return self.input(data)
def input(self, data):
self._map = {}
if not isinstance(data, dict):
raise VectorInputTypeError('Vector input not a dictionary')
self._validate(data)
self._map_attrs(data)
def _setup_fields(self):
self._fields = {}
for a in dir(self):
v = getattr(self, a)
if isinstance(v, Field):
self._fields[a] = v
if v.required:
self._required.append(a)
self._reset_fields()
def _reset_fields(self):
for f in self.get_fields():
setattr(self, f, None)
def _validate(self, input_data):
for f in self._required:
if f not in input_data:
raise FieldRequiredError('Missing field %s is a required field'
% f)
for k, v in input_data.iteritems():
if k in self.get_fields():
f = self.get_field(k)
f.set_data(v)
def _map_attrs(self, input_data):
self.input_data = input_data
for k, v in self.input_data.iteritems():
if k in self.get_fields():
# setattr(self, k, self.get_field(k).data)
self._map[k] = self.get_field(k).data
else:
# setattr(self, k, v)
self._map[k] = v
for k, v in self._map.iteritems():
setattr(self, k, v)
def get_fields(self):
return self._fields
def get_field(self, name):
return self._fields[name]
@property
def data(self):
return self._map
|
""" PyAbleton
A library for creating and editing Ableton Live instrument/effect presets in Python.
"""
__author__ = 'ham@hamiltonkib.be'
__version__ = '1.0'
import presets
|
"""
Django settings for figexample project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = 'pp&p7ex-&+#n4waijg96v&txz$=y*rh=t$u-!hri@(-s@6^51='
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'figexample.urls'
WSGI_APPLICATION = 'figexample.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': 'db_1',
'PORT': 5432,
}
}
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
|
import abc
import platform
from UserList import UserList
class Monitor(object):
@abc.abstractmethod
def current(self):
pass
@abc.abstractmethod
def percent(self, range):
pass
@abc.abstractmethod
def reset(self):
pass
@abc.abstractmethod
def max(self):
pass
@abc.abstractmethod
def min(self):
pass
class Monitors(UserList):
@abc.abstractmethod
def percent(self, range):
pass
@abc.abstractmethod
def reset(self):
pass
@abc.abstractmethod
def max(self):
pass
@abc.abstractmethod
def min(self):
pass
def get_monitors():
if platform.system() == "Windows":
from .driver_win_wmi import WinWMIMonitors
return WinWMIMonitors()
elif platform.system() == "Darwin":
from .driver_mac import MacMonitors
return MacMonitors()
elif platform.system() == "Linux":
from .driver_linux import LinuxMonitors
return LinuxMonitors()
else:
raise OSError()
|
"""
Forms for day forms
"""
from django.conf import settings
from django import forms
from django.utils.translation import ugettext as _
from arrow import Arrow
from datebook.models import DayEntry
from datebook.forms import CrispyFormMixin
from datebook.utils.imports import safe_import_module
DATETIME_FORMATS = {
'input_date_formats': ['%d/%m/%Y'],
'input_time_formats': ['%H:%M'],
'widget': forms.SplitDateTimeWidget(date_format='%d/%m/%Y', time_format='%H:%M'),
}
class DayBaseFormMixin(object):
"""
DayBase form mixin
"""
crispy_form_helper_path = 'datebook.forms.crispies.day_helper'
crispy_form_helper_kwargs = {}
def fill_initial_data(self, *args, **kwargs):
# Pass initial data for start and stop to their SplitDateTimeField clones
if 'start' in kwargs['initial']:
kwargs['initial']['start_datetime'] = kwargs['initial']['start']
if 'stop' in kwargs['initial']:
kwargs['initial']['stop_datetime'] = kwargs['initial']['stop']
# For existing instance (in edit mode) pass the start and stop values to their
# clone with SplitDateTimeField via initial datas
if kwargs.get('instance'):
kwargs['initial']['start_datetime'] = kwargs['instance'].start
kwargs['initial']['stop_datetime'] = kwargs['instance'].stop
return kwargs
def init_fields(self, *args, **kwargs):
self.fields['start_datetime'] = forms.SplitDateTimeField(label=_('start'), **DATETIME_FORMATS)
self.fields['stop_datetime'] = forms.SplitDateTimeField(label=_('stop'), **DATETIME_FORMATS)
# Set the form field for DayEntry.content
field_helper = safe_import_module(settings.DATEBOOK_TEXT_FIELD_HELPER_PATH)
if field_helper is not None:
self.fields['content'] = field_helper(self, **{'label':_('content'), 'required': False})
def clean_content(self):
"""
Text content validation
"""
content = self.cleaned_data.get("content")
validation_helper = safe_import_module(settings.DATEBOOK_TEXT_VALIDATOR_HELPER_PATH)
if validation_helper is not None:
return validation_helper(self, content)
else:
return content
def clean_start_datetime(self):
start = self.cleaned_data['start_datetime']
# Day entry can't start before the targeted day date
if start and start.date() < self.daydate:
raise forms.ValidationError(_("You can't start a day before itself"))
# Day entry can't start after the targeted day date
if start and start.date() > self.daydate:
raise forms.ValidationError(_("You can't start a day after itself"))
return start
def clean_stop_datetime(self):
start = self.cleaned_data.get('start_datetime')
stop = self.cleaned_data['stop_datetime']
# Day entry can't stop before the start
if start and stop and stop <= start:
raise forms.ValidationError(_("Stop time can't be less or equal to start time"))
# Day entry can't stop in more than one futur day from the targeted day date
if stop and stop.date() > Arrow.fromdate(self.daydate).replace(days=1).date():
raise forms.ValidationError(_("Stop time can't be more than the next day"))
return stop
# TODO: overtime must not be more than effective worked time
#def clean_overtime(self):
#overtime = self.cleaned_data.get('overtime')
#return overtime
# TODO
#def clean_pause(self):
#start = self.cleaned_data.get('start_datetime')
#stop = self.cleaned_data.get('stop_datetime')
#pause = self.cleaned_data['pause']
## Pause time can't be more than elapsed time between start and stop
#if start and stop and pause and False:
#raise forms.ValidationError("Pause time is more than the elapsed time")
#return pause
class DayEntryForm(DayBaseFormMixin, CrispyFormMixin, forms.ModelForm):
"""
DayEntry form
"""
def __init__(self, datebook, day, *args, **kwargs):
self.datebook = datebook
self.daydate = datebook.period.replace(day=day)
# Args to give to the form layout method
self.crispy_form_helper_kwargs.update({
'next_day': kwargs.pop('next_day', None),
'day_to_model_url': kwargs.pop('day_to_model_url', None),
'form_action': kwargs.pop('form_action'),
'remove_url': kwargs.pop('remove_url', None),
})
# Fill initial datas
kwargs = self.fill_initial_data(*args, **kwargs)
super(DayEntryForm, self).__init__(*args, **kwargs)
super(forms.ModelForm, self).__init__(*args, **kwargs)
# Init some special fields
kwargs = self.init_fields(*args, **kwargs)
def clean(self):
cleaned_data = super(DayBaseFormMixin, self).clean()
content = cleaned_data.get("content")
vacation = cleaned_data.get("vacation")
# Content text is only required when vacation is not checked
if not vacation and not content:
raise forms.ValidationError(_("Worked days require a content text"))
return cleaned_data
def save(self, *args, **kwargs):
instance = super(DayEntryForm, self).save(commit=False, *args, **kwargs)
instance.start = self.cleaned_data['start_datetime']
instance.stop = self.cleaned_data['stop_datetime']
instance.datebook = self.datebook
instance.activity_date = self.daydate
instance.save()
return instance
class Meta:
model = DayEntry
exclude = ('datebook', 'activity_date', 'start', 'stop')
widgets = {
'pause': forms.TimeInput(format=DATETIME_FORMATS['input_time_formats'][0]),
'overtime': forms.TimeInput(format=DATETIME_FORMATS['input_time_formats'][0]),
}
class DayEntryCreateForm(DayEntryForm):
def clean(self):
cleaned_data = super(DayEntryCreateForm, self).clean()
# Validate that there is not allready a day entry for the same day
try:
obj = DayEntry.objects.get(datebook=self.datebook, activity_date=self.daydate)
except DayEntry.DoesNotExist:
pass
else:
raise forms.ValidationError(_("This day entry has allready been created"))
return cleaned_data
|
from __future__ import absolute_import, division, print_function
import numpy as np
from ...magic.core.frame import Frame
from ..basics.remote import Remote, connected_remotes
from . import time
from . import filesystem as fs
from .logging import log
def remote_convolution(image, kernel, host_id):
"""
This function ...
:param image:
:param kernel:
:param host_id:
"""
# Check whether we are already connected to the specified remote host
if host_id in connected_remotes and connected_remotes[host_id] is not None:
remote = connected_remotes[host_id]
else:
# Debugging
log.debug("Logging in to remote host ...")
# Create a remote instance for the specified host ID
remote = Remote()
remote.setup(host_id)
# Debugging
log.debug("Creating temporary directory remotely ...")
# Create a temporary directory to do the convolution
remote_home_directory = remote.home_directory
remote_temp_path = fs.join(remote_home_directory, time.unique_name("convolution"))
remote.create_directory(remote_temp_path)
# Debugging
#log.debug("Uploading the kernel to the remote directory ...")
# Upload the kernel FITS file to the remote directory
#remote_kernel_path = fs.join(remote_temp_path, "kernel.fits")
#remote.upload(kernel_path, remote_temp_path, new_name="kernel.fits", compress=True, show_output=True)
# Debugging
log.debug("Creating a local temporary directory ...")
# Create a temporary directory locally to contain the frames
local_temp_path = fs.join(fs.home(), time.unique_name("convolution"))
fs.create_directory(local_temp_path)
# Debugging
log.debug("Saving the image frames to the temporary directory ...")
# Save the frames
local_frame_paths = []
constant_frames = []
for frame_name in image.frames:
frame_path = fs.join(local_temp_path, frame_name + ".fits")
# Only upload and convolve non-constant frames
if not image.frames[frame_name].is_constant():
image.frames[frame_name].save(frame_path)
local_frame_paths.append(frame_path)
else:
log.debug("The " + frame_name + " frame is constant, so this won't be uploaded and convolved")
constant_frames.append(frame_name)
# Debugging
log.debug("Saving the kernel to the temporary directory ...")
local_kernel_path = fs.join(local_temp_path, "kernel.fits")
kernel.save(local_kernel_path)
# Debugging
log.debug("Uploading the image frames to the remote directory ...")
# Upload the frames
remote_frame_paths = []
for local_frame_path in local_frame_paths:
# Determine the name of the local frame file
frame_file_name = fs.name(local_frame_path)
# Debugging
log.debug("Uploading the " + fs.strip_extension(frame_file_name) + " frame ...")
# Upload the frame file
remote_frame_path = fs.join(remote_temp_path, frame_file_name)
remote.upload(local_frame_path, remote_temp_path, new_name=frame_file_name, compress=True, show_output=True)
remote_frame_paths.append(remote_frame_path)
# Debugging
log.debug("Uploading the kernel to the remote directory ...")
# Upload the kernel
remote_kernel_path = fs.join(remote_temp_path, "kernel.fits")
remote.upload(local_kernel_path, remote_temp_path, new_name="kernel.fits", compress=True, show_output=True)
# Debugging
log.debug("Creating a python script to perform the convolution remotely ...")
# Create a python script that does the convolution
#script_file = tempfile.NamedTemporaryFile()
#local_script_path = script_file.name
local_script_path = fs.join(local_temp_path, "convolve.py")
script_file = open(local_script_path, 'w')
script_file.write("#!/usr/bin/env python\n")
script_file.write("# -*- coding: utf8 -*-\n")
script_file.write("\n")
script_file.write("# Import astronomical modules\n")
script_file.write("from astropy.units import Unit\n")
script_file.write("\n")
script_file.write("# Import the relevant PTS classes and modules\n")
script_file.write("from pts.magic.core.frame import Frame\n")
script_file.write("from pts.magic.core.image import Image\n")
script_file.write("from pts.magic.core.kernel import ConvolutionKernel\n")
script_file.write("from pts.core.tools.logging import log\n")
script_file.write("\n")
script_file.write("# Inform the user\n")
script_file.write("log.info('Opening the kernel frame ...')\n")
script_file.write("\n")
script_file.write("# Open the kernel\n")
script_file.write("kernel = ConvolutionKernel.from_file('" + remote_kernel_path + "')\n")
script_file.write("\n")
for remote_frame_path in remote_frame_paths:
frame_name = fs.strip_extension(fs.name(remote_frame_path))
script_file.write("# Inform the user\n")
script_file.write("log.info('Opening the " + frame_name + " frame ...')\n")
script_file.write("\n")
script_file.write("# Open the frame\n")
script_file.write("frame = Frame.from_file('" + remote_frame_path + "')\n")
script_file.write("\n")
script_file.write("# Inform the user\n")
script_file.write("log.info('Convolving the " + frame_name + " frame ...')\n")
script_file.write("\n")
script_file.write("# Do the convolution and save the result\n")
script_file.write("frame.convolve(kernel, allow_huge=True)\n")
script_file.write("frame.save('" + remote_frame_path + "')\n") # overwrite the frame
script_file.write("\n")
#script_file.write("# Save the image\n")
#script_file.write("image.save(" + remote_image_path + ")\n")
# Write to disk
#script_file.flush()
script_file.close()
# Debugging
log.debug("Uploading the python script ...")
# Upload the script file
remote_script_path = fs.join(remote_temp_path, "convolve.py")
remote.upload(local_script_path, remote_temp_path, new_name="convolve.py", show_output=True)
# Close the local script (it is automatically removed)
#script_file.close()
# Debugging
log.debug("Executing the script remotely ...")
# Execute the script file remotely
remote.execute("python " + remote_script_path, output=False, show_output=True)
# Debugging
log.debug("Downloading the results ...")
# Download the resulting FITS file (the convolved image)
#local_result_path = self.full_output_path("convolved.fits")
#remote.download(remote_image_path, fs.directory_of(local_result_path), new_name="convolved.fits", compress=True)
for remote_frame_path in remote_frame_paths:
# Determine the name of the local frame file
frame_file_name = fs.name(remote_frame_path)
# Debugging
log.debug("Downloading the " + fs.strip_extension(frame_file_name) + " frame ...")
# Download
remote.download(remote_frame_path, local_temp_path, new_name=frame_file_name, compress=True, show_output=True)
# Remove the temporary directory on the remote's filesystem
remote.remove_directory(remote_temp_path)
# Load the result
#self.image = Image.from_file(local_result_path)
for frame_name in image.frames.keys():
if frame_name in constant_frames: continue # Skip constant frames, these are not convolved
local_frame_path = fs.join(local_temp_path, frame_name + ".fits")
image.frames[frame_name] = Frame.from_file(local_frame_path)
# Remove the local temporary directory
fs.remove_directory(local_temp_path)
def remote_convolution_frame(frame, kernel_path, host_id):
"""
This function ...
:param frame:
:param kernel_path:
:param host_id:
:return:
"""
# Check whether the frame is constant. If it is, we don't have to convolve!
if frame.is_constant(): return frame.copy()
# Check whether we are already connected to the specified remote host
if host_id in connected_remotes and connected_remotes[host_id] is not None:
remote = connected_remotes[host_id]
else:
# Debugging
log.debug("Logging in to remote host ...")
# Create a remote instance for the specified host ID
remote = Remote()
remote.setup(host_id)
# Debugging
log.debug("Creating temporary directory remotely ...")
# Create a temporary directory to do the convolution
remote_home_directory = remote.home_directory
remote_temp_path = fs.join(remote_home_directory, time.unique_name("convolution"))
remote.create_directory(remote_temp_path)
# Debugging
log.debug("Creating local temporary directory ...")
# Create a temporary directory locally to contain the frames
local_temp_path = fs.join(fs.home(), time.unique_name("convolution"))
fs.create_directory(local_temp_path)
# Debugging
log.debug("Writing the frame to the temporary directory ...")
# Write the frame
local_frame_path = fs.join(local_temp_path, frame.name + ".fits")
frame.save(local_frame_path)
# Debugging
#log.debug("Writing the kernel to the temporary directory ...")
# Write the kernel
#local_kernel_path = fs.join(local_temp_path, "kernel.fits")
#kernel.save(local_kernel_path)
# Debugging
log.debug("Uploading the frame to the remote directory ...")
# Upload the frame file
remote_frame_path = fs.join(remote_temp_path, frame.name)
remote.upload(local_frame_path, remote_temp_path, new_name=frame.name, compress=True, show_output=True)
# Debugging
#log.debug("Uploading the kernel to the remote directory ...")
# Upload the kernel FITS file to the remote directory
#remote_kernel_path = fs.join(remote_temp_path, "kernel.fits")
#remote.upload(local_kernel_path, remote_temp_path, new_name="kernel.fits", compress=True, show_output=True)
# Debugging
log.debug("Uploading the kernel to the remote directory ...")
# Upload the kernel FITS file to the remote directory
remote_kernel_path = fs.join(remote_temp_path, "kernel.fits")
remote.upload(kernel_path, remote_temp_path, new_name="kernel.fits", compress=True, show_output=True)
# Debugging
log.debug("Creating a python script to perform the convolution remotely ...")
# Create the script
local_script_path = fs.join(local_temp_path, "convolve.py")
script_file = open(local_script_path, 'w')
script_file.write("#!/usr/bin/env python\n")
script_file.write("# -*- coding: utf8 -*-\n")
script_file.write("\n")
script_file.write("# Import the relevant PTS classes and modules\n")
script_file.write("from pts.magic.core.frame import Frame\n")
script_file.write("from pts.core.tools.logging import log\n")
script_file.write("\n")
script_file.write("# Inform the user\n")
script_file.write("log.info('Opening the kernel frame ...')\n")
script_file.write("\n")
script_file.write("# Open the kernel frame\n")
script_file.write("kernel = Frame.from_file('" + remote_kernel_path + "')\n")
script_file.write("\n")
script_file.write("# Inform the user\n")
script_file.write("log.info('Opening the frame ...')\n")
script_file.write("\n")
script_file.write("# Open the frame\n")
script_file.write("frame = Frame.from_file('" + remote_frame_path + "')\n")
script_file.write("\n")
script_file.write("# Inform the user\n")
script_file.write("log.info('Convolving the frame ...')\n")
script_file.write("\n")
script_file.write("# Do the convolution and save the result\n")
script_file.write("convolved = frame.convolved(kernel, allow_huge=True)\n")
script_file.write("convolved.save('" + remote_frame_path + "')\n") # overwrite the frame
# Write to disk
script_file.close()
# Debugging
log.debug("Uploading the python script ...")
# Upload the script file
remote_script_path = fs.join(remote_temp_path, "convolve.py")
remote.upload(local_script_path, remote_temp_path, new_name="convolve.py", show_output=True)
# Debugging
log.debug("Executing the script remotely ...")
# Execute the script file remotely
remote.execute("python " + remote_script_path, output=False, show_output=True)
# Debugging
log.debug("Downloading the result ...")
# Determine the name of the local frame file
frame_file_name = fs.name(remote_frame_path)
# Debugging
log.debug("Downloading the " + fs.strip_extension(frame_file_name) + " frame ...")
# Download
remote.download(remote_frame_path, local_temp_path, new_name=frame_file_name, compress=True, show_output=True)
# Remove the temporary directory on the remote's filesystem
remote.remove_directory(remote_temp_path)
# Load the convolved frame
convolved = Frame.from_file(local_frame_path)
# Remove the local temporary directory
fs.remove_directory(local_temp_path)
# Return the convolved frame
return convolved
def remote_filter_convolution_no_pts(host_id, datacube_path, wavelengths, filters):
"""
This function ...
:param host_id:
:param datacube_path:
:param wavelengths:
:param filters:
:return:
"""
# Check whether we are already connected to the specified remote host
if host_id in connected_remotes and connected_remotes[host_id] is not None:
remote = connected_remotes[host_id]
else:
# Debugging
log.debug("Logging in to remote host ...")
# Create a remote instance for the specified host ID
remote = Remote()
remote.setup(host_id)
# Debugging
log.debug("Creating temporary directory remotely ...")
# Create a temporary directory to do the convolution
remote_home_directory = remote.home_directory
remote_temp_path = fs.join(remote_home_directory, time.unique_name("filter-convolution"))
remote.create_directory(remote_temp_path)
# Debugging
log.debug("Creating local temporary directory ...")
# Create a temporary directory locally to contain the frames
local_temp_path = fs.join(fs.home(), time.unique_name("filter-convolution"))
fs.create_directory(local_temp_path)
integrated_transmissions = dict()
# Loop over the filters
for fltr in filters:
# Get the transmission data
fltr_wavelengths = fltr._Wavelengths
fltr_transmission = fltr._Transmission
fltr_integrated_transmission = fltr._IntegratedTransmission
integrated_transmissions[fltr.name] = fltr_integrated_transmission
# Save the transmission data
path = fs.join(local_temp_path, "transmission__" + str(fltr) + ".dat")
np.savetxt(path, (fltr_wavelengths, fltr_transmission))
#print(integrated_transmissions)
#print(local_temp_path)
integrated_path = fs.join(local_temp_path, "integrated_transmissions.txt")
with open(integrated_path, 'w') as integrated_trans_file:
for fltr_name in integrated_transmissions:
integrated_trans_file.write(fltr_name + ": " + str(integrated_transmissions[fltr_name]) + "\n")
# NOT FINISHED ...
def remote_filter_convolution(host_id, datacube_path, wavelengths, filters, keep_output=False):
"""
This function ...
:param host_id:
:param datacube_path:
:param wavelengths:
:param filters:
:param keep_output:
:return:
"""
# Check whether we are already connected to the specified remote host
if host_id in connected_remotes and connected_remotes[host_id] is not None:
remote = connected_remotes[host_id]
else:
# Debugging
log.debug("Logging in to remote host ...")
# Create a remote instance for the specified host ID
remote = Remote()
remote.setup(host_id)
# Debugging
log.debug("Creating temporary directory remotely ...")
# Create a temporary directory to do the convolution
remote_home_directory = remote.home_directory
remote_temp_path = fs.join(remote_home_directory, time.unique_name("filter-convolution"))
remote.create_directory(remote_temp_path)
# Debugging
log.debug("Creating local temporary directory ...")
# Create a temporary directory locally to contain the frames
local_temp_path = fs.join(fs.home(), time.unique_name("filter-convolution"))
fs.create_directory(local_temp_path)
# Debugging
log.debug("Uploading the datacube to the temporary remote directory ...")
# Upload the frame file
datacube_name = fs.name(datacube_path)
remote_datacube_path = fs.join(remote_temp_path, datacube_name)
remote.upload(datacube_path, remote_temp_path, compress=True, show_output=True)
# Debugging
log.debug("Writing the wavelengths to the temporary local directory ...")
local_wavelengths_path = fs.join(local_temp_path, "wavelengths.txt")
np.savetxt(local_wavelengths_path, wavelengths)
# Debugging
log.debug("Uploading the wavelengths file to the remote directory ...")
# Upload the kernel FITS file to the remote directory
remote_wavelengths_path = fs.join(remote_temp_path, "wavelengths.txt")
remote.upload(local_wavelengths_path, remote_temp_path, compress=True, show_output=True)
# Debugging
log.debug("Creating a python script to perform the filter convolution remotely ...")
# Create the script
local_script_path = fs.join(local_temp_path, "make_images.py")
script_file = open(local_script_path, 'w')
script_file.write("#!/usr/bin/env python\n")
script_file.write("# -*- coding: utf8 -*-\n")
script_file.write("\n")
script_file.write("# Import standard modules\n")
script_file.write("import numpy as np\n")
script_file.write("\n")
script_file.write("# Import the relevant PTS classes and modules\n")
script_file.write("from pts.magic.core.image import Image\n")
script_file.write("from pts.magic.core.frame import Frame\n")
script_file.write("from pts.core.basics.filter import Filter\n")
script_file.write("from pts.core.tools.logging import log\n")
script_file.write("from pts.core.tools import filesystem as fs\n")
script_file.write("\n")
script_file.write("# Inform the user\n")
script_file.write("log.info('Loading the datacube ...')\n")
script_file.write("\n")
script_file.write("# Open the datacube as an Image\n")
script_file.write("datacube = Image.from_file('" + remote_datacube_path + "', always_call_first_primary=False)\n")
script_file.write("\n")
script_file.write("# Inform the user\n")
script_file.write("log.info('Loading the wavelengths ...')\n")
script_file.write("\n")
script_file.write("# Load the wavelengths from the text file\n")
script_file.write("wavelengths = np.loadtxt('" + remote_wavelengths_path + "')\n")
script_file.write("\n")
script_file.write("# Convert the frames from neutral surface brightness to wavelength surface brightness\n")
script_file.write("for l in range(len(wavelengths)):\n")
script_file.write("\n")
script_file.write(" # Get the wavelength\n")
script_file.write(" wavelength = wavelengths[l]\n")
script_file.write("\n")
script_file.write(" # Determine the name of the frame in the datacube\n")
script_file.write(" frame_name = 'frame' + str(l)\n")
script_file.write("\n")
script_file.write(" # Divide this frame by the wavelength in micron\n")
script_file.write(" datacube.frames[frame_name] /= wavelength\n")
script_file.write("\n")
script_file.write(" # Set the new unit\n")
script_file.write(" datacube.frames[frame_name].unit = 'W / (m2 * arcsec2 * micron)'\n")
script_file.write("\n")
script_file.write("# Convert the datacube to a numpy array where wavelength is the third dimension\n")
script_file.write("fluxdensities = datacube.asarray()\n")
script_file.write("\n")
script_file.write("# Inform the user\n")
script_file.write("log.info('Creating the filters ...')\n")
script_file.write("\n")
script_file.write("filters = dict()\n")
script_file.write("\n")
for filter_name in filters:
fltr = filters[filter_name]
script_file.write("# Inform the user\n")
script_file.write("log.info('Creating the " + str(fltr) + " filter')\n")
script_file.write("\n")
script_file.write("fltr = Filter.from_string('" + str(fltr) + "')\n")
script_file.write("filters['" + filter_name + "'] = fltr\n")
script_file.write("\n")
script_file.write("# Inform the user\n")
script_file.write("log.info('Performing the filter convolutions ...')\n")
script_file.write("\n")
script_file.write("# Loop over the filters, perform the convolution\n")
script_file.write("for filter_name in filters:\n")
script_file.write("\n")
script_file.write(" log.info('Making the observed image for the ' + str(fltr) + ' filter ...')\n")
script_file.write(" fltr = filters[filter_name]\n")
script_file.write(" data = fltr.convolve(wavelengths, fluxdensities)\n")
script_file.write(" frame = Frame(data)\n")
script_file.write(" frame.unit = 'W/(m2 * arcsec2 * micron)'\n")
script_file.write(" path = fs.join('" + remote_temp_path + "', filter_name + '.fits')\n")
script_file.write(" frame.save(path)\n")
# Write to disk
script_file.close()
# Debugging
log.debug("Uploading the python script ...")
# Upload the script file
remote_script_path = fs.join(remote_temp_path, "make_images.py")
remote.upload(local_script_path, remote_temp_path, new_name="make_images.py", show_output=True)
# Debugging
log.debug("Executing the script remotely ...")
# Execute the script file remotely
remote.execute("python " + remote_script_path, output=False, show_output=True)
# Remove the datacube in the remote directory
remote.remove_file(remote_datacube_path)
# Debugging
log.debug("Downloading the convolved frames ...")
# Download
local_downloaded_temp_path = fs.join(fs.home(), fs.name(remote_temp_path))
fs.create_directory(local_downloaded_temp_path)
remote.download(remote_temp_path, local_downloaded_temp_path, compress=True, show_output=True)
# Remove the temporary directory on the remote's filesystem
remote.remove_directory(remote_temp_path)
# Remove the local temporary directory
fs.remove_directory(local_temp_path)
# Create a dictionary to contain the frames
frames = dict()
# Loop over the filters, load the frame
for filter_name in filters:
# Determine the path to the resulting FITS file
path = fs.join(local_downloaded_temp_path, filter_name + ".fits")
# Check whether the frame exists
if not fs.is_file(path): raise RuntimeError("The image for filter " + str(filters[filter_name]) + " is missing")
# Load the FITS file
frame = Frame.from_file(path)
# Add the frame to the dictionary
frames[filter_name] = frame
# Remove the downloaded temporary directory
if not keep_output: fs.remove_directory(local_downloaded_temp_path)
# Return the dictionary of frames
return frames
|
"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Provides:
- List; like list but returns None instead of IndexOutOfBounds
- Storage; like dictionary allowing also for `obj.foo` for `obj['foo']`
"""
try:
import cPickle as pickle
except:
import pickle
import copy_reg
import gluon.portalocker as portalocker
__all__ = ['List', 'Storage', 'Settings', 'Messages',
'StorageList', 'load_storage', 'save_storage']
DEFAULT = lambda:0
class Storage(dict):
"""
A Storage object is like a dictionary except `obj.foo` can be used
in addition to `obj['foo']`, and setting obj.foo = None deletes item foo.
Example::
>>> o = Storage(a=1)
>>> print o.a
1
>>> o['a']
1
>>> o.a = 2
>>> print o['a']
2
>>> del o.a
>>> print o.a
None
"""
__slots__ = ()
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
__getitem__ = dict.get
__getattr__ = dict.get
__getnewargs__ = lambda self: getattr(dict,self).__getnewargs__(self)
__repr__ = lambda self: '<Storage %s>' % dict.__repr__(self)
# http://stackoverflow.com/questions/5247250/why-does-pickle-getstate-accept-as-a-return-value-the-very-instance-it-requi
__getstate__ = lambda self: None
__copy__ = lambda self: Storage(self)
def getlist(self, key):
"""
Returns a Storage value as a list.
If the value is a list it will be returned as-is.
If object is None, an empty list will be returned.
Otherwise, `[value]` will be returned.
Example output for a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getlist('x')
['abc']
>>> request.vars.getlist('y')
['abc', 'def']
>>> request.vars.getlist('z')
[]
"""
value = self.get(key, [])
if value is None or isinstance(value, (list, tuple)):
return value
else:
return [value]
def getfirst(self, key, default=None):
"""
Returns the first value of a list or the value itself when given a
`request.vars` style key.
If the value is a list, its first item will be returned;
otherwise, the value will be returned as-is.
Example output for a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getfirst('x')
'abc'
>>> request.vars.getfirst('y')
'abc'
>>> request.vars.getfirst('z')
"""
values = self.getlist(key)
return values[0] if values else default
def getlast(self, key, default=None):
"""
Returns the last value of a list or value itself when given a
`request.vars` style key.
If the value is a list, the last item will be returned;
otherwise, the value will be returned as-is.
Simulated output with a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>> request.vars = Storage()
>>> request.vars.x = 'abc'
>>> request.vars.y = ['abc', 'def']
>>> request.vars.getlast('x')
'abc'
>>> request.vars.getlast('y')
'def'
>>> request.vars.getlast('z')
"""
values = self.getlist(key)
return values[-1] if values else default
def pickle_storage(s):
return Storage, (dict(s),)
copy_reg.pickle(Storage, pickle_storage)
PICKABLE = (str, int, long, float, bool, list, dict, tuple, set)
class StorageList(Storage):
"""
Behaves like Storage but missing elements defaults to [] instead of None
"""
def __getitem__(self, key):
return self.__getattr__(key)
def __getattr__(self, key):
if key in self:
return self.get(key)
else:
r = []
self[key] = r
return r
def load_storage(filename):
fp = None
try:
fp = portalocker.LockedFile(filename, 'rb')
storage = pickle.load(fp)
finally:
if fp:
fp.close()
return Storage(storage)
def save_storage(storage, filename):
fp = None
try:
fp = portalocker.LockedFile(filename, 'wb')
pickle.dump(dict(storage), fp)
finally:
if fp:
fp.close()
class Settings(Storage):
def __setattr__(self, key, value):
if key != 'lock_keys' and self['lock_keys'] and key not in self:
raise SyntaxError('setting key \'%s\' does not exist' % key)
if key != 'lock_values' and self['lock_values']:
raise SyntaxError('setting value cannot be changed: %s' % key)
self[key] = value
class Messages(Settings):
def __init__(self, T):
Storage.__init__(self, T=T)
def __getattr__(self, key):
value = self[key]
if isinstance(value, str):
return self.T(value)
return value
class FastStorage(dict):
"""
Eventually this should replace class Storage but causes memory leak
because of http://bugs.python.org/issue1469629
>>> s = FastStorage()
>>> s.a = 1
>>> s.a
1
>>> s['a']
1
>>> s.b
>>> s['b']
>>> s['b']=2
>>> s['b']
2
>>> s.b
2
>>> isinstance(s,dict)
True
>>> dict(s)
{'a': 1, 'b': 2}
>>> dict(FastStorage(s))
{'a': 1, 'b': 2}
>>> import pickle
>>> s = pickle.loads(pickle.dumps(s))
>>> dict(s)
{'a': 1, 'b': 2}
>>> del s.b
>>> del s.a
>>> s.a
>>> s.b
>>> s['a']
>>> s['b']
"""
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.__dict__ = self
def __getattr__(self, key):
return getattr(self, key) if key in self else None
def __getitem__(self, key):
return dict.get(self, key, None)
def copy(self):
self.__dict__ = {}
s = FastStorage(self)
self.__dict__ = self
return s
def __repr__(self):
return '<Storage %s>' % dict.__repr__(self)
def __getstate__(self):
return dict(self)
def __setstate__(self, sdict):
dict.__init__(self, sdict)
self.__dict__ = self
def update(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.__dict__ = self
class List(list):
"""
Like a regular python list but a[i] if i is out of bounds returns None
instead of `IndexOutOfBounds`
"""
def __call__(self, i, default=DEFAULT, cast=None, otherwise=None):
"""Allows to use a special syntax for fast-check of `request.args()`
validity
Args:
i: index
default: use this value if arg not found
cast: type cast
otherwise: can be:
- None: results in a 404
- str: redirect to this address
- callable: calls the function (nothing is passed)
Example:
You can use::
request.args(0,default=0,cast=int,otherwise='http://error_url')
request.args(0,default=0,cast=int,otherwise=lambda:...)
"""
n = len(self)
if 0 <= i < n or -n <= i < 0:
value = self[i]
elif default is DEFAULT:
value = None
else:
value, cast = default, False
if cast:
try:
value = cast(value)
except (ValueError, TypeError):
from http import HTTP, redirect
if otherwise is None:
raise HTTP(404)
elif isinstance(otherwise, str):
redirect(otherwise)
elif callable(otherwise):
return otherwise()
else:
raise RuntimeError("invalid otherwise")
return value
if __name__ == '__main__':
import doctest
doctest.testmod()
|
from ..debugging import bacpypes_debugging, ModuleLogger
from ..capability import Capability
from ..basetypes import ErrorType, PropertyIdentifier
from ..primitivedata import Atomic, Null, Unsigned
from ..constructeddata import Any, Array, ArrayOf, List
from ..apdu import \
SimpleAckPDU, ReadPropertyACK, ReadPropertyMultipleACK, \
ReadAccessResult, ReadAccessResultElement, ReadAccessResultElementChoice
from ..errors import ExecutionError
from ..object import PropertyError
_debug = 0
_log = ModuleLogger(globals())
ArrayOfPropertyIdentifier = ArrayOf(PropertyIdentifier)
@bacpypes_debugging
class ReadWritePropertyServices(Capability):
def __init__(self):
if _debug: ReadWritePropertyServices._debug("__init__")
Capability.__init__(self)
def do_ReadPropertyRequest(self, apdu):
"""Return the value of some property of one of our objects."""
if _debug: ReadWritePropertyServices._debug("do_ReadPropertyRequest %r", apdu)
# extract the object identifier
objId = apdu.objectIdentifier
# check for wildcard
if (objId == ('device', 4194303)) and self.localDevice is not None:
if _debug: ReadWritePropertyServices._debug(" - wildcard device identifier")
objId = self.localDevice.objectIdentifier
# get the object
obj = self.get_object_id(objId)
if _debug: ReadWritePropertyServices._debug(" - object: %r", obj)
if not obj:
raise ExecutionError(errorClass='object', errorCode='unknownObject')
try:
# get the datatype
datatype = obj.get_datatype(apdu.propertyIdentifier)
if _debug: ReadWritePropertyServices._debug(" - datatype: %r", datatype)
# get the value
value = obj.ReadProperty(apdu.propertyIdentifier, apdu.propertyArrayIndex)
if _debug: ReadWritePropertyServices._debug(" - value: %r", value)
if value is None:
raise PropertyError(apdu.propertyIdentifier)
# change atomic values into something encodeable
if issubclass(datatype, Atomic) or (issubclass(datatype, (Array, List)) and isinstance(value, list)):
value = datatype(value)
elif issubclass(datatype, Array) and (apdu.propertyArrayIndex is not None):
if apdu.propertyArrayIndex == 0:
value = Unsigned(value)
elif issubclass(datatype.subtype, Atomic):
value = datatype.subtype(value)
elif not isinstance(value, datatype.subtype):
raise TypeError("invalid result datatype, expecting {0} and got {1}" \
.format(datatype.subtype.__name__, type(value).__name__))
elif issubclass(datatype, List):
value = datatype(value)
elif not isinstance(value, datatype):
raise TypeError("invalid result datatype, expecting {0} and got {1}" \
.format(datatype.__name__, type(value).__name__))
if _debug: ReadWritePropertyServices._debug(" - encodeable value: %r", value)
# this is a ReadProperty ack
resp = ReadPropertyACK(context=apdu)
resp.objectIdentifier = objId
resp.propertyIdentifier = apdu.propertyIdentifier
resp.propertyArrayIndex = apdu.propertyArrayIndex
# save the result in the property value
resp.propertyValue = Any()
resp.propertyValue.cast_in(value)
if _debug: ReadWritePropertyServices._debug(" - resp: %r", resp)
except PropertyError:
raise ExecutionError(errorClass='property', errorCode='unknownProperty')
# return the result
self.response(resp)
def do_WritePropertyRequest(self, apdu):
"""Change the value of some property of one of our objects."""
if _debug: ReadWritePropertyServices._debug("do_WritePropertyRequest %r", apdu)
# get the object
obj = self.get_object_id(apdu.objectIdentifier)
if _debug: ReadWritePropertyServices._debug(" - object: %r", obj)
if not obj:
raise ExecutionError(errorClass='object', errorCode='unknownObject')
try:
# check if the property exists
if obj.ReadProperty(apdu.propertyIdentifier, apdu.propertyArrayIndex) is None:
raise PropertyError(apdu.propertyIdentifier)
# get the datatype, special case for null
if apdu.propertyValue.is_application_class_null():
datatype = Null
else:
datatype = obj.get_datatype(apdu.propertyIdentifier)
if _debug: ReadWritePropertyServices._debug(" - datatype: %r", datatype)
# special case for array parts, others are managed by cast_out
if issubclass(datatype, Array) and (apdu.propertyArrayIndex is not None):
if apdu.propertyArrayIndex == 0:
value = apdu.propertyValue.cast_out(Unsigned)
else:
value = apdu.propertyValue.cast_out(datatype.subtype)
else:
value = apdu.propertyValue.cast_out(datatype)
if _debug: ReadWritePropertyServices._debug(" - value: %r", value)
# change the value
value = obj.WriteProperty(apdu.propertyIdentifier, value, apdu.propertyArrayIndex, apdu.priority)
# success
resp = SimpleAckPDU(context=apdu)
if _debug: ReadWritePropertyServices._debug(" - resp: %r", resp)
except PropertyError:
raise ExecutionError(errorClass='property', errorCode='unknownProperty')
# return the result
self.response(resp)
@bacpypes_debugging
def read_property_to_any(obj, propertyIdentifier, propertyArrayIndex=None):
"""Read the specified property of the object, with the optional array index,
and cast the result into an Any object."""
if _debug: read_property_to_any._debug("read_property_to_any %s %r %r", obj, propertyIdentifier, propertyArrayIndex)
# get the datatype
datatype = obj.get_datatype(propertyIdentifier)
if _debug: read_property_to_any._debug(" - datatype: %r", datatype)
if datatype is None:
raise ExecutionError(errorClass='property', errorCode='datatypeNotSupported')
# get the value
value = obj.ReadProperty(propertyIdentifier, propertyArrayIndex)
if _debug: read_property_to_any._debug(" - value: %r", value)
if value is None:
raise ExecutionError(errorClass='property', errorCode='unknownProperty')
# change atomic values into something encodeable
if issubclass(datatype, Atomic) or (issubclass(datatype, (Array, List)) and isinstance(value, list)):
value = datatype(value)
elif issubclass(datatype, Array) and (propertyArrayIndex is not None):
if propertyArrayIndex == 0:
value = Unsigned(value)
elif issubclass(datatype.subtype, Atomic):
value = datatype.subtype(value)
elif not isinstance(value, datatype.subtype):
raise TypeError("invalid result datatype, expecting %s and got %s" \
% (datatype.subtype.__name__, type(value).__name__))
elif not isinstance(value, datatype):
raise TypeError("invalid result datatype, expecting %s and got %s" \
% (datatype.__name__, type(value).__name__))
if _debug: read_property_to_any._debug(" - encodeable value: %r", value)
# encode the value
result = Any()
result.cast_in(value)
if _debug: read_property_to_any._debug(" - result: %r", result)
# return the object
return result
@bacpypes_debugging
def read_property_to_result_element(obj, propertyIdentifier, propertyArrayIndex=None):
"""Read the specified property of the object, with the optional array index,
and cast the result into an Any object."""
if _debug: read_property_to_result_element._debug("read_property_to_result_element %s %r %r", obj, propertyIdentifier, propertyArrayIndex)
# save the result in the property value
read_result = ReadAccessResultElementChoice()
try:
if not obj:
raise ExecutionError(errorClass='object', errorCode='unknownObject')
read_result.propertyValue = read_property_to_any(obj, propertyIdentifier, propertyArrayIndex)
if _debug: read_property_to_result_element._debug(" - success")
except PropertyError as error:
if _debug: read_property_to_result_element._debug(" - error: %r", error)
read_result.propertyAccessError = ErrorType(errorClass='property', errorCode='unknownProperty')
except ExecutionError as error:
if _debug: read_property_to_result_element._debug(" - error: %r", error)
read_result.propertyAccessError = ErrorType(errorClass=error.errorClass, errorCode=error.errorCode)
# make an element for this value
read_access_result_element = ReadAccessResultElement(
propertyIdentifier=propertyIdentifier,
propertyArrayIndex=propertyArrayIndex,
readResult=read_result,
)
if _debug: read_property_to_result_element._debug(" - read_access_result_element: %r", read_access_result_element)
# fini
return read_access_result_element
@bacpypes_debugging
class ReadWritePropertyMultipleServices(Capability):
def __init__(self):
if _debug: ReadWritePropertyMultipleServices._debug("__init__")
Capability.__init__(self)
def do_ReadPropertyMultipleRequest(self, apdu):
"""Respond to a ReadPropertyMultiple Request."""
if _debug: ReadWritePropertyMultipleServices._debug("do_ReadPropertyMultipleRequest %r", apdu)
# response is a list of read access results (or an error)
resp = None
read_access_result_list = []
# loop through the request
for read_access_spec in apdu.listOfReadAccessSpecs:
# get the object identifier
objectIdentifier = read_access_spec.objectIdentifier
if _debug: ReadWritePropertyMultipleServices._debug(" - objectIdentifier: %r", objectIdentifier)
# check for wildcard
if (objectIdentifier == ('device', 4194303)) and self.localDevice is not None:
if _debug: ReadWritePropertyMultipleServices._debug(" - wildcard device identifier")
objectIdentifier = self.localDevice.objectIdentifier
# get the object
obj = self.get_object_id(objectIdentifier)
if _debug: ReadWritePropertyMultipleServices._debug(" - object: %r", obj)
# build a list of result elements
read_access_result_element_list = []
# loop through the property references
for prop_reference in read_access_spec.listOfPropertyReferences:
# get the property identifier
propertyIdentifier = prop_reference.propertyIdentifier
if _debug: ReadWritePropertyMultipleServices._debug(" - propertyIdentifier: %r", propertyIdentifier)
# get the array index (optional)
propertyArrayIndex = prop_reference.propertyArrayIndex
if _debug: ReadWritePropertyMultipleServices._debug(" - propertyArrayIndex: %r", propertyArrayIndex)
# check for special property identifiers
if propertyIdentifier in ('all', 'required', 'optional'):
if not obj:
# build a property access error
read_result = ReadAccessResultElementChoice()
read_result.propertyAccessError = ErrorType(errorClass='object', errorCode='unknownObject')
# make an element for this error
read_access_result_element = ReadAccessResultElement(
propertyIdentifier=propertyIdentifier,
propertyArrayIndex=propertyArrayIndex,
readResult=read_result,
)
# add it to the list
read_access_result_element_list.append(read_access_result_element)
else:
for propId, prop in obj._properties.items():
if _debug: ReadWritePropertyMultipleServices._debug(" - checking: %r %r", propId, prop.optional)
# skip propertyList for ReadPropertyMultiple
if (propId == 'propertyList'):
if _debug: ReadWritePropertyMultipleServices._debug(" - ignore propertyList")
continue
if (propertyIdentifier == 'all'):
pass
elif (propertyIdentifier == 'required') and (prop.optional):
if _debug: ReadWritePropertyMultipleServices._debug(" - not a required property")
continue
elif (propertyIdentifier == 'optional') and (not prop.optional):
if _debug: ReadWritePropertyMultipleServices._debug(" - not an optional property")
continue
# read the specific property
read_access_result_element = read_property_to_result_element(obj, propId, propertyArrayIndex)
# check for undefined property
if read_access_result_element.readResult.propertyAccessError \
and read_access_result_element.readResult.propertyAccessError.errorCode == 'unknownProperty':
continue
# add it to the list
read_access_result_element_list.append(read_access_result_element)
else:
# read the specific property
read_access_result_element = read_property_to_result_element(obj, propertyIdentifier, propertyArrayIndex)
# add it to the list
read_access_result_element_list.append(read_access_result_element)
# build a read access result
read_access_result = ReadAccessResult(
objectIdentifier=objectIdentifier,
listOfResults=read_access_result_element_list
)
if _debug: ReadWritePropertyMultipleServices._debug(" - read_access_result: %r", read_access_result)
# add it to the list
read_access_result_list.append(read_access_result)
# this is a ReadPropertyMultiple ack
if not resp:
resp = ReadPropertyMultipleACK(context=apdu)
resp.listOfReadAccessResults = read_access_result_list
if _debug: ReadWritePropertyMultipleServices._debug(" - resp: %r", resp)
# return the result
self.response(resp)
|
"""
Contains data about certain markup, like HTML tags and external links.
When updating this file, please also update the the C tokenizer version:
- mwparserfromhell/parser/ctokenizer/definitions.c
- mwparserfromhell/parser/ctokenizer/definitions.h
"""
__all__ = [
"get_html_tag",
"is_parsable",
"is_visible",
"is_single",
"is_single_only",
"is_scheme",
]
URI_SCHEMES = {
# [wikimedia/mediawiki.git]/includes/DefaultSettings.php @ 5c660de5d0
"bitcoin": False,
"ftp": True,
"ftps": True,
"geo": False,
"git": True,
"gopher": True,
"http": True,
"https": True,
"irc": True,
"ircs": True,
"magnet": False,
"mailto": False,
"mms": True,
"news": False,
"nntp": True,
"redis": True,
"sftp": True,
"sip": False,
"sips": False,
"sms": False,
"ssh": True,
"svn": True,
"tel": False,
"telnet": True,
"urn": False,
"worldwind": True,
"xmpp": False,
}
PARSER_BLACKLIST = [
# https://www.mediawiki.org/wiki/Parser_extension_tags @ 2020-12-21
"categorytree",
"ce",
"chem",
"gallery",
"graph",
"hiero",
"imagemap",
"inputbox",
"math",
"nowiki",
"pre",
"score",
"section",
"source",
"syntaxhighlight",
"templatedata",
"timeline",
]
INVISIBLE_TAGS = [
# https://www.mediawiki.org/wiki/Parser_extension_tags @ 2020-12-21
"categorytree",
"gallery",
"graph",
"imagemap",
"inputbox",
"math",
"score",
"section",
"templatedata",
"timeline",
]
SINGLE_ONLY = ["br", "wbr", "hr", "meta", "link", "img"]
SINGLE = SINGLE_ONLY + ["li", "dt", "dd", "th", "td", "tr"]
MARKUP_TO_HTML = {
"#": "li",
"*": "li",
";": "dt",
":": "dd",
}
def get_html_tag(markup):
"""Return the HTML tag associated with the given wiki-markup."""
return MARKUP_TO_HTML[markup]
def is_parsable(tag):
"""Return if the given *tag*'s contents should be passed to the parser."""
return tag.lower() not in PARSER_BLACKLIST
def is_visible(tag):
"""Return whether or not the given *tag* contains visible text."""
return tag.lower() not in INVISIBLE_TAGS
def is_single(tag):
"""Return whether or not the given *tag* can exist without a close tag."""
return tag.lower() in SINGLE
def is_single_only(tag):
"""Return whether or not the given *tag* must exist without a close tag."""
return tag.lower() in SINGLE_ONLY
def is_scheme(scheme, slashes=True):
"""Return whether *scheme* is valid for external links."""
scheme = scheme.lower()
if slashes:
return scheme in URI_SCHEMES
return scheme in URI_SCHEMES and not URI_SCHEMES[scheme]
|
from __future__ import print_function
from __future__ import division
from builtins import range
from past.utils import old_div
from coetools import *
sum = add.reduce # Just to make sure
inbpz = capfile(sys.argv[1], 'bpz')
inroot = inbpz[:-4]
infile = loadfile(inbpz)
for line in infile:
if line[:7] == '##INPUT':
incat = line[8:]
break
for line in infile:
if line[:9] == '##N_PEAKS':
npeaks = string.atoi(line[10])
break
outbpz = inroot + '_bpz.cat'
if npeaks == 1:
labels = string.split(
'id zb zbmin zbmax tb odds zml tml chisq')
elif npeaks == 3:
labels = string.split(
'id zb zbmin zbmax tb odds zb2 zb2min zb2max tb2 odds2 zb3 zb3min zb3max tb3 odds3 zml tml chisq')
else:
print('N_PEAKS = %d!?' % npeaks)
sys.exit(1)
labelnicks = {'Z_S': 'zspec', 'M_0': 'M0'}
read = 0
ilabel = 0
for iline in range(len(infile)):
line = infile[iline]
if line[:2] == '##':
if read:
break
else:
read = 1
if read == 1:
ilabel += 1
label = string.split(line)[-1]
if ilabel >= 10:
labels.append(labelnicks.get(label, label))
mybpz = loadvarswithclass(inbpz, labels=labels)
mycat = loadvarswithclass(incat)
if os.path.exists(inroot + '.flux_comparison'):
data = loaddata(inroot + '.flux_comparison+')
#nf = 6
nf = old_div((len(data) - 5), 3)
# id M0 zb tb*3
id = data[0]
ft = data[5:5 + nf] # FLUX (from spectrum for that TYPE)
fo = data[5 + nf:5 + 2 * nf] # FLUX (OBSERVED)
efo = data[5 + 2 * nf:5 + 3 * nf] # FLUX_ERROR (OBSERVED)
# chisq 2
eft = old_div(ft, 15.)
eft = max(eft) # for each galaxy, take max eft among filters
ef = sqrt(efo**2 + eft**2) # (6, 18981) + (18981) done correctly
dfosq = (old_div((ft - fo), ef))**2
dfosqsum = sum(dfosq)
detected = greater(fo, 0)
nfdet = sum(detected)
observed = less(efo, 1)
nfobs = sum(observed)
# DEGREES OF FREEDOM
dof = clip2(nfobs - 3., 1, None) # 3 params (z, t, a)
chisq2clip = old_div(dfosqsum, dof)
sedfrac = divsafe(max(fo - efo), max(ft), -1) # SEDzero
chisq2 = chisq2clip[:]
chisq2 = where(less(sedfrac, 1e-10), 900., chisq2)
chisq2 = where(equal(nfobs, 1), 990., chisq2)
chisq2 = where(equal(nfobs, 0), 999., chisq2)
#################################
#print 'BPZ tb N_PEAKS BUG FIX'
#mybpz.tb = mybpz.tb + 0.667
#mybpz.tb2 = where(greater(mybpz.tb2, 0), mybpz.tb2 + 0.667, -1.)
#mybpz.tb3 = where(greater(mybpz.tb3, 0), mybpz.tb3 + 0.667, -1.)
mybpz.add('chisq2', chisq2)
mybpz.add('nfdet', nfdet)
mybpz.add('nfobs', nfobs)
if 'stel' in mycat.labels:
mybpz.add('stel', mycat.stel)
elif 'stellarity' in mycat.labels:
mybpz.add('stel', mycat.stellarity)
if 'maxsigisoaper' in mycat.labels:
mybpz.add('sig', mycat.maxsigisoaper)
if 'sig' in mycat.labels:
mybpz.assign('sig', mycat.maxsigisoaper)
if 'zspec' not in mybpz.labels:
if 'zspec' in mycat.labels:
mybpz.add('zspec', mycat.zspec)
print(mycat.zspec)
if 'zqual' in mycat.labels:
mybpz.add('zqual', mycat.zqual)
print(mybpz.labels)
mybpz.save(outbpz, maxy=None)
|
import math
import sys
import os
import random
CommonPath = os.path.abspath(os.path.join('..', 'Common'))
sys.path.append(CommonPath)
import tsp
def GenerateInitialPath(tour_length):
tour=list(range(tour_length))
random.shuffle(tour)
return tour
MAX_ITERATION = 50000
def reversed_sections(tour):
'''generator to return all possible variations where the section between two cities are swapped'''
for i,j in tsp.AllEdges(len(tour)):
if i != j:
copy=tour[:]
if i < j:
copy[i:j+1]=reversed(tour[i:j+1])
else:
copy[i+1:]=reversed(tour[:j])
copy[:j]=reversed(tour[i+1:])
if copy != tour: # no point returning the same tour
yield copy
def kirkpatrick_cooling(start_temp, alpha):
T = start_temp
while True:
yield T
T = alpha * T
def P(prev_score,next_score,temperature):
if next_score > prev_score:
return 1.0
else:
return math.exp( -abs(next_score-prev_score)/temperature )
class ObjectiveFunction:
'''class to wrap an objective function and
keep track of the best solution evaluated'''
def __init__(self,objective_function):
self.objective_function=objective_function
self.best=None
self.best_score=None
def __call__(self,solution):
score=self.objective_function(solution)
if self.best is None or score > self.best_score:
self.best_score=score
self.best=solution
return score
def ApplySimulatedAnnealing(init_function,move_operator,objective_function,max_evaluations,start_temp,alpha):
# wrap the objective function (so we record the best)
objective_function=ObjectiveFunction(objective_function)
current = init_function()
current_score = objective_function(current)
iterationCount = 1
cooling_schedule = kirkpatrick_cooling(start_temp, alpha)
for temperature in cooling_schedule:
done = False
# examine moves around our current position
for next in move_operator(current):
if iterationCount >= max_evaluations:
done=True
break
next_score=objective_function(next)
iterationCount+=1
# probablistically accept this solution always accepting better solutions
p = P(current_score, next_score, temperature)
# random.random() basic function random() generates a random float uniformly in the range [0.0, 1.0).
# p function returns data in range [0.0, 1.0]
if random.random() < p:
current = next
current_score= next_score
break
# see if completely finished
if done: break
best_score = objective_function.best_score
best = objective_function.best
return (iterationCount,best_score,best)
def SolveTSP():
print("Starting to solve travel salesman problem")
coordinates = tsp.ReadCoordinatesFromFile(".\cityCoordinates.csv")
distance_matrix = tsp.ComputeDistanceMatrix(coordinates);
init_function = lambda: GenerateInitialPath(len(coordinates))
objective_function = lambda tour: -tsp.ComputeTourLength(distance_matrix, tour)
start_temp,alpha = 100, 0.995
iterationCount,best_score,shortestPath = ApplySimulatedAnnealing(init_function, reversed_sections, objective_function, MAX_ITERATION,start_temp,alpha)
print(iterationCount, best_score, shortestPath);
tsp.DrawPath(coordinates, shortestPath, "TSP.png");
if __name__ == "__main__":
SolveTSP();
|
"""django_todo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^todo/', include('todo.urls')),
url(r'^accounts/', include('accounts.urls')),
]
|
"""
Example
-------
class SystemSetting(KVModel):
pass
setting = SystemSetting.create(key='foo', value=100)
loaded_setting = SystemSetting.get_by_key('foo')
"""
from django.db import models
from .fields import SerializableField
class KVModel(models.Model):
"""
An Abstract model that has key and value fields
key -- Unique CharField of max_length 255
value -- SerializableField by default could be used to store bool, int,
float, str, list, dict and date
"""
key = models.CharField(max_length=255, unique=True)
value = SerializableField(blank=True, null=True)
def __unicode__(self):
return 'KVModel instance: ' + self.key + ' = ' + unicode(self.value)
@classmethod
def get_by_key(cls, key):
"""
A static method that returns a KVModel instance.
key -- unique key that is used for the search.
this method will throw a DoesNotExist exception if an object with the
key provided is not found.
"""
return cls.objects.get(key=key)
class Meta:
abstract = True
|
import re
from datetime import datetime
from flask import current_app as app
from flask_jwt import current_identity
from flask_restplus import Namespace, Resource, fields, reqparse
from sqlalchemy.exc import IntegrityError
from packr.models import Message
api = Namespace('contact',
description='Operations related to the contact form')
message = api.model('Contact', {
'email': fields.String(required=True,
description='Contact email'),
'content': fields.String(required=True,
description='Message'),
})
message_id = api.model('ContactCompletion', {
'id': fields.Integer(required=True,
description='id')
})
@api.route('/')
class MessageItem(Resource):
@api.expect(message)
@api.response(204, 'Message successfully received.')
def post(self):
req_parse = reqparse.RequestParser(bundle_errors=True)
req_parse.add_argument('email', type=str, required=True,
help='No email provided',
location='json')
req_parse.add_argument('content', type=str, required=True,
help='No message provided',
location='json')
args = req_parse.parse_args()
email = args.get('email')
content = args.get('content')
if email == '':
return {'message': {'email': 'No email provided'}}, 400
elif not re.match(r"^[A-Za-z0-9.+_-]+@[A-Za-z0-9._-]+\.[a-zA-Z]*$",
email):
return {'message': {'email': 'Invalid email provided'}}, 400
if content == '':
return {'message': {'content': 'No content provided'}}, 400
new_message = Message(email=email,
content=content,
time=datetime.now())
try:
new_message.save()
except IntegrityError as e:
print(e)
return {
'description': 'Failed to send message.'
}, 409
except Exception as e:
print(e)
return {'description': 'Server encountered an error.'}, 500
return {'email': new_message.email}, 201
def get(self):
if not current_identity and not app.config.get('TESTING'):
return {'message': 'User not authenticated'}, 401
if app.config.get('TESTING') \
or current_identity.role.role_name == "ADMIN":
messages = dict()
for message_row in Message.query.filter_by(done=False).all():
messages[message_row.id] = {
"email": message_row.email,
"time": message_row.time.isoformat(),
"content": message_row.content
}
return messages, 201
else:
return {'message': 'Not authorised'}, 401
@api.route('/complete')
class CompleteItem(Resource):
@api.expect(message_id)
@api.response(204, 'Message successfully updated.')
def post(self):
req_parse = reqparse.RequestParser(bundle_errors=True)
req_parse.add_argument('id', type=int, required=True,
help='No id provided',
location='json')
args = req_parse.parse_args()
id = args.get('id')
if id == 0:
return {'message': {'id': 'No id provided'}}, 400
completed_message = Message.query.filter_by(id=id).first()
completed_message.done = True
try:
completed_message.save()
except IntegrityError as e:
print(e)
return {
'description': 'Failed to update message.'
}, 409
except Exception as e:
print(e)
return {'description': 'Server encountered an error.'}, 500
return {'message': "Message updated"}, 201
|
from typing import TYPE_CHECKING, Any, Dict, Union
from ansiblelint.rules import AnsibleLintRule
if TYPE_CHECKING:
from typing import Optional
from ansiblelint.file_utils import Lintable
class MercurialHasRevisionRule(AnsibleLintRule):
id = 'hg-latest'
shortdesc = 'Mercurial checkouts must contain explicit revision'
description = (
'All version control checkouts must point to '
'an explicit commit or tag, not just ``latest``'
)
severity = 'MEDIUM'
tags = ['idempotency']
version_added = 'historic'
def matchtask(
self, task: Dict[str, Any], file: 'Optional[Lintable]' = None
) -> Union[bool, str]:
return bool(
task['action']['__ansible_module__'] == 'hg'
and task['action'].get('revision', 'default') == 'default'
)
|
from setuptools import setup
setup(
name='libipa',
version='0.0.6',
author='Andrew Udvare',
author_email='audvare@gmail.com',
packages=['ipa'],
scripts=['bin/ipa-unzip-bin', 'bin/ipa-dump-info'],
url='https://github.com/Tatsh/libipa',
license='LICENSE.txt',
description='Library to read IPA files (iOS application archives).',
test_suite='ipa.test',
long_description='No description.',
install_requires=[
'biplist>=0.7',
'six>=1.7.3',
],
)
|
"""
Support to interface with Sonos players (via SoCo).
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.sonos/
"""
import datetime
import logging
from os import path
import socket
import urllib
import voluptuous as vol
from homeassistant.components.media_player import (
ATTR_MEDIA_ENQUEUE, DOMAIN, MEDIA_TYPE_MUSIC, SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_SEEK,
SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, SUPPORT_CLEAR_PLAYLIST,
SUPPORT_SELECT_SOURCE, MediaPlayerDevice)
from homeassistant.const import (
STATE_IDLE, STATE_PAUSED, STATE_PLAYING, STATE_UNKNOWN, STATE_OFF,
ATTR_ENTITY_ID)
from homeassistant.config import load_yaml_config_file
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['SoCo==0.12']
_LOGGER = logging.getLogger(__name__)
_SOCO_LOGGER = logging.getLogger('soco')
_SOCO_LOGGER.setLevel(logging.ERROR)
_REQUESTS_LOGGER = logging.getLogger('requests')
_REQUESTS_LOGGER.setLevel(logging.ERROR)
SUPPORT_SONOS = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE |\
SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_PLAY_MEDIA |\
SUPPORT_SEEK | SUPPORT_CLEAR_PLAYLIST | SUPPORT_SELECT_SOURCE
SERVICE_GROUP_PLAYERS = 'sonos_group_players'
SERVICE_UNJOIN = 'sonos_unjoin'
SERVICE_SNAPSHOT = 'sonos_snapshot'
SERVICE_RESTORE = 'sonos_restore'
SERVICE_SET_TIMER = 'sonos_set_sleep_timer'
SERVICE_CLEAR_TIMER = 'sonos_clear_sleep_timer'
SUPPORT_SOURCE_LINEIN = 'Line-in'
SUPPORT_SOURCE_TV = 'TV'
ATTR_SLEEP_TIME = 'sleep_time'
SONOS_SCHEMA = vol.Schema({
ATTR_ENTITY_ID: cv.entity_ids,
})
SONOS_SET_TIMER_SCHEMA = SONOS_SCHEMA.extend({
vol.Required(ATTR_SLEEP_TIME): vol.All(vol.Coerce(int),
vol.Range(min=0, max=86399))
})
DEVICES = []
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Sonos platform."""
import soco
global DEVICES
if discovery_info:
player = soco.SoCo(discovery_info)
# if device allready exists by config
if player.uid in DEVICES:
return True
if player.is_visible:
device = SonosDevice(hass, player)
add_devices([device])
if not DEVICES:
register_services(hass)
DEVICES.append(device)
return True
return False
players = None
hosts = config.get('hosts', None)
if hosts:
# Support retro compatibility with comma separated list of hosts
# from config
hosts = hosts.split(',') if isinstance(hosts, str) else hosts
players = []
for host in hosts:
players.append(soco.SoCo(socket.gethostbyname(host)))
if not players:
players = soco.discover(interface_addr=config.get('interface_addr',
None))
if not players:
_LOGGER.warning('No Sonos speakers found.')
return False
DEVICES = [SonosDevice(hass, p) for p in players]
add_devices(DEVICES)
register_services(hass)
_LOGGER.info('Added %s Sonos speakers', len(players))
return True
def register_services(hass):
"""Register all services for sonos devices."""
descriptions = load_yaml_config_file(
path.join(path.dirname(__file__), 'services.yaml'))
hass.services.register(DOMAIN, SERVICE_GROUP_PLAYERS,
_group_players_service,
descriptions.get(SERVICE_GROUP_PLAYERS),
schema=SONOS_SCHEMA)
hass.services.register(DOMAIN, SERVICE_UNJOIN,
_unjoin_service,
descriptions.get(SERVICE_UNJOIN),
schema=SONOS_SCHEMA)
hass.services.register(DOMAIN, SERVICE_SNAPSHOT,
_snapshot_service,
descriptions.get(SERVICE_SNAPSHOT),
schema=SONOS_SCHEMA)
hass.services.register(DOMAIN, SERVICE_RESTORE,
_restore_service,
descriptions.get(SERVICE_RESTORE),
schema=SONOS_SCHEMA)
hass.services.register(DOMAIN, SERVICE_SET_TIMER,
_set_sleep_timer_service,
descriptions.get(SERVICE_SET_TIMER),
schema=SONOS_SET_TIMER_SCHEMA)
hass.services.register(DOMAIN, SERVICE_CLEAR_TIMER,
_clear_sleep_timer_service,
descriptions.get(SERVICE_CLEAR_TIMER),
schema=SONOS_SCHEMA)
def _apply_service(service, service_func, *service_func_args):
"""Internal func for applying a service."""
entity_ids = service.data.get('entity_id')
if entity_ids:
_devices = [device for device in DEVICES
if device.entity_id in entity_ids]
else:
_devices = DEVICES
for device in _devices:
service_func(device, *service_func_args)
device.update_ha_state(True)
def _group_players_service(service):
"""Group media players, use player as coordinator."""
_apply_service(service, SonosDevice.group_players)
def _unjoin_service(service):
"""Unjoin the player from a group."""
_apply_service(service, SonosDevice.unjoin)
def _snapshot_service(service):
"""Take a snapshot."""
_apply_service(service, SonosDevice.snapshot)
def _restore_service(service):
"""Restore a snapshot."""
_apply_service(service, SonosDevice.restore)
def _set_sleep_timer_service(service):
"""Set a timer."""
_apply_service(service,
SonosDevice.set_sleep_timer,
service.data[ATTR_SLEEP_TIME])
def _clear_sleep_timer_service(service):
"""Set a timer."""
_apply_service(service,
SonosDevice.clear_sleep_timer)
def only_if_coordinator(func):
"""Decorator for coordinator.
If used as decorator, avoid calling the decorated method if player is not
a coordinator. If not, a grouped speaker (not in coordinator role) will
throw soco.exceptions.SoCoSlaveException.
Also, partially catch exceptions like:
soco.exceptions.SoCoUPnPException: UPnP Error 701 received:
Transition not available from <player ip address>
"""
def wrapper(*args, **kwargs):
"""Decorator wrapper."""
if args[0].is_coordinator:
from soco.exceptions import SoCoUPnPException
try:
func(*args, **kwargs)
except SoCoUPnPException:
_LOGGER.error('command "%s" for Sonos device "%s" '
'not available in this mode',
func.__name__, args[0].name)
else:
_LOGGER.debug('Ignore command "%s" for Sonos device "%s" (%s)',
func.__name__, args[0].name, 'not coordinator')
return wrapper
class SonosDevice(MediaPlayerDevice):
"""Representation of a Sonos device."""
# pylint: disable=too-many-arguments
def __init__(self, hass, player):
"""Initialize the Sonos device."""
from soco.snapshot import Snapshot
self.hass = hass
self.volume_increment = 5
self._player = player
self._speaker_info = None
self._name = None
self._coordinator = None
self._media_content_id = None
self._media_duration = None
self._media_image_url = None
self._media_artist = None
self._media_album_name = None
self._media_title = None
self.update()
self.soco_snapshot = Snapshot(self._player)
@property
def should_poll(self):
"""Polling needed."""
return True
def update_sonos(self, now):
"""Update state, called by track_utc_time_change."""
self.update_ha_state(True)
@property
def unique_id(self):
"""Return an unique ID."""
return self._player.uid
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
if self._status == 'PAUSED_PLAYBACK':
return STATE_PAUSED
if self._status == 'PLAYING':
return STATE_PLAYING
if self._status == 'STOPPED':
return STATE_IDLE
if self._status == 'OFF':
return STATE_OFF
return STATE_UNKNOWN
@property
def is_coordinator(self):
"""Return true if player is a coordinator."""
return self._player.is_coordinator
def update(self):
"""Retrieve latest state."""
self._speaker_info = self._player.get_speaker_info()
self._name = self._speaker_info['zone_name'].replace(
' (R)', '').replace(' (L)', '')
if self.available:
self._status = self._player.get_current_transport_info().get(
'current_transport_state')
trackinfo = self._player.get_current_track_info()
if trackinfo['uri'].startswith('x-rincon:'):
# this speaker is a slave, find the coordinator
# the uri of the track is 'x-rincon:{coordinator-id}'
coordinator_id = trackinfo['uri'][9:]
coordinators = [device for device in DEVICES
if device.unique_id == coordinator_id]
self._coordinator = coordinators[0] if coordinators else None
else:
self._coordinator = None
if not self._coordinator:
mediainfo = self._player.avTransport.GetMediaInfo([
('InstanceID', 0)
])
duration = trackinfo.get('duration', '0:00')
# if the speaker is playing from the "line-in" source, getting
# track metadata can return NOT_IMPLEMENTED, which breaks the
# volume logic below
if duration == 'NOT_IMPLEMENTED':
duration = None
else:
duration = sum(60 ** x[0] * int(x[1]) for x in enumerate(
reversed(duration.split(':'))))
media_image_url = trackinfo.get('album_art', None)
media_artist = trackinfo.get('artist', None)
media_album_name = trackinfo.get('album', None)
media_title = trackinfo.get('title', None)
if media_image_url in ('', 'NOT_IMPLEMENTED', None):
# fallback to asking the speaker directly
media_image_url = \
'http://{host}:{port}/getaa?s=1&u={uri}'.format(
host=self._player.ip_address,
port=1400,
uri=urllib.parse.quote(mediainfo['CurrentURI'])
)
if media_artist in ('', 'NOT_IMPLEMENTED', None):
# if listening to a radio stream the media_artist field
# will be empty and the title field will contain the
# filename that is being streamed
current_uri_metadata = mediainfo["CurrentURIMetaData"]
if current_uri_metadata not in \
('', 'NOT_IMPLEMENTED', None):
# currently soco does not have an API for this
import soco
current_uri_metadata = soco.xml.XML.fromstring(
soco.utils.really_utf8(current_uri_metadata))
md_title = current_uri_metadata.findtext(
'.//{http://purl.org/dc/elements/1.1/}title')
if md_title not in ('', 'NOT_IMPLEMENTED', None):
media_artist = ''
media_title = md_title
self._media_content_id = trackinfo.get('title', None)
self._media_duration = duration
self._media_image_url = media_image_url
self._media_artist = media_artist
self._media_album_name = media_album_name
self._media_title = media_title
else:
self._status = 'OFF'
self._coordinator = None
self._media_content_id = None
self._media_duration = None
self._media_image_url = None
self._media_artist = None
self._media_album_name = None
self._media_title = None
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._player.volume / 100.0
@property
def is_volume_muted(self):
"""Return true if volume is muted."""
return self._player.mute
@property
def media_content_id(self):
"""Content ID of current playing media."""
if self._coordinator:
return self._coordinator.media_content_id
else:
return self._media_content_id
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_MUSIC
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
if self._coordinator:
return self._coordinator.media_duration
else:
return self._media_duration
@property
def media_image_url(self):
"""Image url of current playing media."""
if self._coordinator:
return self._coordinator.media_image_url
else:
return self._media_image_url
@property
def media_artist(self):
"""Artist of current playing media, music track only."""
if self._coordinator:
return self._coordinator.media_artist
else:
return self._media_artist
@property
def media_album_name(self):
"""Album name of current playing media, music track only."""
if self._coordinator:
return self._coordinator.media_album_name
else:
return self._media_album_name
@property
def media_title(self):
"""Title of current playing media."""
if self._player.is_playing_line_in:
return SUPPORT_SOURCE_LINEIN
if self._player.is_playing_tv:
return SUPPORT_SOURCE_TV
if self._coordinator:
return self._coordinator.media_title
else:
return self._media_title
@property
def supported_media_commands(self):
"""Flag of media commands that are supported."""
if not self.source_list:
# some devices do not allow source selection
return SUPPORT_SONOS ^ SUPPORT_SELECT_SOURCE
return SUPPORT_SONOS
def volume_up(self):
"""Volume up media player."""
self._player.volume += self.volume_increment
def volume_down(self):
"""Volume down media player."""
self._player.volume -= self.volume_increment
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._player.volume = str(int(volume * 100))
def mute_volume(self, mute):
"""Mute (true) or unmute (false) media player."""
self._player.mute = mute
def select_source(self, source):
"""Select input source."""
if source == SUPPORT_SOURCE_LINEIN:
self._player.switch_to_line_in()
elif source == SUPPORT_SOURCE_TV:
self._player.switch_to_tv()
@property
def source_list(self):
"""List of available input sources."""
model_name = self._speaker_info['model_name']
if 'PLAY:5' in model_name:
return [SUPPORT_SOURCE_LINEIN]
elif 'PLAYBAR' in model_name:
return [SUPPORT_SOURCE_LINEIN, SUPPORT_SOURCE_TV]
@property
def source(self):
"""Name of the current input source."""
if self._player.is_playing_line_in:
return SUPPORT_SOURCE_LINEIN
if self._player.is_playing_tv:
return SUPPORT_SOURCE_TV
return None
@only_if_coordinator
def turn_off(self):
"""Turn off media player."""
self._player.pause()
def media_play(self):
"""Send play command."""
if self._coordinator:
self._coordinator.media_play()
else:
self._player.play()
def media_pause(self):
"""Send pause command."""
if self._coordinator:
self._coordinator.media_pause()
else:
self._player.pause()
def media_next_track(self):
"""Send next track command."""
if self._coordinator:
self._coordinator.media_next_track()
else:
self._player.next()
def media_previous_track(self):
"""Send next track command."""
if self._coordinator:
self._coordinator.media_previous_track()
else:
self._player.previous()
def media_seek(self, position):
"""Send seek command."""
if self._coordinator:
self._coordinator.media_seek(position)
else:
self._player.seek(str(datetime.timedelta(seconds=int(position))))
def clear_playlist(self):
"""Clear players playlist."""
if self._coordinator:
self._coordinator.clear_playlist()
else:
self._player.clear_queue()
@only_if_coordinator
def turn_on(self):
"""Turn the media player on."""
self._player.play()
def play_media(self, media_type, media_id, **kwargs):
"""
Send the play_media command to the media player.
If ATTR_MEDIA_ENQUEUE is True, add `media_id` to the queue.
"""
if self._coordinator:
self._coordinator.play_media(media_type, media_id, **kwargs)
else:
if kwargs.get(ATTR_MEDIA_ENQUEUE):
from soco.exceptions import SoCoUPnPException
try:
self._player.add_uri_to_queue(media_id)
except SoCoUPnPException:
_LOGGER.error('Error parsing media uri "%s", '
"please check it's a valid media resource "
'supported by Sonos', media_id)
else:
self._player.play_uri(media_id)
def group_players(self):
"""Group all players under this coordinator."""
if self._coordinator:
self._coordinator.group_players()
else:
self._player.partymode()
@only_if_coordinator
def unjoin(self):
"""Unjoin the player from a group."""
self._player.unjoin()
@only_if_coordinator
def snapshot(self):
"""Snapshot the player."""
self.soco_snapshot.snapshot()
@only_if_coordinator
def restore(self):
"""Restore snapshot for the player."""
self.soco_snapshot.restore(True)
@only_if_coordinator
def set_sleep_timer(self, sleep_time):
"""Set the timer on the player."""
self._player.set_sleep_timer(sleep_time)
@only_if_coordinator
def clear_sleep_timer(self):
"""Clear the timer on the player."""
self._player.set_sleep_timer(None)
@property
def available(self):
"""Return True if player is reachable, False otherwise."""
try:
sock = socket.create_connection(
address=(self._player.ip_address, 1443),
timeout=3)
sock.close()
return True
except socket.error:
return False
|
"""
"""
import logging
import time
import hiro
import mock
from flask import Flask, request
from werkzeug.exceptions import BadRequest
from flask_limiter.extension import C, Limiter
from flask_limiter.util import get_remote_address
def test_reset(extension_factory):
app, limiter = extension_factory({C.DEFAULT_LIMITS: "1 per day"})
@app.route("/")
def null():
return "Hello Reset"
with app.test_client() as cli:
cli.get("/")
assert "1 per 1 day" in cli.get("/").data.decode()
limiter.reset()
assert "Hello Reset" == cli.get("/").data.decode()
assert "1 per 1 day" in cli.get("/").data.decode()
def test_reset_unsupported(extension_factory, memcached_connection):
app, limiter = extension_factory(
{C.DEFAULT_LIMITS: "1 per day", C.STORAGE_URI: "memcached://localhost:31211"}
)
@app.route("/")
def null():
return "Hello Reset"
with app.test_client() as cli:
cli.get("/")
assert "1 per 1 day" in cli.get("/").data.decode()
# no op with memcached but no error raised
limiter.reset()
assert "1 per 1 day" in cli.get("/").data.decode()
def test_combined_rate_limits(extension_factory):
app, limiter = extension_factory({C.DEFAULT_LIMITS: "1 per hour; 10 per day"})
@app.route("/t1")
@limiter.limit("100 per hour;10/minute")
def t1():
return "t1"
@app.route("/t2")
def t2():
return "t2"
with hiro.Timeline().freeze():
with app.test_client() as cli:
assert 200 == cli.get("/t1").status_code
assert 200 == cli.get("/t2").status_code
assert 429 == cli.get("/t2").status_code
def test_defaults_per_method(extension_factory):
app, limiter = extension_factory(
{C.DEFAULT_LIMITS: "1 per hour", C.DEFAULT_LIMITS_PER_METHOD: True}
)
@app.route("/t1", methods=["GET", "POST"])
def t1():
return "t1"
with hiro.Timeline().freeze():
with app.test_client() as cli:
assert 200 == cli.get("/t1").status_code
assert 429 == cli.get("/t1").status_code
assert 200 == cli.post("/t1").status_code
assert 429 == cli.post("/t1").status_code
def test_default_limit_with_exemption(extension_factory):
def is_backdoor():
return request.headers.get("backdoor") == "true"
app, limiter = extension_factory(
{C.DEFAULT_LIMITS: "1 per hour", C.DEFAULT_LIMITS_EXEMPT_WHEN: is_backdoor}
)
@app.route("/t1")
def t1():
return "test"
with hiro.Timeline() as timeline:
with app.test_client() as cli:
assert cli.get("/t1", headers={"backdoor": "true"}).status_code == 200
assert cli.get("/t1", headers={"backdoor": "true"}).status_code == 200
assert cli.get("/t1").status_code == 200
assert cli.get("/t1").status_code == 429
timeline.forward(3600)
assert cli.get("/t1").status_code == 200
def test_default_limit_with_conditional_deduction(extension_factory):
def failed_request(response):
return response.status_code != 200
app, limiter = extension_factory(
{C.DEFAULT_LIMITS: "1 per hour", C.DEFAULT_LIMITS_DEDUCT_WHEN: failed_request}
)
@app.route("/t1/<path:path>")
def t1(path):
if path != "1":
raise BadRequest()
return path
with hiro.Timeline() as timeline:
with app.test_client() as cli:
assert cli.get("/t1/1").status_code == 200
assert cli.get("/t1/1").status_code == 200
assert cli.get("/t1/2").status_code == 400
assert cli.get("/t1/1").status_code == 429
assert cli.get("/t1/2").status_code == 429
timeline.forward(3600)
assert cli.get("/t1/1").status_code == 200
assert cli.get("/t1/2").status_code == 400
def test_key_func(extension_factory):
app, limiter = extension_factory()
@app.route("/t1")
@limiter.limit("100 per minute", lambda: "test")
def t1():
return "test"
with hiro.Timeline().freeze():
with app.test_client() as cli:
for i in range(0, 100):
assert (
200
== cli.get(
"/t1", headers={"X_FORWARDED_FOR": "127.0.0.2"}
).status_code
)
assert 429 == cli.get("/t1").status_code
def test_logging(caplog):
app = Flask(__name__)
limiter = Limiter(app, key_func=get_remote_address)
@app.route("/t1")
@limiter.limit("1/minute")
def t1():
return "test"
with app.test_client() as cli:
assert 200 == cli.get("/t1").status_code
assert 429 == cli.get("/t1").status_code
assert len(caplog.records) == 1
assert caplog.records[0].levelname == "WARNING"
def test_reuse_logging():
app = Flask(__name__)
app_handler = mock.Mock()
app_handler.level = logging.INFO
app.logger.addHandler(app_handler)
limiter = Limiter(app, key_func=get_remote_address)
for handler in app.logger.handlers:
limiter.logger.addHandler(handler)
@app.route("/t1")
@limiter.limit("1/minute")
def t1():
return "42"
with app.test_client() as cli:
cli.get("/t1")
cli.get("/t1")
assert app_handler.handle.call_count == 1
def test_disabled_flag(extension_factory):
app, limiter = extension_factory(
config={C.ENABLED: False}, default_limits=["1/minute"]
)
@app.route("/t1")
def t1():
return "test"
@app.route("/t2")
@limiter.limit("10 per minute")
def t2():
return "test"
with app.test_client() as cli:
assert cli.get("/t1").status_code == 200
assert cli.get("/t1").status_code == 200
for i in range(0, 10):
assert cli.get("/t2").status_code == 200
assert cli.get("/t2").status_code == 200
def test_multiple_apps():
app1 = Flask(__name__)
app2 = Flask(__name__)
limiter = Limiter(default_limits=["1/second"], key_func=get_remote_address)
limiter.init_app(app1)
limiter.init_app(app2)
@app1.route("/ping")
def ping():
return "PONG"
@app1.route("/slowping")
@limiter.limit("1/minute")
def slow_ping():
return "PONG"
@app2.route("/ping")
@limiter.limit("2/second")
def ping_2():
return "PONG"
@app2.route("/slowping")
@limiter.limit("2/minute")
def slow_ping_2():
return "PONG"
with hiro.Timeline().freeze() as timeline:
with app1.test_client() as cli:
assert cli.get("/ping").status_code == 200
assert cli.get("/ping").status_code == 429
timeline.forward(1)
assert cli.get("/ping").status_code == 200
assert cli.get("/slowping").status_code == 200
timeline.forward(59)
assert cli.get("/slowping").status_code == 429
timeline.forward(1)
assert cli.get("/slowping").status_code == 200
with app2.test_client() as cli:
assert cli.get("/ping").status_code == 200
assert cli.get("/ping").status_code == 200
assert cli.get("/ping").status_code == 429
timeline.forward(1)
assert cli.get("/ping").status_code == 200
assert cli.get("/slowping").status_code == 200
timeline.forward(59)
assert cli.get("/slowping").status_code == 200
assert cli.get("/slowping").status_code == 429
timeline.forward(1)
assert cli.get("/slowping").status_code == 200
def test_headers_no_breach():
app = Flask(__name__)
limiter = Limiter(
app,
default_limits=["10/minute"],
headers_enabled=True,
key_func=get_remote_address,
)
@app.route("/t1")
def t1():
return "test"
@app.route("/t2")
@limiter.limit("2/second; 5 per minute; 10/hour")
def t2():
return "test"
with hiro.Timeline().freeze():
with app.test_client() as cli:
resp = cli.get("/t1")
assert resp.headers.get("X-RateLimit-Limit") == "10"
assert resp.headers.get("X-RateLimit-Remaining") == "9"
assert resp.headers.get("X-RateLimit-Reset") == str(int(time.time() + 61))
assert resp.headers.get("Retry-After") == str(60)
resp = cli.get("/t2")
assert resp.headers.get("X-RateLimit-Limit") == "2"
assert resp.headers.get("X-RateLimit-Remaining") == "1"
assert resp.headers.get("X-RateLimit-Reset") == str(int(time.time() + 2))
assert resp.headers.get("Retry-After") == str(1)
assert limiter.current_limit.remaining == 1
assert limiter.current_limit.reset_at == int(time.time() + 2)
assert not limiter.current_limit.breached
def test_headers_breach():
app = Flask(__name__)
limiter = Limiter(
app,
default_limits=["10/minute"],
headers_enabled=True,
key_func=get_remote_address,
)
@app.route("/t1")
@limiter.limit("2/second; 10 per minute; 20/hour")
def t():
return "test"
with hiro.Timeline().freeze() as timeline:
with app.test_client() as cli:
for i in range(10):
resp = cli.get("/t1")
timeline.forward(1)
assert len(limiter.current_limits) == 3
assert all(not limit.breached for limit in limiter.current_limits)
resp = cli.get("/t1")
timeline.forward(1)
assert resp.headers.get("X-RateLimit-Limit") == "10"
assert resp.headers.get("X-RateLimit-Remaining") == "0"
assert resp.headers.get("X-RateLimit-Reset") == str(int(time.time() + 50))
assert resp.headers.get("Retry-After") == str(int(50))
assert limiter.current_limit.remaining == 0
assert limiter.current_limit.reset_at == int(time.time() + 50)
assert limiter.current_limit.breached
def test_retry_after():
app = Flask(__name__)
_ = Limiter(
app,
default_limits=["1/minute"],
headers_enabled=True,
key_func=get_remote_address,
)
@app.route("/t1")
def t():
return "test"
with hiro.Timeline().freeze() as timeline:
with app.test_client() as cli:
resp = cli.get("/t1")
retry_after = int(resp.headers.get("Retry-After"))
assert retry_after > 0
timeline.forward(retry_after)
resp = cli.get("/t1")
assert resp.status_code == 200
def test_retry_after_exists_seconds():
app = Flask(__name__)
_ = Limiter(
app,
default_limits=["1/minute"],
headers_enabled=True,
key_func=get_remote_address,
)
@app.route("/t1")
def t():
return "", 200, {"Retry-After": "1000000"}
with app.test_client() as cli:
resp = cli.get("/t1")
retry_after = int(resp.headers.get("Retry-After"))
assert retry_after > 1000
def test_retry_after_exists_rfc1123():
app = Flask(__name__)
_ = Limiter(
app,
default_limits=["1/minute"],
headers_enabled=True,
key_func=get_remote_address,
)
@app.route("/t1")
def t():
return "", 200, {"Retry-After": "Sun, 06 Nov 2032 01:01:01 GMT"}
with app.test_client() as cli:
resp = cli.get("/t1")
retry_after = int(resp.headers.get("Retry-After"))
assert retry_after > 1000
def test_custom_headers_from_config():
app = Flask(__name__)
app.config.setdefault(C.HEADER_LIMIT, "X-Limit")
app.config.setdefault(C.HEADER_REMAINING, "X-Remaining")
app.config.setdefault(C.HEADER_RESET, "X-Reset")
limiter = Limiter(
app,
default_limits=["10/minute"],
headers_enabled=True,
key_func=get_remote_address,
)
@app.route("/t1")
@limiter.limit("2/second; 10 per minute; 20/hour")
def t():
return "test"
with hiro.Timeline().freeze() as timeline:
with app.test_client() as cli:
for i in range(11):
resp = cli.get("/t1")
timeline.forward(1)
assert resp.headers.get("X-Limit") == "10"
assert resp.headers.get("X-Remaining") == "0"
assert resp.headers.get("X-Reset") == str(int(time.time() + 50))
def test_application_shared_limit(extension_factory):
app, limiter = extension_factory(application_limits=["2/minute"])
@app.route("/t1")
def t1():
return "route1"
@app.route("/t2")
def t2():
return "route2"
with hiro.Timeline().freeze():
with app.test_client() as cli:
assert 200 == cli.get("/t1").status_code
assert 200 == cli.get("/t2").status_code
assert 429 == cli.get("/t1").status_code
def test_callable_default_limit(extension_factory):
app, limiter = extension_factory(default_limits=[lambda: "1/minute"])
@app.route("/t1")
def t1():
return "t1"
@app.route("/t2")
def t2():
return "t2"
with hiro.Timeline().freeze():
with app.test_client() as cli:
assert cli.get("/t1").status_code == 200
assert cli.get("/t2").status_code == 200
assert cli.get("/t1").status_code == 429
assert cli.get("/t2").status_code == 429
def test_callable_application_limit(extension_factory):
app, limiter = extension_factory(application_limits=[lambda: "1/minute"])
@app.route("/t1")
def t1():
return "t1"
@app.route("/t2")
def t2():
return "t2"
with hiro.Timeline().freeze():
with app.test_client() as cli:
assert cli.get("/t1").status_code == 200
assert cli.get("/t2").status_code == 429
def test_no_auto_check(extension_factory):
app, limiter = extension_factory(auto_check=False)
@app.route("/", methods=["GET", "POST"])
@limiter.limit("1/second", per_method=True)
def root():
return "root"
with hiro.Timeline().freeze():
with app.test_client() as cli:
assert 200 == cli.get("/").status_code
assert 200 == cli.get("/").status_code
# attach before_request to perform check
@app.before_request
def _():
limiter.check()
with hiro.Timeline().freeze():
with app.test_client() as cli:
assert 200 == cli.get("/").status_code
assert 429 == cli.get("/").status_code
def test_fail_on_first_breach(extension_factory):
app, limiter = extension_factory(fail_on_first_breach=True)
@app.route("/", methods=["GET", "POST"])
@limiter.limit("1/second", per_method=True)
@limiter.limit("2/minute", per_method=True)
def root():
return "root"
with hiro.Timeline().freeze() as timeline:
with app.test_client() as cli:
assert 200 == cli.get("/").status_code
assert 429 == cli.get("/").status_code
assert [True] == [k.breached for k in limiter.current_limits]
timeline.forward(1)
assert 200 == cli.get("/").status_code
assert [False, False] == [k.breached for k in limiter.current_limits]
timeline.forward(1)
assert 429 == cli.get("/").status_code
assert [False, True] == [k.breached for k in limiter.current_limits]
def test_no_fail_on_first_breach(extension_factory):
app, limiter = extension_factory(fail_on_first_breach=False)
@app.route("/", methods=["GET", "POST"])
@limiter.limit("1/second", per_method=True)
@limiter.limit("2/minute", per_method=True)
def root():
return "root"
with hiro.Timeline().freeze() as timeline:
with app.test_client() as cli:
assert 200 == cli.get("/").status_code
assert 429 == cli.get("/").status_code
assert [True, False] == [k.breached for k in limiter.current_limits]
timeline.forward(1)
assert 429 == cli.get("/").status_code
assert [False, True] == [k.breached for k in limiter.current_limits]
def test_custom_key_prefix(redis_connection, extension_factory):
app1, limiter1 = extension_factory(
key_prefix="moo", storage_uri="redis://localhost:46379"
)
app2, limiter2 = extension_factory(
{C.KEY_PREFIX: "cow"}, storage_uri="redis://localhost:46379"
)
app3, limiter3 = extension_factory(storage_uri="redis://localhost:46379")
@app1.route("/test")
@limiter1.limit("1/day")
def app1_test():
return "app1 test"
@app2.route("/test")
@limiter2.limit("1/day")
def app2_test():
return "app1 test"
@app3.route("/test")
@limiter3.limit("1/day")
def app3_test():
return "app1 test"
with app1.test_client() as cli:
resp = cli.get("/test")
assert 200 == resp.status_code
resp = cli.get("/test")
assert 429 == resp.status_code
with app2.test_client() as cli:
resp = cli.get("/test")
assert 200 == resp.status_code
resp = cli.get("/test")
assert 429 == resp.status_code
with app3.test_client() as cli:
resp = cli.get("/test")
assert 200 == resp.status_code
resp = cli.get("/test")
assert 429 == resp.status_code
def test_second_instance_bypassed_by_shared_g():
app = Flask(__name__)
limiter1 = Limiter(app, key_func=get_remote_address)
limiter2 = Limiter(app, key_func=get_remote_address)
@app.route("/test1")
@limiter2.limit("1/second")
def app_test1():
return "app test1"
@app.route("/test2")
@limiter1.limit("10/minute")
@limiter2.limit("1/second")
def app_test2():
return "app test2"
with hiro.Timeline().freeze() as timeline:
with app.test_client() as cli:
assert cli.get("/test1").status_code == 200
assert cli.get("/test2").status_code == 200
assert cli.get("/test1").status_code == 429
assert cli.get("/test2").status_code == 200
for i in range(8):
assert cli.get("/test1").status_code == 429
assert cli.get("/test2").status_code == 200
assert cli.get("/test2").status_code == 429
timeline.forward(1)
assert cli.get("/test1").status_code == 200
assert cli.get("/test2").status_code == 429
timeline.forward(59)
assert cli.get("/test1").status_code == 200
assert cli.get("/test2").status_code == 200
def test_independent_instances_by_key_prefix():
app = Flask(__name__)
limiter1 = Limiter(app, key_prefix="lmt1", key_func=get_remote_address)
limiter2 = Limiter(app, key_prefix="lmt2", key_func=get_remote_address)
@app.route("/test1")
@limiter2.limit("1/second")
def app_test1():
return "app test1"
@app.route("/test2")
@limiter1.limit("10/minute")
@limiter2.limit("1/second")
def app_test2():
return "app test2"
with hiro.Timeline().freeze() as timeline:
with app.test_client() as cli:
assert cli.get("/test1").status_code == 200
assert cli.get("/test2").status_code == 200
resp = cli.get("/test1")
assert resp.status_code == 429
assert "1 per 1 second" in resp.data.decode()
resp = cli.get("/test2")
assert resp.status_code == 429
assert "1 per 1 second" in resp.data.decode()
for i in range(8):
assert cli.get("/test1").status_code == 429
assert cli.get("/test2").status_code == 429
assert cli.get("/test2").status_code == 429
timeline.forward(1)
assert cli.get("/test1").status_code == 200
assert cli.get("/test2").status_code == 429
timeline.forward(59)
assert cli.get("/test1").status_code == 200
assert cli.get("/test2").status_code == 200
|
class argpasser(object):
"""
ComEst use the arguments that are almost repeatedly. Therefore, it will be useful to create a customized arguemnt passer like this.
"""
def __init__(self,
stamp_size_arcsec = 20.0,
mag_dict = {"lo":20.0, "hi":25.0 },
hlr_dict = {"lo":0.35, "hi":0.75 },
fbulge_dict = {"lo":0.5 , "hi":0.9 },
q_dict = {"lo":0.4 , "hi":1.0 },
pos_ang_dict = {"lo":0.0 , "hi":180.0},
ngals_arcmin2 = 15.0,
nsimimages = 50,
ncpu = 2,
):
"""
:param stamp_size_arcsec: The size of the stamp of each simulated source by **GalSim**. The stamp is with the size of ``stamp_size_arcsec`` x ``stamp_size_arcsec`` (``stamp_size_arcsec`` in arcsec) where the **GalSim** will simulate one single source on. By default, it is ``stamp_size_arcsec = 15.0``.
:param mag_dict: The magnitude range which **GalSim** will simulate sources. It must be in the form of ``{"lo": _value_, "hi": _value_}``, where _value_ is expressed in magnitude. By default, it is ``mag_dict = {"lo":20.0, "hi":25.0 }``.
:param hlr_dict: The half light radius configuration of the sources simulated by **GalSim**. It is in the unit of arcsec. It has to be in the form of ``{"lo": _value_, "high": _value_}``. By default, it is ``hlr_dict = {"lo":0.35 , "hi":0.75 }``.
:param fbulge_dict: The configuration of the fraction of the bulge component. It must be in the form of ``{"lo": _value_, "high": _value_}``. Note that the _value_ has to be within [0,1] and 1 means the galaxy has zero fraction of light from the disk component. By default, it is ``fbulge_dict = {"lo":0.5 , "hi":0.9 }``.
:param q_dict: The minor-to-major axis ratio configuration of the sources simulated by **GalSim**. It must be in the form of ``{"lo": _value_, "high": _value_}``. Note that the _value_ has to be within [0,1] and ``q = 1`` means spherical. By default, it is ``q_dict = {"lo":0.4 , "hi":1.0 }``.
:param pos_ang_dict: The position angle configuration of the sources simulated by **GalSim**. It is in the unit of degree. It must be in the form of ``{"lo": _value_, "high": _value_}``. Note that the _value_ has to be within [0,180.0] and it is counter-clockwise with +x is 0 degree. By default, it is ``pos_ang_dict={"lo":0.0 , "hi":180.0 }``.
:param ngals_arcmin2: The projected number of the sources simulated by **GalSim** per arcmin square. You dont want to set this number too high because it will cause the problem from blending in the source detection. However, you dont want to lose the statistic power if you set this number too low. By defualt, it is ``ngals_arcmin2 = 15.0``.
:param nsimimages: The number of the images you want to simulate. It will be saved in the multi-extension file with the code name ``sims_nameroot``. By default, it is ``nsimimages = 50``.
:param ncpu: The number of cpu for parallel running. By default, it is ``ncpu = 2``. Please do not set this number higher than the CPU cores you have.
"""
self.stamp_size_arcsec = float(stamp_size_arcsec)
self.mag_dict = mag_dict
self.hlr_dict = hlr_dict
self.fbulge_dict = fbulge_dict
self.q_dict = q_dict
self.pos_ang_dict = pos_ang_dict
self.ngals_arcmin2 = float(ngals_arcmin2)
self.nsimimages = int(nsimimages)
self.ncpu = int(ncpu)
return
# i_am function
def i_am(self):
"""
"""
print "#", "stamp_size_arcsec:", self.stamp_size_arcsec
print "#", "mag_dict:", self.mag_dict
print "#", "hlr_dict:", self.hlr_dict
print "#", "fbulge_dict:", self.fbulge_dict
print "#", "q_dict:", self.q_dict
print "#", "pos_ang_dict:", self.pos_ang_dict
print "#", "ngals_arcmin2:", self.ngals_arcmin2
print "#", "nsimimages:", self.nsimimages
print "#", "ncpu:", self.ncpu
return
|
import os
from setuptools import setup, find_packages
README = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-email-subscription',
url='https://github.com/MagicSolutions/django-email-subscription',
version='0.0.1',
description='Django app for creating subcription accoutns.',
long_description=README,
install_requires=[
'django-simple-captcha>=0.4.2',
],
packages=find_packages(),
package_data={'': ['LICENSE']},
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
],
)
|
from pymongo import MongoClient
from pandas import read_csv
from datetime import date
mongodb = MongoClient('192.168.178.82', 9999)
db = mongodb['dev']
drug_collection = db['drug']
drugs = read_csv('~/Dokumente/bfarm_lieferenpass_meldung.csv', delimiter=';', encoding='iso8859_2').to_dict()
drugs.pop('Id', None)
drugs.pop('aktuelle Bescheidart', None)
drugs.pop('Meldungsart', None)
drugs.pop('aktuelle Bescheidart', None)
data = dict()
for x in range(drugs['Verkehrsfähig'].__len__()):
"""
if drugs['Ende Engpass'][x] == '-':
data['end'] = None
else:
day, month, year = drugs['Ende Engpass'][x].split('.')
data['end'] = date(int(year), int(month), int(day)).__str__()
if drugs['Beginn Engpass'][x] == '-':
data['initial_report'] = None
else:
day, month, year = drugs['Beginn Engpass'][x].split('.')
data['initial_report'] = date(int(year), int(month), int(day)).__str__()
if drugs['Datum der letzten Meldung'][x] == '-':
data['last_report'] = None
else:
day, month, year = drugs['Datum der letzten Meldung'][x].split('.')
data['last_report'] = date(int(year), int(month), int(day)).__str__()
"""
data['substance'] = drugs['Wirkstoffe'][x].replace(' ', '').split(';')
data['enr'] = int(drugs['Enr'][x])
data['marketability'] = True if drugs['Verkehrsfähig'][x] == 'ja' else False
data['atc_code'] = drugs['ATC-Code'][x]
data['pzn'] = int(drugs['PZN'][x].split(' ')[0].replace(';', '')) if drugs['PZN'][x] != '-' else None
data['drug_title'] = drugs['Arzneimittelbezeichnung'][x]
data['hospital'] = True if drugs['Krankenhausrelevant'][x] == 'ja' else False
drug_collection.update_one({'enr': data['enr']}, {'$set': data}, upsert=True)
|
"""Molt Web API with Interface."""
import re
import redis
import docker
import subprocess
import os
import shlex
import requests
import sys
import argparse
from flask import Flask, Response, render_template, abort, request
from molt import Molt, MoltError
app = Flask(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config', help='config file path')
args = parser.parse_args()
if args.config:
cfg_file = args.config
else:
cfg_file = 'config/molt_app.cfg'
if not os.path.exists(cfg_file):
app.logger.error("{} が存在しません".format(cfg_file))
sys.exit(1)
app.config.from_pyfile(cfg_file, silent=True)
@app.route('/<virtual_host>', strict_slashes=False)
def index(virtual_host):
"""Moltの実行をプレビューするページ."""
try:
rev, repo, user = virtual_host_parse(virtual_host)
except Exception:
abort(404)
vhost = {'rev': rev, 'repo': repo, 'user': user, 'full': virtual_host}
redirect_url = '//{}.{}/'.format(virtual_host, app.config['BASE_DOMAIN'])
return render_template('index.html', vhost=vhost,
redirect_url=redirect_url)
@app.route('/molt/<virtual_host>', methods=['GET'], strict_slashes=False)
def molt(virtual_host):
"""Moltの実行をストリーミングする(Server-Sent Eventを使ったAPI)."""
try:
rev, repo, user = virtual_host_parse(virtual_host)
except Exception:
abort(404)
m = Molt(rev, repo, user, app.config['BASE_DOMAIN'],
app.config['GITHUB_USER'], app.config['GITHUB_TOKEN'])
r = redis.StrictRedis(host=app.config['REDIS_HOST'],
port=app.config['REDIS_PORT'])
def generate(m, r):
"""Dockerイメージ立ち上げ(ストリーミングするための関数).
git clone から docker-compose upまでの一連の処理のSTDIOの送信と、Dockerイメージ
の情報取得・設定をする
"""
# コマンド群の実行
try:
for row in m.molt():
row = row.decode()
data = row.split('\r')[-1] # CRのみの行は保留されるので取り除く
yield event_stream_parser(data)
except MoltError as e:
yield event_stream_parser(e, event='failure')
except Exception:
yield event_stream_parser('Molt内部でエラーが発生しました。終了します...',
event='failure')
else:
# RedisへIPアドレスとバーチャルホストの対応を書き込む
r.hset('mirror-store', virtual_host, m.get_container_ip())
yield event_stream_parser('', event='success')
return Response(generate(m, r), mimetype='text/event-stream')
@app.route('/favicon.ico')
def favicon():
"""favicon.ico."""
abort(404)
@app.template_filter('base_domain')
def base_domain_filter(path):
"""Staticファイルを呼び出す際のドメインを指定する."""
return '//' + app.config['BASE_DOMAIN'] + ':' + str(app.config['PORT']) + \
'/' + path
@app.route("/hook", methods=['POST'])
def hook():
event = request.headers["X-GitHub-Event"]
req = request.json
if event != "pull_request":
return "ok", 200
elif req["action"] not in {"opened", "synchronize"}:
return "ok", 200
pr = req["pull_request"]
pr_url = pr["comments_url"]
pr_sha = pr["head"]["sha"][:7]
pr_reponame = pr["head"]["repo"]["name"]
pr_owner = pr["head"]["repo"]["owner"]["login"]
payload = {
"event": "COMMENT",
"body": "Launched the preview environment!\nhttp://{}.{}.{}.{}\
".format(pr_sha, pr_reponame, pr_owner, app.config["BASE_DOMAIN"]),
}
headers = {
"Accept": "application/vnd.github.v3+json",
"Content-Type": "application/json",
"Authorization": "token {}".format(app.config["GITHUB_TOKEN"]),
}
requests.post(
pr_url,
json=payload,
headers=headers,
)
return "ok", 200
def virtual_host_parse(virtual_host):
"""Virtual_hostの文字列を 'rev', 'repo', 'user' に分割する.
e.g.(1) "host.repo.sitory.user" => "host", "repo.sitory", "user"
e.g.(2) "host.repository.user" => "host", "repository", "user"
"""
p = re.compile(r'(?P<rev>^.+?)\.(?P<repo>.+)\.(?P<user>.+)$')
m = p.search(virtual_host)
return m.group('rev'), m.group('repo'), m.group('user')
def event_stream_parser(data, event=None, id=None, retry=None):
"""Server-Sent Event 形式へのパーサ."""
event_stream = ''
if event:
event_stream += 'event: {}\n'.format(event)
event_stream += 'data: {}\n'.format(data)
if id:
event_stream += 'id: {}\n'.format(id)
if retry:
event_stream += 'retry: {}\n'.format(id)
event_stream += '\n'
return event_stream
if __name__ == '__main__':
# RSA鍵の生成
user = os.getenv('USER')
ssh_key_path = os.path.expanduser("~")+"/.ssh/molt_deploy_key"
if not os.path.exists(ssh_key_path):
command = 'ssh-keygen -t rsa -N "" -f {}'.format(ssh_key_path)
command = shlex.split(command)
subprocess.Popen(command)
# Dockerネットワークの作成
clinet = docker.from_env()
networks = clinet.networks.list()
if 'molt-network' not in [network.name for network in networks]:
command = 'docker network create --subnet=172.31.255.0/24 \
--ip-range=172.31.255.0/24 --gateway=172.31.255.254 \
-o "com.docker.network.bridge.host_binding_ipv4"="0.0.0.0" \
molt-network'
command = shlex.split(command)
subprocess.Popen(command)
app.run(host=app.config['HOST'], port=app.config['PORT'])
|
from django.contrib import admin
try:
from django.contrib.auth import get_permission_codename
except ImportError: # pragma: no cover
# Django < 1.6
def get_permission_codename(action, opts):
return '%s_%s' % (action, opts.object_name.lower())
class ObjectPermissionsModelAdminMixin(object):
def has_change_permission(self, request, obj=None):
opts = self.opts
codename = get_permission_codename('change', opts)
return request.user.has_perm('%s.%s' % (opts.app_label, codename), obj)
def has_delete_permission(self, request, obj=None):
opts = self.opts
codename = get_permission_codename('delete', opts)
return request.user.has_perm('%s.%s' % (opts.app_label, codename), obj)
class ObjectPermissionsInlineModelAdminMixin(ObjectPermissionsModelAdminMixin):
def has_change_permission(self, request, obj=None): # pragma: no cover
opts = self.opts
if opts.auto_created:
for field in opts.fields:
if field.rel and field.rel.to != self.parent_model:
opts = field.rel.to._meta
break
codename = get_permission_codename('change', opts)
return request.user.has_perm('%s.%s' % (opts.app_label, codename), obj)
def has_delete_permission(self, request, obj=None): # pragma: no cover
if self.opts.auto_created:
return self.has_change_permission(request, obj)
return super(ObjectPermissionsInlineModelAdminMixin, self).has_delete_permission(request, obj)
class ObjectPermissionsModelAdmin(ObjectPermissionsModelAdminMixin, admin.ModelAdmin):
pass
class ObjectPermissionsStackedInline(ObjectPermissionsInlineModelAdminMixin, admin.StackedInline):
pass
class ObjectPermissionsTabularInline(ObjectPermissionsInlineModelAdminMixin, admin.TabularInline):
pass
|
"""
Production Configurations
- Use Redis for cache
"""
from __future__ import absolute_import, unicode_literals
from boto.s3.connection import OrdinaryCallingFormat
from django.utils import six
from .common import * # noqa
SECRET_KEY = env('DJANGO_SECRET_KEY')
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
'DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS', default=True)
SECURE_CONTENT_TYPE_NOSNIFF = env.bool(
'DJANGO_SECURE_CONTENT_TYPE_NOSNIFF', default=True)
SECURE_BROWSER_XSS_FILTER = True
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
SECURE_SSL_REDIRECT = env.bool('DJANGO_SECURE_SSL_REDIRECT', default=True)
CSRF_COOKIE_SECURE = True
CSRF_COOKIE_HTTPONLY = True
X_FRAME_OPTIONS = 'DENY'
ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['example.com'])
INSTALLED_APPS += ('gunicorn', )
INSTALLED_APPS += (
'storages',
)
AWS_EXPIRY = 60 * 60 * 24 * 7
AWS_HEADERS = {
'Cache-Control': six.b('max-age=%d, s-maxage=%d, must-revalidate' % (
AWS_EXPIRY, AWS_EXPIRY))
}
from storages.backends.s3boto import S3BotoStorage
StaticRootS3BotoStorage = lambda: S3BotoStorage(location='static')
MediaRootS3BotoStorage = lambda: S3BotoStorage(location='media')
DEFAULT_FILE_STORAGE = 'config.settings.production.MediaRootS3BotoStorage'
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader',
['django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader', ]),
]
DATABASES['default'] = env.db('DATABASE_URL')
REDIS_LOCATION = '{0}/{1}'.format(env('REDIS_URL', default='redis://127.0.0.1:6379'), 0)
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': REDIS_LOCATION,
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
'IGNORE_EXCEPTIONS': True, # mimics memcache behavior.
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
}
}
}
ADMIN_URL = env('DJANGO_ADMIN_URL')
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.console.EmailBackend')
|
"""
getname
~~~~~~~
Get popular cat/dog/superhero/supervillain names.
:copyright: (c) 2015 by lord63.
:license: MIT, see LICENSE for more details.
"""
from getname.main import random_name
__title__ = "getname"
__version__ = '0.1.1'
__author__ = "lord63"
__license__ = "MIT"
__copyright__ = "Copyright 2015 lord63"
|
from cse.util import Util
from collections import OrderedDict
from cse.pipeline import Handler
class WpApiParser(Handler):
def __init__(self):
super()
def parse(self, comments, url, assetId, parentId):
data = self.__buildDataSkeleton(url, assetId)
data["comments"] = self.__iterateComments(comments, parentId)
return data
def __buildDataSkeleton(self, url, assetId):
return {
"article_url" : url,
"article_id" : assetId,
"comments" : None
}
def __iterateComments(self, comments, parentId=None):
commentList = OrderedDict()
for comment in comments:
votes = 0
for action_summary in comment["action_summaries"]:
if action_summary["__typename"] == "LikeActionSummary":
votes = action_summary["count"]
commentObject = {
"comment_author": comment["user"]["username"],
"comment_text" : comment["body"],
"timestamp" : comment["created_at"],
"parent_comment_id" : parentId,
"upvotes" : votes,
"downvotes": 0
}
commentList[comment["id"]] = commentObject
try:
commentReplies = self.__iterateComments(comment["replies"]["nodes"], comment["id"])
except KeyError: # There may be a limit of the nesting level of comments on wp
commentReplies = {}
commentList.update(commentReplies)
return commentList
# inherited from cse.pipeline.Handler
def registeredAt(self, ctx):
pass
def process(self, ctx, data):
result = self.parse(
comments=data["comments"],
url=data["url"],
assetId=data["assetId"],
parentId=data["parentId"]
)
ctx.write(result)
|
"""Test framework for syscoin utils.
Runs automatically during `make check`.
Can also be run manually."""
import argparse
import binascii
import configparser
import difflib
import json
import logging
import os
import pprint
import subprocess
import sys
def main():
config = configparser.ConfigParser()
config.optionxform = str
config.read_file(open(os.path.join(os.path.dirname(__file__), "../config.ini"), encoding="utf8"))
env_conf = dict(config.items('environment'))
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-v', '--verbose', action='store_true')
args = parser.parse_args()
verbose = args.verbose
if verbose:
level = logging.DEBUG
else:
level = logging.ERROR
formatter = '%(asctime)s - %(levelname)s - %(message)s'
# Add the format/level to the logger
logging.basicConfig(format=formatter, level=level)
bctester(os.path.join(env_conf["SRCDIR"], "test", "util", "data"), "syscoin-util-test.json", env_conf)
def bctester(testDir, input_basename, buildenv):
""" Loads and parses the input file, runs all tests and reports results"""
input_filename = os.path.join(testDir, input_basename)
raw_data = open(input_filename, encoding="utf8").read()
input_data = json.loads(raw_data)
failed_testcases = []
for testObj in input_data:
try:
bctest(testDir, testObj, buildenv)
logging.info("PASSED: " + testObj["description"])
except:
logging.info("FAILED: " + testObj["description"])
failed_testcases.append(testObj["description"])
if failed_testcases:
error_message = "FAILED_TESTCASES:\n"
error_message += pprint.pformat(failed_testcases, width=400)
logging.error(error_message)
sys.exit(1)
else:
sys.exit(0)
def bctest(testDir, testObj, buildenv):
"""Runs a single test, comparing output and RC to expected output and RC.
Raises an error if input can't be read, executable fails, or output/RC
are not as expected. Error is caught by bctester() and reported.
"""
# Get the exec names and arguments
execprog = os.path.join(buildenv["BUILDDIR"], "src", testObj["exec"] + buildenv["EXEEXT"])
execargs = testObj['args']
execrun = [execprog] + execargs
# Read the input data (if there is any)
stdinCfg = None
inputData = None
if "input" in testObj:
filename = os.path.join(testDir, testObj["input"])
inputData = open(filename, encoding="utf8").read()
stdinCfg = subprocess.PIPE
# Read the expected output data (if there is any)
outputFn = None
outputData = None
outputType = None
if "output_cmp" in testObj:
outputFn = testObj['output_cmp']
outputType = os.path.splitext(outputFn)[1][1:] # output type from file extension (determines how to compare)
try:
outputData = open(os.path.join(testDir, outputFn), encoding="utf8").read()
except:
logging.error("Output file " + outputFn + " can not be opened")
raise
if not outputData:
logging.error("Output data missing for " + outputFn)
raise Exception
if not outputType:
logging.error("Output file %s does not have a file extension" % outputFn)
raise Exception
# Run the test
proc = subprocess.Popen(execrun, stdin=stdinCfg, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
try:
outs = proc.communicate(input=inputData)
except OSError:
logging.error("OSError, Failed to execute " + execprog)
raise
if outputData:
data_mismatch, formatting_mismatch = False, False
# Parse command output and expected output
try:
a_parsed = parse_output(outs[0], outputType)
except Exception as e:
logging.error('Error parsing command output as %s: %s' % (outputType, e))
raise
try:
b_parsed = parse_output(outputData, outputType)
except Exception as e:
logging.error('Error parsing expected output %s as %s: %s' % (outputFn, outputType, e))
raise
# Compare data
if a_parsed != b_parsed:
logging.error("Output data mismatch for " + outputFn + " (format " + outputType + ")")
data_mismatch = True
# Compare formatting
if outs[0] != outputData:
error_message = "Output formatting mismatch for " + outputFn + ":\n"
error_message += "".join(difflib.context_diff(outputData.splitlines(True),
outs[0].splitlines(True),
fromfile=outputFn,
tofile="returned"))
logging.error(error_message)
formatting_mismatch = True
assert not data_mismatch and not formatting_mismatch
# Compare the return code to the expected return code
wantRC = 0
if "return_code" in testObj:
wantRC = testObj['return_code']
if proc.returncode != wantRC:
logging.error("Return code mismatch for " + outputFn)
raise Exception
if "error_txt" in testObj:
want_error = testObj["error_txt"]
# Compare error text
# TODO: ideally, we'd compare the strings exactly and also assert
# That stderr is empty if no errors are expected. However, syscoin-tx
# emits DISPLAY errors when running as a windows application on
# linux through wine. Just assert that the expected error text appears
# somewhere in stderr.
if want_error not in outs[1]:
logging.error("Error mismatch:\n" + "Expected: " + want_error + "\nReceived: " + outs[1].rstrip())
raise Exception
def parse_output(a, fmt):
"""Parse the output according to specified format.
Raise an error if the output can't be parsed."""
if fmt == 'json': # json: compare parsed data
return json.loads(a)
elif fmt == 'hex': # hex: parse and compare binary data
return binascii.a2b_hex(a.strip())
else:
raise NotImplementedError("Don't know how to compare %s" % fmt)
if __name__ == '__main__':
main()
|
from __future__ import print_function
import sys, os
def print_error(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
if len(sys.argv) != 2:
print_error("Usage: python pre_NAMD.py $PDBID")
sys.exit(-1)
mypath = os.path.realpath(__file__)
tclpath = os.path.split(mypath)[0] + os.path.sep + 'tcl' + os.path.sep
pdbid = sys.argv[1]
logfile = pdbid+'.log'
vmd = "/Volumes/VMD-1.9.2/VMD 1.9.2.app/Contents/vmd/vmd_MACOSXX86"
print("Input: "+pdbid+".pdb")
print("Remove water..")
cmdline = '\"'+ vmd + '\"' +' -dispdev text -eofexit < '+ tclpath + 'remove_water.tcl' + ' ' + '-args' + ' '+ pdbid +'> '+ logfile
os.system(cmdline)
print("Create PSF file...")
cmdline = '\"'+ vmd + '\"' +' -dispdev text -eofexit < '+ tclpath + 'create_psf.tcl' + ' ' + '-args' + ' '+ pdbid +'>> '+ logfile
os.system(cmdline)
print("Build water box...")
cmdline = '\"'+ vmd + '\"' +' -dispdev text -eofexit < '+ tclpath + 'build_water_box.tcl' + ' ' + '-args' + ' '+ pdbid +'>> '+ logfile
os.system(cmdline)
print("Add ions...")
cmdline = '\"'+ vmd + '\"' +' -dispdev text -eofexit < '+ tclpath + 'add_ion.tcl' + ' ' + '-args' + ' '+ pdbid +'>> '+ logfile
os.system(cmdline)
print("Calculate center coordinates...")
cmdline = '\"'+ vmd + '\"' +' -dispdev text -eofexit < '+ tclpath + 'get_center.tcl' + ' ' + '-args' + ' '+ pdbid +'>> '+ logfile
os.system(cmdline)
print("Finish!")
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('busshaming', '0013_auto_20170917_0502'),
]
operations = [
migrations.CreateModel(
name='StopSequence',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sequence_hash', models.CharField(max_length=64)),
('stop_sequence', models.TextField()),
('length', models.SmallIntegerField()),
('trip_headsign', models.CharField(blank=True, max_length=200, null=True)),
('trip_short_name', models.CharField(blank=True, max_length=200, null=True)),
('direction', models.SmallIntegerField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('route', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='busshaming.Route')),
],
),
migrations.AddField(
model_name='trip',
name='stop_sequence',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='busshaming.StopSequence'),
),
migrations.AlterUniqueTogether(
name='stopsequence',
unique_together=set([('sequence_hash', 'route')]),
),
]
|
"""
Poisson time series penalised likelihood regression
via the Berman Turner device
"""
from . import weighted_linear_model
from . import design_nonlattice as design
from math import ceil
import numpy as np
from importlib import reload
design = reload(design)
class NonLatticeOneShot:
"""
the simplest device.
Uses a stepwise-constant quadrature rule and non-adaptive
smoothing.
"""
def __init__(
self,
positive=True,
normalize=False,
wlm=None,
wlm_factory='WeightedLassoLarsCV',
cum_interp='linear',
smoothing=1.0, # only for spline smoother
step_size=0.25, # only for dirac interpolant
strategy='random', # only for dirac interpolant
*args, **kwargs):
if wlm is None:
# Allow reference by class for easy serialization
if isinstance(wlm_factory, str):
wlm_factory = getattr(weighted_linear_model, wlm_factory)
self.wlm = wlm_factory(
positive=positive,
normalize=normalize,
*args, **kwargs
)
else:
self.wlm = wlm
self.big_n_hat_ = None
self.cum_interp = cum_interp
self.smoothing = smoothing
self.strategy = strategy
self.step_size = step_size
def fit(
self,
obs_t,
cum_obs,
basis_lag=1.0,
penalty_weight='adaptive',
sample_weight='bermanturner',
max_basis_span=float('inf'),
big_n_hat=None,
*args, **kwargs):
self.obs_t_ = obs_t
self.cum_obs_ = cum_obs
if np.isscalar(basis_lag):
# scalars are a bin width
basis_span = min(
(np.amax(obs_t) - np.amin(obs_t))/2.0,
max_basis_span
)
n_bins = ceil(basis_span/basis_lag)
self.basis_lag_ = np.arange(n_bins+1) * basis_lag
else:
self.basis_lag_ = basis_lag
if big_n_hat is None:
self.big_n_hat_ = self.predict_big_n()
(
self.inc_predictors_,
self.inc_response_,
self.inc_sample_weight_
) = (
design.design_stepwise(
obs_t=self.obs_t_,
cum_obs=self.cum_obs_,
basis_lag=self.basis_lag_,
big_n_hat=self.big_n_hat_,
sample_weight=sample_weight
)
)
self.wlm.fit(
X=self.inc_predictors_,
y=self.inc_response_,
sample_weight=self.inc_sample_weight_,
penalty_weight=penalty_weight,
*args, **kwargs
)
def predict_intensity(self, obs_t=None):
"""
This should return forward-predicted intensity
based on the fitted histogram, up to the last observations
before the given times.
"""
return design.predict_increment(
big_n=self.big_n_hat_,
obs_t=obs_t if obs_t is not None else self.obs_t_,
mu=self.intercept_,
basis_lag=self.basis_lag_,
coef=self.coef_)
def predict(self, obs_t=None):
"""
This should return predicted increments
based on the fitted histogram, up to the last observations
before the given times.
"""
return design.predict_increment(
big_n=self.big_n_hat_,
obs_t=obs_t if obs_t is not None else self.obs_t_,
mu=self.intercept_,
basis_lag=self.basis_lag_,
coef=self.coef_)
def predict_big_n(self, obs_t=None):
"""
This should return predicted increment interpolant
based on the fitted histogram, up to the last observations
before the given times.
"""
return design.interpolate(
obs_t=self.obs_t_,
cum_obs=self.cum_obs_,
cum_interp=self.cum_interp,
smoothing=self.smoothing,
step_size=self.step_size,
strategy=self.strategy,
)
@property
def coef_(self):
return self.wlm.coef_
@property
def eta_(self):
return np.sum(self.coef_)
@property
def intercept_(self):
return self.wlm.intercept_
@property
def alpha_(self):
return self.wlm.alpha_
@property
def n_iter_(self):
return self.wlm.n_iter_
class NonLatticeIterative(NonLatticeOneShot):
"""
repeatedly forward-smooth to find optimal interpolant.
TODO: This doesn't do backwards losses
"""
def __init__(
self,
*args, **kwargs):
super().__init__(
cum_interp='dirac',
strategy='random',
*args, **kwargs)
def fit(
self,
obs_t,
cum_obs,
basis_lag=1.0,
penalty_weight='adaptive',
sample_weight='bermanturner',
max_basis_span=float('inf'),
big_n_hat=None,
max_iter=3,
*args, **kwargs):
inner_model = NonLatticeOneShot(
wlm=self.wlm,
cum_interp='linear',
)
self.inner_model = inner_model
self.obs_t_ = obs_t
self.cum_obs_ = cum_obs
if np.isscalar(basis_lag):
# scalars are a bin width
basis_span = min(
(np.amax(obs_t) - np.amin(obs_t))/2.0,
max_basis_span
)
n_bins = ceil(basis_span/basis_lag)
self.basis_lag_ = np.arange(n_bins+1) * basis_lag
else:
self.basis_lag_ = basis_lag
if big_n_hat is None:
self.big_n_hat_ = self.predict_big_n()
for i in range(max_iter):
print('i', i, max_iter)
inner_model.fit(
obs_t=self.big_n_hat_.spike_lattice,
cum_obs=self.big_n_hat_.spike_cum_weight,
*args,
**kwargs)
n_hat_arr = inner_model.predict(
obs_t=self.big_n_hat_.spike_lattice,
)
self.big_n_hat_ = design.reweight_dirac_interpolant(
self.big_n_hat_,
n_hat_arr
)
|
"""useful context managers"""
from contextlib import suppress
with suppress(ModuleNotFoundError):
from lag import *
import os
import contextlib
def clog(*args, condition=True, log_func=print, **kwargs):
if condition:
return log_func(*args, **kwargs)
@contextlib.contextmanager
def cd(newdir, verbose=True):
"""Change your working directory, do stuff, and change back to the original"""
_clog = partial(clog, condition=verbose, log_func=print)
prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
_clog(f'cd {newdir}')
yield
finally:
_clog(f'cd {prevdir}')
os.chdir(prevdir)
# from pathlib import Path
# _clog("Called before cd", Path().absolute())
# with cd(Path.home()):
# if verbose: print("Called under cd", Path().absolute())
# _clog("Called after cd and same as before", Path().absolute())
|
import os
import warnings
from pymatgen.core.structure import Structure
from pymatgen.core.units import Ha_to_eV, bohr_to_ang
from pymatgen.io.abinit.abiobjects import *
from pymatgen.util.testing import PymatgenTest
class LatticeFromAbivarsTest(PymatgenTest):
def test_rprim_acell(self):
l1 = lattice_from_abivars(acell=3 * [10], rprim=np.eye(3))
self.assertAlmostEqual(l1.volume, bohr_to_ang ** 3 * 1000)
assert l1.angles == (90, 90, 90)
l2 = lattice_from_abivars(acell=3 * [10], angdeg=(90, 90, 90))
assert l1 == l2
l2 = lattice_from_abivars(acell=3 * [8], angdeg=(60, 60, 60))
abi_rprimd = (
np.reshape(
[
4.6188022,
0.0000000,
6.5319726,
-2.3094011,
4.0000000,
6.5319726,
-2.3094011,
-4.0000000,
6.5319726,
],
(3, 3),
)
* bohr_to_ang
)
self.assertArrayAlmostEqual(l2.matrix, abi_rprimd)
l3 = lattice_from_abivars(acell=[3, 6, 9], angdeg=(30, 40, 50))
abi_rprimd = (
np.reshape(
[
3.0000000,
0.0000000,
0.0000000,
3.8567257,
4.5962667,
0.0000000,
6.8944000,
4.3895544,
3.7681642,
],
(3, 3),
)
* bohr_to_ang
)
self.assertArrayAlmostEqual(l3.matrix, abi_rprimd)
with self.assertRaises(ValueError):
lattice_from_abivars(acell=[1, 1, 1], angdeg=(90, 90, 90), rprim=np.eye(3))
with self.assertRaises(ValueError):
lattice_from_abivars(acell=[1, 1, 1], angdeg=(-90, 90, 90))
def test_znucl_typat(self):
"""Test the order of typat and znucl in the Abinit input and enforce_typat, enforce_znucl."""
# Ga Ga1 1 0.33333333333333 0.666666666666667 0.500880 1.0
# Ga Ga2 1 0.66666666666667 0.333333333333333 0.000880 1.0
# N N3 1 0.333333333333333 0.666666666666667 0.124120 1.0
# N N4 1 0.666666666666667 0.333333333333333 0.624120 1.0
gan = Structure.from_file(os.path.join(PymatgenTest.TEST_FILES_DIR, "abinit", "gan.cif"))
# By default, znucl is filled using the first new type found in sites.
def_vars = structure_to_abivars(gan)
def_znucl = def_vars["znucl"]
self.assertArrayEqual(def_znucl, [31, 7])
def_typat = def_vars["typat"]
self.assertArrayEqual(def_typat, [1, 1, 2, 2])
# But it's possible to enforce a particular value of typat and znucl.
enforce_znucl = [7, 31]
enforce_typat = [2, 2, 1, 1]
enf_vars = structure_to_abivars(gan, enforce_znucl=enforce_znucl, enforce_typat=enforce_typat)
self.assertArrayEqual(enf_vars["znucl"], enforce_znucl)
self.assertArrayEqual(enf_vars["typat"], enforce_typat)
self.assertArrayEqual(def_vars["xred"], enf_vars["xred"])
assert [s.symbol for s in species_by_znucl(gan)] == ["Ga", "N"]
for itype1, itype2 in zip(def_typat, enforce_typat):
assert def_znucl[itype1 - 1] == enforce_znucl[itype2 - 1]
with self.assertRaises(Exception):
structure_to_abivars(gan, enforce_znucl=enforce_znucl, enforce_typat=None)
class SpinModeTest(PymatgenTest):
def test_base(self):
polarized = SpinMode.as_spinmode("polarized")
other_polarized = SpinMode.as_spinmode("polarized")
unpolarized = SpinMode.as_spinmode("unpolarized")
polarized.to_abivars()
self.assertTrue(polarized is other_polarized)
self.assertTrue(polarized == other_polarized)
self.assertTrue(polarized != unpolarized)
# Test pickle
self.serialize_with_pickle(polarized)
# Test dict methods
self.assertMSONable(polarized)
self.assertMSONable(unpolarized)
class SmearingTest(PymatgenTest):
def test_base(self):
fd1ev = Smearing.as_smearing("fermi_dirac:1 eV")
fd1ev.to_abivars()
self.assertTrue(fd1ev)
same_fd = Smearing.as_smearing("fermi_dirac:" + str(1.0 / Ha_to_eV))
self.assertTrue(same_fd == fd1ev)
nosmear = Smearing.nosmearing()
assert nosmear == Smearing.as_smearing("nosmearing")
self.assertFalse(nosmear)
self.assertTrue(nosmear != fd1ev)
self.assertMSONable(nosmear)
new_fd1ev = Smearing.from_dict(fd1ev.as_dict())
self.assertTrue(new_fd1ev == fd1ev)
# Test pickle
self.serialize_with_pickle(fd1ev)
# Test dict methods
self.assertMSONable(fd1ev)
class ElectronsAlgorithmTest(PymatgenTest):
def test_base(self):
algo = ElectronsAlgorithm(nstep=70)
abivars = algo.to_abivars()
# Test pickle
self.serialize_with_pickle(algo)
# Test dict methods
self.assertMSONable(algo)
class ElectronsTest(PymatgenTest):
def test_base(self):
default_electrons = Electrons()
self.assertTrue(default_electrons.nsppol == 2)
self.assertTrue(default_electrons.nspinor == 1)
self.assertTrue(default_electrons.nspden == 2)
abivars = default_electrons.to_abivars()
# new = Electron.from_dict(default_electrons.as_dict())
# Test pickle
self.serialize_with_pickle(default_electrons, test_eq=False)
custom_electrons = Electrons(
spin_mode="unpolarized",
smearing="marzari4:0.2 eV",
algorithm=ElectronsAlgorithm(nstep=70),
nband=10,
charge=1.0,
comment="Test comment",
)
# Test dict methods
self.assertMSONable(custom_electrons)
class KSamplingTest(PymatgenTest):
def test_base(self):
monkhorst = KSampling.monkhorst((3, 3, 3), (0.5, 0.5, 0.5), 0, False, False)
gamma_centered = KSampling.gamma_centered((3, 3, 3), False, False)
monkhorst.to_abivars()
# Test dict methods
self.assertMSONable(monkhorst)
self.assertMSONable(gamma_centered)
class RelaxationTest(PymatgenTest):
def test_base(self):
atoms_and_cell = RelaxationMethod.atoms_and_cell()
atoms_only = RelaxationMethod.atoms_only()
atoms_and_cell.to_abivars()
# Test dict methods
self.assertMSONable(atoms_and_cell)
self.assertMSONable(atoms_only)
class PPModelTest(PymatgenTest):
def test_base(self):
godby = PPModel.as_ppmodel("godby:12 eV")
# print(godby)
# print(repr(godby))
godby.to_abivars()
self.assertTrue(godby)
same_godby = PPModel.as_ppmodel("godby:" + str(12.0 / Ha_to_eV))
self.assertTrue(same_godby == godby)
noppm = PPModel.get_noppmodel()
self.assertFalse(noppm)
self.assertTrue(noppm != godby)
new_godby = PPModel.from_dict(godby.as_dict())
self.assertTrue(new_godby == godby)
# Test pickle
self.serialize_with_pickle(godby)
# Test dict methods
self.assertMSONable(godby)
|
import _plotly_utils.basevalidators
class ArrowcolorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="arrowcolor", parent_name="layout.annotation", **kwargs
):
super(ArrowcolorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "arraydraw"),
role=kwargs.pop("role", "style"),
**kwargs
)
|
import sys
if sys.version_info.major >= 3:
from configparser import RawConfigParser
else:
from ConfigParser import RawConfigParser
from .OrderedMultiDict import OrderedMultiDict
class UsefulConfigParser(object):
"""A config parser that sucks less than those in module `ConfigParser`."""
def __init__(self, filenames_to_try=[]):
# FUN FACT: In Python 3.2, they spontaneously changed the behaviour of
# RawConfigParser so that it no longer considers ';' a comment delimiter
# for inline comments.
#
# Compare:
# "Configuration files may include comments, prefixed by specific
# characters (# and ;). Comments may appear on their own in an otherwise
# empty line, or may be entered in lines holding values or section names.
# In the latter case, they need to be preceded by a whitespace character
# to be recognized as a comment. (For backwards compatibility, only ;
# starts an inline comment, while # does not.)"
# -- https://docs.python.org/2/library/configparser.html
# vs:
# "Comment prefixes are strings that indicate the start of a valid comment
# within a config file. comment_prefixes are used only on otherwise empty
# lines (optionally indented) whereas inline_comment_prefixes can be used
# after every valid value (e.g. section names, options and empty lines as
# well). By default inline comments are disabled and '#' and ';' are used
# as prefixes for whole line comments.
# Changed in version 3.2: In previous versions of configparser behaviour
# matched comment_prefixes=('#',';') and inline_comment_prefixes=(';',)."
# -- https://docs.python.org/3/library/configparser.html#customizing-parser-behaviour
#
# Grrr...
if sys.version_info.major >= 3:
self._cp = RawConfigParser(dict_type=OrderedMultiDict, inline_comment_prefixes=(';',))
else:
self._cp = RawConfigParser(dict_type=OrderedMultiDict)
if isinstance(filenames_to_try, str):
filenames_to_try = [filenames_to_try]
self._filenames_to_try = filenames_to_try[:]
def read(self, filenames_to_try=[]):
if isinstance(filenames_to_try, str):
filenames_to_try = [filenames_to_try]
self._filenames_to_try.extend(filenames_to_try)
return self._cp.read(self._filenames_to_try)
def sections(self):
return self._cp.sections()
def options(self, section_name):
## The client code doesn't need to check in advance that the requested
## section name is present in the config; this function will check
## this automatically, so no exception is raised by RawConfigParser.
## Check that `section_name` is present in the config.
## Otherwise, RawConfigParser will raise ConfigParser.NoSectionError.
if not self._cp.has_section(section_name):
return []
return self._cp.options(section_name)
def get(self, section_name, option_name, do_optionxform=True):
if do_optionxform:
# https://docs.python.org/2/library/configparser.html#ConfigParser.RawConfigParser.optionxform
option_name = self._cp.optionxform(option_name)
if section_name is None:
return self._get_optval_in_sections(self.sections(), option_name)
elif isinstance(section_name, str):
return self._get_optval_in_sections([section_name], option_name)
else:
return self._get_optval_in_sections(section_name, option_name)
def _get_optval_in_sections(self, section_names, option_name):
## The client code doesn't need to check in advance that the requested
## section name(s) are present in the config; this function will check
## this automatically, so no exception is raised by RawConfigParser.
optvals = []
for section_name in section_names:
## Check that `section_name` is present in the config.
## Otherwise, RawConfigParser will raise ConfigParser.NoSectionError.
if not self._cp.has_section(section_name):
continue
optvals.extend([optval
for optname, optval in self._cp.items(section_name)
if optname == option_name])
return optvals
def getboolean(self, section_name, option_name, do_optionxform=True):
# https://docs.python.org/2/library/configparser.html#ConfigParser.RawConfigParser.getboolean
return [self._coerce_to_boolean(optval)
for optval in self.get(section_name, option_name, do_optionxform)]
_boolean_states = {'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False}
def _coerce_to_boolean(self, optval_str):
# 'The accepted values for the option are "1", "yes", "true", and "on",
# which cause this method to return True, and "0", "no", "false", and
# "off", which cause it to return False. These string values are checked
# in a case-insensitive manner. Any other value will cause it to raise
# ValueError.'
# https://docs.python.org/2/library/configparser.html#ConfigParser.RawConfigParser.getboolean
ovs_lower = optval_str.lower()
if ovs_lower not in self._boolean_states:
raise ValueError("Not a boolean: %s" % optval_str)
return self._boolean_states[ovs_lower]
|
__Author__ = "Riyaz Ahmad Bhat"
__Email__ = "riyaz.ah.bhat@gmail.com"
import re
from collections import namedtuple
from sanity_checker import SanityChecker
class DefaultList(list):
"""Equivalent of Default dictionaries for Indexing Errors."""
def __init__(self, default=None):
self.default = default
list.__init__(self)
def __getitem__(self, index):
try: return list.__getitem__(self, index)
except IndexError: return self.default
class SSFReader (SanityChecker):
def __init__ (self, sentence):
super(SSFReader, self).__init__()
self.id_ = int()
self.nodeList = list()
self.chunk_word = dict()
self.sentence = sentence
self.modifierModified = dict()
self.node = namedtuple('node',
('id', 'head', 'children', 'pos', 'poslcat', 'af', 'vpos', 'name','drel','parent',
'chunkId', 'chunkType', 'mtype', 'troot', 'coref', 'stype','voicetype', 'posn'))
self.features = namedtuple('features',
('lemma','cat','gen','num','per','case','vib','tam'))
def getAnnotations (self):
children_ = list()
for line in self.sentence.split("\n"):
nodeInfo = line.decode("utf-8").split("\t")
if nodeInfo[0].isdigit():
assert len(nodeInfo) == 4 # no need to process trash! FIXME
attributeValue_pairs = self.FSPairs(nodeInfo[3][4:-1])
attributes = self.updateFSValues(attributeValue_pairs)
h = attributes.get #NOTE h -> head node attributes
elif nodeInfo[0].replace(".",'',1).isdigit():
assert (len(nodeInfo) == 4) and (nodeInfo[1] and nodeInfo[2] != '') # FIXME
self.id_ += 1
pos_ = nodeInfo[2].encode("utf-8").decode("ascii",'ignore').encode("ascii")
wordForm_ = nodeInfo[1]
attributeValue_pairs = self.FSPairs(nodeInfo[3][4:-1])
if attributeValue_pairs['name'] == h('head_'):# NOTE head word of the chunk
self.nodeList.append(self.node(str(self.id_),wordForm_,children_,pos_,h('poslcat_'),
self.features(h('lemma_') if h('lemma_') else wordForm_ ,h('cat_'),h('gen_'), h('num_'),
h('per_'),h('case_'),h('vib_'),h('tam_')),h('vpos_'),h('head_'),h('drel_'),
h('parent_'),h('chunkId_'),":".join(('head',h('chunkId_'))),h('mtype_'),h('troot_'),
h('coref_'),h('stype_'),h('voicetype_'),h('posn_')))
self.modifierModified[h('chunkId_')] = h('parent_')
self.chunk_word[h('chunkId_')] = h('head_')
else:
attributes = self.updateFSValues(attributeValue_pairs)
c = attributes.get #NOTE c -> child node attributes
children_.append(self.node(str(self.id_),wordForm_,[],pos_,c('poslcat_'),self.features(c('lemma_') \
if c('lemma_') else wordForm_ ,c('cat_'),c('gen_'),c('num_'),c('per_'),c('case_'),c('vib_'),
c('tam_')),c('vpos_'),c('name_'),"_","_",None,":".join(('child',h('chunkId_'))),c('mtype_'),
c('troot_'),c('coref_'),None, None, c('posn_')))
else: children_ = list()
return self
def FSPairs (self, FS) :
feats = dict()
for feat in FS.split():
if "=" not in feat:continue
feat = re.sub("af='+","af='",feat.replace("dmrel=",'drel='))
assert len(feat.split("=")) == 2
attribute,value = feat.split("=")
feats[attribute] = value
return feats
def morphFeatures (self, AF):
"LEMMA,CAT,GEN,NUM,PER,CASE,VIB,TAM"
assert len(AF[:-1].split(",")) == 8 # no need to process trash! FIXME
lemma_,cat_,gen_,num_,per_,case_,vib_,tam_ = AF.split(",")
if len(lemma_) > 1: lemma_ = lemma_.strip("'")
return lemma_.strip("'"),cat_,gen_,num_,per_,case_,vib_,tam_.strip("'")
def updateFSValues (self, attributeValue_pairs):
attributes = dict(zip(['head_','poslcat_','af_','vpos_','name_','drel_','parent_','mtype_','troot_','chunkId_',\
'coref_','stype_','voicetype_','posn_'], [None] * 14))
attributes.update(dict(zip(['lemma_','cat_','gen_','num_','per_','case_','vib_','tam_'], [''] * 8)))
for key,value in attributeValue_pairs.items():
if key == "af":
attributes['lemma_'],attributes['cat_'],attributes['gen_'],attributes['num_'],\
attributes['per_'],attributes['case_'],attributes['vib_'],attributes['tam_'] = \
self.morphFeatures (value)
elif key == "drel":
assert len(value.split(":")) == 2 # no need to process trash! FIXME
attributes['drel_'], attributes['parent_'] = re.sub("'|\"",'',value).split(":")
assert attributes['drel_'] and attributes['parent_'] != "" # no need to process trash! FIXME
else:
variable = str(key) + "_"
if variable == "name_": attributes['chunkId_'] = re.sub("'|\"",'',value)
attributes[variable] = re.sub("'|\"",'',value)
return attributes
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.md')) as f:
CHANGES = f.read()
requires = [
'pyramid',
'pyramid_jinja2',
'pyramid_debugtoolbar',
'pyramid_tm',
'SQLAlchemy',
'transaction',
'zope.sqlalchemy',
'waitress',
]
setup(name='guestbook',
version='0.1',
description='guestbook',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python :: 3",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web wsgi bfg pylons pyramid',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='guestbook',
install_requires=requires,
entry_points="""\
[paste.app_factory]
main = guestbook:main
[console_scripts]
initialize_guestbook_db = guestbook.scripts.initializedb:main
""",
)
|
"""
atomorder/parse_args.py
Parses command line arguments and overwrites setting defaults
"""
from . import settings
import argparse
import sys
description = ""
epilog = ""
parser = argparse.ArgumentParser(
description = description,
formatter_class = argparse.RawDescriptionHelpFormatter,
epilog = epilog)
parser = argparse.ArgumentParser(description='Fit probability density functions to data-files')
parser.add_argument('-r', '--reactants', help='Reactant structures in a coordinate file format.', action='store', type=str, nargs='+')
parser.add_argument('-p', '--products', help='Product structures in a coordinate file format.', action='store', type=str, nargs='+')
parser.add_argument('--print-level', help='Print-level - 0: quiet, 1: results and errors, 2: +warnings, 3: +progress, 4: excess, 5: EXTREME',
action='store', choices = range(0,6), default=1, type=int)
parser.add_argument('-f', '--format', help='File format', type=str, action='store', default='guess', choices=["guess","xyz","pdb"])
parser.add_argument('-m', '--method', help='Method to use.\n \
rotate: Ignore bond order, align a single reactant and product molecule and match all atoms\n \
no-bond: Atom matching by rotation and atomic similarity\n \
full: Atom matching by rotation and bond similarity\n \
info: Information about molecule sybyl atom types, bond types and conjugated sub systems',
choices = ['rotate', 'full', 'info', 'no-bond'], action='store', default='full')
parser.add_argument('-o', '--output', help='Given a filename, output the reordered product in xyz format instead of printing to stdout', action='store', type=str, default=sys.stdout)
parser.add_argument('--atomic-sybyl-weight', action='store', default=1, type=float)
parser.add_argument('--bond-weight', action='store', default=1, type=float)
args = parser.parse_args()
settings.update(args)
|
import glob
import os
import shutil
import subprocess
import sys
import yaml
def create_role(role):
ret = subprocess.check_output(
'ansible-galaxy init {}'.format(role).split())
if not ret.strip().endswith('created successfully'):
raise Exception('could not create role "{}"'.format(role))
def get_metadata(role):
try:
main = open(os.path.join(role, 'meta/main.yml'))
return yaml.load(main)
except IOError:
return {}
def ensure_meta(role):
"""Ensure the role has a meta directory"""
try:
os.makedirs(os.path.join(role, 'meta'))
except OSError:
pass
def set_metadata(role, metadata):
ensure_meta(role)
new_main = os.path.join(role, 'meta/main.yml.new')
orig_main = os.path.join(role, 'meta/main.yml')
with open(new_main, 'w') as out:
yaml.dump(metadata, out, default_flow_style=False, explicit_start=True)
os.rename(new_main, orig_main)
def add_dependency(src_role, target_role):
"""Add metadata saying that 'target_role' depends on 'src_role'"""
md = get_metadata(target_role)
deps = md.setdefault('dependencies', [])
deps.append(os.path.join(target_role, 'roles', src_role))
set_metadata(target_role, md)
def sub_roles(role):
try:
return glob.glob(os.path.join(role, 'roles/*'))
except OSError:
return []
def fix_dependency(role, for_destination):
"""Fix the sub-role dependency.
Dependency on a sub-role has to be changed once we move the base
role.
"""
metadata = get_metadata(role)
deps = metadata.setdefault('dependencies', [])
def f(dep):
if dep.startswith(role):
return os.path.join(for_destination, 'roles', dep)
else:
return dep
metadata['dependencies'] = [f(dep) for dep in deps]
set_metadata(role, metadata)
def fix_dependencies(src_role, for_destination):
for role in sub_roles(src_role):
fix_dependencies(role, for_destination)
fix_dependency(src_role, for_destination)
def move(src_role, target_role, copy=False):
op = shutil.copytree if copy else shutil.move
try:
os.makedirs(os.path.join(target_role, 'roles'))
except OSError:
pass
fix_dependencies(src_role, for_destination=target_role)
op(src_role, os.path.join(target_role, 'roles', src_role))
add_dependency(src_role, target_role)
def concat(roles, into, copy=False):
create_role(into)
for role in roles:
move(role, target_role=into, copy=copy)
def test():
roles = ['foo', 'bar', 'spam']
try:
for role in roles:
create_role(role)
move('foo', 'bar')
assert get_metadata('bar')['dependencies'] == ['bar/roles/foo']
move('bar', 'spam')
assert get_metadata('spam')['dependencies'] == ['spam/roles/bar']
assert get_metadata('spam/roles/bar')['dependencies'] == ['spam/roles/bar/roles/foo']
finally:
for role in roles:
shutil.rmtree(role, ignore_errors=True)
def main():
roles_path = None
if roles_path is not None:
os.chdir(roles_path)
concat([sys.argv[1], sys.argv[2]], into=sys.argv[3])
if __name__ == '__main__':
main()
|
"""Make use of synaptic as backend."""
__author__ = "Sebastian Heinlein <devel@glatzor.de>, " \
"Michael Vogt <mvo@canonical.com"
import tempfile
from gettext import gettext as _
from gi.repository import GObject
from defer import Deferred
import sessioninstaller.errors
class SynapticBackend(object):
"""Make use of Synaptic to install and remove packages."""
def _run_synaptic(self, xid, opt, tempf, interaction):
deferred = Deferred()
if tempf:
opt.extend(["--set-selections-file", "%s" % tempf.name])
#FIXME: Take interaction into account
opt.extend(["-o", "Synaptic::closeZvt=true"])
if xid:
opt.extend(["--parent-window-id", "%s" % (xid)])
cmd = ["/usr/bin/gksu",
"--desktop", "/usr/share/applications/update-manager.desktop",
"--", "/usr/sbin/synaptic", "--hide-main-window",
"--non-interactive"]
cmd.extend(opt)
flags = GObject.SPAWN_DO_NOT_REAP_CHILD
(pid, stdin, stdout, stderr) = GObject.spawn_async(cmd, flags=flags)
GObject.child_watch_add(pid, self._on_synaptic_exit, (tempf, deferred))
return deferred
def _on_synaptic_exit(self, pid, condition, (tempf, deferred)):
if tempf:
tempf.close()
if condition == 0:
deferred.callback()
else:
deferred.errback(sessioninstaller.errors.ModifyFailed())
def remove_packages(self, xid, package_names, interaction):
opt = []
# custom progress strings
#opt.append("--progress-str")
#opt.append("%s" % _("Please wait, this can take some time."))
#opt.append("--finish-str")
#opt.append("%s" % _("Update is complete"))
tempf = tempfile.NamedTemporaryFile()
for pkg_name in package_names:
tempf.write("%s\tuninstall\n" % pkg_name)
tempf.flush()
return self._run_synaptic(xid, opt, tempf, interaction)
def install_packages(self, xid, package_names, interaction):
opt = []
# custom progress strings
#opt.append("--progress-str")
#opt.append("%s" % _("Please wait, this can take some time."))
#opt.append("--finish-str")
#opt.append("%s" % _("Update is complete"))
tempf = tempfile.NamedTemporaryFile()
for pkg_name in package_names:
tempf.write("%s\tinstall\n" % pkg_name)
tempf.flush()
return self._run_synaptic(xid, opt, tempf, interaction)
def install_package_files(self, xid, package_names, interaction):
raise NotImplemented
|
from django.test import TestCase
from medicine.models import Medicine
from medicine.views import ListAllMedicines
from user.models import HealthProfessional
class TestListAllMedicines(TestCase):
def setUp(self):
# Making a HealthProfessional
self.view = ListAllMedicines
# Making medicati
self.medicine = Medicine()
self.medicine.name = "Medicamento Teste"
self.medicine.active_ingredient = "Teste Lab"
self.medicine.save()
self.listing = Medicine.objects.all()
def test_medicine_is_show(self):
instance = self.view()
self.assertEqual(instance.get_queryset()[0], self.listing[0])
|
import _plotly_utils.basevalidators
class BgcolorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="bgcolor", parent_name="sankey.node.hoverlabel", **kwargs
):
super(BgcolorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
**kwargs
)
|
from pyperator.decorators import inport, outport, component, run_once
from pyperator.nodes import Component
from pyperator.DAG import Multigraph
from pyperator.utils import InputPort, OutputPort
import pyperator.components
|
import itertools
import os.path
import sys
import time
from . import core
from . import file_io
from . import geometry
from . import stringconv
from . import version
def save_output(profileli, opt):
""" Save a summary of results of evaluated profiles
"""
def m(x, pixelwidth):
return geometry.to_metric_units(x, pixelwidth)
def m2(x, pixelwidth):
# For area units...
return geometry.to_metric_units(x, pixelwidth**2)
def na(x):
if x in (None, -1):
return "N/A"
else:
return x
def write_session_summary():
with file_io.FileWriter("session.summary", opt) as f:
f.writerow(["%s version:" % version.title,
"%s (Last modified %s %s, %s)"
% ((version.version,) + version.date)])
f.writerow(["Number of evaluated profiles:", len(eval_proli)])
if err_fli:
f.writerow(["Number of non-evaluated profiles:", len(err_fli)])
f.writerow(["Metric unit:", eval_proli[0].metric_unit])
f.writerow(["Spatial resolution:", opt.spatial_resolution, eval_proli[0].metric_unit])
f.writerow(["Shell width:", opt.shell_width, eval_proli[0].metric_unit])
f.writerow(["Interpoint distances calculated:",
stringconv.yes_or_no(opt.determine_interpoint_dists)])
if opt.determine_interpoint_dists:
f.writerow(["Interpoint distance mode:", opt.interpoint_dist_mode])
f.writerow(["Shortest interpoint distances:",
stringconv.yes_or_no(opt.interpoint_shortest_dist)])
f.writerow(["Lateral interpoint distances:",
stringconv.yes_or_no(opt.interpoint_lateral_dist)])
f.writerow(["Monte Carlo simulations performed:",
stringconv.yes_or_no(opt.run_monte_carlo)])
if opt.run_monte_carlo:
f.writerow(["Number of Monte Carlo runs:", opt.monte_carlo_runs])
f.writerow(["Monte Carlo simulation window:", opt.monte_carlo_simulation_window])
f.writerow(["Strict localization in simulation window:",
stringconv.yes_or_no(opt.monte_carlo_strict_location)])
f.writerow(["Clusters determined:", stringconv.yes_or_no(opt.determine_clusters)])
if opt.determine_clusters:
f.writerow(["Within-cluster distance:",
opt.within_cluster_dist, eval_proli[0].metric_unit])
if clean_fli:
f.writerow(["Input files processed cleanly:"])
f.writerows([[fn] for fn in clean_fli])
if nop_fli:
f.writerow(["Input files processed but which generated no point distances:"])
f.writerows([[fn] for fn in nop_fli])
if warn_fli:
f.writerow(["Input files processed but which generated "
"warnings (see log for details):"])
f.writerows([[fn] for fn in warn_fli])
if err_fli:
f.writerow(["Input files not processed or not included in "
"summary (see log for details):"])
f.writerows([[fn] for fn in err_fli])
def write_profile_summary():
with file_io.FileWriter("profile.summary", opt) as f:
f.writerow(["Postsynaptic element length",
"Presynaptic element length",
"Number of PSDs:",
"Total postsynaptic membrane length incl perforations:",
"Total postsynaptic membrane length excl perforations:",
"Total PSD area:",
"Particles (total)",
"Particles in PSD",
"Particles within %s %s of PSD"
% (opt.spatial_resolution, eval_proli[0].metric_unit),
"Shell particles strictly synaptic and postsynaptic",
"Shell particles strictly synaptic and postsynaptic "
"or associated with postsynaptic membrane",
"Synaptic particles associated w/ postsynaptic "
"membrane",
"Synaptic particles associated w/ presynaptic membrane",
"Perisynaptic particles associated w/ postsynaptic "
"membrane",
"Perisynaptic particles associated w/ presynaptic "
"membrane",
"Within-perforation particles associated w/ "
"postsynaptic membrane",
"Within-perforation particles associated w/ "
"presynaptic membrane",
"Presynaptic profile",
"Postsynaptic profile",
"ID",
"Input file",
"Comment"])
f.writerows([[m(pro.posel.length(), pro.pixelwidth),
m(pro.prsel.length(), pro.pixelwidth),
len(pro.psdli),
m(pro.total_posm.length(), pro.pixelwidth),
sum([m(psd.posm.length(), pro.pixelwidth)
for psd in pro.psdli]),
sum([m2(psd.psdposm.area(), pro.pixelwidth)
for psd in pro.psdli]),
len(pro.pli),
len([p for p in pro.pli if p.is_within_psd]),
len([p for p in pro.pli if p.is_associated_with_psd]),
len([p for p in pro.pli
if p.strict_lateral_location == "synaptic" and
p.axodendritic_location == "postsynaptic" and
p.is_within_postsynaptic_membrane_shell]),
len([p for p in pro.pli
if p.strict_lateral_location == "synaptic" and
(p.axodendritic_location == "postsynaptic" and
p.is_within_postsynaptic_membrane_shell) or
p.is_postsynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "synaptic" and
p.is_postsynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "synaptic" and
p.is_presynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "perisynaptic" and
p.is_postsynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "perisynaptic" and
p.is_presynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "within perforation"
and p.is_postsynaptic_membrane_associated]),
len([p for p in pro.pli
if p.lateral_location == "within perforation"
and p.is_presynaptic_membrane_associated]),
pro.presyn_profile,
pro.postsyn_profile,
pro.id,
pro.comment,
os.path.basename(pro.inputfn)] for pro in eval_proli])
def write_point_summary(ptype):
if ptype == "particle":
pli = "pli"
pstr = "particle"
elif ptype == "random":
if not opt.use_random:
return
else:
pli = "randomli"
pstr = "point"
else:
return
with file_io.FileWriter("%s.summary" % ptype, opt) as f:
f.writerow(["%s number (as appearing in input file)" % pstr.capitalize(),
"Coordinates (in pixels)",
"Axodendritic location",
"Distance to postsynaptic element membrane",
"Distance to presynaptic element membrane",
"Lateral location",
"Strict lateral location",
"Lateral distance to nearest PSD center",
"Normalized lateral distance to nearest PSD center",
"Within PSD",
"Within %s %s of PSD" % (opt.spatial_resolution, eval_proli[0].metric_unit),
"Total postsynaptic membrane length incl perforations",
"Total postsynaptic membrane length excl perforations",
"Length of laterally closest PSD",
"Presynaptic profile",
"Postsynaptic profile",
"Profile ID",
"Input file",
"Comment"])
f.writerows([[n+1,
p,
p.axodendritic_location,
m(p.dist_to_posel, pro.pixelwidth),
m(p.dist_to_prsel, pro.pixelwidth),
p.lateral_location,
p.strict_lateral_location,
m(p.lateral_dist_psd, pro.pixelwidth),
p.norm_lateral_dist_psd,
stringconv.yes_or_no(p.is_within_psd),
stringconv.yes_or_no(p.is_associated_with_psd),
m(pro.total_posm.length(), pro.pixelwidth),
m(sum([psd.posm.length() for psd in pro.psdli]),
pro.pixelwidth),
m(p.nearest_psd.posm.length(), pro.pixelwidth),
pro.presyn_profile,
pro.postsyn_profile,
pro.id,
os.path.basename(pro.inputfn),
pro.comment] for pro in eval_proli for n, p in
enumerate(pro.__dict__[pli])])
def write_cluster_summary():
if not opt.determine_clusters:
return
with file_io.FileWriter("cluster.summary", opt) as f:
f.writerow(["Cluster number",
"Number of particles in cluster",
"Distance to postsynaptic membrane of centroid",
"Distance to nearest cluster along postsynaptic element membrane",
"Profile ID",
"Input file",
"Comment"])
f.writerows([[n + 1,
len(c),
m(c.dist_to_posel, pro.pixelwidth),
m(na(c.dist_to_nearest_cluster), pro.pixelwidth),
pro.id,
os.path.basename(pro.inputfn),
pro.comment]for pro in eval_proli for n, c in
enumerate(pro.clusterli)])
def write_interpoint_summaries():
if not opt.determine_interpoint_dists:
return
ip_rels = dict([(key, val)
for key, val in opt.interpoint_relations.items()
if val and 'simulated' not in key])
if not opt.use_random:
for key, val in opt.interpoint_relations.items():
if 'random' in key and val:
del ip_rels[key]
if (len(ip_rels) == 0 or not
(opt.interpoint_shortest_dist or opt.interpoint_lateral_dist)):
return
table = []
if opt.interpoint_dist_mode == 'all':
s = "all distances"
else:
s = "nearest neighbour distances"
table.append(["Mode: " + s])
headerli = list(ip_rels.keys())
prefixli = []
for key, val in ip_rels.items():
prefix = key[0] + key[key.index("- ") + 2] + "_"
prefixli.append(prefix)
if opt.interpoint_shortest_dist and opt.interpoint_lateral_dist:
headerli.extend(headerli)
prefixli.extend([t + 'lat' for t in prefixli])
topheaderli = []
if opt.interpoint_shortest_dist:
topheaderli.append("Shortest distances")
if opt.interpoint_lateral_dist:
topheaderli.extend([""] * (len(ip_rels) - 1))
if opt.interpoint_lateral_dist:
topheaderli.append("Lateral distances along postsynaptic element "
"membrane")
table.extend([topheaderli, headerli])
cols = [[] for _ in prefixli]
for pro in eval_proli:
for n, li in enumerate([pro.__dict__[prefix + "distli"]
for prefix in prefixli]):
cols[n].extend([m(e, pro.pixelwidth) for e in li])
# transpose cols and append to table
table.extend(list(itertools.zip_longest(*cols, fillvalue="")))
with file_io.FileWriter("interpoint.summary", opt) as f:
f.writerows(table)
def write_mc_dist_to_psd(dtype):
if not opt.run_monte_carlo:
return
table = []
if dtype == 'metric':
table.append(["Lateral distances in %s to center of the nearest PSD"
% eval_proli[0].metric_unit])
elif dtype == 'normalized':
table.append(["Normalized lateral distances to the center of the nearest PSD"])
table.append(["Run %d" % (n + 1) for n in range(0, opt.monte_carlo_runs)])
for pro in eval_proli:
if dtype == 'metric':
table.extend(zip(*[[m(p.lateral_dist_psd, pro.pixelwidth) for p in li["pli"]]
for li in pro.mcli]))
elif dtype == 'normalized':
table.extend(zip(*[[p.norm_lateral_dist_psd for p in li["pli"]]
for li in pro.mcli]))
with file_io.FileWriter("simulated.PSD.%s.lateral.distances" % dtype, opt) as f:
f.writerows(table)
def write_mc_dist_to_posel():
if not opt.run_monte_carlo:
return
table = [["Run %d" % (n + 1) for n in range(0, opt.monte_carlo_runs)]]
for pro in eval_proli:
table.extend(itertools.zip_longest(*[[m(p.dist_to_posel, pro.pixelwidth)
for p in li['pli']] for li in pro.mcli]))
with file_io.FileWriter(
"simulated.postsynaptic.element.membrane.distances", opt) as f:
f.writerows(table)
def write_mc_ip_dists(dist_type):
def m_li(*_li):
return [m(x, pro.pixelwidth) for x in _li]
if not (opt.run_monte_carlo and opt.determine_interpoint_dists):
return
for ip_type in [key for key, val in opt.interpoint_relations.items()
if 'simulated' in key and val]:
if ((dist_type == 'shortest' and not opt.interpoint_shortest_dist) or
(dist_type == 'lateral' and not opt.interpoint_lateral_dist)):
return
if dist_type == 'lateral':
short_dist_type = 'lat'
else:
short_dist_type = ''
table = [["Run %d" % (n + 1) for n in range(0, opt.monte_carlo_runs)]]
for pro in eval_proli:
table.extend(itertools.zip_longest(*[m(p, pro.pixelwidth)
for li in pro.mcli
for p in li[ip_type]["%sdist"
% short_dist_type]]))
with file_io.FileWriter("%s.interpoint.%s.distances"
% (ip_type.replace(" ", ""), dist_type), opt) as f:
f.writerows(table)
def write_mc_cluster_summary():
if not (opt.determine_clusters and opt.run_monte_carlo):
return
table = [["N particles in cluster", "Run",
"Distance to postsynaptic element membrane from centroid",
"Distance to nearest cluster along postsynaptic element membrane",
"Profile ID",
"Input file",
"Comment"]]
for pro in eval_proli:
for n in range(0, opt.monte_carlo_runs):
for c in pro.mcli[n]["clusterli"]:
table.append([len(c), n + 1,
m(c.dist_to_posel, pro.pixelwidth),
m(na(c.dist_to_nearest_cluster),
pro.pixelwidth),
pro.id,
os.path.basename(pro.inputfn),
pro.comment])
with file_io.FileWriter("simulated.clusters", opt) as f:
f.writerows(table)
sys.stdout.write("\nSaving summaries to %s:\n" % opt.output_dir)
opt.save_result = {'any_saved': False, 'any_err': False}
eval_proli = [profile for profile in profileli if not profile.errflag]
clean_fli = [profile.inputfn for profile in profileli
if not (profile.errflag or profile.warnflag)]
warn_fli = [profile.inputfn for profile in profileli if profile.warnflag]
err_fli = [profile.inputfn for profile in profileli if profile.errflag]
nop_fli = [profile.inputfn for profile in eval_proli if not profile.pli]
write_session_summary()
write_profile_summary()
write_point_summary('particle')
write_point_summary('random')
write_interpoint_summaries()
write_cluster_summary()
write_mc_dist_to_posel()
write_mc_dist_to_psd('metric')
write_mc_dist_to_psd('normalized')
write_mc_ip_dists('shortest')
write_mc_ip_dists('lateral')
write_mc_cluster_summary()
if opt.save_result['any_err']:
sys.stdout.write("Note: One or more summaries could not be saved.\n")
if opt.save_result['any_saved']:
sys.stdout.write("Done.\n")
else:
sys.stdout.write("No summaries saved.\n")
def reset_options(opt):
""" Deletes certain options that should always be set anew for each run
(each time the "Start" button is pressed)
"""
for optstr in ('metric_unit', 'use_random'):
if hasattr(opt, optstr):
delattr(opt, optstr)
def show_options(opt):
sys.stdout.write("{} version: {} (Last modified {} {}, {})\n".format(
version.title, version.version, *version.date))
sys.stdout.write("Output file format: %s\n" % opt.output_file_format)
sys.stdout.write("Suffix of output files: %s\n" % opt.output_filename_suffix)
sys.stdout.write("Output directory: %s\n" % opt.output_dir)
sys.stdout.write("Spatial resolution: %d\n" % opt.spatial_resolution)
sys.stdout.write("Shell width: %d metric units\n" % opt.shell_width)
sys.stdout.write("Interpoint distances calculated: %s\n"
% stringconv.yes_or_no(opt.determine_interpoint_dists))
if opt.determine_interpoint_dists:
sys.stdout.write("Interpoint distance mode: %s\n" % opt.interpoint_dist_mode.capitalize())
sys.stdout.write("Shortest interpoint distances: %s\n"
% stringconv.yes_or_no(opt.interpoint_shortest_dist))
sys.stdout.write("Lateral interpoint distances: %s\n"
% stringconv.yes_or_no(opt.interpoint_lateral_dist))
sys.stdout.write("Monte Carlo simulations performed: %s\n"
% stringconv.yes_or_no(opt.run_monte_carlo))
if opt.run_monte_carlo:
sys.stdout.write("Number of Monte Carlo runs: %d\n"
% opt.monte_carlo_runs)
sys.stdout.write("Monte Carlo simulation window: %s\n"
% opt.monte_carlo_simulation_window)
sys.stdout.write("Strict localization in simulation window: %s\n"
% stringconv.yes_or_no(opt.monte_carlo_strict_location))
sys.stdout.write("Clusters determined: %s\n" % stringconv.yes_or_no(opt.determine_clusters))
if opt.determine_clusters:
sys.stdout.write("Within-cluster distance: %d\n" % opt.within_cluster_dist)
def get_output_format(opt):
if opt.output_file_format == 'excel':
try:
import openpyxl
except ImportError:
sys.stdout.write("Unable to write Excel files: resorting to csv format.\n")
opt.output_file_format = 'csv'
if opt.output_file_format == 'csv':
opt.output_filename_ext = '.csv'
opt.csv_format = {'dialect': 'excel', 'lineterminator': '\n'}
if opt.csv_delimiter == 'tab':
opt.csv_format['delimiter'] = '\t'
if opt.output_filename_date_suffix:
from datetime import date
opt.output_filename_suffix = "." + date.today().isoformat()
if opt.output_filename_other_suffix != '':
opt.output_filename_suffix += "." + opt.output_filename_other_suffix
def main_proc(parent):
""" Process profile data files
"""
opt = parent.opt
if not opt.input_file_list:
sys.stdout.write("No input files.\n")
return 0
i, n = 0, 0
profileli = []
sys.stdout.write("--- Session started %s local time ---\n" % time.ctime())
for f in opt.input_file_list:
if opt.input_file_list.count(f) > 1:
sys.stdout.write("Duplicate input filename %s:\n => removing first occurrence in "
"list\n" % f)
opt.input_file_list.remove(f)
get_output_format(opt)
reset_options(opt)
show_options(opt)
while True:
if i < len(opt.input_file_list):
inputfn = opt.input_file_list[i]
i += 1
else:
sys.stdout.write("\nNo more input files...\n")
break
parent.process_queue.put(("new_file", inputfn))
profileli.append(core.ProfileData(inputfn, opt))
profileli[-1].process()
if opt.stop_requested:
sys.stdout.write("\n--- Session aborted by user %s local time ---\n" % time.ctime())
return 3
if not profileli[-1].errflag:
n += 1
if profileli[-1].warnflag:
sys.stdout.write("Warning(s) found while processing input file.\n")
continue
else:
sys.stdout.write("Error(s) found while processing input file =>\n"
" => No distances could be determined.\n")
continue
# no more input files
errfli = [pro.inputfn for pro in profileli if pro.errflag]
warnfli = [pro.inputfn for pro in profileli if pro.warnflag]
if errfli:
sys.stdout.write("\n%s input %s generated one or more errors:\n"
% (stringconv.plurality("This", len(errfli)),
stringconv.plurality("file", len(errfli))))
sys.stdout.write("%s\n" % "\n".join([fn for fn in errfli]))
if warnfli:
sys.stdout.write("\n%s input %s generated one or more warnings:\n"
% (stringconv.plurality("This", len(warnfli)),
stringconv.plurality("file", len(warnfli))))
sys.stdout.write("%s\n" % "\n".join([fn for fn in warnfli]))
if n > 0:
parent.process_queue.put(("saving_summaries", ""))
save_output(profileli, opt)
else:
sys.stdout.write("\nNo files processed.\n")
sys.stdout.write("--- Session ended %s local time ---\n" % time.ctime())
parent.process_queue.put(("done", ""))
if errfli:
return 0
elif warnfli:
return 2
else:
return 1
|
import os
import re
import subprocess
from six.moves.urllib.parse import urlparse, quote_plus
from subprocess import CalledProcessError, PIPE, STDOUT
from conans.client.tools.env import no_op, environment_append
from conans.client.tools.files import chdir
from conans.errors import ConanException
from conans.util.files import decode_text, to_file_bytes
class Git(object):
def __init__(self, folder=None, verify_ssl=True, username=None, password=None,
force_english=True, runner=None):
self.folder = folder or os.getcwd()
if not os.path.exists(self.folder):
os.makedirs(self.folder)
self._verify_ssl = verify_ssl
self._force_eng = force_english
self._username = username
self._password = password
self._runner = runner
def run(self, command):
command = "git %s" % command
with chdir(self.folder) if self.folder else no_op():
with environment_append({"LC_ALL": "en_US.UTF-8"}) if self._force_eng else no_op():
if not self._runner:
return subprocess.check_output(command, shell=True).decode().strip()
else:
return self._runner(command)
def get_repo_root(self):
return self.run("rev-parse --show-toplevel")
def get_url_with_credentials(self, url):
if not self._username or not self._password:
return url
if urlparse(url).password:
return url
user_enc = quote_plus(self._username)
pwd_enc = quote_plus(self._password)
url = url.replace("://", "://" + user_enc + ":" + pwd_enc + "@", 1)
return url
def _configure_ssl_verify(self):
return self.run("config http.sslVerify %s" % ("true" if self._verify_ssl else "false"))
def clone(self, url, branch=None):
url = self.get_url_with_credentials(url)
if os.path.exists(url):
url = url.replace("\\", "/") # Windows local directory
if os.path.exists(self.folder) and os.listdir(self.folder):
if not branch:
raise ConanException("The destination folder '%s' is not empty, "
"specify a branch to checkout (not a tag or commit) "
"or specify a 'subfolder' "
"attribute in the 'scm'" % self.folder)
output = self.run("init")
output += self._configure_ssl_verify()
output += self.run('remote add origin "%s"' % url)
output += self.run("fetch ")
output += self.run("checkout -t origin/%s" % branch)
else:
branch_cmd = "--branch %s" % branch if branch else ""
output = self.run('clone "%s" . %s' % (url, branch_cmd))
output += self._configure_ssl_verify()
return output
def checkout(self, element, submodule=None):
self._check_git_repo()
output = self.run('checkout "%s"' % element)
if submodule:
if submodule == "shallow":
output += self.run("submodule sync")
output += self.run("submodule update --init")
elif submodule == "recursive":
output += self.run("submodule sync --recursive")
output += self.run("submodule update --init --recursive")
else:
raise ConanException("Invalid 'submodule' attribute value in the 'scm'. "
"Unknown value '%s'. Allowed values: ['shallow', 'recursive']" % submodule)
# Element can be a tag, branch or commit
return output
def excluded_files(self):
try:
file_paths = [os.path.normpath(os.path.join(os.path.relpath(folder, self.folder), el)).replace("\\", "/")
for folder, dirpaths, fs in os.walk(self.folder)
for el in fs + dirpaths]
p = subprocess.Popen(['git', 'check-ignore', '--stdin'],
stdout=PIPE, stdin=PIPE, stderr=STDOUT, cwd=self.folder)
paths = to_file_bytes("\n".join(file_paths))
grep_stdout = decode_text(p.communicate(input=paths)[0])
tmp = grep_stdout.splitlines()
except CalledProcessError:
tmp = []
return tmp
def get_remote_url(self, remote_name=None):
self._check_git_repo()
remote_name = remote_name or "origin"
try:
remotes = self.run("remote -v")
for remote in remotes.splitlines():
try:
name, url = remote.split(None, 1)
url, _ = url.rsplit(None, 1)
if name == remote_name:
return url
except Exception:
pass
except subprocess.CalledProcessError:
pass
return None
def get_commit(self):
self._check_git_repo()
try:
commit = self.run("rev-parse HEAD")
commit = commit.strip()
return commit
except Exception as e:
raise ConanException("Unable to get git commit from %s\n%s" % (self.folder, str(e)))
get_revision = get_commit
def _check_git_repo(self):
try:
self.run("status")
except Exception:
raise ConanException("Not a valid git repository")
def get_branch(self):
self._check_git_repo()
try:
status = self.run("status -bs --porcelain")
# ## feature/scm_branch...myorigin/feature/scm_branch
branch = status.splitlines()[0].split("...")[0].strip("#").strip()
return branch
except Exception as e:
raise ConanException("Unable to get git branch from %s\n%s" % (self.folder, str(e)))
|
import pycmds.project.classes as pc
import pycmds.hardware.hardware as hw
import pathlib
import appdirs
import toml
import yaqc
class Driver(hw.Driver):
def __init__(self, *args, **kwargs):
self._yaqd_port = kwargs.pop("yaqd_port")
super().__init__(*args, **kwargs)
self.grating_index = pc.Combo(
name="Grating",
allowed_values=[1, 2],
section=self.name,
option="grating_index",
display=True,
set_method="set_turret",
)
self.exposed.append(self.grating_index)
def get_position(self):
native_position = self.ctrl.get_position()
self.position.write(native_position, self.native_units)
return self.position.read()
def initialize(self, *args, **kwargs):
# open control
self.ctrl = yaqc.Client(self._yaqd_port)
# import some information from control
id_dict = self.ctrl.id()
self.serial_number = id_dict["serial"]
self.position.write(self.ctrl.get_position())
# recorded
self.recorded[self.name] = [self.position, self.native_units, 1.0, "m", False]
self.wait_until_still()
# finish
self.initialized.write(True)
self.initialized_signal.emit()
def is_busy(self):
return self.ctrl.busy()
def set_position(self, destination):
self.ctrl.set_position(float(destination))
self.wait_until_still()
def set_turret(self, destination_index):
if type(destination_index) == list:
destination_index = destination_index[0]
# turret index on ActiveX call starts from zero
destination_index_zero_based = int(destination_index) - 1
self.ctrl.set_turret(destination_index_zero_based)
self.grating_index.write(destination_index)
self.wait_until_still()
self.limits.write(*self.ctrl.get_limits(), self.native_units)
class GUI(hw.GUI):
pass
class Hardware(hw.Hardware):
def __init__(self, *args, **kwargs):
self.kind = "spectrometer"
hw.Hardware.__init__(self, *args, **kwargs)
conf = pathlib.Path(appdirs.user_config_dir("pycmds", "pycmds")) / "config.toml"
conf = toml.load(conf)
hardwares, gui, advanced_gui = hw.import_hardwares(
conf.get("hardware", {}).get("spectrometers", {}),
name="Spectrometers",
Driver=Driver,
GUI=GUI,
Hardware=Hardware,
)
|
u"""
Fixer for Python 3 function parameter syntax
This fixer is rather sensitive to incorrect py3k syntax.
"""
from lib2to3 import fixer_base
from lib2to3.fixer_util import token, String, Newline, Comma, Name
from libfuturize.fixer_util import indentation, suitify, DoubleStar
_assign_template = u"%(name)s = %(kwargs)s['%(name)s']; del %(kwargs)s['%(name)s']"
_if_template = u"if '%(name)s' in %(kwargs)s: %(assign)s"
_else_template = u"else: %(name)s = %(default)s"
_kwargs_default_name = u"_3to2kwargs"
def gen_params(raw_params):
u"""
Generator that yields tuples of (name, default_value) for each parameter in the list
If no default is given, then it is default_value is None (not Leaf(token.NAME, 'None'))
"""
assert raw_params[0].type == token.STAR and len(raw_params) > 2
curr_idx = 2 # the first place a keyword-only parameter name can be is index 2
max_idx = len(raw_params)
while curr_idx < max_idx:
curr_item = raw_params[curr_idx]
prev_item = curr_item.prev_sibling
if curr_item.type != token.NAME:
curr_idx += 1
continue
if prev_item is not None and prev_item.type == token.DOUBLESTAR:
break
name = curr_item.value
nxt = curr_item.next_sibling
if nxt is not None and nxt.type == token.EQUAL:
default_value = nxt.next_sibling
curr_idx += 2
else:
default_value = None
yield (name, default_value)
curr_idx += 1
def remove_params(raw_params, kwargs_default=_kwargs_default_name):
u"""
Removes all keyword-only args from the params list and a bare star, if any.
Does not add the kwargs dict if needed.
Returns True if more action is needed, False if not
(more action is needed if no kwargs dict exists)
"""
assert raw_params[0].type == token.STAR
if raw_params[1].type == token.COMMA:
raw_params[0].remove()
raw_params[1].remove()
kw_params = raw_params[2:]
else:
kw_params = raw_params[3:]
for param in kw_params:
if param.type != token.DOUBLESTAR:
param.remove()
else:
return False
else:
return True
def needs_fixing(raw_params, kwargs_default=_kwargs_default_name):
u"""
Returns string with the name of the kwargs dict if the params after the first star need fixing
Otherwise returns empty string
"""
found_kwargs = False
needs_fix = False
for t in raw_params[2:]:
if t.type == token.COMMA:
# Commas are irrelevant at this stage.
continue
elif t.type == token.NAME and not found_kwargs:
# Keyword-only argument: definitely need to fix.
needs_fix = True
elif t.type == token.NAME and found_kwargs:
# Return 'foobar' of **foobar, if needed.
return t.value if needs_fix else u''
elif t.type == token.DOUBLESTAR:
# Found either '*' from **foobar.
found_kwargs = True
else:
# Never found **foobar. Return a synthetic name, if needed.
return kwargs_default if needs_fix else u''
class FixKwargs(fixer_base.BaseFix):
run_order = 7 # Run after function annotations are removed
PATTERN = u"funcdef< 'def' NAME parameters< '(' arglist=typedargslist< params=any* > ')' > ':' suite=any >"
def transform(self, node, results):
params_rawlist = results[u"params"]
for i, item in enumerate(params_rawlist):
if item.type == token.STAR:
params_rawlist = params_rawlist[i:]
break
else:
return
# params is guaranteed to be a list starting with *.
# if fixing is needed, there will be at least 3 items in this list:
# [STAR, COMMA, NAME] is the minimum that we need to worry about.
new_kwargs = needs_fixing(params_rawlist)
# new_kwargs is the name of the kwargs dictionary.
if not new_kwargs:
return
suitify(node)
# At this point, params_rawlist is guaranteed to be a list
# beginning with a star that includes at least one keyword-only param
# e.g., [STAR, NAME, COMMA, NAME, COMMA, DOUBLESTAR, NAME] or
# [STAR, COMMA, NAME], or [STAR, COMMA, NAME, COMMA, DOUBLESTAR, NAME]
# Anatomy of a funcdef: ['def', 'name', parameters, ':', suite]
# Anatomy of that suite: [NEWLINE, INDENT, first_stmt, all_other_stmts]
# We need to insert our new stuff before the first_stmt and change the
# first_stmt's prefix.
suite = node.children[4]
first_stmt = suite.children[2]
ident = indentation(first_stmt)
for name, default_value in gen_params(params_rawlist):
if default_value is None:
suite.insert_child(2, Newline())
suite.insert_child(2, String(_assign_template % {u'name': name, u'kwargs': new_kwargs}, prefix=ident))
else:
suite.insert_child(2, Newline())
suite.insert_child(2, String(_else_template % {u'name': name, u'default': default_value}, prefix=ident))
suite.insert_child(2, Newline())
suite.insert_child(2, String(
_if_template % {u'assign': _assign_template % {u'name': name, u'kwargs': new_kwargs}, u'name': name,
u'kwargs': new_kwargs}, prefix=ident))
first_stmt.prefix = ident
suite.children[2].prefix = u""
# Now, we need to fix up the list of params.
must_add_kwargs = remove_params(params_rawlist)
if must_add_kwargs:
arglist = results[u'arglist']
if len(arglist.children) > 0 and arglist.children[-1].type != token.COMMA:
arglist.append_child(Comma())
arglist.append_child(DoubleStar(prefix=u" "))
arglist.append_child(Name(new_kwargs))
|
__author__ = 'Radoslaw Matusiak'
__copyright__ = 'Copyright (c) 2016 Radoslaw Matusiak'
__license__ = 'MIT'
__version__ = '0.5'
import cmd
import functools
import os
import sys
from polar import Device
from polar.pb import device_pb2 as pb_device
__INTRO = """
_| _| _|
_| _|_| _|_| _|_|_| _|_|_| _|_| _| _|_|
_| _| _| _| _| _| _| _| _| _| _| _| _|_|_|_|
_| _| _| _| _| _| _| _| _| _| _| _| _|
_| _|_| _|_| _|_|_| _| _| _|_| _| _|_|_|
_|
_|
ver. {}
"""
def check_if_device_is_connected(f):
"""
Decorator. Checks if device is connected before invoking function.
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
if args[0].device is not None:
return f(*args, **kwargs)
else:
print '[!] Device disconnected.'
print
return wrapper
class LoopholeCli(cmd.Cmd):
""" Loophole command line interface class. """
__PROMPT = 'loophole({})>'
def __init__(self):
"""Constructor.
"""
cmd.Cmd.__init__(self)
self.prompt = LoopholeCli.__PROMPT.format('no device')
self.device = None
# end-of-method __init__
def do_exit(self, _):
"""Quit.
Usage: exit
"""
if self.device is not None:
self.device.close()
sys.exit(0)
# end-of-method do_exit
def do_EOF(self, _):
"""Quit. handles EOF"""
self.do_exit(_)
# end-of-method do_EOF
def do_list(self, _):
"""List available Polar devices.
Usage: list
"""
devs = Device.list()
if len(devs) > 0:
for i, dev in enumerate(devs):
try:
info = Device.get_info(dev)
except ValueError as err:
print "Device no: %i" % i
print "Device info:"
print dev
print "-"*79
if 'langid' in err.message:
raise ValueError(
(
"Can't get device info. Origin Error: %s\n"
"Maybe this is a permission issue.\n"
"Please read section 'permission' in README ;)"
) % err
)
raise # raise origin error
print '{} - {} ({})'.format(i, info['product_name'], info['serial_number'])
else:
print '[!] No Polar devices found!'
print
# end-of-method do_list
def do_connect(self, dev_no):
"""Connect Polar device. Run 'list' to see available devices.
Usage: connect <device_no>
"""
try:
dev_no = int(dev_no)
except ValueError:
print '[!] You need to specify the device number. Run \'list\' to see available devices.'
print
return
try:
devs = Device.list()
dev = devs[dev_no]
serial = Device.get_info(dev)['serial_number']
self.prompt = LoopholeCli.__PROMPT.format(serial)
self.device = Device(dev)
self.device.open()
print '[+] Device connected.'
print
except IndexError:
print '[!] Device not found or failed to open it. Run \'list\' to see available devices.'
print
# end-of-method do_connect
@check_if_device_is_connected
def do_disconnect(self, _):
"""Disconnect Polar device.
"""
self.device.close()
self.device = None
self.prompt = LoopholeCli.__PROMPT.format('no device')
print '[+] Device disconnected.'
print
# end-of-method do_disconnect
@check_if_device_is_connected
def do_get(self, line):
"""Read file from device and store in under local_path.
Usage: get <device_path> <local_path>
"""
try:
src, dest = line.strip().split()
data = self.device.read_file(src)
with open(dest, 'wb') as outfile:
outfile.write(bytearray(data))
print '[+] File \'{}\' saved to \'{}\''.format(src, dest)
print
except ValueError:
print '[!] Invalid command usage.'
print '[!] Usage: get <source> <destination>'
print
# end-of-method do_get
@check_if_device_is_connected
def do_delete(self, line):
"""Delete file from device.
Usage: delete <device_path>
"""
path = line.strip()
_ = self.device.delete(path)
# end-of-method do_delete
@check_if_device_is_connected
def do_dump(self, path):
"""Dump device memory. Path is local folder to store dump.
Usage: dump <local_path>
"""
print '[+] Reading files tree...'
dev_map = self.device.walk(self.device.SEP)
for directory in dev_map.keys():
fixed_directory = directory.replace(self.device.SEP, os.sep)
full_path = os.path.abspath(os.path.join(path, fixed_directory[1:]))
if not os.path.exists(full_path):
os.makedirs(full_path)
d = dev_map[directory]
files = [e for e in d.entries if not e.name.endswith('/')]
for file in files:
with open(os.path.join(full_path, file.name), 'wb') as fh:
print '[+] Dumping {}{}'.format(directory, file.name)
data = self.device.read_file('{}{}'.format(directory, file.name))
fh.write(bytearray(data))
print '[+] Device memory dumped.'
print
# end-of-method do_dump
@check_if_device_is_connected
def do_info(self, _):
"""Print connected device info.
Usage: info
"""
info = Device.get_info(self.device.usb_device)
print '{:>20s} - {}'.format('Manufacturer', info['manufacturer'])
print '{:>20s} - {}'.format('Product name', info['product_name'])
print '{:>20s} - {}'.format('Vendor ID', info['vendor_id'])
print '{:>20s} - {}'.format('Product ID', info['product_id'])
print '{:>20s} - {}'.format('Serial number', info['serial_number'])
try:
data = self.device.read_file('/DEVICE.BPB')
resp = ''.join(chr(c) for c in data)
d = pb_device.PbDeviceInfo()
d.ParseFromString(resp)
bootloader_version = '{}.{}.{}'.format(d.bootloader_version.major, d.bootloader_version.minor, d.bootloader_version.patch)
print '{:>20s} - {}'.format('Bootloader version', bootloader_version)
platform_version = '{}.{}.{}'.format(d.platform_version.major, d.platform_version.minor, d.platform_version.patch)
print '{:>20s} - {}'.format('Platform version', platform_version)
device_version = '{}.{}.{}'.format(d.device_version.major, d.device_version.minor, d.device_version.patch)
print '{:>20s} - {}'.format('Device version', device_version)
print '{:>20s} - {}'.format('SVN revision', d.svn_rev)
print '{:>20s} - {}'.format('Hardware code', d.hardware_code)
print '{:>20s} - {}'.format('Color', d.product_color)
print '{:>20s} - {}'.format('Product design', d.product_design)
except:
print '[!] Failed to get extended info.'
print ' '
# end-of-method do_info
@check_if_device_is_connected
def do_fuzz(self, _):
import polar
num = _.strip()
if len(num) > 0:
num = int(num)
resp = self.device.send_raw([0x01, num] + [0x00] * 62)
print 'req: {} '.format(num),
if resp:
print 'err code: {}'.format(polar.PFTP_ERROR[resp[0]])
return
for i in xrange(256):
#raw_input('Sending [{}]...<press enter>'.format(i))
if (i & 0x03) == 2:
continue
if i in [3, 251, 252]:
continue
resp = self.device.send_raw([0x01, i] + [0x00] * 62)
print 'resp: {} '.format(i),
if resp:
print 'err code: {}'.format(polar.PFTP_ERROR[resp[0]])
else:
print
# end-of-method do_fuzz
@check_if_device_is_connected
def do_put_file(self, line):
path, filename = line.split()
self.device.put_file(path.strip(), filename.strip())
# end-of-method do_put_file
@check_if_device_is_connected
def do_walk(self, path):
"""Walk file system. Default device_path is device root folder.
Usage: walk [device_path]
"""
if not path.endswith('/'):
path += '/'
fs = self.device.walk(path)
keyz = fs.keys()
keyz.sort()
for k in keyz:
print k
d = fs[k]
files = [e for e in d.entries if not e.name.endswith('/')]
files.sort()
for f in files:
print '{}{} ({} bytes)'.format(k, f.name, f.size)
print
# end-of-method do_walk
pass
def main():
cli = LoopholeCli()
cli.cmdloop(__INTRO.format(__version__))
if __name__ == '__main__':
main()
|
import json
import sublime
import sublime_plugin
from .edit import Edit
class PawnBuildPathCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.view.window().show_input_panel(
"Working directory that contains pawncc.exe",
"C:\\Pawno\\",
self.onPawnPathDone,
None,
None
)
def onPawnPathDone(self, path):
view = self.view.window().new_file()
path = path.replace("\\", "/")
obj = {
"cmd": [
"pawncc.exe",
"$file",
"-o$file_path/$file_base_name",
"-;+",
"-(+",
"-d3"
],
"file_regex": r"(.*?)\(([0-9]*)[- 0-9]*\)",
"selector": "source.pwn",
"working_dir": path
}
with Edit(view) as edit:
edit.insert(0, json.dumps(obj, indent=4))
view.set_name("Pawn.sublime-build")
view.run_command("save")
|
class node:
def __init__(self):
self.outputs=[]
def set(self):
for out in self.outputs:
out.set()
def clear(self):
for out in self.outputs:
out.clear()
class switch:
def __init__(self):
self.outputs=[]
self.state=False
self.input=False
def set(self):
self.input=True
if(self.state):
for out in self.outputs:
out.set()
def clear(self):
self.input=False
for out in self.outputs:
out.clear()
def open(self):
self.state=False
for out in self.outputs:
out.clear()
def close(self):
self.input=True
if(self.input):
for out in self.outputs:
out.set()
class light:
def __init__(self):
self.outputs=[]
def set(self):
print('light set')
for out in self.outputs:
out.set()
def clear(self):
print('light cleared')
for out in self.outputs:
out.clear()
if __name__ == '__main__':
a=node()
s=switch()
b=node()
l=light()
a.outputs.append(s)
s.outputs.append(b)
b.outputs.append(l)
a.set()
s.close()
print('switch close')
s.open()
|
from math import sqrt
def is_prime(x):
for i in xrange(2, int(sqrt(x) + 1)):
if x % i == 0:
return False
return True
def rotate(v):
res = []
u = str(v)
while True:
u = u[1:] + u[0]
w = int(u)
if w == v:
break
res.append(w)
return res
MILLION = 1000000
primes = filter(is_prime, range(2, MILLION))
s = set(primes)
ans = 0
for item in primes:
flag = True
print item
for y in rotate(item):
if y not in s:
flag = False
if flag:
ans += 1
print ans
|
"""
Copyright (c) 2016 Genome Research Ltd.
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import time
import sys
import json
try:
import urllib.request as request
from urllib.error import HTTPError
except ImportError:
import urllib2 as request
from urllib2 import HTTPError
import pandas
PREV_TIME = time.time()
IS_PYTHON3 = sys.version[0] == "3"
def fix_missing_gene_symbols(de_novos, build='grch37'):
""" adds gene symbols to variants lacking them.
Args:
de_novos: dataframe of de novo variants
build: whether to use the 'grch37' or 'grch38' build (default=GRCh37)
Returns:
pandas Series of HGNC symbols, with additional annotations for many
variants previously lacking a HGNC symbol.
"""
symbols = de_novos["symbol"].copy()
# get the variants with no gene annotation, ensure chrom, start and stop
# positions columns exist
missing = de_novos[symbols == ""].copy()
missing['end'] = missing["pos"] + missing["ref"].str.len() - 1
# find the HGNC symbols (if any) for the variants
missing = [ get_gene_id(x["chrom"], x["pos"], x['end'], build=build, verbose=True) for i, x in missing.iterrows() ]
symbols[de_novos["symbol"] == ""] = missing
# 360 out of 17000 de novos still lack HGNC symbols. Their consequences are:
#
# consequence count
# ======================== =====
# downstream_gene_variant 17
# intergenic_variant 259
# regulatory_region_variant 63
# upstream_gene_variant 27
#
# In spot checks, these are sufficiently distant from genes that we can't
# add them to the analysis of their nearest gene. We shall analyse these
# per site by giving them mock gene symbols.
missing = de_novos[symbols == ""].copy()
fake = 'fake_symbol.' + missing['chrom'].map(str) + '_' + missing["pos"].map(str)
symbols[symbols == ""] = fake
return symbols
def open_url(url, headers):
""" open url with python libraries
Args:
url:
headers:
Returns:
tuple of http response, http response code, and http headers
"""
req = request.Request(url, headers=headers)
try:
handler = request.urlopen(req)
except HTTPError as e:
handler = e
status_code = handler.getcode()
response = handler.read()
if IS_PYTHON3:
response = response.decode("utf-8")
# parse the headers into a key, value dictionary
headers = dict(zip(map(str.lower, handler.headers.keys()), handler.headers.values()))
return response, status_code, headers
def rate_limit_requests(rate_limit=0.0667):
""" limit ensembl requests to one per 0.067 s
"""
global PREV_TIME
diff = time.time() - PREV_TIME
if diff < rate_limit:
time.sleep(rate_limit - diff)
PREV_TIME = time.time()
def get_gene_id(chrom, start_pos, end_pos, build="grch37", verbose=False, attempts=0):
"""find the hgnc symbol overlapping a variant position
Args:
variant: data frame or list for a variant, containing columns named
"chrom", "start_pos", and "end_pos" for a single variant
build: genome build to find consequences on
verbose: flag indicating whether to print variants as they are checked
Returns:
a character string containing the HGNC symbol.
"""
attempts += 1
if attempts > 5:
raise ValueError("too many attempts, figure out why its failing")
rate_limit_requests()
# define parts of the URL
ext = "overlap/region/human/{0}:{1}-{2}?feature=gene".format(chrom, start_pos, end_pos)
server_dict = {"grch37": "grch37.", "grch38": ""}
base_url = "http://{}rest.ensembl.org".format(server_dict[build])
url = "{0}/{1}".format(base_url, ext)
headers = {"Content-Type" : "application/json"}
if verbose:
print("chr{0}:{1} {2}".format(chrom, start_pos, ext))
response, status_code, requested_headers = open_url(url, headers)
if status_code == 429:
if "retry-after" in requested_headers:
time.sleep(float(requested_headers["retry-after"]))
elif "x-ratelimit-reset" in requested_headers:
time.sleep(int(requested_headers["x-ratelimit-reset"]))
return get_gene_id(chrom, start_pos, end_pos, build, verbose, attempts)
elif status_code in [503, 504]:
time.sleep(30)
return get_gene_id(chrom, start_pos, end_pos, build, verbose, attempts)
elif status_code != 200:
raise ValueError('Invalid Ensembl response: {0}.\nSubmitted '
'URL was: {1}{2}\nheaders: {3}\nresponse: {4}'.format(status_code,
base_url, ext, requested_headers, response))
json_text = json.loads(response)
if len(json_text) > 0:
return json_text[0]["external_name"]
return ""
|
import mock
from tests.compat import unittest
from tests.utils import APITestCase
import evelink.eve as evelink_eve
class EVETestCase(APITestCase):
def setUp(self):
super(EVETestCase, self).setUp()
self.eve = evelink_eve.EVE(api=self.api)
def test_character_names_from_ids(self):
self.api.get.return_value = self.make_api_result("eve/character_name.xml")
result, current, expires = self.eve.character_names_from_ids(set([1,2]))
self.assertEqual(result, {1:"EVE System", 2:"EVE Central Bank"})
self.assertEqual(self.api.mock_calls, [
mock.call.get('eve/CharacterName', params={'IDs': set([1,2])}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_character_name_from_id(self):
self.api.get.return_value = self.make_api_result("eve/character_name_single.xml")
result, current, expires = self.eve.character_name_from_id(1)
self.assertEqual(result, "EVE System")
self.assertEqual(self.api.mock_calls, [
mock.call.get('eve/CharacterName', params={'IDs': [1]}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_character_ids_from_names(self):
self.api.get.return_value = self.make_api_result("eve/character_id.xml")
result, current, expires = self.eve.character_ids_from_names(set(["EVE System", "EVE Central Bank"]))
self.assertEqual(result, {"EVE System":1, "EVE Central Bank":2})
self.assertEqual(self.api.mock_calls, [
mock.call.get('eve/CharacterID', params={'names': set(["EVE System","EVE Central Bank"])}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_character_id_from_name(self):
self.api.get.return_value = self.make_api_result("eve/character_id_single.xml")
result, current, expires = self.eve.character_id_from_name("EVE System")
self.assertEqual(result, 1)
self.assertEqual(self.api.mock_calls, [
mock.call.get('eve/CharacterID', params={'names': ["EVE System"]}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_affiliations_for_characters(self):
self.api.get.return_value = self.make_api_result("eve/character_affiliation.xml")
result, current, expires = self.eve.affiliations_for_characters(set([92168909, 401111892, 1979087900]))
self.assertEqual(result, {
1979087900: {
'id': 1979087900,
'name': 'Marcel Devereux',
'faction': {
'id': 500004,
'name': 'Gallente Federation'
},
'corp': {
'id': 1894214152,
'name': 'Aideron Robotics'
}
},
401111892: {
'id': 401111892,
'name': 'ShadowMaster',
'alliance': {
'id': 99000652,
'name': 'RvB - BLUE Republic'
},
'corp': {
'id': 1741770561,
'name': 'Blue Republic'
}
},
92168909: {
'id': 92168909,
'name': 'CCP FoxFour',
'alliance': {
'id': 434243723,
'name': 'C C P Alliance'
},
'corp': {
'id': 109299958,
'name': 'C C P'
}
}
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('eve/CharacterAffiliation', params={'ids': set([92168909, 401111892, 1979087900])})
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_affiliations_for_character(self):
self.api.get.return_value = self.make_api_result("eve/character_affiliation_single.xml")
result, current, expires = self.eve.affiliations_for_character(92168909)
self.assertEqual(result, {
'id': 92168909,
'name': 'CCP FoxFour',
'alliance': {
'id': 434243723,
'name': 'C C P Alliance'
},
'corp': {
'id': 109299958,
'name': 'C C P'
}
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('eve/CharacterAffiliation', params={'ids': [92168909]})
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_character_info_from_id(self):
self.api.get.return_value = self.make_api_result("eve/character_info.xml")
result, current, expires = self.eve.character_info_from_id(1234)
self.assertEqual(result, {
'alliance': {'id': None, 'name': None, 'timestamp': None},
'bloodline': 'Civire',
'corp': {'id': 2345, 'name': 'Test Corporation', 'timestamp': 1338689400},
'history': [
{'corp_id': 1, 'corp_name': 'test_one', 'start_ts': 1338603000},
{'corp_id': 2, 'corp_name': 'test_two', 'start_ts': 1318422896}
],
'id': 1234,
'isk': None,
'location': None,
'name': 'Test Character',
'race': 'Caldari',
'sec_status': 2.5,
'ship': {'name': None, 'type_id': None, 'type_name': None},
'skillpoints': None,
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('eve/CharacterInfo', params={'characterID': 1234}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_alliances(self):
self.api.get.return_value = self.make_api_result("eve/alliances.xml")
result, current, expires = self.eve.alliances()
self.assertEqual(result, {
1: {
'executor_id': 2,
'id': 1,
'member_corps': {
2: {'id': 2, 'timestamp': 1289250660},
3: {'id': 3, 'timestamp': 1327728960},
4: {'id': 4, 'timestamp': 1292440500},
},
'member_count': 123,
'name': 'Test Alliance',
'ticker': 'TEST',
'timestamp': 1272717240,
}
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('eve/AllianceList', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_errors(self):
self.api.get.return_value = self.make_api_result("eve/errors.xml")
result, current, expires = self.eve.errors()
self.assertEqual(result, {1:"Foo", 2:"Bar"})
self.assertEqual(self.api.mock_calls, [
mock.call.get('eve/ErrorList', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_faction_warfare_stats(self):
self.api.get.return_value = self.make_api_result("eve/faction_warfare_stats.xml")
result, current, expires = self.eve.faction_warfare_stats()
self.assertEqual(result, {
'kills': {'total': 232772, 'week': 3246, 'yesterday': 677},
'points': {'total': 44045189, 'week': 414049, 'yesterday': 55087},
'factions': {
500001: {
'id': 500001,
'kills': {'total': 59239, 'week': 627, 'yesterday': 115},
'name': 'Caldari State',
'pilots': 5324,
'points': {'total': 4506493, 'week': 64548, 'yesterday': 9934},
'systems': 61,
},
500002: {
'id': 500002,
'kills': {'total': 56736, 'week': 952, 'yesterday': 213},
'name': 'Minmatar Republic',
'pilots': 4068,
'points': {'total': 3627522, 'week': 51211, 'yesterday': 2925},
'systems': 0,
},
500003: {
'id': 500003,
'kills': {'total': 55717, 'week': 1000, 'yesterday': 225},
'name': 'Amarr Empire',
'pilots': 3960,
'points': {'total': 3670190, 'week': 50518, 'yesterday': 3330},
'systems': 11,
},
500004: {
'id': 500004,
'kills': {'total': 61080, 'week': 667, 'yesterday': 124},
'name': 'Gallente Federation',
'pilots': 3663,
'points': {'total': 4098366, 'week': 62118, 'yesterday': 10343},
'systems': 0,
},
},
'wars': [
{
'against': {'id': 500002, 'name': 'Minmatar Republic'},
'faction': {'id': 500001, 'name': 'Caldari State'},
},
{
'against': {'id': 500004, 'name': 'Gallente Federation'},
'faction': {'id': 500001, 'name': 'Caldari State'},
},
{
'against': {'id': 500001, 'name': 'Caldari State'},
'faction': {'id': 500002, 'name': 'Minmatar Republic'},
},
{
'against': {'id': 500003, 'name': 'Amarr Empire'},
'faction': {'id': 500002, 'name': 'Minmatar Republic'},
},
{
'against': {'id': 500002, 'name': 'Minmatar Republic'},
'faction': {'id': 500003, 'name': 'Amarr Empire'},
},
{
'against': {'id': 500004, 'name': 'Gallente Federation'},
'faction': {'id': 500003, 'name': 'Amarr Empire'},
},
{
'against': {'id': 500001, 'name': 'Caldari State'},
'faction': {'id': 500004, 'name': 'Gallente Federation'},
},
{
'against': {'id': 500003, 'name': 'Amarr Empire'},
'faction': {'id': 500004, 'name': 'Gallente Federation'},
}
],
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('eve/FacWarStats', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_faction_warfare_leaderboard(self):
self.api.get.return_value = self.make_api_result("eve/faction_warfare_leaderboard.xml")
result, current, expires = self.eve.faction_warfare_leaderboard()
self.assertEqual(result, {
'char': {
'kills': {
'total': [{'id': 673662188, 'kills': 451, 'name': 'Val Erian'}],
'week': [{'id': 187452523, 'kills': 52, 'name': 'Tigrana Blanque'}],
'yesterday': [
{'id': 1007512845, 'kills': 14, 'name': 'StonedBoy'},
{'id': 646053002, 'kills': 11, 'name': 'Erick Voliffe'},
],
},
'points': {
'total': [{'id': 395923478, 'name': 'sasawong', 'points': 197046}],
'week': [{'id': 161929388, 'name': 'Ankhesentapemkah', 'points': 20851}],
'yesterday': [{'id': 774720050, 'name': 'v3nd3tt4', 'points': 3151}],
},
},
'corp': {
'kills': {
'total': [{'id': 673662188, 'kills': 451, 'name': 'Val Erian'}],
'week': [{'id': 187452523, 'kills': 52, 'name': 'Tigrana Blanque'}],
'yesterday': [
{'id': 1007512845, 'kills': 14, 'name': 'StonedBoy'},
{'id': 646053002, 'kills': 11, 'name': 'Erick Voliffe'},
],
},
'points': {
'total': [{'id': 395923478, 'name': 'sasawong', 'points': 197046}],
'week': [{'id': 161929388, 'name': 'Ankhesentapemkah', 'points': 20851}],
'yesterday': [{'id': 774720050, 'name': 'v3nd3tt4', 'points': 3151}],
},
},
'faction': {
'kills': {
'total': [{'id': 500004, 'kills': 104, 'name': 'Gallente Federation'}],
'week': [{'id': 500004, 'kills': 105, 'name': 'Gallente Federation'}],
'yesterday': [{'id': 500004, 'kills': 106, 'name': 'Gallente Federation'}],
},
'points': {
'total': [{'id': 500004, 'points': 101, 'name': 'Gallente Federation'}],
'week': [{'id': 500004, 'points': 102, 'name': 'Gallente Federation'}],
'yesterday': [{'id': 500004, 'points': 103, 'name': 'Gallente Federation'}],
},
},
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('eve/FacWarTopStats', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_conquerable_stations(self):
self.api.get.return_value = self.make_api_result("eve/conquerable_stations.xml")
result, current, expires = self.eve.conquerable_stations()
self.assertEqual(result, {
1:{ 'id':1,
'name':"Station station station",
'type_id':123,
'system_id':512,
'corp':{
'id':444,
'name':"Valkyries of Night" }
},
2:{ 'id':2,
'name':"Station the station",
'type_id':42,
'system_id':503,
'corp':{
'id':400,
'name':"Deus Fides Empire"}
}
})
self.assertEqual(self.api.mock_calls, [
mock.call.get('eve/ConquerableStationlist', params={}),
])
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
def test_skill_tree(self):
self.api.get.return_value = self.make_api_result("eve/skill_tree.xml")
result, current, expires = self.eve.skill_tree()
self.assertEqual(result, {
255: {
'id': 255,
'name': 'Gunnery',
'skills': {
3300: {
'attributes': {
'primary': 'perception',
'secondary': 'willpower',
},
'bonuses': {
'turretSpeeBonus': {
'type': 'turretSpeeBonus',
'value': -2.0,
},
},
'description': "Basic turret operation skill. 2% Bonus to weapon turrets' rate of fire per skill level.",
'group_id': 255,
'id': 3300,
'name': 'Gunnery',
'published': True,
'rank': 1,
'required_skills': {},
},
3301: {
'attributes': {
'primary': 'perception',
'secondary': 'willpower',
},
'bonuses': {
'damageMultiplierBonus': {
'type': 'damageMultiplierBonus',
'value': 5.0,
},
},
'description': 'Operation of small hybrid turrets. 5% Bonus to small hybrid turret damage per level.',
'group_id': 255,
'id': 3301,
'name': 'Small Hybrid Turret',
'published': True,
'rank': 1,
'required_skills': {
3300: {
'id': 3300,
'level': 1,
'name': 'Gunnery',
},
},
},
},
},
266: {
'id': 266,
'name': 'Corporation Management',
'skills': {
11584 : {
'id': 11584,
'group_id': 266,
'name': 'Anchoring',
'description': 'Skill at Anchoring Deployables. Can not be trained on Trial Accounts.',
'published': True,
'rank': 3,
'attributes': {
'primary': 'memory',
'secondary': 'charisma',
},
'required_skills': {},
'bonuses': {
'canNotBeTrainedOnTrial': {
'type': 'canNotBeTrainedOnTrial',
'value': 1.0,
}
}
},
3369 : {
'id': 3369,
'group_id': 266,
'name': 'CFO Training',
'description': 'Skill at managing corp finances. 5% discount on all fees at non-hostile NPC station if acting as CFO of a corp. ',
'published': False,
'rank': 3,
'attributes': {
'primary': 'memory',
'secondary': 'charisma',
},
'required_skills': {
3363 : { 'id' : 3363, 'level' : 2, 'name' : None },
3444 : { 'id' : 3444, 'level' : 3, 'name' : None },
},
'bonuses': {}
}
}
}
})
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
self.assertEqual(self.api.mock_calls, [
mock.call.get('eve/SkillTree', params={})
])
def test_reference_types(self):
self.api.get.return_value = self.make_api_result("eve/reference_types.xml")
result, current, expires = self.eve.reference_types()
self.assertEqual(result, {
0: 'Undefined',
1: 'Player Trading',
2: 'Market Transaction',
3: 'GM Cash Transfer',
4: 'ATM Withdraw',
5: 'ATM Deposit'
})
self.assertEqual(current, 12345)
self.assertEqual(expires, 67890)
self.assertEqual(self.api.mock_calls, [
mock.call.get('eve/RefTypes', params={})
])
if __name__ == "__main__":
unittest.main()
|
try:
from calais import Calais
except ImportError: # pragma: no cover
Calais = None # NOQA
if Calais is not None:
def process_calais(content, key):
calais = Calais(key)
response = calais.analyze(content)
people = [entity["name"] for entity in getattr(response, "entities", []) if entity["_type"] == "Person"]
return {"people": people}
|
import datetime
day = datetime.datetime.now().weekday()
def get_sunday():
return "Today it's Sunday"
def get_monday():
return "Today it's Monday"
def get_tuesday():
return "Today it's Tuesday"
def get_wednesday():
return "Today it's Wednesday"
def get_thursday():
return "Today it's Thursday"
def get_friday():
return "Today it's Friday"
def get_saturday():
return "Today it's Saturday"
def get_default():
return "Looking forward to the Weekend"
switcher = {
0:get_sunday,
1:get_monday,
2:get_tuesday,
3:get_wednesday,
4:get_thursday,
5:get_friday,
6:get_default
}
dayName = switcher.get(day,get_default)()
print(dayName)
|
from django.conf.urls import url
from timeline import views
urlpatterns = [
url(r'^$', views.timelines, name='timelines'),
]
|
from contextlib import contextmanager
from functools import wraps
from werkzeug.local import LocalProxy, LocalStack
_additional_ctx_stack = LocalStack()
__all__ = ("current_additions", "Additional", "AdditionalManager")
@LocalProxy
def current_additions():
"""
Proxy to the currently added requirements
"""
rv = _additional_ctx_stack.top
if rv is None:
return None
return rv[1]
def _isinstance(f):
@wraps(f)
def check(self, other):
if not isinstance(other, Additional):
return NotImplemented
return f(self, other)
return check
class Additional(object):
"""
Container object that allows to run extra requirements on checks. These
additional requirements will be run at most once per check and will
occur in no guarenteed order.
Requirements can be added by passing them into the constructor or
by calling the ``add`` method. They can be removed from this object
by calling the ``remove`` method. To check if a requirement has been added
to the current conext, you may call ``is_added`` or use ``in``::
some_req in additional
additional.is_added(some)req)
Additional objects can be iterated and length checked::
additional = Additional(some_req)
assert len(additional) == 1
assert list(additional) == [some_req]
Additional objects may be combined and compared to each other with the following
operators:
``+`` creates a new additional object by combining two others, the new
additional supplies all requirements that both parents did.
``+=`` similar to ``+`` except it is an inplace update.
``-`` creates a new additional instance by removing any requirements from
the first instance that are contained in the second instance.
``-=`` similar to ``-`` except it is an inplace update.
``==`` compares two additional instances and returns true if both have
the same added requirements.
``!=`` similar to ``!=`` except returns true if both have different
requirements contained in them.
"""
def __init__(self, *requirements):
self._requirements = set(requirements)
def add(self, requirement, *requirements):
self._requirements.update((requirement,) + requirements)
def remove(self, requirement, *requirements):
self._requirements.difference_update((requirement,) + requirements)
@_isinstance
def __add__(self, other):
requirements = self._requirements | other._requirements
return Additional(*requirements)
@_isinstance
def __iadd__(self, other):
if len(other._requirements) > 0:
self._requirements.add(*other._requirements)
return self
@_isinstance
def __sub__(self, other):
requirements = self._requirements - other._requirements
return Additional(*requirements)
@_isinstance
def __isub__(self, other):
if len(other._requirements) > 0:
self.remove(*other._requirements)
return self
@_isinstance
def __eq__(self, other):
return self._requirements == other._requirements
@_isinstance
def __ne__(self, other):
return not self == other
def __iter__(self):
return iter(self._requirements)
def is_added(self, requirement):
return requirement in self._requirements
def __contains__(self, requirement):
return self.is_added(requirement)
def __len__(self):
return len(self._requirements)
def __bool__(self):
return len(self) != 0
__nonzero__ = __bool__
def __repr__(self):
return "Additional({!r})".format(self._requirements)
class AdditionalManager(object):
"""
Used to manage the process of adding and removing additional requirements
to be run. This class shouldn't be used directly, instead use
``allows.additional`` to access these controls.
"""
def push(self, additional, use_parent=False):
"""
Binds an additional to the current context, optionally use the
current additionals in conjunction with this additional
If ``use_parent`` is true, a new additional is created from the
parent and child additionals rather than manipulating either
directly.
"""
current = self.current
if use_parent and current:
additional = current + additional
_additional_ctx_stack.push((self, additional))
def pop(self):
"""
Pops the latest additional context.
If the additional context was pushed by a different additional manager,
a ``RuntimeError`` is raised.
"""
rv = _additional_ctx_stack.pop()
if rv is None or rv[0] is not self:
raise RuntimeError(
"popped wrong additional context ({} instead of {})".format(rv, self)
)
@property
def current(self):
"""
Returns the current additional context if set otherwise None
"""
try:
return _additional_ctx_stack.top[1]
except TypeError:
return None
@contextmanager
def additional(self, additional, use_parent=False):
"""
Allows temporarily pushing an additional context, yields the new context
into the following block.
"""
self.push(additional, use_parent)
yield self.current
self.pop()
|
import unittest
"""
Given a binary tree, we need to find maximum value we can get by subtracting
value of node B from value of node A, where A and B are two nodes of the binary tree
and A is ancestor of B. Expected time complexity is O(n).
"""
class Node:
def __init__(self, data, left=None, right=None):
self.data = data
self.left = left
self.right = right
def add_left_child(self, data):
self.left = Node(data)
return self.left
def add_right_child(self, data):
self.right = Node(data)
return self.right
class BinaryTree:
def __init__(self, root):
self.root = root
self.max_difference = -float('inf')
def max_difference_node_and_ancestor(self):
self.max_min_in_subtree(self.root)
return self.max_difference
def max_min_in_subtree(self, node):
if node is None:
return float('inf'), -float('inf')
left_min, left_max = self.max_min_in_subtree(node.left)
right_min, right_max = self.max_min_in_subtree(node.right)
if node.left:
self.max_difference = max(self.max_difference, node.data - left_min, node.data - left_max)
if node.right:
self.max_difference = max(self.max_difference, node.data - right_min, node.data - right_max)
return min(node.data, left_min, right_min), max(node.data, left_max, right_max)
class TestBinaryTree(unittest.TestCase):
def test_max_difference(self):
root = Node(8)
root.left = Node(3)
root.left.left = Node(1)
root.left.right = Node(6)
root.left.right.left = Node(4)
root.left.right.right = Node(7)
root.right = Node(10)
root.right.right = Node(14)
root.right.right.left = Node(13)
binary_tree = BinaryTree(root)
self.assertEqual(binary_tree.max_difference_node_and_ancestor(), 7)
|
import datetime
now = datetime.datetime.now()
print now.strftime("%Y-%m-%d %H:%M:%S")
|
import requests
from unittest import skip
from sure import expect
from httpretty import HTTPretty
@skip
def test_http_passthrough():
url = 'http://httpbin.org/status/200'
response1 = requests.get(url)
response1 = requests.get(url, stream=True)
HTTPretty.enable()
HTTPretty.register_uri(HTTPretty.GET, 'http://google.com/', body="Not Google")
response2 = requests.get('http://google.com/')
expect(response2.content).to.equal(b'Not Google')
response3 = requests.get(url, stream=True)
(response3.content).should.equal(response1.content)
HTTPretty.disable()
response4 = requests.get(url, stream=True)
(response4.content).should.equal(response1.content)
@skip
def test_https_passthrough():
url = 'https://raw.githubusercontent.com/gabrielfalcao/HTTPretty/master/COPYING'
response1 = requests.get(url, stream=True)
HTTPretty.enable()
HTTPretty.register_uri(HTTPretty.GET, 'https://google.com/', body="Not Google")
response2 = requests.get('https://google.com/')
expect(response2.content).to.equal(b'Not Google')
response3 = requests.get(url, stream=True)
(response3.content).should.equal(response1.content)
HTTPretty.disable()
response4 = requests.get(url, stream=True)
(response4.content).should.equal(response1.content)
|
from SPARQLWrapper import SPARQLWrapper, JSON
import requests
import re
import os
import os.path
import time
import sys
FINTO_ENDPOINT='http://api.dev.finto.fi/sparql'
FINNA_API_SEARCH='https://api.finna.fi/v1/search'
lang = sys.argv[1]
LANGMAP = {
'fi': 'fin',
'sv': 'swe',
'en': 'eng'
}
def row_to_concept(row):
concept = {'uri': row['c']['value'],
'pref': row['pref']['value'],
'ysapref': row['ysapref']['value'],
'allarspref': row['allarspref']['value']}
if 'alts' in row:
concept['alts'] = row['alts']['value']
return concept
def get_concepts(lang):
sparql = SPARQLWrapper(FINTO_ENDPOINT)
sparql.setQuery("""
PREFIX skos: <http://www.w3.org/2004/02/skos/core#>
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX ysometa: <http://www.yso.fi/onto/yso-meta/>
SELECT ?c ?pref (GROUP_CONCAT(?alt) AS ?alts) ?ysapref ?allarspref
WHERE {
GRAPH <http://www.yso.fi/onto/yso/> {
?c a skos:Concept .
?c skos:prefLabel ?pref .
FILTER(LANG(?pref)='%s')
OPTIONAL {
?c skos:altLabel ?alt .
FILTER(LANG(?alt)='%s')
}
FILTER NOT EXISTS { ?c owl:deprecated true }
FILTER NOT EXISTS { ?c a ysometa:Hierarchy }
}
GRAPH <http://www.yso.fi/onto/ysa/> {
?ysac skos:closeMatch|skos:exactMatch ?c .
?ysac skos:prefLabel ?ysapref .
}
GRAPH <http://www.yso.fi/onto/allars/> {
?allarsc skos:closeMatch|skos:exactMatch ?c .
?allarsc skos:prefLabel ?allarspref .
}
}
GROUP BY ?c ?pref ?ysapref ?allarspref
""" % (lang, lang))
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return [row_to_concept(row) for row in results['results']['bindings']]
concepts = get_concepts(lang)
def search_finna(params):
r = requests.get(FINNA_API_SEARCH, params=params, headers={'User-agent': 'annif 0.1'})
return r.json()
def records_to_texts(records):
texts = []
for rec in records:
if 'title' in rec:
texts.append(rec['title'])
if 'summary' in rec:
for summary in rec['summary']:
texts.append(summary)
return texts
def generate_text(concept, lang):
# start with pref- and altlabels
labels = [concept['pref']]
if lang == 'fi':
# we can use the YSA label too
labels.append(concept['ysapref'])
if lang == 'sv':
# we can use the Allars label too
labels.append(concept['allarspref'])
if 'alts' in concept:
labels.append(concept['alts'])
labels = ' '.join(labels)
# look for more text in Finna API
texts = []
fields = ['title','summary']
finnaterms = (concept['ysapref'], concept['allarspref'])
finnalang = LANGMAP[lang]
# Search type 1: exact matches using topic facet
params = {'lookfor': 'topic_facet:"%s" OR topic_facet:"%s"' % finnaterms, 'filter[]': 'language:%s' % finnalang, 'lng':lang, 'limit':100, 'field[]':fields}
response = search_finna(params)
if 'records' in response:
texts += records_to_texts(response['records'])
# Search type 2: exact matches using Subject search
params['lookfor'] = '"%s" OR "%s"' % finnaterms
params['type'] = 'Subject'
response = search_finna(params)
if 'records' in response:
texts += records_to_texts(response['records'])
# Search type 3: fuzzy matches using Subject search
params['lookfor'] = '(%s) OR (%s)' % finnaterms
response = search_finna(params)
if 'records' in response:
texts += records_to_texts(response['records'])
return "\n".join([labels] + list(set(texts)))
for concept in concepts:
localname = concept['uri'].split('/')[-1]
outfile = 'corpus/%s-%s.raw' % (localname, lang)
if os.path.exists(outfile):
continue
text = None
tries = 0
while tries < 10:
try:
text = generate_text(concept, lang)
break
except:
# failure, try again until tries exhausted
tries += 1
print("Error generating text for concept %s, trying again (attempt %d)" % (concept['uri'], tries))
time.sleep(tries) # wait progressively longer between attempts
if text is None:
print("Failed looking up concept %s, exiting" % concept['uri'])
sys.exit(1)
print(localname, lang, concept['pref'], concept['ysapref'], concept['allarspref'], len(text.split()))
f = open(outfile, 'w')
print (concept['uri'], concept['pref'], file=f)
print (text, file=f)
f.close()
|
from django.db import models
class Profil(models.Model):
awal = ''
PILIHAN_JENJANG = (
(awal, '----'),
('Pertama', 'Perekayasa Pertama'),
('Muda', 'Perekayasa Muda'),
('Madya', 'Perekayasa Madya'),
('Utama', 'Perekayasa Utama'),
)
nip = models.CharField(max_length=50, verbose_name='NIP')
pendidikan = models.CharField(max_length=150, verbose_name='Pendidikan')
instansi = models.TextField(verbose_name='Nama Lengkap Unit')
instansi_kode = models.CharField(max_length=20, verbose_name='Singkatan Unit')
satuan = models.TextField(verbose_name='Nama Lengkap Satuan kerja', blank=True)
kantor = models.TextField(verbose_name='Nama Lengkap Kantor', blank=True)
pangkat = models.TextField(verbose_name='Pangkat/Golongan Ruang/TMT')
jabatan = models.CharField(max_length=150, verbose_name='Jabatan')
jenjang = models.CharField(max_length=10, verbose_name='Jenjang Perekayasa', choices=PILIHAN_JENJANG, default=awal)
user = models.ForeignKey('auth.User', verbose_name='Personil', on_delete=models.CASCADE)
class Meta:
verbose_name_plural = 'Profil'
def __str__(self):
return self.nip
|
import sys, os, os.path, re, subprocess, datetime, multiprocessing.pool
import dns.reversename, dns.resolver
import dateutil.parser, dateutil.tz
import idna
import psutil
from dns_update import get_dns_zones, build_tlsa_record, get_custom_dns_config, get_secondary_dns, get_custom_dns_record
from web_update import get_web_domains, get_domains_with_a_records
from ssl_certificates import get_ssl_certificates, get_domain_ssl_files, check_certificate
from mailconfig import get_mail_domains, get_mail_aliases
from utils import shell, sort_domains, load_env_vars_from_file, load_settings
def run_checks(rounded_values, env, output, pool):
# run systems checks
output.add_heading("System")
# check that services are running
if not run_services_checks(env, output, pool):
# If critical services are not running, stop. If bind9 isn't running,
# all later DNS checks will timeout and that will take forever to
# go through, and if running over the web will cause a fastcgi timeout.
return
# clear bind9's DNS cache so our DNS checks are up to date
# (ignore errors; if bind9/rndc isn't running we'd already report
# that in run_services checks.)
shell('check_call', ["/usr/sbin/rndc", "flush"], trap=True)
run_system_checks(rounded_values, env, output)
# perform other checks asynchronously
run_network_checks(env, output)
run_domain_checks(rounded_values, env, output, pool)
def get_ssh_port():
# Returns ssh port
try:
output = shell('check_output', ['sshd', '-T'])
except FileNotFoundError:
# sshd is not installed. That's ok.
return None
returnNext = False
for e in output.split():
if returnNext:
return int(e)
if e == "port":
returnNext = True
# Did not find port!
return None
def run_services_checks(env, output, pool):
# Check that system services are running.
services = [
{ "name": "Local DNS (bind9)", "port": 53, "public": False, },
#{ "name": "NSD Control", "port": 8952, "public": False, },
{ "name": "Local DNS Control (bind9/rndc)", "port": 953, "public": False, },
{ "name": "Dovecot LMTP LDA", "port": 10026, "public": False, },
{ "name": "Postgrey", "port": 10023, "public": False, },
{ "name": "Spamassassin", "port": 10025, "public": False, },
{ "name": "OpenDKIM", "port": 8891, "public": False, },
{ "name": "OpenDMARC", "port": 8893, "public": False, },
{ "name": "Memcached", "port": 11211, "public": False, },
{ "name": "Mail-in-a-Box Management Daemon", "port": 10222, "public": False, },
{ "name": "SSH Login (ssh)", "port": get_ssh_port(), "public": True, },
{ "name": "Public DNS (nsd4)", "port": 53, "public": True, },
{ "name": "Incoming Mail (SMTP/postfix)", "port": 25, "public": True, },
{ "name": "Outgoing Mail (SMTP 587/postfix)", "port": 587, "public": True, },
#{ "name": "Postfix/master", "port": 10587, "public": True, },
{ "name": "IMAPS (dovecot)", "port": 993, "public": True, },
{ "name": "Mail Filters (Sieve/dovecot)", "port": 4190, "public": True, },
{ "name": "HTTP Web (nginx)", "port": 80, "public": True, },
{ "name": "HTTPS Web (nginx)", "port": 443, "public": True, },
]
all_running = True
fatal = False
ret = pool.starmap(check_service, ((i, service, env) for i, service in enumerate(services)), chunksize=1)
for i, running, fatal2, output2 in sorted(ret):
if output2 is None: continue # skip check (e.g. no port was set, e.g. no sshd)
all_running = all_running and running
fatal = fatal or fatal2
output2.playback(output)
if all_running:
output.print_ok("All system services are running.")
return not fatal
def check_service(i, service, env):
if not service["port"]:
# Skip check (no port, e.g. no sshd).
return (i, None, None, None)
output = BufferedOutput()
running = False
fatal = False
# Helper function to make a connection to the service, since we try
# up to three ways (localhost, IPv4 address, IPv6 address).
def try_connect(ip):
# Connect to the given IP address on the service's port with a one-second timeout.
import socket
s = socket.socket(socket.AF_INET if ":" not in ip else socket.AF_INET6, socket.SOCK_STREAM)
s.settimeout(1)
try:
s.connect((ip, service["port"]))
return True
except OSError as e:
# timed out or some other odd error
return False
finally:
s.close()
if service["public"]:
# Service should be publicly accessible.
if try_connect(env["PUBLIC_IP"]):
# IPv4 ok.
if not env.get("PUBLIC_IPV6") or service.get("ipv6") is False or try_connect(env["PUBLIC_IPV6"]):
# No IPv6, or service isn't meant to run on IPv6, or IPv6 is good.
running = True
# IPv4 ok but IPv6 failed. Try the PRIVATE_IPV6 address to see if the service is bound to the interface.
elif service["port"] != 53 and try_connect(env["PRIVATE_IPV6"]):
output.print_error("%s is running (and available over IPv4 and the local IPv6 address), but it is not publicly accessible at %s:%d." % (service['name'], env['PUBLIC_IP'], service['port']))
else:
output.print_error("%s is running and available over IPv4 but is not accessible over IPv6 at %s port %d." % (service['name'], env['PUBLIC_IPV6'], service['port']))
# IPv4 failed. Try the private IP to see if the service is running but not accessible (except DNS because a different service runs on the private IP).
elif service["port"] != 53 and try_connect("127.0.0.1"):
output.print_error("%s is running but is not publicly accessible at %s:%d." % (service['name'], env['PUBLIC_IP'], service['port']))
else:
output.print_error("%s is not running (port %d)." % (service['name'], service['port']))
# Why is nginx not running?
if not running and service["port"] in (80, 443):
output.print_line(shell('check_output', ['nginx', '-t'], capture_stderr=True, trap=True)[1].strip())
else:
# Service should be running locally.
if try_connect("127.0.0.1"):
running = True
else:
output.print_error("%s is not running (port %d)." % (service['name'], service['port']))
# Flag if local DNS is not running.
if not running and service["port"] == 53 and service["public"] == False:
fatal = True
return (i, running, fatal, output)
def run_system_checks(rounded_values, env, output):
check_ssh_password(env, output)
check_software_updates(env, output)
check_miab_version(env, output)
check_system_aliases(env, output)
check_free_disk_space(rounded_values, env, output)
check_free_memory(rounded_values, env, output)
def check_ssh_password(env, output):
# Check that SSH login with password is disabled. The openssh-server
# package may not be installed so check that before trying to access
# the configuration file.
if not os.path.exists("/etc/ssh/sshd_config"):
return
sshd = open("/etc/ssh/sshd_config").read()
if re.search("\nPasswordAuthentication\s+yes", sshd) \
or not re.search("\nPasswordAuthentication\s+no", sshd):
output.print_error("""The SSH server on this machine permits password-based login. A more secure
way to log in is using a public key. Add your SSH public key to $HOME/.ssh/authorized_keys, check
that you can log in without a password, set the option 'PasswordAuthentication no' in
/etc/ssh/sshd_config, and then restart the openssh via 'sudo service ssh restart'.""")
else:
output.print_ok("SSH disallows password-based login.")
def is_reboot_needed_due_to_package_installation():
return os.path.exists("/var/run/reboot-required")
def check_software_updates(env, output):
# Check for any software package updates.
pkgs = list_apt_updates(apt_update=False)
if is_reboot_needed_due_to_package_installation():
output.print_error("System updates have been installed and a reboot of the machine is required.")
elif len(pkgs) == 0:
output.print_ok("System software is up to date.")
else:
output.print_error("There are %d software packages that can be updated." % len(pkgs))
for p in pkgs:
output.print_line("%s (%s)" % (p["package"], p["version"]))
def check_system_aliases(env, output):
# Check that the administrator alias exists since that's where all
# admin email is automatically directed.
check_alias_exists("System administrator address", "administrator@" + env['PRIMARY_HOSTNAME'], env, output)
def check_free_disk_space(rounded_values, env, output):
# Check free disk space.
st = os.statvfs(env['STORAGE_ROOT'])
bytes_total = st.f_blocks * st.f_frsize
bytes_free = st.f_bavail * st.f_frsize
if not rounded_values:
disk_msg = "The disk has %s GB space remaining." % str(round(bytes_free/1024.0/1024.0/1024.0*10.0)/10)
else:
disk_msg = "The disk has less than %s%% space left." % str(round(bytes_free/bytes_total/10 + .5)*10)
if bytes_free > .3 * bytes_total:
output.print_ok(disk_msg)
elif bytes_free > .15 * bytes_total:
output.print_warning(disk_msg)
else:
output.print_error(disk_msg)
def check_free_memory(rounded_values, env, output):
# Check free memory.
percent_free = 100 - psutil.virtual_memory().percent
memory_msg = "System memory is %s%% free." % str(round(percent_free))
if percent_free >= 20:
if rounded_values: memory_msg = "System free memory is at least 20%."
output.print_ok(memory_msg)
elif percent_free >= 10:
if rounded_values: memory_msg = "System free memory is below 20%."
output.print_warning(memory_msg)
else:
if rounded_values: memory_msg = "System free memory is below 10%."
output.print_error(memory_msg)
def run_network_checks(env, output):
# Also see setup/network-checks.sh.
output.add_heading("Network")
# Stop if we cannot make an outbound connection on port 25. Many residential
# networks block outbound port 25 to prevent their network from sending spam.
# See if we can reach one of Google's MTAs with a 5-second timeout.
code, ret = shell("check_call", ["/bin/nc", "-z", "-w5", "aspmx.l.google.com", "25"], trap=True)
if ret == 0:
output.print_ok("Outbound mail (SMTP port 25) is not blocked.")
else:
output.print_error("""Outbound mail (SMTP port 25) seems to be blocked by your network. You
will not be able to send any mail. Many residential networks block port 25 to prevent hijacked
machines from being able to send spam. A quick connection test to Google's mail server on port 25
failed.""")
# Stop if the IPv4 address is listed in the ZEN Spamhaus Block List.
# The user might have ended up on an IP address that was previously in use
# by a spammer, or the user may be deploying on a residential network. We
# will not be able to reliably send mail in these cases.
rev_ip4 = ".".join(reversed(env['PUBLIC_IP'].split('.')))
zen = query_dns(rev_ip4+'.zen.spamhaus.org', 'A', nxdomain=None)
if zen is None:
output.print_ok("IP address is not blacklisted by zen.spamhaus.org.")
else:
output.print_error("""The IP address of this machine %s is listed in the Spamhaus Block List (code %s),
which may prevent recipients from receiving your email. See http://www.spamhaus.org/query/ip/%s."""
% (env['PUBLIC_IP'], zen, env['PUBLIC_IP']))
def run_domain_checks(rounded_time, env, output, pool):
# Get the list of domains we handle mail for.
mail_domains = get_mail_domains(env)
# Get the list of domains we serve DNS zones for (i.e. does not include subdomains).
dns_zonefiles = dict(get_dns_zones(env))
dns_domains = set(dns_zonefiles)
# Get the list of domains we serve HTTPS for.
web_domains = set(get_web_domains(env))
domains_to_check = mail_domains | dns_domains | web_domains
# Get the list of domains that we don't serve web for because of a custom CNAME/A record.
domains_with_a_records = get_domains_with_a_records(env)
# Serial version:
#for domain in sort_domains(domains_to_check, env):
# run_domain_checks_on_domain(domain, rounded_time, env, dns_domains, dns_zonefiles, mail_domains, web_domains)
# Parallelize the checks across a worker pool.
args = ((domain, rounded_time, env, dns_domains, dns_zonefiles, mail_domains, web_domains, domains_with_a_records)
for domain in domains_to_check)
ret = pool.starmap(run_domain_checks_on_domain, args, chunksize=1)
ret = dict(ret) # (domain, output) => { domain: output }
for domain in sort_domains(ret, env):
ret[domain].playback(output)
def run_domain_checks_on_domain(domain, rounded_time, env, dns_domains, dns_zonefiles, mail_domains, web_domains, domains_with_a_records):
output = BufferedOutput()
# we'd move this up, but this returns non-pickleable values
ssl_certificates = get_ssl_certificates(env)
# The domain is IDNA-encoded in the database, but for display use Unicode.
try:
domain_display = idna.decode(domain.encode('ascii'))
output.add_heading(domain_display)
except (ValueError, UnicodeError, idna.IDNAError) as e:
# Looks like we have some invalid data in our database.
output.add_heading(domain)
output.print_error("Domain name is invalid: " + str(e))
if domain == env["PRIMARY_HOSTNAME"]:
check_primary_hostname_dns(domain, env, output, dns_domains, dns_zonefiles)
if domain in dns_domains:
check_dns_zone(domain, env, output, dns_zonefiles)
if domain in mail_domains:
check_mail_domain(domain, env, output)
if domain in web_domains:
check_web_domain(domain, rounded_time, ssl_certificates, env, output)
if domain in dns_domains:
check_dns_zone_suggestions(domain, env, output, dns_zonefiles, domains_with_a_records)
return (domain, output)
def check_primary_hostname_dns(domain, env, output, dns_domains, dns_zonefiles):
# If a DS record is set on the zone containing this domain, check DNSSEC now.
has_dnssec = False
for zone in dns_domains:
if zone == domain or domain.endswith("." + zone):
if query_dns(zone, "DS", nxdomain=None) is not None:
has_dnssec = True
check_dnssec(zone, env, output, dns_zonefiles, is_checking_primary=True)
ip = query_dns(domain, "A")
ns_ips = query_dns("ns1." + domain, "A") + '/' + query_dns("ns2." + domain, "A")
my_ips = env['PUBLIC_IP'] + ((" / "+env['PUBLIC_IPV6']) if env.get("PUBLIC_IPV6") else "")
# Check that the ns1/ns2 hostnames resolve to A records. This information probably
# comes from the TLD since the information is set at the registrar as glue records.
# We're probably not actually checking that here but instead checking that we, as
# the nameserver, are reporting the right info --- but if the glue is incorrect this
# will probably fail.
if ns_ips == env['PUBLIC_IP'] + '/' + env['PUBLIC_IP']:
output.print_ok("Nameserver glue records are correct at registrar. [ns1/ns2.%s ↦ %s]" % (env['PRIMARY_HOSTNAME'], env['PUBLIC_IP']))
elif ip == env['PUBLIC_IP']:
# The NS records are not what we expect, but the domain resolves correctly, so
# the user may have set up external DNS. List this discrepancy as a warning.
output.print_warning("""Nameserver glue records (ns1.%s and ns2.%s) should be configured at your domain name
registrar as having the IP address of this box (%s). They currently report addresses of %s. If you have set up External DNS, this may be OK."""
% (env['PRIMARY_HOSTNAME'], env['PRIMARY_HOSTNAME'], env['PUBLIC_IP'], ns_ips))
else:
output.print_error("""Nameserver glue records are incorrect. The ns1.%s and ns2.%s nameservers must be configured at your domain name
registrar as having the IP address %s. They currently report addresses of %s. It may take several hours for
public DNS to update after a change."""
% (env['PRIMARY_HOSTNAME'], env['PRIMARY_HOSTNAME'], env['PUBLIC_IP'], ns_ips))
# Check that PRIMARY_HOSTNAME resolves to PUBLIC_IP[V6] in public DNS.
ipv6 = query_dns(domain, "AAAA") if env.get("PUBLIC_IPV6") else None
if ip == env['PUBLIC_IP'] and ipv6 in (None, env['PUBLIC_IPV6']):
output.print_ok("Domain resolves to box's IP address. [%s ↦ %s]" % (env['PRIMARY_HOSTNAME'], my_ips))
else:
output.print_error("""This domain must resolve to your box's IP address (%s) in public DNS but it currently resolves
to %s. It may take several hours for public DNS to update after a change. This problem may result from other
issues listed above."""
% (my_ips, ip + ((" / " + ipv6) if ipv6 is not None else "")))
# Check reverse DNS matches the PRIMARY_HOSTNAME. Note that it might not be
# a DNS zone if it is a subdomain of another domain we have a zone for.
existing_rdns_v4 = query_dns(dns.reversename.from_address(env['PUBLIC_IP']), "PTR")
existing_rdns_v6 = query_dns(dns.reversename.from_address(env['PUBLIC_IPV6']), "PTR") if env.get("PUBLIC_IPV6") else None
if existing_rdns_v4 == domain and existing_rdns_v6 in (None, domain):
output.print_ok("Reverse DNS is set correctly at ISP. [%s ↦ %s]" % (my_ips, env['PRIMARY_HOSTNAME']))
elif existing_rdns_v4 == existing_rdns_v6 or existing_rdns_v6 is None:
output.print_error("""Your box's reverse DNS is currently %s, but it should be %s. Your ISP or cloud provider will have instructions
on setting up reverse DNS for your box.""" % (existing_rdns_v4, domain) )
else:
output.print_error("""Your box's reverse DNS is currently %s (IPv4) and %s (IPv6), but it should be %s. Your ISP or cloud provider will have instructions
on setting up reverse DNS for your box.""" % (existing_rdns_v4, existing_rdns_v6, domain) )
# Check the TLSA record.
tlsa_qname = "_25._tcp." + domain
tlsa25 = query_dns(tlsa_qname, "TLSA", nxdomain=None)
tlsa25_expected = build_tlsa_record(env)
if tlsa25 == tlsa25_expected:
output.print_ok("""The DANE TLSA record for incoming mail is correct (%s).""" % tlsa_qname,)
elif tlsa25 is None:
if has_dnssec:
# Omit a warning about it not being set if DNSSEC isn't enabled,
# since TLSA shouldn't be used without DNSSEC.
output.print_warning("""The DANE TLSA record for incoming mail is not set. This is optional.""")
else:
output.print_error("""The DANE TLSA record for incoming mail (%s) is not correct. It is '%s' but it should be '%s'.
It may take several hours for public DNS to update after a change."""
% (tlsa_qname, tlsa25, tlsa25_expected))
# Check that the hostmaster@ email address exists.
check_alias_exists("Hostmaster contact address", "hostmaster@" + domain, env, output)
def check_alias_exists(alias_name, alias, env, output):
mail_aliases = dict([(address, receivers) for address, receivers, *_ in get_mail_aliases(env)])
if alias in mail_aliases:
if mail_aliases[alias]:
output.print_ok("%s exists as a mail alias. [%s ↦ %s]" % (alias_name, alias, mail_aliases[alias]))
else:
output.print_error("""You must set the destination of the mail alias for %s to direct email to you or another administrator.""" % alias)
else:
output.print_error("""You must add a mail alias for %s which directs email to you or another administrator.""" % alias)
def check_dns_zone(domain, env, output, dns_zonefiles):
# If a DS record is set at the registrar, check DNSSEC first because it will affect the NS query.
# If it is not set, we suggest it last.
if query_dns(domain, "DS", nxdomain=None) is not None:
check_dnssec(domain, env, output, dns_zonefiles)
# We provide a DNS zone for the domain. It should have NS records set up
# at the domain name's registrar pointing to this box. The secondary DNS
# server may be customized.
# (I'm not sure whether this necessarily tests the TLD's configuration,
# as it should, or if one successful NS line at the TLD will result in
# this query being answered by the box, which would mean the test is only
# half working.)
custom_dns_records = list(get_custom_dns_config(env)) # generator => list so we can reuse it
correct_ip = get_custom_dns_record(custom_dns_records, domain, "A") or env['PUBLIC_IP']
custom_secondary_ns = get_secondary_dns(custom_dns_records, mode="NS")
secondary_ns = custom_secondary_ns or ["ns2." + env['PRIMARY_HOSTNAME']]
existing_ns = query_dns(domain, "NS")
correct_ns = "; ".join(sorted(["ns1." + env['PRIMARY_HOSTNAME']] + secondary_ns))
ip = query_dns(domain, "A")
probably_external_dns = False
if existing_ns.lower() == correct_ns.lower():
output.print_ok("Nameservers are set correctly at registrar. [%s]" % correct_ns)
elif ip == correct_ip:
# The domain resolves correctly, so maybe the user is using External DNS.
output.print_warning("""The nameservers set on this domain at your domain name registrar should be %s. They are currently %s.
If you are using External DNS, this may be OK."""
% (correct_ns, existing_ns) )
probably_external_dns = True
else:
output.print_error("""The nameservers set on this domain are incorrect. They are currently %s. Use your domain name registrar's
control panel to set the nameservers to %s."""
% (existing_ns, correct_ns) )
# Check that each custom secondary nameserver resolves the IP address.
if custom_secondary_ns and not probably_external_dns:
for ns in custom_secondary_ns:
# We must first resolve the nameserver to an IP address so we can query it.
ns_ip = query_dns(ns, "A")
if not ns_ip:
output.print_error("Secondary nameserver %s is not valid (it doesn't resolve to an IP address)." % ns)
continue
# Now query it to see what it says about this domain.
ip = query_dns(domain, "A", at=ns_ip, nxdomain=None)
if ip == correct_ip:
output.print_ok("Secondary nameserver %s resolved the domain correctly." % ns)
elif ip is None:
output.print_error("Secondary nameserver %s is not configured to resolve this domain." % ns)
else:
output.print_error("Secondary nameserver %s is not configured correctly. (It resolved this domain as %s. It should be %s.)" % (ns, ip, correct_ip))
def check_dns_zone_suggestions(domain, env, output, dns_zonefiles, domains_with_a_records):
# Warn if a custom DNS record is preventing this or the automatic www redirect from
# being served.
if domain in domains_with_a_records:
output.print_warning("""Web has been disabled for this domain because you have set a custom DNS record.""")
if "www." + domain in domains_with_a_records:
output.print_warning("""A redirect from 'www.%s' has been disabled for this domain because you have set a custom DNS record on the www subdomain.""" % domain)
# Since DNSSEC is optional, if a DS record is NOT set at the registrar suggest it.
# (If it was set, we did the check earlier.)
if query_dns(domain, "DS", nxdomain=None) is None:
check_dnssec(domain, env, output, dns_zonefiles)
def check_dnssec(domain, env, output, dns_zonefiles, is_checking_primary=False):
# See if the domain has a DS record set at the registrar. The DS record may have
# several forms. We have to be prepared to check for any valid record. We've
# pre-generated all of the valid digests --- read them in.
ds_file = '/etc/nsd/zones/' + dns_zonefiles[domain] + '.ds'
if not os.path.exists(ds_file): return # Domain is in our database but DNS has not yet been updated.
ds_correct = open(ds_file).read().strip().split("\n")
digests = { }
for rr_ds in ds_correct:
ds_keytag, ds_alg, ds_digalg, ds_digest = rr_ds.split("\t")[4].split(" ")
digests[ds_digalg] = ds_digest
# Some registrars may want the public key so they can compute the digest. The DS
# record that we suggest using is for the KSK (and that's how the DS records were generated).
alg_name_map = { '7': 'RSASHA1-NSEC3-SHA1', '8': 'RSASHA256' }
dnssec_keys = load_env_vars_from_file(os.path.join(env['STORAGE_ROOT'], 'dns/dnssec/%s.conf' % alg_name_map[ds_alg]))
dnsssec_pubkey = open(os.path.join(env['STORAGE_ROOT'], 'dns/dnssec/' + dnssec_keys['KSK'] + '.key')).read().split("\t")[3].split(" ")[3]
# Query public DNS for the DS record at the registrar.
ds = query_dns(domain, "DS", nxdomain=None)
ds_looks_valid = ds and len(ds.split(" ")) == 4
if ds_looks_valid: ds = ds.split(" ")
if ds_looks_valid and ds[0] == ds_keytag and ds[1] == ds_alg and ds[3] == digests.get(ds[2]):
if is_checking_primary: return
output.print_ok("DNSSEC 'DS' record is set correctly at registrar.")
else:
if ds == None:
if is_checking_primary: return
output.print_warning("""This domain's DNSSEC DS record is not set. The DS record is optional. The DS record activates DNSSEC.
To set a DS record, you must follow the instructions provided by your domain name registrar and provide to them this information:""")
else:
if is_checking_primary:
output.print_error("""The DNSSEC 'DS' record for %s is incorrect. See further details below.""" % domain)
return
output.print_error("""This domain's DNSSEC DS record is incorrect. The chain of trust is broken between the public DNS system
and this machine's DNS server. It may take several hours for public DNS to update after a change. If you did not recently
make a change, you must resolve this immediately by following the instructions provided by your domain name registrar and
provide to them this information:""")
output.print_line("")
output.print_line("Key Tag: " + ds_keytag + ("" if not ds_looks_valid or ds[0] == ds_keytag else " (Got '%s')" % ds[0]))
output.print_line("Key Flags: KSK")
output.print_line(
("Algorithm: %s / %s" % (ds_alg, alg_name_map[ds_alg]))
+ ("" if not ds_looks_valid or ds[1] == ds_alg else " (Got '%s')" % ds[1]))
# see http://www.iana.org/assignments/dns-sec-alg-numbers/dns-sec-alg-numbers.xhtml
output.print_line("Digest Type: 2 / SHA-256")
# http://www.ietf.org/assignments/ds-rr-types/ds-rr-types.xml
output.print_line("Digest: " + digests['2'])
if ds_looks_valid and ds[3] != digests.get(ds[2]):
output.print_line("(Got digest type %s and digest %s which do not match.)" % (ds[2], ds[3]))
output.print_line("Public Key: ")
output.print_line(dnsssec_pubkey, monospace=True)
output.print_line("")
output.print_line("Bulk/Record Format:")
output.print_line("" + ds_correct[0])
output.print_line("")
def check_mail_domain(domain, env, output):
# Check the MX record.
recommended_mx = "10 " + env['PRIMARY_HOSTNAME']
mx = query_dns(domain, "MX", nxdomain=None)
if mx is None:
mxhost = None
else:
# query_dns returns a semicolon-delimited list
# of priority-host pairs.
mxhost = mx.split('; ')[0].split(' ')[1]
if mxhost == None:
# A missing MX record is okay on the primary hostname because
# the primary hostname's A record (the MX fallback) is... itself,
# which is what we want the MX to be.
if domain == env['PRIMARY_HOSTNAME']:
output.print_ok("Domain's email is directed to this domain. [%s has no MX record, which is ok]" % (domain,))
# And a missing MX record is okay on other domains if the A record
# matches the A record of the PRIMARY_HOSTNAME. Actually this will
# probably confuse DANE TLSA, but we'll let that slide for now.
else:
domain_a = query_dns(domain, "A", nxdomain=None)
primary_a = query_dns(env['PRIMARY_HOSTNAME'], "A", nxdomain=None)
if domain_a != None and domain_a == primary_a:
output.print_ok("Domain's email is directed to this domain. [%s has no MX record but its A record is OK]" % (domain,))
else:
output.print_error("""This domain's DNS MX record is not set. It should be '%s'. Mail will not
be delivered to this box. It may take several hours for public DNS to update after a
change. This problem may result from other issues listed here.""" % (recommended_mx,))
elif mxhost == env['PRIMARY_HOSTNAME']:
good_news = "Domain's email is directed to this domain. [%s ↦ %s]" % (domain, mx)
if mx != recommended_mx:
good_news += " This configuration is non-standard. The recommended configuration is '%s'." % (recommended_mx,)
output.print_ok(good_news)
else:
output.print_error("""This domain's DNS MX record is incorrect. It is currently set to '%s' but should be '%s'. Mail will not
be delivered to this box. It may take several hours for public DNS to update after a change. This problem may result from
other issues listed here.""" % (mx, recommended_mx))
# Check that the postmaster@ email address exists. Not required if the domain has a
# catch-all address or domain alias.
if "@" + domain not in [address for address, *_ in get_mail_aliases(env)]:
check_alias_exists("Postmaster contact address", "postmaster@" + domain, env, output)
# Stop if the domain is listed in the Spamhaus Domain Block List.
# The user might have chosen a domain that was previously in use by a spammer
# and will not be able to reliably send mail.
dbl = query_dns(domain+'.dbl.spamhaus.org', "A", nxdomain=None)
if dbl is None:
output.print_ok("Domain is not blacklisted by dbl.spamhaus.org.")
else:
output.print_error("""This domain is listed in the Spamhaus Domain Block List (code %s),
which may prevent recipients from receiving your mail.
See http://www.spamhaus.org/dbl/ and http://www.spamhaus.org/query/domain/%s.""" % (dbl, domain))
def check_web_domain(domain, rounded_time, ssl_certificates, env, output):
# See if the domain's A record resolves to our PUBLIC_IP. This is already checked
# for PRIMARY_HOSTNAME, for which it is required for mail specifically. For it and
# other domains, it is required to access its website.
if domain != env['PRIMARY_HOSTNAME']:
ok_values = []
for (rtype, expected) in (("A", env['PUBLIC_IP']), ("AAAA", env.get('PUBLIC_IPV6'))):
if not expected: continue # IPv6 is not configured
value = query_dns(domain, rtype)
if value == expected:
ok_values.append(value)
else:
output.print_error("""This domain should resolve to your box's IP address (%s %s) if you would like the box to serve
webmail or a website on this domain. The domain currently resolves to %s in public DNS. It may take several hours for
public DNS to update after a change. This problem may result from other issues listed here.""" % (rtype, expected, value))
return
# If both A and AAAA are correct...
output.print_ok("Domain resolves to this box's IP address. [%s ↦ %s]" % (domain, '; '.join(ok_values)))
# We need a TLS certificate for PRIMARY_HOSTNAME because that's where the
# user will log in with IMAP or webmail. Any other domain we serve a
# website for also needs a signed certificate.
check_ssl_cert(domain, rounded_time, ssl_certificates, env, output)
def query_dns(qname, rtype, nxdomain='[Not Set]', at=None):
# Make the qname absolute by appending a period. Without this, dns.resolver.query
# will fall back a failed lookup to a second query with this machine's hostname
# appended. This has been causing some false-positive Spamhaus reports. The
# reverse DNS lookup will pass a dns.name.Name instance which is already
# absolute so we should not modify that.
if isinstance(qname, str):
qname += "."
# Use the default nameservers (as defined by the system, which is our locally
# running bind server), or if the 'at' argument is specified, use that host
# as the nameserver.
resolver = dns.resolver.get_default_resolver()
if at:
resolver = dns.resolver.Resolver()
resolver.nameservers = [at]
# Set a timeout so that a non-responsive server doesn't hold us back.
resolver.timeout = 5
# Do the query.
try:
response = resolver.query(qname, rtype)
except (dns.resolver.NoNameservers, dns.resolver.NXDOMAIN, dns.resolver.NoAnswer):
# Host did not have an answer for this query; not sure what the
# difference is between the two exceptions.
return nxdomain
except dns.exception.Timeout:
return "[timeout]"
# There may be multiple answers; concatenate the response. Remove trailing
# periods from responses since that's how qnames are encoded in DNS but is
# confusing for us. The order of the answers doesn't matter, so sort so we
# can compare to a well known order.
return "; ".join(sorted(str(r).rstrip('.') for r in response))
def check_ssl_cert(domain, rounded_time, ssl_certificates, env, output):
# Check that TLS certificate is signed.
# Skip the check if the A record is not pointed here.
if query_dns(domain, "A", None) not in (env['PUBLIC_IP'], None): return
# Where is the certificate file stored?
tls_cert = get_domain_ssl_files(domain, ssl_certificates, env, allow_missing_cert=True)
if tls_cert is None:
output.print_warning("""No TLS (SSL) certificate is installed for this domain. Visitors to a website on
this domain will get a security warning. If you are not serving a website on this domain, you do
not need to take any action. Use the TLS Certificates page in the control panel to install a
TLS certificate.""")
return
# Check that the certificate is good.
cert_status, cert_status_details = check_certificate(domain, tls_cert["certificate"], tls_cert["private-key"], rounded_time=rounded_time)
if cert_status == "OK":
# The certificate is ok. The details has expiry info.
output.print_ok("TLS (SSL) certificate is signed & valid. " + cert_status_details)
elif cert_status == "SELF-SIGNED":
# Offer instructions for purchasing a signed certificate.
if domain == env['PRIMARY_HOSTNAME']:
output.print_error("""The TLS (SSL) certificate for this domain is currently self-signed. You will get a security
warning when you check or send email and when visiting this domain in a web browser (for webmail or
static site hosting).""")
else:
output.print_error("""The TLS (SSL) certificate for this domain is self-signed.""")
else:
output.print_error("The TLS (SSL) certificate has a problem: " + cert_status)
if cert_status_details:
output.print_line("")
output.print_line(cert_status_details)
output.print_line("")
_apt_updates = None
def list_apt_updates(apt_update=True):
# See if we have this information cached recently.
# Keep the information for 8 hours.
global _apt_updates
if _apt_updates is not None and _apt_updates[0] > datetime.datetime.now() - datetime.timedelta(hours=8):
return _apt_updates[1]
# Run apt-get update to refresh package list. This should be running daily
# anyway, so on the status checks page don't do this because it is slow.
if apt_update:
shell("check_call", ["/usr/bin/apt-get", "-qq", "update"])
# Run apt-get upgrade in simulate mode to get a list of what
# it would do.
simulated_install = shell("check_output", ["/usr/bin/apt-get", "-qq", "-s", "upgrade"])
pkgs = []
for line in simulated_install.split('\n'):
if line.strip() == "":
continue
if re.match(r'^Conf .*', line):
# remove these lines, not informative
continue
m = re.match(r'^Inst (.*) \[(.*)\] \((\S*)', line)
if m:
pkgs.append({ "package": m.group(1), "version": m.group(3), "current_version": m.group(2) })
else:
pkgs.append({ "package": "[" + line + "]", "version": "", "current_version": "" })
# Cache for future requests.
_apt_updates = (datetime.datetime.now(), pkgs)
return pkgs
def what_version_is_this(env):
# This function runs `git describe --abbrev=0` on the Mail-in-a-Box installation directory.
# Git may not be installed and Mail-in-a-Box may not have been cloned from github,
# so this function may raise all sorts of exceptions.
miab_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
tag = shell("check_output", ["/usr/bin/git", "describe", "--abbrev=0"], env={"GIT_DIR": os.path.join(miab_dir, '.git')}).strip()
return tag
def get_latest_miab_version():
# This pings https://mailinabox.email/setup.sh and extracts the tag named in
# the script to determine the current product version.
import urllib.request
return re.search(b'TAG=(.*)', urllib.request.urlopen("https://mailinabox.email/setup.sh?ping=1").read()).group(1).decode("utf8")
def check_miab_version(env, output):
config = load_settings(env)
if config.get("privacy", True):
output.print_warning("Mail-in-a-Box version check disabled by privacy setting.")
else:
try:
this_ver = what_version_is_this(env)
except:
this_ver = "Unknown"
latest_ver = get_latest_miab_version()
if this_ver == latest_ver:
output.print_ok("Mail-in-a-Box is up to date. You are running version %s." % this_ver)
else:
output.print_error("A new version of Mail-in-a-Box is available. You are running version %s. The latest version is %s. For upgrade instructions, see https://mailinabox.email. "
% (this_ver, latest_ver))
def run_and_output_changes(env, pool):
import json
from difflib import SequenceMatcher
out = ConsoleOutput()
# Run status checks.
cur = BufferedOutput()
run_checks(True, env, cur, pool)
# Load previously saved status checks.
cache_fn = "/var/cache/mailinabox/status_checks.json"
if os.path.exists(cache_fn):
prev = json.load(open(cache_fn))
# Group the serial output into categories by the headings.
def group_by_heading(lines):
from collections import OrderedDict
ret = OrderedDict()
k = []
ret["No Category"] = k
for line_type, line_args, line_kwargs in lines:
if line_type == "add_heading":
k = []
ret[line_args[0]] = k
else:
k.append((line_type, line_args, line_kwargs))
return ret
prev_status = group_by_heading(prev)
cur_status = group_by_heading(cur.buf)
# Compare the previous to the current status checks
# category by category.
for category, cur_lines in cur_status.items():
if category not in prev_status:
out.add_heading(category + " -- Added")
BufferedOutput(with_lines=cur_lines).playback(out)
else:
# Actual comparison starts here...
prev_lines = prev_status[category]
def stringify(lines):
return [json.dumps(line) for line in lines]
diff = SequenceMatcher(None, stringify(prev_lines), stringify(cur_lines)).get_opcodes()
for op, i1, i2, j1, j2 in diff:
if op == "replace":
out.add_heading(category + " -- Previously:")
elif op == "delete":
out.add_heading(category + " -- Removed")
if op in ("replace", "delete"):
BufferedOutput(with_lines=prev_lines[i1:i2]).playback(out)
if op == "replace":
out.add_heading(category + " -- Currently:")
elif op == "insert":
out.add_heading(category + " -- Added")
if op in ("replace", "insert"):
BufferedOutput(with_lines=cur_lines[j1:j2]).playback(out)
for category, prev_lines in prev_status.items():
if category not in cur_status:
out.add_heading(category)
out.print_warning("This section was removed.")
# Store the current status checks output for next time.
os.makedirs(os.path.dirname(cache_fn), exist_ok=True)
with open(cache_fn, "w") as f:
json.dump(cur.buf, f, indent=True)
class FileOutput:
def __init__(self, buf, width):
self.buf = buf
self.width = width
def add_heading(self, heading):
print(file=self.buf)
print(heading, file=self.buf)
print("=" * len(heading), file=self.buf)
def print_ok(self, message):
self.print_block(message, first_line="✓ ")
def print_error(self, message):
self.print_block(message, first_line="✖ ")
def print_warning(self, message):
self.print_block(message, first_line="? ")
def print_block(self, message, first_line=" "):
print(first_line, end='', file=self.buf)
message = re.sub("\n\s*", " ", message)
words = re.split("(\s+)", message)
linelen = 0
for w in words:
if self.width and (linelen + len(w) > self.width-1-len(first_line)):
print(file=self.buf)
print(" ", end="", file=self.buf)
linelen = 0
if linelen == 0 and w.strip() == "": continue
print(w, end="", file=self.buf)
linelen += len(w)
print(file=self.buf)
def print_line(self, message, monospace=False):
for line in message.split("\n"):
self.print_block(line)
class ConsoleOutput(FileOutput):
def __init__(self):
self.buf = sys.stdout
# Do nice line-wrapping according to the size of the terminal.
# The 'stty' program queries standard input for terminal information.
if sys.stdin.isatty():
try:
self.width = int(shell('check_output', ['stty', 'size']).split()[1])
except:
self.width = 76
else:
# However if standard input is not a terminal, we would get
# "stty: standard input: Inappropriate ioctl for device". So
# we test with sys.stdin.isatty first, and if it is not a
# terminal don't do any line wrapping. When this script is
# run from cron, or if stdin has been redirected, this happens.
self.width = None
class BufferedOutput:
# Record all of the instance method calls so we can play them back later.
def __init__(self, with_lines=None):
self.buf = [] if not with_lines else with_lines
def __getattr__(self, attr):
if attr not in ("add_heading", "print_ok", "print_error", "print_warning", "print_block", "print_line"):
raise AttributeError
# Return a function that just records the call & arguments to our buffer.
def w(*args, **kwargs):
self.buf.append((attr, args, kwargs))
return w
def playback(self, output):
for attr, args, kwargs in self.buf:
getattr(output, attr)(*args, **kwargs)
if __name__ == "__main__":
from utils import load_environment
env = load_environment()
pool = multiprocessing.pool.Pool(processes=10)
if len(sys.argv) == 1:
run_checks(False, env, ConsoleOutput(), pool)
elif sys.argv[1] == "--show-changes":
run_and_output_changes(env, pool)
elif sys.argv[1] == "--check-primary-hostname":
# See if the primary hostname appears resolvable and has a signed certificate.
domain = env['PRIMARY_HOSTNAME']
if query_dns(domain, "A") != env['PUBLIC_IP']:
sys.exit(1)
ssl_certificates = get_ssl_certificates(env)
tls_cert = get_domain_ssl_files(domain, ssl_certificates, env)
if not os.path.exists(tls_cert["certificate"]):
sys.exit(1)
cert_status, cert_status_details = check_certificate(domain, tls_cert["certificate"], tls_cert["private-key"], warn_if_expiring_soon=False)
if cert_status != "OK":
sys.exit(1)
sys.exit(0)
elif sys.argv[1] == "--version":
print(what_version_is_this(env))
|
from colab.plugins.utils.proxy_data_api import ProxyDataAPI
class JenkinsDataAPI(ProxyDataAPI):
def fetch_data(self):
pass
|
"""Library for performing speech recognition with the Google Speech Recognition API."""
__author__ = 'Anthony Zhang (Uberi)'
__version__ = '1.0.4'
__license__ = 'BSD'
import io, subprocess, wave, shutil
import math, audioop, collections
import json, urllib.request
class AudioSource(object):
def __init__(self):
raise NotImplementedError("this is an abstract class")
def __enter__(self):
raise NotImplementedError("this is an abstract class")
def __exit__(self, exc_type, exc_value, traceback):
raise NotImplementedError("this is an abstract class")
try:
import pyaudio
class Microphone(AudioSource):
def __init__(self, device_index = None):
self.device_index = device_index
self.format = pyaudio.paInt16 # 16-bit int sampling
self.SAMPLE_WIDTH = pyaudio.get_sample_size(self.format)
self.RATE = 16000 # sampling rate in Hertz
self.CHANNELS = 1 # mono audio
self.CHUNK = 1024 # number of frames stored in each buffer
self.audio = None
self.stream = None
def __enter__(self):
self.audio = pyaudio.PyAudio()
self.stream = self.audio.open(
input_device_index = self.device_index,
format = self.format, rate = self.RATE, channels = self.CHANNELS, frames_per_buffer = self.CHUNK,
input = True, # stream is an input stream
)
return self
def __exit__(self, exc_type, exc_value, traceback):
self.stream.stop_stream()
self.stream.close()
self.stream = None
self.audio.terminate()
except ImportError:
pass
class WavFile(AudioSource):
def __init__(self, filename_or_fileobject):
if isinstance(filename_or_fileobject, str):
self.filename = filename_or_fileobject
else:
self.filename = None
self.wav_file = filename_or_fileobject
self.stream = None
def __enter__(self):
if self.filename: self.wav_file = open(self.filename, "rb")
self.wav_reader = wave.open(self.wav_file, "rb")
self.SAMPLE_WIDTH = self.wav_reader.getsampwidth()
self.RATE = self.wav_reader.getframerate()
self.CHANNELS = self.wav_reader.getnchannels()
assert self.CHANNELS == 1 # audio must be mono
self.CHUNK = 4096
self.stream = WavFile.WavStream(self.wav_reader)
return self
def __exit__(self, exc_type, exc_value, traceback):
if self.filename: self.wav_file.close()
self.stream = None
class WavStream(object):
def __init__(self, wav_reader):
self.wav_reader = wav_reader
def read(self, size = -1):
if size == -1:
return self.wav_reader.readframes(self.wav_reader.getnframes())
return self.wav_reader.readframes(size)
class AudioData(object):
def __init__(self, rate, data):
self.rate = rate
self.data = data
class Recognizer(AudioSource):
def __init__(self, language = "fr-FR", key = "AIzaSyBOti4mM-6x9WDnZIjIeyEU21OpBXqWBgw"):
self.key = key
self.language = language
self.energy_threshold = 1500 # minimum audio energy to consider for recording
self.pause_threshold = 0.8 # seconds of quiet time before a phrase is considered complete
self.quiet_duration = 0.5 # amount of quiet time to keep on both sides of the recording
def samples_to_flac(self, source, frame_data):
import platform, os
with io.BytesIO() as wav_file:
with wave.open(wav_file, "wb") as wav_writer:
wav_writer.setsampwidth(source.SAMPLE_WIDTH)
wav_writer.setnchannels(source.CHANNELS)
wav_writer.setframerate(source.RATE)
wav_writer.writeframes(frame_data)
wav_data = wav_file.getvalue()
# determine which converter executable to use
system = platform.system()
path = os.path.dirname(os.path.abspath(__file__)) # directory of the current module file, where all the FLAC bundled binaries are stored
if shutil.which("flac") is not None: # check for installed version first
flac_converter = shutil.which("flac")
elif system == "Windows" and platform.machine() in {"i386", "x86", "x86_64", "AMD64"}: # Windows NT, use the bundled FLAC conversion utility
flac_converter = os.path.join(path, "flac-win32.exe")
elif system == "Linux" and platform.machine() in {"i386", "x86", "x86_64", "AMD64"}:
flac_converter = os.path.join(path, "flac-linux-i386")
else:
raise ChildProcessError("FLAC conversion utility not available - consider installing the FLAC utility")
process = subprocess.Popen("\"%s\" --stdout --totally-silent --best -" % flac_converter, stdin=subprocess.PIPE, stdout=subprocess.PIPE, shell=True)
flac_data, stderr = process.communicate(wav_data)
return flac_data
def record(self, source, duration = None):
assert isinstance(source, AudioSource) and source.stream
frames = io.BytesIO()
seconds_per_buffer = source.CHUNK / source.RATE
elapsed_time = 0
while True: # loop for the total number of chunks needed
elapsed_time += seconds_per_buffer
if duration and elapsed_time > duration: break
buffer = source.stream.read(source.CHUNK)
if len(buffer) == 0: break
frames.write(buffer)
frame_data = frames.getvalue()
frames.close()
return AudioData(source.RATE, self.samples_to_flac(source, frame_data))
def listen(self, source, timeout = None):
assert isinstance(source, AudioSource) and source.stream
# record audio data as raw samples
frames = collections.deque()
assert self.pause_threshold >= self.quiet_duration >= 0
seconds_per_buffer = source.CHUNK / source.RATE
pause_buffer_count = math.ceil(self.pause_threshold / seconds_per_buffer) # number of buffers of quiet audio before the phrase is complete
quiet_buffer_count = math.ceil(self.quiet_duration / seconds_per_buffer) # maximum number of buffers of quiet audio to retain before and after
elapsed_time = 0
# store audio input until the phrase starts
while True:
# handle timeout if specified
elapsed_time += seconds_per_buffer
if timeout and elapsed_time > timeout:
raise TimeoutError("listening timed out")
buffer = source.stream.read(source.CHUNK)
if len(buffer) == 0: break # reached end of the stream
frames.append(buffer)
# check if the audio input has stopped being quiet
energy = audioop.rms(buffer, source.SAMPLE_WIDTH) # energy of the audio signal
if energy > self.energy_threshold:
break
if len(frames) > quiet_buffer_count: # ensure we only keep the needed amount of quiet buffers
frames.popleft()
# read audio input until the phrase ends
pause_count = 0
while True:
buffer = source.stream.read(source.CHUNK)
if len(buffer) == 0: break # reached end of the stream
frames.append(buffer)
# check if the audio input has gone quiet for longer than the pause threshold
energy = audioop.rms(buffer, source.SAMPLE_WIDTH) # energy of the audio signal
if energy > self.energy_threshold:
pause_count = 0
else:
pause_count += 1
if pause_count > pause_buffer_count: # end of the phrase
break
# obtain frame data
for i in range(quiet_buffer_count, pause_buffer_count): frames.pop() # remove extra quiet frames at the end
frame_data = b"".join(list(frames))
return AudioData(source.RATE, self.samples_to_flac(source, frame_data))
def recognize(self, audio_data, show_all = False):
assert isinstance(audio_data, AudioData)
url = "http://www.google.com/speech-api/v2/recognize?client=chromium&lang=%s&key=%s" % (self.language, self.key)
self.request = urllib.request.Request(url, data = audio_data.data, headers = {"Content-Type": "audio/x-flac; rate=%s" % audio_data.rate})
# check for invalid key response from the server
try:
response = urllib.request.urlopen(self.request)
except:
raise KeyError("Server wouldn't respond (invalid key or quota has been maxed out)")
response_text = response.read().decode("utf-8")
# ignore any blank blocks
actual_result = []
for line in response_text.split("\n"):
if not line: continue
result = json.loads(line)["result"]
if len(result) != 0:
actual_result = result[0]
# make sure we have a list of transcriptions
if "alternative" not in actual_result:
raise LookupError("Speech is unintelligible")
# return the best guess unless told to do otherwise
if not show_all:
for prediction in actual_result["alternative"]:
if "confidence" in prediction:
return prediction["transcript"]
raise LookupError("Speech is unintelligible")
spoken_text = []
# check to see if Google thinks it's 100% correct
default_confidence = 0
if len(actual_result["alternative"])==1: default_confidence = 1
# return all the possibilities
for prediction in actual_result["alternative"]:
if "confidence" in prediction:
spoken_text.append({"text":prediction["transcript"],"confidence":prediction["confidence"]})
else:
spoken_text.append({"text":prediction["transcript"],"confidence":default_confidence})
return spoken_text
if __name__ == "__main__":
r = Recognizer()
m = Microphone()
while True:
print("Say something!")
with m as source:
audio = r.listen(source)
print("Got it! Now to recognize it...")
try:
print("You said " + r.recognize(audio))
except LookupError:
print("Oops! Didn't catch that")
|
from ftw.upgrade import UpgradeStep
from plone import api
class AddLanguageIndex(UpgradeStep):
"""Add Language index."""
def __call__(self):
self.install_upgrade_profile()
ct = api.portal.get_tool("portal_catalog")
survey_results = ct(portal_type="euphorie.survey")
for brain in survey_results:
survey = brain.getObject()
survey.reindexObject(idxs=["Language"])
|
"""Test API for Zenodo and GitHub integration."""
from __future__ import absolute_import, print_function
from contextlib import contextmanager
from copy import deepcopy
import pytest
from flask import current_app
from invenio_accounts.models import User
from invenio_github.models import Release, ReleaseStatus, Repository
from invenio_pidrelations.contrib.versioning import PIDVersioning
from invenio_pidstore.models import PersistentIdentifier, PIDStatus
from invenio_sipstore.models import SIP
from mock import MagicMock, Mock, patch
from six import BytesIO
from zenodo.modules.deposit.tasks import datacite_register
from zenodo.modules.github.api import ZenodoGitHubRelease
from zenodo.modules.github.utils import is_github_owner, is_github_versioned
from zenodo.modules.records.api import ZenodoRecord
from zenodo.modules.records.minters import zenodo_record_minter
from zenodo.modules.records.permissions import has_newversion_permission, \
has_update_permission
creators_params = (
(dict(),
[dict(name='Contributor', affiliation='X'), ],
[dict(name='Owner', affiliation='Y'), ],
[dict(name='Contributor', affiliation='X'), ]),
(dict(creators=[]), # List of creators provided as empty
[dict(name='Contributor', affiliation='X'), ],
[dict(name='Owner', affiliation='Y'), ],
[dict(name='Owner', affiliation='Y'), ]),
(dict(creators=None),
[dict(name='Contributor', affiliation='X'), ],
None, # Failed to get GH owner
[dict(name='Unknown', affiliation=''), ]),
)
@pytest.mark.parametrize('defaults,contribs,owner,output', creators_params)
@patch('zenodo.modules.github.api.get_owner')
@patch('zenodo.modules.github.api.get_contributors')
@patch('zenodo.modules.github.api.legacyjson_v1_translator')
def test_github_creators_metadata(m_ljv1t, m_get_contributors, m_get_owner,
defaults, contribs, owner, output):
"""Test 'creators' metadata fetching from GitHub."""
m_get_contributors.return_value = contribs
m_get_owner.return_value = owner
release = MagicMock()
release.event.user_id = 1
release.event.payload['repository']['id'] = 1
zgh = ZenodoGitHubRelease(release)
zgh.defaults = defaults
zgh.gh.api = None
zgh.extra_metadata = {}
zgh.metadata
m_ljv1t.assert_called_with({'metadata': {'creators': output}})
@patch('zenodo.modules.github.api.ZenodoGitHubRelease.metadata')
@patch('invenio_pidstore.providers.datacite.DataCiteMDSClient')
def test_github_publish(datacite_mock, zgh_meta, db, users, location,
deposit_metadata):
"""Test basic GitHub payload."""
data = b'foobar'
resp = Mock()
resp.headers = {'Content-Length': len(data)}
resp.raw = BytesIO(b'foobar')
resp.status_code = 200
gh3mock = MagicMock()
gh3mock.api.session.get = Mock(return_value=resp)
gh3mock.account.user.email = 'foo@baz.bar'
release = MagicMock()
release.event.user_id = 1
release.event.payload['release']['author']['id'] = 1
release.event.payload['foo']['bar']['baz'] = 1
release.event.payload['repository']['id'] = 1
zgh = ZenodoGitHubRelease(release)
zgh.gh = gh3mock
zgh.release = dict(author=dict(id=1))
zgh.metadata = deposit_metadata
zgh.files = (('foobar.txt', None), )
zgh.model.repository.releases.filter_by().count.return_value = 0
datacite_task_mock = MagicMock()
# We have to make the call to the task synchronous
datacite_task_mock.delay = datacite_register.apply
with patch('zenodo.modules.deposit.tasks.datacite_register',
new=datacite_task_mock):
zgh.publish()
# datacite should be called twice - for regular DOI and Concept DOI
assert datacite_mock().metadata_post.call_count == 2
datacite_mock().doi_post.assert_any_call(
'10.5072/zenodo.1', 'https://zenodo.org/record/1')
datacite_mock().doi_post.assert_any_call(
'10.5072/zenodo.2', 'https://zenodo.org/record/2')
expected_sip_agent = {
'email': 'foo@baz.bar',
'$schema': 'http://zenodo.org/schemas/sipstore/'
'agent-githubclient-v1.0.0.json',
'user_id': 1,
'github_id': 1,
}
gh_sip = SIP.query.one()
assert gh_sip.agent == expected_sip_agent
@patch('invenio_github.api.GitHubAPI.check_sync', new=lambda *_, **__: False)
def test_github_newversion_permissions(app, db, minimal_record, users, g_users,
g_remoteaccounts):
"""Test new version creation permissions for GitHub records."""
old_owner, new_owner = [User.query.get(u['id']) for u in g_users]
# Create repository, and set owner to `old_owner`
repo = Repository.create(
name='foo/bar', github_id=8000, user_id=old_owner.id, hook=1234)
# Create concpetrecid for the GitHub records
conceptrecid = PersistentIdentifier.create(
'recid', '100', status=PIDStatus.RESERVED)
def create_deposit_and_record(pid_value, owner):
"""Utility function for creating records and deposits."""
recid = PersistentIdentifier.create(
'recid', pid_value, status=PIDStatus.RESERVED)
pv = PIDVersioning(parent=conceptrecid)
pv.insert_draft_child(recid)
depid = PersistentIdentifier.create(
'depid', pid_value, status=PIDStatus.REGISTERED)
deposit = ZenodoRecord.create({'_deposit': {'id': depid.pid_value},
'conceptrecid': conceptrecid.pid_value,
'recid': recid.pid_value})
deposit.commit()
depid.assign('rec', deposit.id)
record_metadata = deepcopy(minimal_record)
record_metadata['_deposit'] = {'id': depid.pid_value}
record_metadata['conceptrecid'] = conceptrecid.pid_value
record_metadata['recid'] = int(recid.pid_value)
record_metadata['owners'] = [owner.id]
record = ZenodoRecord.create(record_metadata)
zenodo_record_minter(record.id, record)
record.commit()
return (depid, deposit, recid, record)
# Create first GitHub record (by `old_owner`)
depid1, d1, recid1, r1 = create_deposit_and_record('101', old_owner)
rel1 = Release(release_id=111, repository_id=repo.id, record_id=d1.id,
status=ReleaseStatus.PUBLISHED)
db.session.add(rel1)
db.session.commit()
assert is_github_versioned(recid1)
@contextmanager
def set_identity(user):
from flask_principal import AnonymousIdentity, Identity
principal = current_app.extensions['security'].principal
principal.set_identity(Identity(user))
yield
principal.set_identity(AnonymousIdentity())
with app.test_request_context():
with set_identity(old_owner):
assert is_github_owner(old_owner, recid1)
assert has_update_permission(old_owner, r1)
assert has_newversion_permission(old_owner, r1)
with set_identity(new_owner):
assert not is_github_owner(new_owner, recid1)
assert not has_update_permission(new_owner, r1)
assert not has_newversion_permission(new_owner, r1)
# Change the repository owner
repo.user_id = new_owner.id
db.session.add(repo)
db.session.commit()
with app.test_request_context():
with set_identity(old_owner):
assert not is_github_owner(old_owner, recid1)
# `old_owner` can edit his record of course
assert has_update_permission(old_owner, r1)
assert has_newversion_permission(old_owner, r1)
with set_identity(new_owner):
assert is_github_owner(new_owner, recid1)
# `new_owner` can't edit the `old_owner`'s record
assert not has_update_permission(new_owner, r1)
assert not has_newversion_permission(new_owner, r1)
# Create second GitHub record (by `new_owner`)
depid2, d2, recid2, r2 = create_deposit_and_record('102', new_owner)
rel2 = Release(release_id=222, repository_id=repo.id, record_id=d2.id,
status=ReleaseStatus.PUBLISHED)
db.session.add(rel2)
db.session.commit()
with app.test_request_context():
with set_identity(old_owner):
assert not is_github_owner(old_owner, recid1)
assert not is_github_owner(old_owner, recid2)
assert has_update_permission(old_owner, r1)
# `old_owner` can't edit the `new_owner`'s record
assert not has_update_permission(old_owner, r2)
assert not has_newversion_permission(old_owner, r1)
assert not has_newversion_permission(old_owner, r2)
with set_identity(new_owner):
assert is_github_owner(new_owner, recid1)
assert is_github_owner(new_owner, recid2)
assert not has_update_permission(new_owner, r1)
# `new_owner` can edit his newly released record
assert has_update_permission(new_owner, r2)
assert has_newversion_permission(new_owner, r1)
assert has_newversion_permission(new_owner, r2)
# Create a manual record (by `new_owner`)
depid3, d3, recid3, r3 = create_deposit_and_record('103', new_owner)
db.session.commit()
with app.test_request_context():
with set_identity(old_owner):
assert not is_github_owner(old_owner, recid3)
assert not has_update_permission(old_owner, r3)
assert not has_newversion_permission(old_owner, r3)
with set_identity(new_owner):
assert is_github_owner(new_owner, recid3)
assert has_update_permission(new_owner, r3)
assert has_newversion_permission(new_owner, r3)
|
"""sdist tests"""
import os
import shutil
import sys
import tempfile
import unittest
import urllib
import unicodedata
import posixpath
from StringIO import StringIO
from setuptools.command.sdist import sdist
from setuptools.command.egg_info import manifest_maker
from setuptools.dist import Distribution
SETUP_ATTRS = {
'name': 'sdist_test',
'version': '0.0',
'packages': ['sdist_test'],
'package_data': {'sdist_test': ['*.txt']}
}
SETUP_PY = """\
from setuptools import setup
setup(**%r)
""" % SETUP_ATTRS
if sys.version_info >= (3,):
LATIN1_FILENAME = 'smörbröd.py'.encode('latin-1')
else:
LATIN1_FILENAME = 'sm\xf6rbr\xf6d.py'
def quiet():
global old_stdout, old_stderr
old_stdout, old_stderr = sys.stdout, sys.stderr
sys.stdout, sys.stderr = StringIO(), StringIO()
def unquiet():
sys.stdout, sys.stderr = old_stdout, old_stderr
def b(s, encoding='utf-8'):
if sys.version_info >= (3,):
return s.encode(encoding)
return s
def decompose(path):
if isinstance(path, unicode):
return unicodedata.normalize('NFD', path)
try:
path = path.decode('utf-8')
path = unicodedata.normalize('NFD', path)
path = path.encode('utf-8')
except UnicodeError:
pass # Not UTF-8
return path
def hfs_quote(path):
if isinstance(path, unicode):
raise TypeError('bytes are required')
try:
u = path.decode('utf-8')
except UnicodeDecodeError:
path = urllib.quote(path) # Not UTF-8
else:
if sys.version_info >= (3,):
path = u
return path
class TestSdistTest(unittest.TestCase):
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
f = open(os.path.join(self.temp_dir, 'setup.py'), 'w')
f.write(SETUP_PY)
f.close()
# Set up the rest of the test package
test_pkg = os.path.join(self.temp_dir, 'sdist_test')
os.mkdir(test_pkg)
# *.rst was not included in package_data, so c.rst should not be
# automatically added to the manifest when not under version control
for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']:
# Just touch the files; their contents are irrelevant
open(os.path.join(test_pkg, fname), 'w').close()
self.old_cwd = os.getcwd()
os.chdir(self.temp_dir)
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.temp_dir)
def test_package_data_in_sdist(self):
"""Regression test for pull request #4: ensures that files listed in
package_data are included in the manifest even if they're not added to
version control.
"""
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# squelch output
quiet()
try:
cmd.run()
finally:
unquiet()
manifest = cmd.filelist.files
self.assertTrue(os.path.join('sdist_test', 'a.txt') in manifest)
self.assertTrue(os.path.join('sdist_test', 'b.txt') in manifest)
self.assertTrue(os.path.join('sdist_test', 'c.rst') not in manifest)
def test_manifest_is_written_with_utf8_encoding(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
mm = manifest_maker(dist)
mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
os.mkdir('sdist_test.egg-info')
# UTF-8 filename
filename = posixpath.join('sdist_test', 'smörbröd.py')
# Add UTF-8 filename and write manifest
quiet()
try:
mm.run()
mm.filelist.files.append(filename)
mm.write_manifest()
finally:
unquiet()
manifest = open(mm.manifest, 'rbU')
contents = manifest.read()
manifest.close()
# The manifest should be UTF-8 encoded
try:
u = contents.decode('UTF-8')
except UnicodeDecodeError, e:
self.fail(e)
# The manifest should contain the UTF-8 filename
if sys.version_info >= (3,):
self.assertTrue(filename in u)
else:
self.assertTrue(filename in contents)
def test_manifest_is_written_with_surrogateescape_error_handler(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
mm = manifest_maker(dist)
mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
os.mkdir('sdist_test.egg-info')
# Latin-1 filename
filename = posixpath.join(b('sdist_test'), LATIN1_FILENAME)
# Add filename with surrogates and write manifest
quiet()
try:
mm.run()
if sys.version_info >= (3,):
u = filename.decode('utf-8', 'surrogateescape')
mm.filelist.files.append(u)
else:
mm.filelist.files.append(filename)
mm.write_manifest()
finally:
unquiet()
manifest = open(mm.manifest, 'rbU')
contents = manifest.read()
manifest.close()
# The manifest should contain the Latin-1 filename
self.assertTrue(filename in contents)
def test_manifest_is_read_with_utf8_encoding(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# UTF-8 filename
filename = os.path.join('sdist_test', 'smörbröd.py')
open(filename, 'w').close()
quiet()
try:
cmd.run()
finally:
unquiet()
# The filelist should contain the UTF-8 filename
if sys.platform == 'darwin':
filename = decompose(filename)
self.assertTrue(filename in cmd.filelist.files)
def test_manifest_is_read_with_surrogateescape_error_handler(self):
# Test for #303.
# This is hard to test on HFS Plus because it quotes unknown
# bytes (see previous test). Furthermore, egg_info.FileList
# only appends filenames that os.path.exist.
# We therefore write the manifest file by hand and check whether
# read_manifest produces a UnicodeDecodeError.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
quiet()
try:
cmd.run()
# Add Latin-1 filename to manifest
cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
manifest = open(cmd.manifest, 'ab')
manifest.write(filename+b('\n'))
manifest.close()
# Re-read manifest
try:
cmd.read_manifest()
except UnicodeDecodeError, e:
self.fail(e)
finally:
unquiet()
def test_sdist_with_utf8_encoded_filename(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# UTF-8 filename
filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
open(filename, 'w').close()
quiet()
try:
cmd.run()
finally:
unquiet()
# The filelist should contain the UTF-8 filename
# (in one representation or other)
if sys.version_info >= (3,):
filename = filename.decode(sys.getfilesystemencoding(), 'surrogateescape')
if sys.platform == 'darwin':
filename = decompose(filename)
self.assertTrue(filename in cmd.filelist.files)
def test_sdist_with_latin1_encoded_filename(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# Latin-1 filename
filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
open(filename, 'w').close()
quiet()
try:
cmd.run()
finally:
unquiet()
# The filelist should contain the Latin-1 filename
# (in one representation or other)
if sys.platform == 'darwin':
filename = hfs_quote(filename)
elif sys.version_info >= (3,):
filename = filename.decode(sys.getfilesystemencoding(), 'surrogateescape')
self.assertTrue(filename in cmd.filelist.files)
def test_decompose(self):
self.assertNotEqual('smörbröd.py', decompose('smörbröd.py'))
if sys.version_info >= (3,):
self.assertEqual(len('smörbröd.py'), 11)
self.assertEqual(len(decompose('smörbröd.py')), 13)
else:
self.assertEqual(len('smörbröd.py'), 13)
self.assertEqual(len(decompose('smörbröd.py')), 15)
def test_hfs_quote(self):
self.assertEqual(hfs_quote(LATIN1_FILENAME), 'sm%F6rbr%F6d.py')
# Bytes are required
if sys.version_info >= (3,):
self.assertRaises(TypeError, hfs_quote, 'smörbröd.py')
else:
self.assertRaises(TypeError, hfs_quote, 'smörbröd.py'.decode('utf-8'))
def test_suite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
|
import os
from setuptools import setup
from subprocess import call
from sys import platform, argv
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
SCRIPTS = ["src/bg_daemon/background_daemon.py"]
if (all([e not in argv for e in ['egg_info', 'sdist', 'register']]) and
platform == 'darwin'):
try:
call(['make', '-C', 'src/bg_daemon/'])
SCRIPTS.append("src/bg_daemon/quack")
except OSError as e:
print "Can't compile quack, reason {}".format(str(e))
setup(
name="bg_daemon",
version="0.0.1",
author="Santiago Torres",
author_email="torresariass@gmail.com",
description=("An extensible set of classes that can programmatically "
"update the desktop wallpaper"),
license="GPLv2",
keywords="imgur desktop wallpaper background",
url="https://github.com/santiagotorres/bg_daemon",
packages=["bg_daemon", "bg_daemon.fetchers"],
package_dir={"bg_daemon": "src/bg_daemon",
"bg_daemon.fetchers": "src/bg_daemon/fetchers"},
scripts=SCRIPTS,
include_package_data=True,
data_files=[('bg_daemon', ['src/bg_daemon/settings.json',
'src/bg_daemon/mac-update.sh'])],
long_description=read("README.md"),
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Topic :: Utilities",
"License :: ",
"Environment :: No Input/Output (Daemon)",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: GNU General Public License v2 (GPLv2)",
"Operating System :: Unix",
"Topic :: Multimedia",
],
install_requires=[
"imgurpython",
"requests",
"python-crontab",
"mock",
],
)
|
"""
Stateful module base class and interface description.
All stateful Python modules
- Get Skype4Py Skype instance on init - have full control over Skype and
thus are not limited to !command handlers
- Reside in the some modules/ folder as UNIX script modules
- Have .py extension and be valid Python 2.7 modules
- Have #!/sevabot magic string at the head of the file
- Exports Python attribute *sevabot_handler* which is an instance of the class
as described below
Please note that in the future we might have different chat backends (GTalk)
and thus have a same-same-but-different stateful handlers.
"""
class StatefulSkypeHandler:
"""
Base class for stateful handlers.
All exceptions slip through are caught and logged.
"""
def init(self, sevabot):
"""
Set-up our state. This is called every time module is (re)loaded.
You can get Skype4Py instance via ``sevabot.getSkype()``.
:param sevabot: Handle to Sevabot instance
"""
def handle_message(self, msg, status):
"""Override this method to have a customized handler for each Skype message.
:param msg: ChatMessage instance https://github.com/awahlig/skype4py/blob/master/Skype4Py/chat.py#L409
:param status: -
:return: True if the message was handled and should not be further processed
"""
def shutdown():
""" Called when the module is reloaded.
In ``shutdown()`` you must
* Stop all created threads
* Unregister all event handlers
..note ::
We do *not* guaranteed to be call when Sevabot process shutdowns as
the process may terminate with SIGKILL.
"""
def register_callback(self, skype, event, callback):
"""
Register any callable as a callback for a skype event.
Thin wrapper for RegisterEventHandler https://github.com/awahlig/skype4py/blob/master/Skype4Py/utils.py
:param skype: Skype4Py instance
:param event: Same as Event
:param callback: Same as Target
:return: Same as RegisterEventHandler
"""
return skype.RegisterEventHandler(event, callback)
def unregister_callback(self, skype, event, callback):
"""
Unregister a callback previously registered with register_callback.
Thin wrapper for UnregisterEventHandler https://github.com/awahlig/skype4py/blob/master/Skype4Py/utils.py
:param skype: Skype4Py instance
:param event: Same as Event
:param callback: Same as Target
:return: Same as UnregisterEventHandler
"""
return skype.UnregisterEventHandler(event, callback)
|
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
)
from zope.sqlalchemy import ZopeTransactionExtension
import tornado.web
from handlers.index import IndexHandler
from handlers.sensors import SensorsHandler
import logging
logging.getLogger().setLevel(logging.DEBUG)
app = tornado.web.Application([
(r'/', IndexHandler),
(r'/sensors', SensorsHandler)
])
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
|
"""
***************************************************************************
v_univar.py
---------------------
Date : December 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'December 2012'
__copyright__ = '(C) 2012, Victor Olaya'
__revision__ = '$Format:%H$'
def postProcessResults(alg):
htmlFile = alg.getOutputFromName('html').value
found = False
f = open(htmlFile, "w")
f.write("<h2>v.univar</h2>\n")
for line in alg.consoleOutput:
if found and not line.strip().endswith('exit'):
f.write(line + "<br>\n")
if 'v.univar' in line:
found = True
f.close()
|
import sys, re, os
import logging
import GrubConf
class LiloImage(object):
def __init__(self, lines, path):
self.reset(lines, path)
def __repr__(self):
return ("title: %s\n"
" root: %s\n"
" kernel: %s\n"
" args: %s\n"
" initrd: %s\n" %(self.title, self.root, self.kernel,
self.args, self.initrd))
def reset(self, lines, path):
self._initrd = self._kernel = self._readonly = None
self._args = ""
self.title = ""
self.lines = []
self.path = path
self.root = ""
map(self.set_from_line, lines)
def set_from_line(self, line, replace = None):
(com, arg) = GrubConf.grub_exact_split(line, 2)
if self.commands.has_key(com):
if self.commands[com] is not None:
setattr(self, self.commands[com], re.sub('^"(.+)"$', r"\1", arg.strip()))
else:
logging.info("Ignored image directive %s" %(com,))
else:
logging.warning("Unknown image directive %s" %(com,))
# now put the line in the list of lines
if replace is None:
self.lines.append(line)
else:
self.lines.pop(replace)
self.lines.insert(replace, line)
def set_kernel(self, val):
self._kernel = (None, self.path + "/" + val)
def get_kernel(self):
return self._kernel
kernel = property(get_kernel, set_kernel)
def set_initrd(self, val):
self._initrd = (None, self.path + "/" + val)
def get_initrd(self):
return self._initrd
initrd = property(get_initrd, set_initrd)
def set_args(self, val):
self._args = val
def get_args(self):
args = self._args
if self.root:
args += " root=" + self.root
if self.readonly:
args += " ro"
return args
args = property(get_args, set_args)
def set_readonly(self, val):
self._readonly = 1
def get_readonly(self):
return self._readonly
readonly = property(get_readonly, set_readonly)
# set up command handlers
commands = { "label": "title",
"root": "root",
"rootnoverify": "root",
"image": "kernel",
"initrd": "initrd",
"append": "args",
"read-only": "readonly",
"chainloader": None,
"module": None}
class LiloConfigFile(object):
def __init__(self, fn = None):
self.filename = fn
self.images = []
self.timeout = -1
self._default = 0
if fn is not None:
self.parse()
def parse(self, buf = None):
if buf is None:
if self.filename is None:
raise ValueError, "No config file defined to parse!"
f = open(self.filename, 'r')
lines = f.readlines()
f.close()
else:
lines = buf.split("\n")
path = os.path.dirname(self.filename)
img = []
for l in lines:
l = l.strip()
# skip blank lines
if len(l) == 0:
continue
# skip comments
if l.startswith('#'):
continue
# new image
if l.startswith("image"):
if len(img) > 0:
self.add_image(LiloImage(img, path))
img = [l]
continue
if len(img) > 0:
img.append(l)
continue
(com, arg) = GrubConf.grub_exact_split(l, 2)
if self.commands.has_key(com):
if self.commands[com] is not None:
setattr(self, self.commands[com], arg.strip())
else:
logging.info("Ignored directive %s" %(com,))
else:
logging.warning("Unknown directive %s" %(com,))
if len(img) > 0:
self.add_image(LiloImage(img, path))
def add_image(self, image):
self.images.append(image)
def _get_default(self):
for i in range(0, len(self.images) - 1):
if self.images[i].title == self._default:
return i
return 0
def _set_default(self, val):
self._default = val
default = property(_get_default, _set_default)
commands = { "default": "self.default",
"timeout": "self.timeout",
"prompt": None,
"relocatable": None,
}
if __name__ == "__main__":
if sys.argv < 2:
raise RuntimeError, "Need a grub.conf to read"
g = LiloConfigFile(sys.argv[1])
for i in g.images:
print i #, i.title, i.root, i.kernel, i.args, i.initrd
print g.default
|
import os
import sys
this_dir = os.path.dirname(os.path.abspath(__file__))
trunk_dir = os.path.split(this_dir)[0]
sys.path.insert(0,trunk_dir)
from ikol.dbregister import DataBase
from ikol import var
if os.path.exists(var.DB_PATH):
os.remove(var.DB_PATH)
DB = DataBase(var.DB_PATH)
DB.insertPlaylist("loLWOCl7nlk","test")
DB.insertPlaylist("loLWO357nlk","testb")
DB.insertVideo("KDk2341oEQQ","loLWOCl7nlk","test")
DB.insertVideo("KDktIWeoE23","loLWOCl7nlk","testb")
print DB.getAllVideosByPlaylist("loLWOCl7nlk")
print DB.getVideoById("KDk2341oEQQ")
|
extensions = [
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'TrinityX'
copyright = '2020, ClusterVision Solutions BV'
author = 'ClusterVision Solutions BV'
version = '12'
release = '12.0'
language = None
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'README.rst']
pygments_style = 'sphinx'
highlight_language = 'none'
todo_include_todos = False
html_theme = 'sphinx_rtd_theme'
html_title = 'TrinityX r12'
html_logo = 'trinityxlogo.png'
html_use_smartypants = True
html_show_sourcelink = False
html_copy_source = False
htmlhelp_basename = 'TrinityXdoc'
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
latex_documents = [
(master_doc, 'TrinityX.tex', 'TrinityX Documentation',
'ClusterVision Solutions BV', 'manual'),
]
man_pages = [
(master_doc, 'trinityx', 'TrinityX Documentation',
[author], 1)
]
texinfo_documents = [
(master_doc, 'TrinityX', 'TrinityX Documentation',
author, 'TrinityX', 'One line description of project.',
'Miscellaneous'),
]
|
TESTS = {
"Level_1": [
{
"input": [1, 2, 3],
"answer": 2,
"explanation": "3-1=2"
},
{
"input": [5, -5],
"answer": 10,
"explanation": "5-(-5)=10"
},
{
"input": [10.2, -2.2, 0, 1.1, 0.5],
"answer": 12.4,
"explanation": "10.2-(-2.2)=12.4"
},
{
"input": [],
"answer": 0,
"explanation": "Empty"
},
{"input": [-99.9, 99.9],
"answer": 199.8,
"explanation": "99.9-(-99.9)"},
{"input": [1, 1],
"answer": 0,
"explanation": "1-1"},
{"input": [0, 0, 0, 0],
"answer": 0,
"explanation": "0-0"},
{"input": [36.0, -26.0, -7.5, 0.9, 0.53, -6.6, -71.0, 0.53, -48.0, 57.0, 69.0, 0.063, -4.7, 0.01, 9.2],
"answer": 140.0,
"explanation": "69.0-(-71.0)"},
{"input": [-0.035, 0.0, -0.1, 83.0, 0.28, 60.0],
"answer": 83.1,
"explanation": "83.0-(-0.1)"},
{"input": [0.02, 0.93, 0.066, -94.0, -0.91, -21.0, -7.2, -0.018, 26.0],
"answer": 120.0,
"explanation": "26.0-(-94.0)"},
{"input": [89.0, 0.014, 2.9, -1.2, 5.8],
"answer": 90.2,
"explanation": "89.0-(-1.2)"},
{"input": [-69.0, 0.0, 0.0, -0.051, -0.021, -0.81],
"answer": 69.0,
"explanation": "0.0-(-69.0)"},
{"input": [-0.07],
"answer": 0.0,
"explanation": "-0.07-(-0.07)"},
{"input": [0.074, 0.12, -0.4, 4.0, -1.7, 3.0, -5.1, 0.57, -54.0, -41.0, -5.2, -5.6, 3.8, 0.054, -35.0, -5.0,
-0.005, 0.034],
"answer": 58.0,
"explanation": "4.0-(-54.0)"},
{"input": [29.0, 0.47, -4.5, -6.7, -0.051, -0.82, -0.074, -4.0, -0.015, -0.015, -8.0, -0.43],
"answer": 37.0,
"explanation": "29.0-(-8.0)"},
{"input": [-0.036, -0.11, -0.55, -64.0],
"answer": 63.964,
"explanation": "-0.036-(-64.0)"},
{"input": [-0.092, -0.079, -0.31, -0.87, -28.0, -6.2, -0.097, -5.8, -0.025, -28.0, -4.7, -2.9, -8.0, -0.093,
-13.0, -73.0],
"answer": 72.975,
"explanation": "-0.025-(-73.0)"},
{"input": [-0.015, 7.6],
"answer": 7.615,
"explanation": "7.6-(-0.015)"},
{"input": [-46.0, 0.19, -0.08, -4.0, 4.4, 0.071, -0.029, -0.034, 28.0, 0.043, -97.0],
"answer": 125.0,
"explanation": "28.0-(-97.0)"},
{"input": [32.0, -0.07, -0.056, -6.4, 0.084],
"answer": 38.4,
"explanation": "32.0-(-6.4)"},
{"input": [0.017, 0.015, 0.69, 0.78],
"answer": 0.765,
"explanation": "0.78-0.015"},
]
}
|
from socket import *
from ftplib import FTP
import ftplib
import socket
import thread
import time
import sys
import codecs
import os
reload(sys)
sys.setdefaultencoding( "utf-8" )
class ClientMessage():
#设置用户名密码
def setUsrANDPwd(self,usr,pwd):
self.usr=usr
self.pwd=pwd
#设置目标用户
def setToUsr(self,toUsr):
self.toUsr=toUsr
self.ChatFormTitle=toUsr
#设置ip地址和端口号
def setLocalANDPort(self,local,port):
self.local = local
self.port = port
def check_info(self):
self.buffer = 1024
self.ADDR=(self.local,self.port)
self.udpCliSock = socket.socket(AF_INET, SOCK_DGRAM)
self.udpCliSock.sendto('0##'+self.usr+'##'+self.pwd,self.ADDR)
self.serverMsg ,self.ADDR = self.udpCliSock.recvfrom(self.buffer)
s=self.serverMsg.split('##')
if s[0]=='Y':
return True
elif s[0]== 'N':
return False
#接收消息
def receiveMessage(self):
self.buffer = 1024
self.ADDR=(self.local,self.port)
self.udpCliSock = socket.socket(AF_INET, SOCK_DGRAM)
self.udpCliSock.sendto('0##'+self.usr+'##'+self.pwd,self.ADDR)
while True:
#连接建立,接收服务器端消息
self.serverMsg ,self.ADDR = self.udpCliSock.recvfrom(self.buffer)
s=self.serverMsg.split('##')
if s[0]=='Y':
#self.chatText.insert(Tkinter.END,'客户端已经与服务器端建立连接......')
return True
elif s[0]== 'N':
#self.chatText.insert(Tkinter.END,'客户端与服务器端建立连接失败......')
return False
elif s[0]=='CLOSE':
i=5
while i>0:
self.chatText.insert(Tkinter.END,'你的账号在另一端登录,该客户端'+str(i)+'秒后退出......')
time.sleep(1)
i=i-1
self.chatText.delete(Tkinter.END)
os._exit(0)
#好友列表
elif s[0]=='F':
for eachFriend in s[1:len(s)]:
print eachFriend
#好友上线
elif s[0]=='0':
theTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
self.chatText.insert(Tkinter.END, theTime+' ' +'你的好友' + s[1]+'上线了')
#好友下线
elif s[0]=='1':
theTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
self.chatText.insert(Tkinter.END, theTime+' ' +'你的好友' + s[1]+'下线了')
#好友传来消息
elif s[0]=='2':
theTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
self.chatText.insert(Tkinter.END, theTime +' '+s[1] +' 说:\n')
self.chatText.insert(Tkinter.END, ' ' + s[3])
#好友传来文件
elif s[0]=='3':
filename=s[2]
f=FTP('192.168.1.105')
f.login('Coder', 'xianjian')
f.cwd(self.usr)
filenameD=filename[:-1].encode("cp936")
try:
f.retrbinary('RETR '+filenameD,open('..\\'+self.usr+'\\'+filenameD,'wb').write)
except ftplib.error_perm:
print 'ERROR:cannot read file "%s"' %file
self.chatText.insert(Tkinter.END,filename[:-1]+' 传输完成')
elif s[0]=='4':
agreement=raw_input(s[1]+'请求加你为好友,验证消息:'+s[3]+'你愿意加'+s[1]+'为好友吗(Y/N)')
if agreement=='Y':
self.udpCliSock.sendto('5##'+s[1]+'##'+s[2]+'##Y',self.ADDR)
elif agreement=='N':
self.udpCliSock.sendto('5##'+s[1]+'##'+s[2]+'##N',self.ADDR)
elif s[0]=='5':
if s[3]=='Y':
print s[2]+'接受了你的好友请求'
elif s[3]=='N':
print s[2]+'拒绝了你的好友请求'
#发送消息
def sendMessage(self):
#得到用户在Text中输入的消息
message = self.inputText.get('1.0',Tkinter.END)
#格式化当前的时间
theTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
self.chatText.insert(Tkinter.END, theTime +' 我 说:\n')
self.chatText.insert(Tkinter.END,' ' + message + '\n')
self.udpCliSock.sendto('2##'+self.usr+'##'+self.toUsr+'##'+message,self.ADDR);
#清空用户在Text中输入的消息
self.inputText.delete(0.0,message.__len__()-1.0)
#传文件
def sendFile(self):
filename = self.inputText.get('1.0',Tkinter.END)
theTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
self.chatText.insert(Tkinter.END, theTime +'我' + ' 传文件:\n')
self.chatText.insert(Tkinter.END,' ' + filename[:-1] + '\n')
f=FTP('192.168.1.105')
f.login('Coder', 'xianjian')
f.cwd(self.toUsr)
filenameU=filename[:-1].encode("cp936")
try:
#f.retrbinary('RETR '+filename,open(filename,'wb').write)
#将文件上传到服务器对方文件夹中
f.storbinary('STOR ' + filenameU, open('..\\'+self.usr+'\\'+filenameU, 'rb'))
except ftplib.error_perm:
print 'ERROR:cannot read file "%s"' %file
self.udpCliSock.sendto('3##'+self.usr+'##'+self.toUsr+'##'+filename,self.ADDR);
#加好友
def addFriends(self):
message= self.inputText.get('1.0',Tkinter.END)
s=message.split('##')
self.udpCliSock.sendto('4##'+self.usr+'##'+s[0]+'##'+s[1],self.ADDR);
#关闭消息窗口并退出
def close(self):
self.udpCliSock.sendto('1##'+self.usr,self.ADDR);
sys.exit()
#启动线程接收服务器端的消息
def startNewThread(self):
thread.start_new_thread(self.receiveMessage,())
def main():
client = ClientMessage()
client.setLocalANDPort('192.168.1.105', 8808)
client.setUsrANDPwd('12073127', '12073127')
client.setToUsr('12073128')
client.startNewThread()
if __name__=='__main__':
main()
|
import SocketServer
import re, binascii
import os, stat, os.path, mimetypes, urllib, time
from pygopherd import handlers, protocols, GopherExceptions
from pygopherd.protocols.base import BaseGopherProtocol
import pygopherd.version
import cgi
class HTTPProtocol(BaseGopherProtocol):
def canhandlerequest(self):
self.requestparts = map(lambda arg: arg.strip(), self.request.split(" "))
return len(self.requestparts) == 3 and \
(self.requestparts[0] == 'GET' or self.requestparts[0] == 'HEAD') and \
self.requestparts[2][0:5] == 'HTTP/'
def headerslurp(self):
if hasattr(self.requesthandler, 'pygopherd_http_slurped'):
# Already slurped.
self.httpheaders = self.requesthandler.pygopherd_http_slurped
return
# Slurp up remaining lines.
self.httpheaders = {}
while 1:
line = self.rfile.readline()
if not len(line):
break
line = line.strip()
if not len(line):
break
splitline = line.split(':', 1)
if len(splitline) == 2:
self.httpheaders[splitline[0].lower()] = splitline[1]
self.requesthandler.pygopherd_http_slurped = self.httpheaders
def handle(self):
self.canhandlerequest() # To get self.requestparts
self.iconmapping = eval(self.config.get("protocols.http.HTTPProtocol",
"iconmapping"))
self.headerslurp()
splitted = self.requestparts[1].split('?')
self.selector = splitted[0]
self.selector = urllib.unquote(self.selector)
self.selector = self.slashnormalize(self.selector)
self.formvals = {}
if len(splitted) >= 2:
self.formvals = cgi.parse_qs(splitted[1])
if self.formvals.has_key('searchrequest'):
self.searchrequest = self.formvals['searchrequest'][0]
icon = re.match('/PYGOPHERD-HTTPPROTO-ICONS/(.+)$', self.selector)
if icon:
iconname = icon.group(1)
if icons.has_key(iconname):
self.wfile.write("HTTP/1.0 200 OK\r\n")
self.wfile.write("Last-Modified: Fri, 14 Dec 2001 21:19:47 GMT\r\n")
self.wfile.write("Content-Type: image/gif\r\n\r\n")
if self.requestparts[0] == 'HEAD':
return
self.wfile.write(binascii.unhexlify(icons[iconname]))
return
try:
handler = self.gethandler()
self.log(handler)
self.entry = handler.getentry()
handler.prepare()
self.wfile.write("HTTP/1.0 200 OK\r\n")
if self.entry.getmtime() != None:
gmtime = time.gmtime(self.entry.getmtime())
mtime = time.strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime)
self.wfile.write("Last-Modified: " + mtime + "\r\n")
mimetype = self.entry.getmimetype()
mimetype = self.adjustmimetype(mimetype)
self.wfile.write("Content-Type: " + mimetype + "\r\n\r\n")
if self.requestparts[0] == 'GET':
if handler.isdir():
self.writedir(self.entry, handler.getdirlist())
else:
self.handlerwrite(self.wfile)
except GopherExceptions.FileNotFound, e:
self.filenotfound(str(e))
except IOError, e:
GopherExceptions.log(e, self, None)
self.filenotfound(e[1])
def handlerwrite(self, wfile):
self.handler.write(wfile)
def adjustmimetype(self, mimetype):
if mimetype == None:
return 'text/plain'
if mimetype == 'application/gopher-menu':
return 'text/html'
return mimetype
def renderobjinfo(self, entry):
url = None
# Decision time....
if re.match('(/|)URL:', entry.getselector()):
# It's a plain URL. Make it that.
url = re.match('(/|)URL:(.+)$', entry.getselector()).group(2)
elif (not entry.gethost()) and (not entry.getport()):
# It's a link to our own server. Make it as such. (relative)
url = urllib.quote(entry.getselector())
else:
# Link to a different server. Make it a gopher URL.
url = entry.geturl(self.server.server_name, 70)
# OK. Render.
return self.getrenderstr(entry, url)
def getrenderstr(self, entry, url):
retstr = '<TR><TD>'
retstr += self.getimgtag(entry)
retstr += "</TD>\n<TD> "
if entry.gettype() != 'i' and entry.gettype() != '7':
retstr += '<A HREF="%s">' % url
retstr += "<TT>"
if entry.getname() != None:
retstr += cgi.escape(entry.getname()).replace(" ", " ")
else:
retstr += cgi.escape(entry.getselector()).replace(" ", " ")
retstr += "</TT>"
if entry.gettype() != 'i' and entry.gettype() != '7':
retstr += '</A>'
if (entry.gettype() == '7'):
retstr += '<BR><FORM METHOD="GET" ACTION="%s">' % url
retstr += '<INPUT TYPE="text" NAME="searchrequest" SIZE="30">'
retstr += '<INPUT TYPE="submit" NAME="Submit" VALUE="Submit">'
retstr += '</FORM>'
retstr += '</TD><TD><FONT SIZE="-2">'
if entry.getmimetype():
subtype = re.search('/.+$', entry.getmimetype())
if subtype:
retstr += cgi.escape(subtype.group()[1:])
retstr += '</FONT></TD></TR>\n'
return retstr
def renderdirstart(self, entry):
retstr ='<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN" "http://www.w3.org/TR/REC-html40/loose.dtd">'
retstr += "\n<HTML><HEAD><TITLE>Gopher"
if self.entry.getname():
retstr += ": " + cgi.escape(self.entry.getname())
retstr += "</TITLE></HEAD><BODY>"
if self.config.has_option("protocols.http.HTTPProtocol", "pagetopper"):
retstr += re.sub('GOPHERURL',
self.entry.geturl(self.server.server_name,
self.server.server_port),
self.config.get("protocols.http.HTTPProtocol",
"pagetopper"))
retstr += "<H1>Gopher"
if self.entry.getname():
retstr += ": " + cgi.escape(self.entry.getname())
retstr += '</H1><TABLE WIDTH="100%" CELLSPACING="1" CELLPADDING="0">'
return retstr
def renderdirend(self, entry):
retstr = "</TABLE><HR>\n[<A HREF=\"/\">server top</A>]"
retstr += " [<A HREF=\"%s\">view with gopher</A>]" % \
entry.geturl(self.server.server_name,
self.server.server_port)
retstr += '<BR>Generated by <A HREF="%s">%s</A>' % (
pygopherd.version.homepage, pygopherd.version.productname)
return retstr + "\n</BODY></HTML>\n"
def filenotfound(self, msg):
self.wfile.write("HTTP/1.0 404 Not Found\r\n")
self.wfile.write("Content-Type: text/html\r\n\r\n")
self.wfile.write('<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN" "http://www.w3.org/TR/REC-html40/loose.dtd">')
self.wfile.write("""\n<HTML><HEAD><TITLE>Selector Not Found</TITLE>
<H1>Selector Not Found</H1>
<TT>""")
self.wfile.write(cgi.escape(msg))
self.wfile.write("</TT><HR>Pygopherd</BODY></HTML>\n")
def getimgtag(self, entry):
name = 'generic.gif'
if self.iconmapping.has_key(entry.gettype()):
name = self.iconmapping[entry.gettype()]
return '<IMG ALT=" * " SRC="%s" WIDTH="20" HEIGHT="22" BORDER="0">' % \
('/PYGOPHERD-HTTPPROTO-ICONS/' + name)
icons = {
'binary.gif':
'47494638396114001600c20000ffffffccffffcccccc99999933333300000000000000000021fe4e546869732061727420697320696e20746865207075626c696320646f6d61696e2e204b6576696e204875676865732c206b6576696e68406569742e636f6d2c2053657074656d62657220313939350021f90401000001002c000000001400160000036948babcf1301040ab9d24be590a105d210013a9715e07a8a509a16beab5ae14df6a41e8fc76839d5168e8b3182983e4a0e0038a6e1525d396931d97be2ad482a55a55c6eec429f484a7b4e339eb215fd138ebda1b7fb3eb73983bafee8b094a8182493b114387885309003b',
'binhex.gif':
'47494638396114001600c20000ffffffccffff99999966666633333300000000000000000021fe4e546869732061727420697320696e20746865207075626c696320646f6d61696e2e204b6576696e204875676865732c206b6576696e68406569742e636f6d2c2053657074656d62657220313939350021f90401000001002c000000001400160000036948babcf1301040ab9d24be59baefc0146adce78555068914985e2b609e0551df9b3c17ba995b408a602828e48a2681856894f44cc1628e07a42e9b985d14ab1b7c9440a9131c0c733b229bb5222ecdb6bfd6da3cd5d29d688a1aee2c97db044482834336113b884d09003b',
'folder.gif':
'47494638396114001600c20000ffffffffcc99ccffff99663333333300000000000000000021fe4e546869732061727420697320696e20746865207075626c696320646f6d61696e2e204b6576696e204875676865732c206b6576696e68406569742e636f6d2c2053657074656d62657220313939350021f90401000002002c000000001400160000035428badcfe30ca4959b9f8ce12baef45c47d64a629c5407a6a8906432cc72b1c8ef51a13579e0f3c9c8f05ec0d4945e171673cb2824e2234da495261569856c5ddc27882d46c3c2680c3e6b47acd232c4cf08c3b01003b',
'image3.gif':
'47494638396114001600e30000ffffffff3333ccffff9999996600003333330099cc00993300336600000000000000000000000000000000000000000021fe4e546869732061727420697320696e20746865207075626c696320646f6d61696e2e204b6576696e204875676865732c206b6576696e68406569742e636f6d2c2053657074656d62657220313939350021f90401000002002c0000000014001600000479b0c849a7b85814c0bbdf45766d5e49861959762a3a76442c132ae0aa44a0ef49d1ff2f4e6ea74b188f892020c70c3007d04152b3aa46a7adcaa42355160ee0f041d5a572bee23017cb1abbbf6476d52a0720ee78fc5a8930f8ff06087b66768080832a7d8a81818873744a8f8805519596503e19489b9c5311003b',
'sound1.gif':
'47494638396114001600c20000ffffffff3333ccffffcccccc99999966000033333300000021fe4e546869732061727420697320696e20746865207075626c696320646f6d61696e2e204b6576696e204875676865732c206b6576696e68406569742e636f6d2c2053657074656d62657220313939350021f90401000002002c000000001400160000036b28badcfe3036c34290ea1c61558f07b171170985c0687e0d9a729e77693401dc5bd7154148fcb6db6b77e1b984c20d4fb03406913866717a842aa7d22af22acd120cdf6fd2d49cd10e034354871518de06b43a17334de42a36243e187d4a7b1a762c7b140b8418898a0b09003b',
'text.gif':
'47494638396114001600c20000ffffffccffff99999933333300000000000000000000000021fe4e546869732061727420697320696e20746865207075626c696320646f6d61696e2e204b6576696e204875676865732c206b6576696e68406569742e636f6d2c2053657074656d62657220313939350021f90401000001002c000000001400160000035838babcf1300c40ab9d23be693bcf11d75522b88dd7057144eb52c410cf270abb6e8db796e00b849aadf20b4a6ebb1705281c128daca412c03c3a7b50a4f4d9bc5645dae9f78aed6e975932baebfc0e7ef0b84f1691da8d09003b',
'generic.gif':
'47494638396114001600c20000ffffffccffff99999933333300000000000000000000000021fe4e546869732061727420697320696e20746865207075626c696320646f6d61696e2e204b6576696e204875676865732c206b6576696e68406569742e636f6d2c2053657074656d62657220313939350021f90401000001002c000000001400160000035038babcf1300c40ab9d23be693bcf11d75522b88dd705892831b8f08952446d13f24c09bc804b3a4befc70a027c39e391a8ac2081cd65d2f82c06ab5129b4898d76b94c2f71d02b9b79afc86dcdfe2500003b',
'blank.gif':
'47494638396114001600a10000ffffffccffff00000000000021fe4e546869732061727420697320696e20746865207075626c696320646f6d61696e2e204b6576696e204875676865732c206b6576696e68406569742e636f6d2c2053657074656d62657220313939350021f90401000001002c00000000140016000002138c8fa9cbed0fa39cb4da8bb3debcfb0f864901003b'}
|
"""
End-to-end tests for mypmirun
"""
import os
import logging
logging.basicConfig(level=logging.DEBUG)
from pmi_utils import PMITest
from vsc.utils.affinity import sched_getaffinity, sched_setaffinity
class TaskPrologEnd2End(PMITest):
def setUp(self):
"""Prepare to run test."""
super(TaskPrologEnd2End, self).setUp()
self.script = os.path.join(os.path.dirname(self.script), 'mytaskprolog.py')
def test_simple(self):
origaff = sched_getaffinity()
aff = sched_getaffinity()
aff.set_bits([1]) # only use first core (we can always assume there is one core
sched_setaffinity(aff)
self.pmirun([], pattern='export CUDA_VISIBLE_DEVICES=0')
# restore
sched_setaffinity(origaff)
|
from fabric.api import local
def html():
local('hovercraft -t ./sixfeetup_hovercraft formation_flask.rst ./build/')
|
import xbmcaddon
import xbmcgui
import xbmcvfs
import datetime
import os
from libs.vpnproviders import removeGeneratedFiles, cleanPassFiles, providers, usesUserKeys, usesMultipleKeys, getUserKeys
from libs.vpnproviders import getUserCerts, getVPNDisplay, getVPNLocation, removeDownloadedFiles, isAlternative, resetAlternative
from libs.utility import debugTrace, errorTrace, infoTrace, newPrint, getID, getName
from libs.vpnplatform import getLogPath, getUserDataPath, writeVPNLog, copySystemdFiles, addSystemd, removeSystemd, generateVPNs
from libs.common import resetVPNConnections, isVPNConnected, disconnectVPN, suspendConfigUpdate, resumeConfigUpdate, dnsFix, getVPNRequestedProfile
from libs.common import resetVPNProvider, setAPICommand
from libs.ipinfo import resetIPServices
try:
from libs.generation import generateAll
except:
pass
action = sys.argv[1]
debugTrace("-- Entered managefiles.py with parameter " + action + " --")
if not getID() == "":
addon = xbmcaddon.Addon(getID())
addon_name = getName()
# Reset the ovpn files
if action == "ovpn":
if getVPNRequestedProfile() == "":
if xbmcgui.Dialog().yesno(addon_name, "Resetting the VPN provider will disconnect and reset all VPN connections, and then remove any files that have been created. Continue?"):
suspendConfigUpdate()
# Disconnect so that live files are not being modified
resetVPNConnections(addon)
infoTrace("managefiles.py", "Resetting the VPN provider")
# Delete the generated files, and reset the locations so it can be selected again
removeGeneratedFiles()
# Delete any values that have previously been validated
vpn_provider = getVPNLocation(addon.getSetting("vpn_provider"))
if isAlternative(vpn_provider): resetAlternative(vpn_provider)
# Reset the IP service error counts, etc
resetIPServices()
addon = xbmcaddon.Addon(getID())
resetVPNProvider(addon)
addon = xbmcaddon.Addon(getID())
resumeConfigUpdate()
xbmcgui.Dialog().ok(addon_name, "Reset the VPN provider. Validate a connection to start using a VPN again.")
else:
xbmcgui.Dialog().ok(addon_name, "Connection to VPN being attempted and has been aborted. Try again in a few seconds.")
setAPICommand("Disconnect")
# Generate the VPN provider files
if action == "generate":
# Only used during development to create location files
generateAll()
xbmcgui.Dialog().ok(addon_name, "Regenerated some or all of the VPN location files.")
# Delete all of the downloaded VPN files
if action == "downloads":
debugTrace("Deleting all downloaded VPN files")
removeDownloadedFiles()
xbmcgui.Dialog().ok(addon_name, "Deleted all of the downloaded VPN files. They'll be downloaded again if required.")
# Copy the log file
elif action == "log":
log_path = ""
dest_path = ""
try:
log_path = getLogPath()
start_dir = ""
dest_folder = xbmcgui.Dialog().browse(0, "Select folder to copy log file into", "files", "", False, False, start_dir, False)
dest_path = "kodi " + datetime.datetime.now().strftime("%y-%m-%d %H-%M-%S") + ".log"
dest_path = dest_folder + dest_path.replace(" ", "_")
# Write VPN log to log before copying
writeVPNLog()
debugTrace("Copying " + log_path + " to " + dest_path)
addon = xbmcaddon.Addon(getID())
infoTrace("managefiles.py", "Copying log file to " + dest_path + ". Using version " + addon.getSetting("version_number"))
xbmcvfs.copy(log_path, dest_path)
if not xbmcvfs.exists(dest_path): raise IOError('Failed to copy log ' + log_path + " to " + dest_path)
dialog_message = "Copied log file to: " + dest_path
except:
errorTrace("managefiles.py", "Failed to copy log from " + log_path + " to " + dest_path)
if xbmcvfs.exists(log_path):
dialog_message = "Error copying log, try copying it to a different location."
else:
dialog_messsage = "Could not find the kodi.log file."
errorTrace("managefiles.py", dialog_message + " " + log_path + ", " + dest_path)
xbmcgui.Dialog().ok("Log Copy", dialog_message)
# Delete the user key and cert files
elif action == "user":
if addon.getSetting("1_vpn_validated") == "" or xbmcgui.Dialog().yesno(addon_name, "Deleting key and certificate files will disconnect and reset all VPN connections. Connections must be re-validated before use. Continue?"):
# Disconnect so that live files are not being modified
if isVPNConnected(): resetVPNConnections(addon)
# Select the provider
provider_list = []
for provider in providers:
if usesUserKeys(provider):
provider_list.append(getVPNDisplay(provider))
provider_list.sort()
index = xbmcgui.Dialog().select("Select VPN provider", provider_list)
provider_display = provider_list[index]
provider = getVPNLocation(provider_display)
# Get the key/cert pairs for that provider and offer up for deletion
user_keys = getUserKeys(provider)
user_certs = getUserCerts(provider)
if len(user_keys) > 0 or len(user_certs) > 0:
still_deleting = True
while still_deleting:
if len(user_keys) > 0 or len(user_certs) > 0:
# Build a list of things to display. We should always have pairs, but if
# something didn't copy or the user has messed with the dir this will cope
all_user = []
single_pair = "user [I](Same key and certificate used for all connections)[/I]"
for key in user_keys:
list_item = os.path.basename(key)
list_item = list_item.replace(".key", "")
if list_item == "user": list_item = single_pair
all_user.append(list_item)
for cert in user_certs:
list_item = os.path.basename(cert)
list_item = list_item.replace(".crt", "")
if list_item == "user": list_item = single_pair
if not list_item in all_user: all_user.append(list_item)
all_user.sort()
# Offer a delete all option if there are multiple keys
all_item = "[I]Delete all key and certificate files[/I]"
if usesMultipleKeys(provider):
all_user.append(all_item)
# Add in a finished option
finished_item = "[I]Finished[/I]"
all_user.append(finished_item)
# Get the pair to delete
index = xbmcgui.Dialog().select("Select key and certificate to delete, or [I]Finished[/I]", all_user)
if all_user[index] == finished_item:
still_deleting = False
else:
if all_user[index] == single_pair : all_user[index] = "user"
if all_user[index] == all_item:
if xbmcgui.Dialog().yesno(addon_name, "Are you sure you want to delete all key and certificate files for " + provider_display + "?"):
for item in all_user:
if not item == all_item and not item == finished_item:
path = getUserDataPath(provider + "/" + item)
try:
if xbmcvfs.exists(path + ".key"):
xbmcvfs.delete(path + ".key")
if xbmcvfs.exists(path + ".txt"):
xbmcvfs.delete(path + ".txt")
if xbmcvfs.exists(path + ".crt"):
xbmcvfs.delete(path + ".crt")
except:
xbmcgui.Dialog().ok(addon_name, "Couldn't delete one of the key or certificate files: " + path)
else:
path = getUserDataPath(provider + "/" + all_user[index])
try:
if xbmcvfs.exists(path+".key"):
xbmcvfs.delete(path + ".key")
if xbmcvfs.exists(path + ".txt"):
xbmcvfs.delete(path + ".txt")
if xbmcvfs.exists(path + ".crt"):
xbmcvfs.delete(path + ".crt")
except:
xbmcgui.Dialog().ok(addon_name, "Couldn't delete one of the key or certificate files: " + path)
# Fetch the directory list again
user_keys = getUserKeys(provider)
user_certs = getUserCerts(provider)
if len(user_keys) == 0 and len(user_certs) == 0:
xbmcgui.Dialog().ok(addon_name, "All key and certificate files for " + provider_display + " have been deleted.")
else:
still_deleting = False
else:
xbmcgui.Dialog().ok(addon_name, "No key and certificate files exist for " + provider_display + ".")
# Fix the user defined files with DNS goodness
if action == "dns":
dnsFix()
command = "Addon.OpenSettings(" + getID() + ")"
xbmc.executebuiltin(command)
else:
errorTrace("managefiles.py", "VPN service is not ready")
debugTrace("-- Exit managefiles.py --")
|
"""
uds.warnings
~~~~~~~~~~~~
:copyright: Copyright (c) 2015, National Institute of Information and Communications Technology.All rights reserved.
:license: GPL2, see LICENSE for more details.
"""
import warnings
def deprecated(func):
"""This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted
when the function is used.
:param func:
:return: new_func
"""
def new_func(*args, **kwargs):
warnings.warn("Call to deprecated function {}.".format(func.__name__),
category=DeprecationWarning)
return func(*args, **kwargs)
new_func.__name__ = func.__name__
new_func.__doc__ = func.__doc__
new_func.__dict__.update(func.__dict__)
return new_func
@deprecated
def some_old_function(x, y):
return x + y
class SomeClass:
@deprecated
def some_old_method(self, x, y):
return x + y
|
__author__ = 'dako'
class SessionHelper:
def __init__(self, app):
self.app = app
def login(self, username, password):
wd = self.app.wd
self.app.open_home_page()
wd.find_element_by_name("user").click()
wd.find_element_by_name("user").clear()
wd.find_element_by_name("user").send_keys(username)
wd.find_element_by_name("pass").click()
wd.find_element_by_name("pass").clear()
wd.find_element_by_name("pass").send_keys(password)
wd.find_element_by_css_selector('input[type="submit"]').click()
def logout(self):
wd = self.app.wd
wd.find_element_by_link_text("Logout").click()
def is_logged_in(self):
wd = self.app.wd
return len(wd.find_elements_by_link_text("Logout")) > 0
def is_logged_in_as(self, username):
wd = self.app.wd
return self.get_logged_user() == username
def get_logged_user(self):
wd = self.app.wd
return wd.find_element_by_xpath("//div/div[1]/form/b").text[1:-1]
def ensure_logout(self):
wd = self.app.wd
if self.is_logged_in():
self.logout()
def ensure_login(self, username, password):
wd = self.app.wd
if self.is_logged_in():
if self.is_logged_in_as(username):
return
else:
self.logout()
self.login(username, password)
|
from time import gmtime, strftime
import ephem
import wx.calendar
class App(wx.App):
def OnInit(self):
self.frame = MyFrame("Lunacy", (50, 60), (640, 220))
self.frame.Show()
self.SetTopWindow(self.frame)
return True
class MyFrame(wx.Frame):
def __init__(self, title, pos, size):
wx.Frame.__init__(self, None, -1, title, pos, size)
path = "/usr/share/pixmaps/pidgin/emotes/default/moon.png"
icon = wx.Icon(path, wx.BITMAP_TYPE_PNG)
self.SetIcon(icon)
self.SetSizeHintsSz(wx.Size(640, 220), wx.DefaultSize)
gSizer1 = wx.GridSizer(1, 2, 0, 0)
fgSizer1 = wx.FlexGridSizer(1, 1, 0, 0)
fgSizer1.SetFlexibleDirection(wx.BOTH)
fgSizer1.SetNonFlexibleGrowMode(wx.FLEX_GROWMODE_SPECIFIED)
cal = wx.calendar.CalendarCtrl(self, wx.ID_ANY, wx.DefaultDateTime, wx.DefaultPosition, wx.DefaultSize,
wx.calendar.CAL_SHOW_HOLIDAYS |
wx.calendar.CAL_SHOW_SURROUNDING_WEEKS |
wx.calendar.CAL_SUNDAY_FIRST |
wx.SUNKEN_BORDER, u"Date of Lunacy")
self.cal = cal
self.cal.SetFont(wx.Font(12, 74, 90, 90, False, "Sans"))
self.cal.SetToolTipString(u"Date for Next Event")
self.cal.SetHelpText(u"Renders Lunar/Solar events for the date.")
self.Bind(wx.calendar.EVT_CALENDAR_SEL_CHANGED, self.OnDateSelect, id=cal.GetId())
fgSizer1.Add(self.cal, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_RIGHT | wx.ALL, 5)
fgSizer1.AddSpacer(( 0, 5), 1, wx.EXPAND, 5)
gSizer1.Add(fgSizer1, 1, 0, 0)
fgSizer2 = wx.FlexGridSizer(8, 3, 3, 0)
fgSizer2.SetFlexibleDirection(wx.HORIZONTAL)
fgSizer2.SetNonFlexibleGrowMode(wx.FLEX_GROWMODE_SPECIFIED)
fgSizer2.SetMinSize(wx.Size(-1, 220))
self.staticText_Moonrise = wx.StaticText(self, wx.ID_ANY, u"Moonrise", wx.DefaultPosition, wx.DefaultSize, 0)
self.staticText_Moonrise.Wrap(-1)
self.staticText_Moonrise.SetFont(wx.Font(12, 74, 90, 90, False, "Sans"))
fgSizer2.Add(self.staticText_Moonrise, 0, 0, 5)
self.mrtime = wx.StaticText(self, wx.ID_ANY, u"next rise", wx.DefaultPosition, wx.DefaultSize, 0)
self.mrtime.Wrap(-1)
fgSizer2.Add(self.mrtime, 0, 0, 5)
self.mraz = wx.StaticText(self, wx.ID_ANY, u"azimuth", wx.DefaultPosition, wx.DefaultSize, 0)
self.mraz.Wrap(-1)
fgSizer2.Add(self.mraz, 0, 0, 5)
self.staticText_Moonset = wx.StaticText(self, wx.ID_ANY, u"Moonset", wx.DefaultPosition, wx.DefaultSize, 0)
self.staticText_Moonset.Wrap(-1)
self.staticText_Moonset.SetFont(wx.Font(12, 74, 90, 90, False, "Sans"))
fgSizer2.Add(self.staticText_Moonset, 0, 0, 10)
self.mstime = wx.StaticText(self, wx.ID_ANY, u"next set", wx.DefaultPosition, wx.DefaultSize, 0)
self.mstime.Wrap(-1)
fgSizer2.Add(self.mstime, 0, 0, 5)
self.msaz = wx.StaticText(self, wx.ID_ANY, u"azimuth", wx.DefaultPosition, wx.DefaultSize, 0)
self.msaz.Wrap(-1)
fgSizer2.Add(self.msaz, 0, 0, 5)
self.staticText_Phase = wx.StaticText(self, wx.ID_ANY, u"Phase", wx.DefaultPosition, wx.DefaultSize, 0)
self.staticText_Phase.Wrap(-1)
self.staticText_Phase.SetFont(wx.Font(12, 74, 90, 90, False, "Sans"))
fgSizer2.Add(self.staticText_Phase, 0, 0, 10)
self.moonphase = wx.StaticText(self, wx.ID_ANY, u"moonphase", wx.DefaultPosition, wx.DefaultSize, 0)
self.moonphase.Wrap(-1)
fgSizer2.Add(self.moonphase, 0, 0, 5)
self.phasepercent = wx.StaticText(self, wx.ID_ANY, u"% illuminated", wx.DefaultPosition, wx.DefaultSize, 0)
self.phasepercent.Wrap(-1)
fgSizer2.Add(self.phasepercent, 0, 0, 5)
self.staticText_NewMoon = wx.StaticText(self, wx.ID_ANY, u"New Moon ", wx.DefaultPosition, wx.DefaultSize,
wx.ST_NO_AUTORESIZE)
self.staticText_NewMoon.Wrap(-1)
self.staticText_NewMoon.SetFont(wx.Font(12, 74, 90, 90, False, "Sans"))
fgSizer2.Add(self.staticText_NewMoon, 0, 0, 10)
self.newmoondate = wx.StaticText(self, wx.ID_ANY, u"next new moon", wx.DefaultPosition, wx.DefaultSize, 0)
self.newmoondate.Wrap(-1)
fgSizer2.Add(self.newmoondate, 0, 0, 10)
self.newmoonhour = wx.StaticText(self, wx.ID_ANY, u"hour", wx.DefaultPosition, wx.DefaultSize, 0)
self.newmoonhour.Wrap(-1)
fgSizer2.Add(self.newmoonhour, 0, 0, 10)
self.staticText_FullMoon = wx.StaticText(self, wx.ID_ANY, u"Full Moon", wx.DefaultPosition, wx.DefaultSize, 0)
self.staticText_FullMoon.Wrap(-1)
self.staticText_FullMoon.SetFont(wx.Font(12, 74, 90, 90, False, "Sans"))
fgSizer2.Add(self.staticText_FullMoon, 0, 0, 10)
self.fullmoondate = wx.StaticText(self, wx.ID_ANY, u"next full moon", wx.DefaultPosition, wx.DefaultSize, 0)
self.fullmoondate.Wrap(-1)
fgSizer2.Add(self.fullmoondate, 0, 0, 5)
self.fullmoonhour = wx.StaticText(self, wx.ID_ANY, u"hour", wx.DefaultPosition, wx.DefaultSize, 0)
self.fullmoonhour.Wrap(-1)
fgSizer2.Add(self.fullmoonhour, 0, 0, 5)
self.staticText_Sunrise = wx.StaticText(self, wx.ID_ANY, u"Sunrise", wx.DefaultPosition, wx.DefaultSize, 0)
self.staticText_Sunrise.Wrap(-1)
self.staticText_Sunrise.SetFont(wx.Font(12, 74, 90, 90, False, "Sans"))
fgSizer2.Add(self.staticText_Sunrise, 0, 0, 10)
self.srtime = wx.StaticText(self, wx.ID_ANY, u"next rise", wx.DefaultPosition, wx.DefaultSize, 0)
self.srtime.Wrap(-1)
fgSizer2.Add(self.srtime, 0, 0, 5)
self.sraz = wx.StaticText(self, wx.ID_ANY, u"azimuth", wx.DefaultPosition, wx.DefaultSize, 0)
self.sraz.Wrap(-1)
fgSizer2.Add(self.sraz, 0, 0, 5)
self.staticText_SolarNoon = wx.StaticText(self, wx.ID_ANY, u"High Noon", wx.DefaultPosition, wx.DefaultSize, 0)
self.staticText_SolarNoon.Wrap(-1)
self.staticText_SolarNoon.SetFont(wx.Font(12, 74, 90, 90, False, "Sans"))
fgSizer2.Add(self.staticText_SolarNoon, 0, 0, 10)
self.sntime = wx.StaticText(self, wx.ID_ANY, u"solar noon", wx.DefaultPosition, wx.DefaultSize, 0)
self.sntime.Wrap(-1)
fgSizer2.Add(self.sntime, 0, 0, 5)
self.snaltitude = wx.StaticText(self, wx.ID_ANY, u"altitude", wx.DefaultPosition, wx.DefaultSize, 0)
self.snaltitude.Wrap(-1)
fgSizer2.Add(self.snaltitude, 0, 0, 5)
self.staticText_Sunset = wx.StaticText(self, wx.ID_ANY, u"Sunset", wx.DefaultPosition, wx.DefaultSize, 0)
self.staticText_Sunset.Wrap(-1)
self.staticText_Sunset.SetFont(wx.Font(12, 74, 90, 90, False, "Sans"))
fgSizer2.Add(self.staticText_Sunset, 0, 0, 10)
self.sstime = wx.StaticText(self, wx.ID_ANY, u"next set", wx.DefaultPosition, wx.DefaultSize, 0)
self.sstime.Wrap(-1)
fgSizer2.Add(self.sstime, 0, 0, 5)
self.ssaz = wx.StaticText(self, wx.ID_ANY, u"azimuth", wx.DefaultPosition, wx.DefaultSize, 0)
self.ssaz.Wrap(-1)
fgSizer2.Add(self.ssaz, 0, 0, 5)
gSizer1.Add(fgSizer2, 1, wx.TOP, 5)
self.SetSizer(gSizer1)
self.Layout()
self.Centre(wx.BOTH)
def __del__(self):
pass
def OnDateSelect(self, evt):
f = open(r'/etc/nx.lat') # Lat/lon files for Navigatrix
lat = f.readline(12)
f.close()
f = open(r'/etc/nx.lon')
lon = f.readline(12)
f.close()
lat = float(lat)
lon = float(lon)
degrees = int(lat)
mnn = (lat - degrees) * 60
minutes = int(mnn)
seconds = round(((mnn - minutes) * 60), 3)
lat = str(degrees) + str(minutes) + str(seconds)
degrees = int(lon)
mnn = (lon - degrees) * 60
minutes = int(mnn)
seconds = round(((mnn - minutes) * 60), 3)
lon = str(degrees) + str(minutes) + str(seconds)
here = ephem.Observer()
here.lat = lat
here.lon = lon
here.pressure = 0 # barometric pressure not factored
here.horizon = '-0:34' # fudge factor from the US Navel Observatory
here.elevation = 2.0 # 2 Meters elevation
here.temp = 25.0 # and a balmy 25 degrees
cal = evt.GetEventObject()
year = (str(self.cal.GetDate().GetYear()))
month = (str(self.cal.GetDate().GetMonth() + 1))
day = (str(self.cal.GetDate().GetDay()))
hour = strftime("%H:%M:%S", gmtime())
datefig = year + '/' + month + '/' + day + ' ' + hour
here.date = datefig
sun = ephem.Sun(here)
moon = ephem.Moon(here)
moon.compute(here)
#
# Moon Rise
#
# mrtime = str(here.next_rising(moon))
mrtime = here.next_rising(moon)
lt = ephem.localtime(mrtime)
mrtime = str(lt).split()
mrtime = mrtime[1].split(".")
self.mrtime.SetLabel(str(mrtime[0]))
mraz = str(moon.az).partition(':')
self.mraz.SetLabel(str(mraz[0]) + u'\u00B0 from North')
#
# Moonset moon.compute(here)
#
#
mstime = here.next_setting(moon)
lt = ephem.localtime(mstime)
mstime = str(lt).split()
mstime = mstime[1].split(".")
self.mstime.SetLabel(mstime[0])
msaz = str(moon.az).partition(':')
self.msaz.SetLabel(str(msaz[0]) + u'\u00B0 from North')
#
# Moon Phase
# TODO Clearly these numbers are pulled out of a hat.
# they are a very rough approximation of the phases and
# do not account for waxing and waning
phasepercent = int(moon.moon_phase * 100)
self.phasepercent.SetLabel(str(phasepercent) + " %")
if phasepercent <= 2.0:
moonphase = "New Moon"
if 2.1 < phasepercent <= 20.0:
moonphase = "Crescent"
if 20.1 < phasepercent <= 60.0:
moonphase = "Quarter Moon"
if 60.1 < phasepercent <= 95.0:
moonphase = "Gibbous"
if phasepercent > 95.1:
moonphase = "Full Moon"
self.moonphase.SetLabel(moonphase)
#
# New Moon Date
#
newmoondate = ephem.next_new_moon(datefig)
lt = ephem.localtime(newmoondate)
newmoondate = str(lt).split()
newmoonhour = newmoondate[1].split(".")
self.newmoondate.SetLabel(str(newmoondate[0]))
self.newmoonhour.SetLabel(str(newmoonhour[0]))
#
# Full Moon Date
#
fullmoondate = ephem.next_full_moon(datefig)
lt = ephem.localtime(fullmoondate)
fullmoondate = str(lt).split()
fullmoonhour = fullmoondate[1].split(".")
self.fullmoondate.SetLabel(str(fullmoondate[0]))
self.fullmoonhour.SetLabel(str(fullmoonhour[0]))
#
# Sun Rise
#
sun.compute(here)
srtime = here.next_rising(sun)
lt = ephem.localtime(srtime)
srtime = str(lt).split()
srtime = srtime[1].split(".")
self.srtime.SetLabel(srtime[0])
sraz = str(sun.az).partition(':')
self.sraz.SetLabel(str(sraz[0]) + u'\u00B0 from North')
#
# High Noon
#
sntime = here.next_transit(sun)
lt = ephem.localtime(sntime)
sntime = str(lt).split()
sntime = sntime[1].split(".")
self.sntime.SetLabel(sntime[0])
snaltitude = str(sun.alt).partition(':')
self.snaltitude.SetLabel(str(snaltitude[0]) + u'\u00B0 above Horizon')
#
# Sun Set
#
sstime = here.next_setting(sun)
lt = ephem.localtime(sstime)
sstime = str(lt).split()
sstime = sstime[1].split(".")
self.sstime.SetLabel(sstime[0])
ssaz = str(sun.az).partition(':')
self.ssaz.SetLabel(str(ssaz[0]) + u'\u00B0 from North')
if __name__ == '__main__':
app = App()
app.MainLoop()
|
from pluginInterfaces import PluginFit, Parameter,leastsqFit
import numpy as np
class PluginFitThreeBodyBeta(PluginFit):
def __init__(self):
pass
def fit(self,array,errarray,param,xmin=0,xmax=0, fitAxes=[]):
"""return the data that is needed for plotting the fitting result"""
"""0...a, 1...xc, 2...k, 3...y0"""
self.params = [Parameter(v) for v in param]
def f(x): return self.params[0]()/(1+np.exp(-(x-self.params[1]())/self.params[2]()))+self.params[3]()
self.simpleFitAllAxes(f,array,errarray,xmin,xmax, fitAxes)
return self.generateDataFromParameters(f,[np.amin(array[0,:]),np.amax(array[0,:])], np.size(fitAxes)+1, xmin, xmax, fitAxes)
def getInitialParameters(self,data):
"""find the best initial values and return them"""
dx = np.abs(data[0,0] - data[0,-1])
mi = np.amin(data[1,:])
ma = np.amax(data[1,:])
xc = (np.amax(data[0,:])-np.amin(data[0,:]))/2+np.amin(data[0,:])
return [ma-mi,xc,dx*2,mi]
def getParameters(self):
"""return the fit parameters"""
return np.array(["a","xc","dx","y0"])
def getFitModelStr(self):
"""return a string of the implemented fitting model, i.e. 'linear fit (y=A*x +B)'"""
return "Sigmoidal"
def getResultStr(self):
"""return a special result, i.e. 'Frequency = blabla'"""
return "nothing fitted"
|
""" sha1Hash_test.py
Unit tests for sha1.py
"""
from crypto.hash.sha1Hash import SHA1
import unittest
import struct
assert struct.calcsize('!IIIII') == 20, '5 integers should be 20 bytes'
class SHA1_FIPS180_TestCases(unittest.TestCase):
""" SHA-1 tests from FIPS180-1 Appendix A, B and C """
def testFIPS180_1_Appendix_A(self):
""" APPENDIX A. A SAMPLE MESSAGE AND ITS MESSAGE DIGEST """
hashAlg = SHA1()
message = 'abc'
message_digest = 0xA9993E36L, 0x4706816AL, 0xBA3E2571L, 0x7850C26CL, 0x9CD0D89DL
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix A test Failed'
def testFIPS180_1_Appendix_B(self):
""" APPENDIX B. A SECOND SAMPLE MESSAGE AND ITS MESSAGE DIGEST """
hashAlg = SHA1()
message = 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'
message_digest = 0x84983E44L, 0x1C3BD26EL, 0xBAAE4AA1L, 0xF95129E5L, 0xE54670F1L
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix B test Failed'
def testFIPS180_1_Appendix_C(self):
""" APPENDIX C. A THIRD SAMPLE MESSAGE AND ITS MESSAGE DIGEST
Let the message be the binary-coded form of the ASCII string which consists
of 1,000,000 repetitions of "a". """
hashAlg = SHA1()
message = 1000000*'a'
message_digest = 0x34AA973CL, 0xD4C4DAA4L, 0xF61EEB2BL, 0xDBAD2731L, 0x6534016FL
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix C test Failed'
def _toBlock(binaryString):
""" Convert binary string to blocks of 5 words of uint32() """
return [uint32(word) for word in struct.unpack('!IIIII', binaryString)]
def _toBString(block):
""" Convert block (5 words of 32 bits to binary string """
return ''.join([struct.pack('!I',word) for word in block])
if __name__ == '__main__':
# Run the tests from the command line
unittest.main()
|
__requires__ = 'kartograph.py==0.6.8'
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.exit(
load_entry_point('kartograph.py==0.6.8', 'console_scripts', 'kartograph')()
)
|
from wxPython.wx import *
import os
import datetime
from objects.emprunt import Emprunt
from popups.members import AddMemberPanel, ShowMembersPanel
class CheckoutPopup(wxDialog):
def __init__(self, parent):
self.parent=parent
wxDialog.__init__(self, parent,-1,"Check out items")
self.mastersizer = wxBoxSizer(wxVERTICAL)
self.static1 = wxStaticText(self, -1, "Check out to :")
self.mastersizer.Add(self.static1)
self.notebook = wxNotebook(self, -1, style=wxNB_TOP)
self.new_member_panel = AddMemberPanel(parent=self.notebook, main_window=parent,
on_successful_add=self.Borrow, cancel=self.Close)
self.notebook.AddPage(self.new_member_panel, "New member")
self.show_member_panel = ShowMembersPanel(parent=self.notebook, main_window=parent, motherDialog=self, on_select=self.Borrow)
self.notebook.AddPage(self.show_member_panel, "Existing member")
self.mastersizer.Add(self.notebook)
self.SetSizer(self.mastersizer)
for i in self.parent.orderbox.items:
print i.database_id, "... ", i.id
#self.b = wxButton(self, -1, "Checkout", (15, 80))
#EVT_BUTTON(self, self.b.GetId(), self.Checkout)
#self.b.SetDefault()
self.mastersizer.SetSizeHints(self)
def Borrow(self, id):
borrower = self.parent.membersList.get(id)
print borrower
for i in self.parent.orderbox.items:
# Check if this work on sqlobject 0.7... I got
# lots of problem on 0.6.1, and itemID __isn't__
# defined in emprunt, which is plain weirdness
e = Emprunt(borrower = id, itemID=i.database_id)
print i.database_id
self.parent.orderbox.setBorrowed()
self.parent.orderbox.void()
self.Close()
def OnCancel(self,event):
self.EndModal(1)
def Checkout(self,event):
borrower=self.borrower.GetValue()
if len(borrower)>0:
today="%s" % datetime.date.today()
self.parent.orderbox.change_status(today+"-"+borrower)
self.parent.orderbox.void()
self.Close()
|
from datetime import *
from Tweetstream import *
from UserAnalyser import *
from TimeAnalyser import *
import math
import sys
import pickle
def load_list(filein):
d = dict()
for l in filein:
l = eval(l)
d[l[0]] = l[1]
return d
if __name__ == "__main__":
follow = load_list(open(sys.argv[5], 'r'))
keywords = open(sys.argv[2], 'r').readline().strip("\n").split(",")
userstream = Tweetstream(jsonfilee=sys.argv[3], jsonformat=False, keywords=keywords)
topicstream = Tweetstream(jsonfilee=sys.argv[1], jsonformat=False, keywords=keywords)
ua = UserAnalyser (sys.argv[4], keywords = keywords)
ua.load_usersVectors()
ua.load_idf()
ua.load_usersScore()
rank = dict()
# normalizar pelo numero de kw no topic vector
c = 0
for t in userstream:
rank[t['id']] = 0
n = 0
if t['user_id'] in follow:
c += 1
for fuser in follow[t['user_id']]:
if fuser in ua.usersScore:
rank[t['id']] += ua.usersScore[fuser]
n += 1
if n > 0: rank[t['id']] /= n
print c
#prinit score, nwindow
pickle.dump(rank, open(sys.argv[4]+"_rank_USER_followers.pick", 'w'), pickle.HIGHEST_PROTOCOL)
|
class Solution(object):
def setZeroes(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: void Do not return anything, modify matrix in-place instead.
"""
width,height = len(matrix[0]),len(matrix)
for i in xrange(height):
foundzero = False
for j in xrange(width):
if matrix[i][j] == 0:
foundzero = True
matrix[i][j] = float("inf")
if not foundzero:
continue
for j in xrange(width):
if matrix[i][j] != float("inf"):
matrix[i][j] = 0
for i in xrange(width):
foundtarget = False
for j in xrange(height):
if matrix[j][i] == float("inf"):
foundtarget = True
break
if not foundtarget:
continue
for j in xrange(height):
matrix[j][i] = 0
|
"""
core/api/serializers.py is the module for core model api data serializers
"""
from django.contrib.auth.models import User, Permission
from rest_framework import serializers
from core.models import (Product, ProductCategory, UnitOfMeasurement, UOMCategory, CompanyCategory, Company,
Currency, Rate, Contact, Address, EmployeeCategory, Employee, ProductPresentation,
ModeOfAdministration, ProductItem, ProductFormulation)
class UserSerializer(serializers.ModelSerializer):
"""
REST API serializer for User model
"""
class Meta:
model = User
class BaseModelSerializer(serializers.ModelSerializer):
"""
Base Model Serializer for models
"""
created_by = UserSerializer(required=False, read_only=True)
modified_by = UserSerializer(required=False, read_only=True)
class ProductCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for ProductCategory model
"""
class Meta:
model = ProductCategory
class ProductSerializer(BaseModelSerializer):
"""
REST API Serializer for Product models
"""
class Meta:
model = Product
class UOMCategorySerializer(BaseModelSerializer):
"""
REST API Serializer for UOMCategory model
"""
class Meta:
model = UOMCategory
class UnitOfMeasurementSerializer(BaseModelSerializer):
"""
REST API Serializer for UnitOfMeasurement model
"""
class Meta:
model = UnitOfMeasurement
class CompanyCategorySerializer(BaseModelSerializer):
"""
REST API serializer for CompanyCategory model
"""
class Meta:
model = CompanyCategory
class CompanySerializer(BaseModelSerializer):
"""
REST API serializer for Company model
"""
class Meta:
model = Company
class CurrencySerializer(BaseModelSerializer):
"""
REST API serializer for Currency model
"""
class Meta:
model = Currency
fields = ('code', 'name', 'symbol', 'symbol_position', 'rates',)
class RateSerializer(BaseModelSerializer):
"""
REST API serializer for Rate model
"""
class Meta:
model = Rate
class ContactSerializer(BaseModelSerializer):
"""
REST API serializer for Contact model
"""
class Meta:
model = Contact
class AddressSerializer(BaseModelSerializer):
"""
REST API serializer for Address model
"""
class Meta:
model = Address
class EmployeeCategorySerializer(BaseModelSerializer):
"""
REST API serializer for EmployeeCategory
"""
class Meta:
model = EmployeeCategory
class EmployeeSerializer(BaseModelSerializer):
"""
REST API serializer for Employee
"""
class Meta:
model = Employee
class PermissionSerializer(BaseModelSerializer):
"""
REST API serializer for Permission model
"""
class Meta:
model = Permission
class ProductPresentationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductPresentation model
"""
class Meta:
model = ProductPresentation
class ModeOfAdministrationSerializer(BaseModelSerializer):
"""
REST API serializer for ModeOfAdministration model
"""
class Meta:
model = ModeOfAdministration
class ProductItemSerializer(BaseModelSerializer):
"""
REST API serializer for ProductItem model
"""
class Meta:
model = ProductItem
class ProductFormulationSerializer(BaseModelSerializer):
"""
REST API serializer for ProductFormulation model, it can be Lyophilized, Liquid or Not Applicable
"""
class Meta:
model = ProductFormulation
|
def hamming(s,t):
dist = 0
for x in range(len(s)):
if s[x]!=t[x]:
dist+=1
return dist
|
import os
import sys
import time
import difflib
from os import path
from subprocess import Popen, PIPE, STDOUT
already_compiled = set()
def run_test(testname, testcode, compiled):
stdin = b''
if path.isfile(testname + '.tst'):
with open(testname + '.tst', 'rb') as stdinfile:
stdin = stdinfile.read()
with open(testname + '.chk', 'r') as stdoutfile:
stdout = stdoutfile.read()
def check(proc, remove_cargo):
real_stdout, _ = proc.communicate(stdin)
real_stdout = real_stdout.decode()
# remove cargo's "Running" line
if remove_cargo:
errindex = real_stdout.find('An unknown error occurred')
if errindex == -1:
errindex = real_stdout.find('error: Process didn\'t exit successfully')
if errindex > -1:
real_stdout = real_stdout[:errindex]
if real_stdout != stdout:
print('*** ERROR: standard output does not match check file')
print(''.join(difflib.unified_diff(stdout.splitlines(True),
real_stdout.splitlines(True))))
raise RuntimeError
print('')
print('>>> Test: ' + testname)
print(' > Step 1: interpreted')
check(Popen(['cargo', 'run', '--release', '-q', '--', '-Rbi', testcode],
stdin=PIPE, stdout=PIPE, stderr=STDOUT), True)
print(' > Step 2: interpreted + optimized')
check(Popen(['cargo', 'run', '--release', '-q', '--', '-Rbio', testcode],
stdin=PIPE, stdout=PIPE, stderr=STDOUT), True)
if compiled:
print(' > Step 3: compiled + optimized')
if testcode not in already_compiled:
if os.system('cargo run --release -q -- -RFbo %s > /dev/null' % testcode) != 0:
print('*** ERROR: compilation failed')
raise RuntimeError
already_compiled.add(testcode)
check(Popen([testcode[:-2]], stdin=PIPE, stdout=PIPE, stderr=STDOUT),
False)
def main():
start = time.time()
compile_flag = '--nocompile' not in sys.argv
skip_flag = '--all' not in sys.argv
tests = [path.splitext(test.replace('/', os.sep))[0]
for test in sys.argv[1:] if not test.startswith('-')]
print('Building...')
if os.system('cargo build --release') != 0:
return 2
print('Running tests, please wait...')
passed = 0
total = 0
failed = []
for root, dirs, files in os.walk('code'):
dirs.sort()
for fn in sorted(files):
if not fn.endswith('.chk'):
continue
if skip_flag and fn.startswith(('fft-', 'flonck', 'unlambda')):
continue
testname = path.join(root, fn)[:-4]
if tests and testname not in tests:
continue
testcode = testname + '.i'
# special case
if fn.startswith('fft-'):
testcode = path.join(root, 'fft.i')
elif fn.startswith('life-'):
testcode = path.join(root, 'life2.i')
if not path.isfile(testcode):
print('')
print('*** WARNING: found %s.chk, but not %s' % (testname, testcode))
continue
total += 1
try:
t1 = time.time()
run_test(testname, testcode, compile_flag)
t2 = time.time()
passed += 1
print('--- passed (%5.2f sec)' % (t2 - t1))
except RuntimeError:
failed.append(testname)
end = time.time()
print('')
print('RESULT: %d/%d tests passed (%6.2f sec)' % (passed, total, end - start))
if failed:
print('Failed:')
for testname in failed:
print(' ' + testname)
return 0 if passed == total else 1
if __name__ == '__main__':
sys.exit(main())
|
"""
Windows Process Control
winprocess.run launches a child process and returns the exit code.
Optionally, it can:
redirect stdin, stdout & stderr to files
run the command as another user
limit the process's running time
control the process window (location, size, window state, desktop)
Works on Windows NT, 2000 & XP. Requires Mark Hammond's win32
extensions.
This code is free for any purpose, with no warranty of any kind.
-- John B. Dell'Aquila <jbd@alum.mit.edu>
"""
import win32api, win32process, win32security
import win32event, win32con, msvcrt, win32gui
def logonUser(loginString):
"""
Login as specified user and return handle.
loginString: 'Domain\nUser\nPassword'; for local
login use . or empty string as domain
e.g. '.\nadministrator\nsecret_password'
"""
domain, user, passwd = loginString.split('\n')
return win32security.LogonUser(
user,
domain,
passwd,
win32con.LOGON32_LOGON_INTERACTIVE,
win32con.LOGON32_PROVIDER_DEFAULT
)
class Process:
"""
A Windows process.
"""
def __init__(self, cmd, login=None,
hStdin=None, hStdout=None, hStderr=None,
show=1, xy=None, xySize=None,
desktop=None):
"""
Create a Windows process.
cmd: command to run
login: run as user 'Domain\nUser\nPassword'
hStdin, hStdout, hStderr:
handles for process I/O; default is caller's stdin,
stdout & stderr
show: wShowWindow (0=SW_HIDE, 1=SW_NORMAL, ...)
xy: window offset (x, y) of upper left corner in pixels
xySize: window size (width, height) in pixels
desktop: lpDesktop - name of desktop e.g. 'winsta0\\default'
None = inherit current desktop
'' = create new desktop if necessary
User calling login requires additional privileges:
Act as part of the operating system [not needed on Windows XP]
Increase quotas
Replace a process level token
Login string must EITHER be an administrator's account
(ordinary user can't access current desktop - see Microsoft
Q165194) OR use desktop='' to run another desktop invisibly
(may be very slow to startup & finalize).
"""
si = win32process.STARTUPINFO()
si.dwFlags = (win32con.STARTF_USESTDHANDLES ^
win32con.STARTF_USESHOWWINDOW)
if hStdin is None:
si.hStdInput = win32api.GetStdHandle(win32api.STD_INPUT_HANDLE)
else:
si.hStdInput = hStdin
if hStdout is None:
si.hStdOutput = win32api.GetStdHandle(win32api.STD_OUTPUT_HANDLE)
else:
si.hStdOutput = hStdout
if hStderr is None:
si.hStdError = win32api.GetStdHandle(win32api.STD_ERROR_HANDLE)
else:
si.hStdError = hStderr
si.wShowWindow = show
if xy is not None:
si.dwX, si.dwY = xy
si.dwFlags ^= win32con.STARTF_USEPOSITION
if xySize is not None:
si.dwXSize, si.dwYSize = xySize
si.dwFlags ^= win32con.STARTF_USESIZE
if desktop is not None:
si.lpDesktop = desktop
procArgs = (None, # appName
cmd, # commandLine
None, # processAttributes
None, # threadAttributes
1, # bInheritHandles
win32process.CREATE_NEW_CONSOLE, # dwCreationFlags
None, # newEnvironment
None, # currentDirectory
si) # startupinfo
if login is not None:
hUser = logonUser(login)
win32security.ImpersonateLoggedOnUser(hUser)
procHandles = win32process.CreateProcessAsUser(hUser, *procArgs)
win32security.RevertToSelf()
else:
procHandles = win32process.CreateProcess(*procArgs)
self.hProcess, self.hThread, self.PId, self.TId = procHandles
def wait(self, mSec=None):
"""
Wait for process to finish or for specified number of
milliseconds to elapse.
"""
if mSec is None:
mSec = win32event.INFINITE
return win32event.WaitForSingleObject(self.hProcess, mSec)
def kill(self, gracePeriod=5000):
"""
Kill process. Try for an orderly shutdown via WM_CLOSE. If
still running after gracePeriod (5 sec. default), terminate.
"""
win32gui.EnumWindows(self.__close__, 0)
if self.wait(gracePeriod) != win32event.WAIT_OBJECT_0:
win32process.TerminateProcess(self.hProcess, 0)
win32api.Sleep(100) # wait for resources to be released
def __close__(self, hwnd, dummy):
"""
EnumWindows callback - sends WM_CLOSE to any window
owned by this process.
"""
TId, PId = win32process.GetWindowThreadProcessId(hwnd)
if PId == self.PId:
win32gui.PostMessage(hwnd, win32con.WM_CLOSE, 0, 0)
def exitCode(self):
"""
Return process exit code.
"""
return win32process.GetExitCodeProcess(self.hProcess)
def run(cmd, mSec=None, stdin=None, stdout=None, stderr=None, **kw):
"""
Run cmd as a child process and return exit code.
mSec: terminate cmd after specified number of milliseconds
stdin, stdout, stderr:
file objects for child I/O (use hStdin etc. to attach
handles instead of files); default is caller's stdin,
stdout & stderr;
kw: see Process.__init__ for more keyword options
"""
if stdin is not None:
kw['hStdin'] = msvcrt.get_osfhandle(stdin.fileno())
if stdout is not None:
kw['hStdout'] = msvcrt.get_osfhandle(stdout.fileno())
if stderr is not None:
kw['hStderr'] = msvcrt.get_osfhandle(stderr.fileno())
child = Process(cmd, **kw)
if child.wait(mSec) != win32event.WAIT_OBJECT_0:
child.kill()
raise WindowsError, 'process timeout exceeded'
return child.exitCode()
if __name__ == '__main__':
# Pipe commands to a shell and display the output in notepad
print 'Testing winprocess.py...'
import tempfile
timeoutSeconds = 15
cmdString = """\
REM Test of winprocess.py piping commands to a shell.\r
REM This window will close in %d seconds.\r
vol\r
net user\r
_this_is_a_test_of_stderr_\r
""" % timeoutSeconds
cmd, out = tempfile.TemporaryFile(), tempfile.TemporaryFile()
cmd.write(cmdString)
cmd.seek(0)
print 'CMD.EXE exit code:', run('cmd.exe', show=0, stdin=cmd,
stdout=out, stderr=out)
cmd.close()
print 'NOTEPAD exit code:', run('notepad.exe %s' % out.file.name,
show=win32con.SW_MAXIMIZE,
mSec=timeoutSeconds*1000)
out.close()
|
from bs4 import BeautifulSoup
from urllib2 import urlopen
import pandas as pd
pos_idx_map = {
'qb': 2,
'rb': 3,
'wr': 4,
'te': 5,
}
def make_url(pos, wk):
ii = pos_idx_map[pos]
fstr = "http://fantasydata.com/nfl-stats/nfl-fantasy-football-stats.aspx?fs=1&stype=0&sn=1&w=%s&s=&t=0&p=%s&st=FantasyPointsPPR&d=1&ls=&live=false" \
% (wk, ii)
return fstr
def html2df(soup):
table = soup.find('table')
headers = [header.text.lower() for header in table.find_all('th')]
rows = []
for row in table.find_all('tr'):
rows.append([val.text.encode('utf8') for val in row.find_all('td')])
rows = [rr for rr in rows if len(rr) > 0]
df = pd.DataFrame.from_records(rows)
df.columns = headers
return df
def position_html_local(posn):
dflist = []
for ii in range(1, 17):
fname = '%s%s.html' % (posn, ii)
with open(fname) as f:
df = html2df(BeautifulSoup(f))
df['wk'] = ii
df.columns = header_clean(df.columns, posn)
dflist.append(df)
return pd.concat(dflist)
def position_html(posn):
dflist = []
for ii in range(1, 17):
fname = make_url(posn, ii)
df = html2df(BeautifulSoup(urlopen(fname)))
df['wk'] = ii
df.columns = header_clean(df.columns, posn)
dflist.append(df)
return pd.concat(dflist)
pos_header_suffixes = {
'qb': ['_pass', '_rush'],
'rb': ['_rush', '_recv'],
'wr': ['_recv'],
'te': ['_recv'],
}
exclude_cols = ['rk', 'player', 'team', 'pos', 'fantasy points',
'wk', 'fum', 'lost', 'qb rating']
def header_clean(header, posn):
res = []
if posn in pos_header_suffixes:
suffixes = pos_header_suffixes[posn]
seen_dict = {hh: 0 for hh in header}
for hh in header:
if not hh in exclude_cols:
hres = hh + suffixes[seen_dict[hh]]
seen_dict[hh] += 1
res.append(hres)
else:
res.append(hh)
else:
res = header
return res
if __name__ == '__main__':
data_all = {}
for pp in ['qb', 'wr', 'rb', 'te']:
data_all[pp] = position_html_local(pp)
data_all[pp].to_pickle('%s.pkl' % pp)
|
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class JsonErrorResponse(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
Swagger model
:param dict swaggerTypes: The key is attribute name and the value is attribute type.
:param dict attributeMap: The key is attribute name and the value is json key in definition.
"""
self.swagger_types = {
'status': 'str',
'message': 'str'
}
self.attribute_map = {
'status': 'status',
'message': 'message'
}
# Status: \"ok\" or \"error\"
self.status = None # str
# Error message
self.message = None # str
def __repr__(self):
properties = []
for p in self.__dict__:
if p != 'swaggerTypes' and p != 'attributeMap':
properties.append('{prop}={val!r}'.format(prop=p, val=self.__dict__[p]))
return '<{name} {props}>'.format(name=__name__, props=' '.join(properties))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.